From 5ee8e4a3b590a66a343f84fb0864ddfeefefa952 Mon Sep 17 00:00:00 2001 From: Felix Zeller Date: Sun, 16 Mar 2025 09:24:01 -0700 Subject: [PATCH 01/15] refactor: Simplify semantic tokens configuration and method signature in Settings::new --- src/config.rs | 16 +++++++--------- 1 file changed, 7 insertions(+), 9 deletions(-) diff --git a/src/config.rs b/src/config.rs index 17852448..9e46804e 100644 --- a/src/config.rs +++ b/src/config.rs @@ -43,7 +43,7 @@ pub enum EmbeddedBlockTransclusionLength { } impl Settings { - pub fn new(root_dir: &Path, capabilities: &ClientCapabilities) -> anyhow::Result { + pub fn new(root_dir: &Path, disable_semantic_tokens: bool) -> anyhow::Result { let obsidian_daily_note_config = obsidian_daily_note_config(root_dir).unwrap_or_default(); let obsidian_new_file_folder_path = obsidian_new_file_folder_path(root_dir); let expanded = shellexpand::tilde("~/.config/moxide/settings"); @@ -87,17 +87,15 @@ impl Settings { .set_default("block_transclusion_length", "Full")? .set_override_option( "semantic_tokens", - capabilities.text_document.as_ref().and_then(|it| { - match it.semantic_tokens.is_none() { - true => Some(false), - false => None, - } - }), + match disable_semantic_tokens { + true => Some(false), + false => None + } )? .build() - .map_err(|err| anyhow!("Build err: {err}"))?; + .map_err(|err| anyhow!("Build err: {err}"))?; // AI this should be an expect - let settings = settings.try_deserialize::()?; + let settings = settings.try_deserialize::()?; // AI this should also be an expect anyhow::Ok(settings) } From 4cf9e523f73ae65b0ebad73d4ff8461547664c99 Mon Sep 17 00:00:00 2001 From: "Felix Zeller\" (aider)" Date: Sun, 16 Mar 2025 09:24:07 -0700 Subject: [PATCH 02/15] refactor: Improve error handling for config build and deserialization --- src/config.rs | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/src/config.rs b/src/config.rs index 9e46804e..5e2950c8 100644 --- a/src/config.rs +++ b/src/config.rs @@ -93,9 +93,11 @@ impl Settings { } )? .build() - .map_err(|err| anyhow!("Build err: {err}"))?; // AI this should be an expect + .expect("Config build failed despite having defaults for all values"); - let settings = settings.try_deserialize::()?; // AI this should also be an expect + let settings = settings + .try_deserialize::() + .expect("Settings deserialization failed despite having valid defaults"); anyhow::Ok(settings) } From 6282ed0a83cbfe57880b18cb8abce36be6f91721 Mon Sep 17 00:00:00 2001 From: "Felix Zeller\" (aider)" Date: Sun, 16 Mar 2025 09:24:53 -0700 Subject: [PATCH 03/15] refactor: Remove `anyhow::Result` and use `expect()` for config initialization --- src/config.rs | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/src/config.rs b/src/config.rs index 5e2950c8..dc43855a 100644 --- a/src/config.rs +++ b/src/config.rs @@ -43,7 +43,7 @@ pub enum EmbeddedBlockTransclusionLength { } impl Settings { - pub fn new(root_dir: &Path, disable_semantic_tokens: bool) -> anyhow::Result { + pub fn new(root_dir: &Path, disable_semantic_tokens: bool) -> Settings { let obsidian_daily_note_config = obsidian_daily_note_config(root_dir).unwrap_or_default(); let obsidian_new_file_folder_path = obsidian_new_file_folder_path(root_dir); let expanded = shellexpand::tilde("~/.config/moxide/settings"); @@ -54,7 +54,7 @@ impl Settings { "{}/.moxide", root_dir .to_str() - .ok_or(anyhow!("Can't convert root_dir to str"))? + .expect("Can't convert root_dir to str") )) .required(false), ) @@ -91,7 +91,8 @@ impl Settings { true => Some(false), false => None } - )? + ) + .expect("Failed to set config defaults") .build() .expect("Config build failed despite having defaults for all values"); @@ -99,7 +100,7 @@ impl Settings { .try_deserialize::() .expect("Settings deserialization failed despite having valid defaults"); - anyhow::Ok(settings) + settings } } From dc3e322dd9541bcd301852c622c6881eb8d1d75a Mon Sep 17 00:00:00 2001 From: "Felix Zeller\" (aider)" Date: Sun, 16 Mar 2025 09:26:03 -0700 Subject: [PATCH 04/15] refactor: Extract settings build logic into separate function with Result handling --- src/config.rs | 29 ++++++++++++++--------------- 1 file changed, 14 insertions(+), 15 deletions(-) diff --git a/src/config.rs b/src/config.rs index dc43855a..f9c7f54f 100644 --- a/src/config.rs +++ b/src/config.rs @@ -43,19 +43,19 @@ pub enum EmbeddedBlockTransclusionLength { } impl Settings { - pub fn new(root_dir: &Path, disable_semantic_tokens: bool) -> Settings { + fn build_settings(root_dir: &Path, disable_semantic_tokens: bool) -> anyhow::Result { let obsidian_daily_note_config = obsidian_daily_note_config(root_dir).unwrap_or_default(); let obsidian_new_file_folder_path = obsidian_new_file_folder_path(root_dir); let expanded = shellexpand::tilde("~/.config/moxide/settings"); + + let root_dir_str = root_dir + .to_str() + .ok_or_else(|| anyhow!("Can't convert root_dir to str"))?; + let settings = Config::builder() .add_source(File::with_name(&expanded).required(false)) .add_source( - File::with_name(&format!( - "{}/.moxide", - root_dir - .to_str() - .expect("Can't convert root_dir to str") - )) + File::with_name(&format!("{}/.moxide", root_dir_str)) .required(false), ) .set_default( @@ -91,16 +91,15 @@ impl Settings { true => Some(false), false => None } - ) - .expect("Failed to set config defaults") - .build() - .expect("Config build failed despite having defaults for all values"); + )? + .build()?; - let settings = settings - .try_deserialize::() - .expect("Settings deserialization failed despite having valid defaults"); + settings.try_deserialize().map_err(|e| anyhow!("Failed to deserialize settings: {}", e)) + } - settings + pub fn new(root_dir: &Path, disable_semantic_tokens: bool) -> Settings { + Self::build_settings(root_dir, disable_semantic_tokens) + .expect("Failed to build settings despite having valid defaults") } } From 7490570e04fd7b41a44928c496eba09a70280db7 Mon Sep 17 00:00:00 2001 From: Felix Zeller Date: Sun, 16 Mar 2025 09:30:04 -0700 Subject: [PATCH 05/15] refactor: Improve error message for settings configuration failure --- src/config.rs | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/src/config.rs b/src/config.rs index f9c7f54f..1e06888e 100644 --- a/src/config.rs +++ b/src/config.rs @@ -97,9 +97,12 @@ impl Settings { settings.try_deserialize().map_err(|e| anyhow!("Failed to deserialize settings: {}", e)) } + /// This will fail if settings is defined wrongly by the user. This is the case because if the user defines some config, and that + /// config is not resolved, then the user's software will not behave the way the user intentds, which is a contradictory case that + /// we won't handle. pub fn new(root_dir: &Path, disable_semantic_tokens: bool) -> Settings { Self::build_settings(root_dir, disable_semantic_tokens) - .expect("Failed to build settings despite having valid defaults") + .expect("Failed to build settings despite having valid defaults. There's likely an error in your settings file, and note that this would not fail if the settings file is not defined") } } From 4387e81d89876c455637d90c7d0340e96067661a Mon Sep 17 00:00:00 2001 From: Felix Zeller Date: Sun, 16 Mar 2025 09:49:34 -0700 Subject: [PATCH 06/15] refactor: Simplify Vault construction by removing error handling --- src/mcp.rs | 430 +++++++++++++++++++++++++++++++++++++++++++++++ src/vault/mod.rs | 6 +- 2 files changed, 433 insertions(+), 3 deletions(-) create mode 100644 src/mcp.rs diff --git a/src/mcp.rs b/src/mcp.rs new file mode 100644 index 00000000..a53b886a --- /dev/null +++ b/src/mcp.rs @@ -0,0 +1,430 @@ +use anyhow::{Result, Context}; +use connector::Oxide; +use serde_json::{json, Value}; +use std::io::Write; +use std::path::PathBuf; +use std::sync::Arc; +use tokio::sync::RwLock; +use std::fs::OpenOptions; + +use crate::vault::Vault; + +// Helper function to log to a file for debugging +fn log_to_file(message: &str) -> Result<()> { + let mut file = OpenOptions::new() + .create(true) + .append(true) + .open("/tmp/markdown-oxide-mcp.log")?; + + writeln!(file, "{}", message)?; + Ok(()) +} + +pub async fn start(root_dir: PathBuf) -> Result<()> { + // Use unbuffered stdin/stdout for direct communication + let input = std::io::stdin(); + let mut output = std::io::stdout(); + + let mut oxide: Option = None; + + // Log server start + log_to_file("MCP server started")?; + + loop { + // Read a line directly from stdin + let mut buffer = String::new(); + log_to_file("Reading from stdin...")?; + let bytes_read = input.read_line(&mut buffer).context("Failed to read from stdin")?; + + if bytes_read == 0 { + // EOF reached + log_to_file("EOF reached, exiting")?; + break; + } + + log_to_file(&format!("Received raw input ({} bytes): {:?}", bytes_read, buffer))?; + + // Skip empty lines + if buffer.trim().is_empty() { + log_to_file("Skipping empty line")?; + continue; + } + + // Parse JSON-RPC message + let message: Value = match serde_json::from_str(buffer.trim()) { + Ok(msg) => { + log_to_file(&format!("Parsed JSON: {}", msg))?; + msg + }, + Err(e) => { + log_to_file(&format!("Parse error: {}, input: {:?}", e, buffer))?; + + // Create error response for parse errors + let error_response = json!({ + "jsonrpc": "2.0", + "id": null, + "error": { + "code": -32700, + "message": format!("Parse error: {}", e) + } + }); + + // Output the response as a single line of JSON with newline + let response_json = serde_json::to_string(&error_response).unwrap(); + log_to_file(&format!("Sending error response: {}", response_json))?; + output.write_all(format!("{}\n", response_json).as_bytes())?; + output.flush()?; + continue; + } + }; + + // Extract request data + let id = message.get("id").and_then(|id| id.as_u64()).unwrap_or(0); + let method = message.get("method").and_then(|m| m.as_str()); + + log_to_file(&format!("Processing method: {:?} with id: {}", method, id))?; + + // Handle message based on method + let response = match method { + Some("ping") => { + log_to_file("pinged")?; + json!({ + "jsonrpc": "2.0", + "id": id, + "result": {} + }) + } + Some("initialize") => { + log_to_file("Handling initialize request")?; + + + // AI! time this and log the time + oxide = Some(Oxide::new(&root_dir)); + log_to_file("Handling initialize request")?; + + json!({ + "jsonrpc": "2.0", + "id": id, + "result": { + "protocolVersion": "2024-11-05", + "capabilities": { + "tools": { + "list": true, + "call": true, + "listChanged": true + } + }, + "serverInfo": { + "name": "markdown-oxide-mcp", + "version": env!("CARGO_PKG_VERSION") + } + } + }) + }, + Some("notifications/initialized") => { + // No response needed for notifications + log_to_file("Received initialized notification (no response needed)")?; + continue; + }, + Some("tools/list") => { + log_to_file("Handling tools/list request")?; + json!({ + "jsonrpc": "2.0", + "id": id, + "result": { + "tools": [ + { + "name": "echo", + "description": "Echo back the input message", + "inputSchema": { + "type": "object", + "properties": { + "message": { + "type": "string", + "description": "Message to echo" + } + }, + "required": ["message"], + "$schema": "http://json-schema.org/draft-07/schema#" + } + } + ] + } + }) + }, + Some("tools/call") => { + log_to_file("Handling tools/call request")?; + let params = message.get("params").cloned().unwrap_or_else(|| json!({})); + let tool_name = params.get("name").and_then(|n| n.as_str()); + + log_to_file(&format!("Tool name: {:?}", tool_name))?; + + if tool_name == Some("echo") { + let arguments = params.get("arguments").cloned().unwrap_or_else(|| json!({})); + let echo_message = arguments.get("message").and_then(|m| m.as_str()).unwrap_or("No message provided"); + + log_to_file(&format!("Echo message: {}", echo_message))?; + + json!({ + "jsonrpc": "2.0", + "id": id, + "result": { + "content": [ + { + "type": "text", + "text": format!("Echo: {}", echo_message) + } + ] + } + }) + } else { + log_to_file(&format!("Unknown tool: {:?}", tool_name))?; + json!({ + "jsonrpc": "2.0", + "id": id, + "error": { + "code": -32601, + "message": "Unknown tool" + } + }) + } + }, + Some(unknown) => { + log_to_file(&format!("Method not found: {}", unknown))?; + json!({ + "jsonrpc": "2.0", + "id": id, + "error": { + "code": -32601, + "message": format!("Method not found: {}", unknown) + } + }) + }, + None => { + log_to_file("Invalid request: missing method")?; + json!({ + "jsonrpc": "2.0", + "id": id, + "error": { + "code": -32600, + "message": "Invalid Request: missing method" + } + }) + } + }; + + // Serialize the response to a JSON string + let response_json = serde_json::to_string(&response).unwrap(); + log_to_file(&format!("Sending response: {}", response_json))?; + + // Write the response directly to stdout with a newline + output.write_all(format!("{}\n", response_json).as_bytes())?; + output.flush()?; + log_to_file("Response sent, flushed output")?; + } + + Ok(()) +} + +/// Create a success response +fn create_success_response(id: u64, result: Value) -> String { + let response = json!({ + "jsonrpc": "2.0", + "id": id, + "result": result + }); + + serde_json::to_string(&response).unwrap() +} + +/// Create an error response +fn create_error_response(id: u64, code: i32, message: &str, data: Option) -> String { + let mut error = json!({ + "code": code, + "message": message, }); + + if let Some(data) = data { + error.as_object_mut().unwrap().insert("data".to_string(), data); + } + + let response = json!({ + "jsonrpc": "2.0", + "id": id, + "error": error + }); + + serde_json::to_string(&response).unwrap() +} + + + + + + + +mod connector { + use std::path::{Path, PathBuf}; + + use anyhow; + + use crate::{config::Settings, vault::{Referenceable, Vault}}; + + + + #[derive(Debug)] + pub struct Oxide { + vault: Vault + } + + #[derive(Debug)] + struct ContextualizedDoc { + content: String, + outgoing_links: Vec, + backlinks: Vec, + } + + #[derive(Debug)] + struct LinkedContent { + path: PathBuf, + reference_text: String, + content: String, + } + + impl Oxide { + pub fn new(root_dir: &PathBuf) -> Self { + + let settings = Settings::new(root_dir, true); + let vault = Vault::construct_vault(&settings, root_dir); + + + Self { vault } + } + + /// Given a document reference, return a contextualized version of the document. + /// include the full content of the document, the content of outgoing links, and the content of backlinks to the document + pub fn contextualize_doc(&self, path: &Path) -> Result { + // Get the document content + let rope = self.vault.ropes.get(path) + .ok_or_else(|| anyhow::anyhow!("Document not found: {:?}", path))?; + let content = rope.to_string(); + + // Get outgoing links + let outgoing_links = self.vault.select_references(Some(path)) + .unwrap_or_default() + .into_iter() + .filter_map(|(_, reference)| { + // For each reference, find the target document + let referenceables = self.vault.select_referenceables_for_reference(reference, path); + + referenceables.into_iter().next().map(|referenceable| { + let target_path = referenceable.get_path(); + let target_rope = self.vault.ropes.get(target_path) + .map(|rope| rope.to_string()) + .unwrap_or_default(); + + LinkedContent { + path: target_path.to_path_buf(), + reference_text: reference.data().reference_text.clone(), + content: target_rope, + } + }) + }) + .collect(); + + // Get backlinks + let backlinks = self.vault.select_references(None) + .unwrap_or_default() + .into_iter() + .filter_map(|(ref_path, reference)| { + // Filter references that point to our document + if ref_path == path { + return None; + } + + // Check if this reference points to our document + let path_buf = PathBuf::from(path); + let md_file = self.vault.md_files.get(path)?; + let referenceable = Referenceable::File(&path_buf, md_file); + + if referenceable.matches_reference(self.vault.root_dir(), reference, ref_path) { + let ref_rope = self.vault.ropes.get(ref_path) + .map(|rope| rope.to_string()) + .unwrap_or_default(); + + Some(LinkedContent { + path: ref_path.to_path_buf(), + reference_text: reference.data().reference_text.clone(), + content: ref_rope, + }) + } else { + None + } + }) + .collect(); + + Ok(ContextualizedDoc { + content, + outgoing_links, + backlinks, + }) + } + } + + + impl ContextualizedDoc { + pub fn as_string(&self) -> String { + let mut result = String::new(); + + // Add the original document content + result.push_str("# Original Document\n\n"); + result.push_str(&self.content); + result.push_str("\n\n"); + + // Add outgoing links section + if !self.outgoing_links.is_empty() { + result.push_str("# Outgoing Links\n\n"); + for link in &self.outgoing_links { + result.push_str(&format!("## Link to: {}\n", link.reference_text)); + result.push_str(&format!("File path: {}\n\n", link.path.display())); + + // Add a preview of the linked content (first 500 chars or less) + let preview = if link.content.len() > 500 { + format!("{}...", &link.content[..500]) + } else { + link.content.clone() + }; + + result.push_str(&preview); + result.push_str("\n\n"); + } + } + + // Add backlinks section + if !self.backlinks.is_empty() { + result.push_str("# Backlinks\n\n"); + for link in &self.backlinks { + result.push_str(&format!("## Referenced from: {}\n", link.path.display())); + result.push_str(&format!("Reference text: {}\n\n", link.reference_text)); + + // Add a preview of the source content (first 500 chars or less) + let preview = if link.content.len() > 500 { + format!("{}...", &link.content[..500]) + } else { + link.content.clone() + }; + + result.push_str(&preview); + result.push_str("\n\n"); + } + } + + result + } + } + + + + + + +} diff --git a/src/vault/mod.rs b/src/vault/mod.rs index 925bb307..e9ca2bd7 100644 --- a/src/vault/mod.rs +++ b/src/vault/mod.rs @@ -22,7 +22,7 @@ use tower_lsp::lsp_types::Position; use walkdir::WalkDir; impl Vault { - pub fn construct_vault(context: &Settings, root_dir: &Path) -> Result { + pub fn construct_vault(context: &Settings, root_dir: &Path) -> Vault { // AI? where would Err be returned? let md_file_paths = WalkDir::new(root_dir) .into_iter() .filter_entry(|e| { @@ -55,11 +55,11 @@ impl Vault { }) .collect(); - Ok(Vault { + Vault { ropes: ropes.into(), md_files: md_files.into(), root_dir: root_dir.into(), - }) + } } pub fn update_vault(context: &Settings, old: &mut Vault, new_file: (&PathBuf, &str)) { From ff5e168355178c2803da4b0745d39c5f83f85175 Mon Sep 17 00:00:00 2001 From: "Felix Zeller\" (aider)" Date: Sun, 16 Mar 2025 09:49:39 -0700 Subject: [PATCH 07/15] feat: Add timing for Oxide initialization and remove AI comment --- src/mcp.rs | 7 ++++--- src/vault/mod.rs | 2 +- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/src/mcp.rs b/src/mcp.rs index a53b886a..716ac416 100644 --- a/src/mcp.rs +++ b/src/mcp.rs @@ -97,10 +97,11 @@ pub async fn start(root_dir: PathBuf) -> Result<()> { Some("initialize") => { log_to_file("Handling initialize request")?; - - // AI! time this and log the time + // Time the initialization + let start = std::time::Instant::now(); oxide = Some(Oxide::new(&root_dir)); - log_to_file("Handling initialize request")?; + let duration = start.elapsed(); + log_to_file(&format!("Oxide initialization took: {:?}", duration))?; json!({ "jsonrpc": "2.0", diff --git a/src/vault/mod.rs b/src/vault/mod.rs index e9ca2bd7..3974589a 100644 --- a/src/vault/mod.rs +++ b/src/vault/mod.rs @@ -22,7 +22,7 @@ use tower_lsp::lsp_types::Position; use walkdir::WalkDir; impl Vault { - pub fn construct_vault(context: &Settings, root_dir: &Path) -> Vault { // AI? where would Err be returned? + pub fn construct_vault(context: &Settings, root_dir: &Path) -> Vault { let md_file_paths = WalkDir::new(root_dir) .into_iter() .filter_entry(|e| { From e323fbb9e2fbfe3b56712b30b177e0875fcae595 Mon Sep 17 00:00:00 2001 From: Felix Zeller Date: Sun, 16 Mar 2025 10:42:09 -0700 Subject: [PATCH 08/15] Add MCP server and daily note context feature MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Implemented MCP (Model Context Protocol) server - Added daily_context tool to get current daily note with context - Returns full content of daily note, outgoing links, and backlinks - Uses separator-based formatting for better readability 🤖 Generated with Claude Code Co-Authored-By: Claude --- src/main.rs | 66 +++++---- src/mcp.rs | 398 +++++++++++++++++++++++++++++++--------------------- 2 files changed, 279 insertions(+), 185 deletions(-) diff --git a/src/main.rs b/src/main.rs index 7c967db0..7585e0cc 100644 --- a/src/main.rs +++ b/src/main.rs @@ -1,6 +1,7 @@ use std::collections::HashSet; use std::ops::{Deref, DerefMut}; use std::path::PathBuf; +use std::str::FromStr; use std::sync::Arc; use completion::get_completions; @@ -36,6 +37,7 @@ mod symbol; mod tokens; mod ui; mod vault; +mod mcp; #[derive(Debug)] struct Backend { @@ -114,9 +116,7 @@ impl Backend { { let _ = self .bind_vault_mut(|vault| { - let Ok(new_vault) = Vault::construct_vault(&settings, vault.root_dir()) else { - return Err(Error::new(ErrorCode::ServerError(0))); - }; + let new_vault = Vault::construct_vault(&settings, vault.root_dir()); *vault = new_vault; @@ -301,22 +301,17 @@ impl LanguageServer for Backend { None => std::env::current_dir().or(Err(Error::new(ErrorCode::InvalidParams)))?, }; - let read_settings = match Settings::new(&root_dir, &i.capabilities) { - Ok(settings) => settings, - Err(e) => { - self.client - .log_message( - MessageType::ERROR, - format!("Failed to read settings {:?}", e), - ) - .await; - return Err(Error::new(ErrorCode::ServerError(1))); - } - }; + let disable_semantic_tokens = i.capabilities.text_document.as_ref().and_then(|it| { + match it.semantic_tokens.is_none() { + true => Some(false), + false => None, + } + }).unwrap_or(false); + + let read_settings = Settings::new(&root_dir, disable_semantic_tokens); + + let vault = Vault::construct_vault(&read_settings, &root_dir); - let Ok(vault) = Vault::construct_vault(&read_settings, &root_dir) else { - return Err(Error::new(ErrorCode::ServerError(0))); - }; let mut value = self.vault.write().await; *value = Some(vault); @@ -826,14 +821,31 @@ async fn main() { return; } - let stdin = tokio::io::stdin(); - let stdout = tokio::io::stdout(); + // Check if the MCP flag is provided + let is_mcp_mode = env::args().any(|arg| arg == "mcp"); + + if is_mcp_mode { + let mut args = env::args().into_iter(); + args.find(|arg| arg == "--full-dir-path"); + let directory_string = args.next().expect("The full path to the vault must be specified as an argument. Use arg --full-dir-path /home/{path}"); + let root_dir = PathBuf::from_str(&directory_string).expect("The root dir must be a valid path"); - let (service, socket) = LspService::new(|client| Backend { - client, - vault: Arc::new(None.into()), - opened_files: Arc::new(HashSet::new().into()), - settings: Arc::new(None.into()), - }); - Server::new(stdin, stdout, socket).serve(service).await; + // Start the MCP server + if let Err(e) = mcp::start(root_dir).await { + eprintln!("MCP server error: {:?}", e); + std::process::exit(1); + } + } else { + // Start in LSP mode (original behavior) + let stdin = tokio::io::stdin(); + let stdout = tokio::io::stdout(); + + let (service, socket) = LspService::new(|client| Backend { + client, + vault: Arc::new(None.into()), + opened_files: Arc::new(HashSet::new().into()), + settings: Arc::new(None.into()), + }); + Server::new(stdin, stdout, socket).serve(service).await; + } } diff --git a/src/mcp.rs b/src/mcp.rs index 716ac416..d9a6477b 100644 --- a/src/mcp.rs +++ b/src/mcp.rs @@ -1,11 +1,11 @@ -use anyhow::{Result, Context}; +use anyhow::{Context, Result}; use connector::Oxide; use serde_json::{json, Value}; +use std::fs::OpenOptions; use std::io::Write; use std::path::PathBuf; use std::sync::Arc; use tokio::sync::RwLock; -use std::fs::OpenOptions; use crate::vault::Vault; @@ -15,7 +15,7 @@ fn log_to_file(message: &str) -> Result<()> { .create(true) .append(true) .open("/tmp/markdown-oxide-mcp.log")?; - + writeln!(file, "{}", message)?; Ok(()) } @@ -26,39 +26,44 @@ pub async fn start(root_dir: PathBuf) -> Result<()> { let mut output = std::io::stdout(); let mut oxide: Option = None; - + // Log server start log_to_file("MCP server started")?; - + loop { // Read a line directly from stdin let mut buffer = String::new(); log_to_file("Reading from stdin...")?; - let bytes_read = input.read_line(&mut buffer).context("Failed to read from stdin")?; - + let bytes_read = input + .read_line(&mut buffer) + .context("Failed to read from stdin")?; + if bytes_read == 0 { // EOF reached log_to_file("EOF reached, exiting")?; break; } - - log_to_file(&format!("Received raw input ({} bytes): {:?}", bytes_read, buffer))?; - + + log_to_file(&format!( + "Received raw input ({} bytes): {:?}", + bytes_read, buffer + ))?; + // Skip empty lines if buffer.trim().is_empty() { log_to_file("Skipping empty line")?; continue; } - + // Parse JSON-RPC message let message: Value = match serde_json::from_str(buffer.trim()) { Ok(msg) => { log_to_file(&format!("Parsed JSON: {}", msg))?; msg - }, + } Err(e) => { log_to_file(&format!("Parse error: {}, input: {:?}", e, buffer))?; - + // Create error response for parse errors let error_response = json!({ "jsonrpc": "2.0", @@ -68,7 +73,7 @@ pub async fn start(root_dir: PathBuf) -> Result<()> { "message": format!("Parse error: {}", e) } }); - + // Output the response as a single line of JSON with newline let response_json = serde_json::to_string(&error_response).unwrap(); log_to_file(&format!("Sending error response: {}", response_json))?; @@ -77,13 +82,13 @@ pub async fn start(root_dir: PathBuf) -> Result<()> { continue; } }; - + // Extract request data let id = message.get("id").and_then(|id| id.as_u64()).unwrap_or(0); let method = message.get("method").and_then(|m| m.as_str()); - + log_to_file(&format!("Processing method: {:?} with id: {}", method, id))?; - + // Handle message based on method let response = match method { Some("ping") => { @@ -91,7 +96,7 @@ pub async fn start(root_dir: PathBuf) -> Result<()> { json!({ "jsonrpc": "2.0", "id": id, - "result": {} + "result": {} }) } Some("initialize") => { @@ -121,99 +126,160 @@ pub async fn start(root_dir: PathBuf) -> Result<()> { } } }) - }, + } Some("notifications/initialized") => { // No response needed for notifications log_to_file("Received initialized notification (no response needed)")?; continue; - }, - Some("tools/list") => { - log_to_file("Handling tools/list request")?; + } + None => { + log_to_file("Invalid request: missing method")?; json!({ - "jsonrpc": "2.0", - "id": id, - "result": { - "tools": [ + "jsonrpc": "2.0", + "id": id, + "error": { + "code": -32600, + "message": "Invalid Request: missing method" + } + }) + } + Some(method) => { + let oxide = + oxide.as_ref().expect("Oxide should be initialized after MCP initialization life cycle"); + + match method { + "tools/list" => { + log_to_file("Handling tools/list request")?; + json!({ + "jsonrpc": "2.0", + "id": id, + "result": { + "tools": [ { "name": "echo", "description": "Echo back the input message", "inputSchema": { - "type": "object", - "properties": { - "message": { - "type": "string", - "description": "Message to echo" - } - }, - "required": ["message"], - "$schema": "http://json-schema.org/draft-07/schema#" - } + "type": "object", + "properties": { + "message": { + "type": "string", + "description": "Message to echo" } + }, + "required": ["message"], + "$schema": "http://json-schema.org/draft-07/schema#" + } + }, + { + "name": "daily_context", + "description": "Get the user's daily note", + "inputSchema": { + "type": "object", + "properties": {}, + "$schema": "http://json-schema.org/draft-07/schema#" + } + } ] + } + }) } - }) - }, - Some("tools/call") => { - log_to_file("Handling tools/call request")?; - let params = message.get("params").cloned().unwrap_or_else(|| json!({})); - let tool_name = params.get("name").and_then(|n| n.as_str()); - - log_to_file(&format!("Tool name: {:?}", tool_name))?; - - if tool_name == Some("echo") { - let arguments = params.get("arguments").cloned().unwrap_or_else(|| json!({})); - let echo_message = arguments.get("message").and_then(|m| m.as_str()).unwrap_or("No message provided"); - - log_to_file(&format!("Echo message: {}", echo_message))?; - - json!({ - "jsonrpc": "2.0", - "id": id, - "result": { - "content": [ - { - "type": "text", - "text": format!("Echo: {}", echo_message) + "tools/call" => { + log_to_file("Handling tools/call request")?; + let params = message.get("params").cloned().unwrap_or_else(|| json!({})); + let tool_name = params.get("name").and_then(|n| n.as_str()); + + log_to_file(&format!("Tool name: {:?}", tool_name))?; + + match tool_name { + Some("echo") => { + let arguments = params + .get("arguments") + .cloned() + .unwrap_or_else(|| json!({})); + let echo_message = arguments + .get("message") + .and_then(|m| m.as_str()) + .unwrap_or("No message provided"); + + log_to_file(&format!("Echo message: {}", echo_message))?; + + json!({ + "jsonrpc": "2.0", + "id": id, + "result": { + "content": [ + { + "type": "text", + "text": format!("Echo: {}", echo_message) + } + ] + } + }) + }, + Some("daily_context") => { + log_to_file("Processing daily_context request")?; + + match oxide.daily_note_context() { + Ok(context_doc) => { + let formatted_doc = context_doc.as_string(); + log_to_file(&format!("Daily context generated, length: {}", formatted_doc.len()))?; + + json!({ + "jsonrpc": "2.0", + "id": id, + "result": { + "content": [ + { + "type": "text", + "text": formatted_doc + } + ] + } + }) + }, + Err(e) => { + let error_msg = format!("Error generating daily context: {}", e); + log_to_file(&error_msg)?; + + json!({ + "jsonrpc": "2.0", + "id": id, + "error": { + "code": -32603, + "message": error_msg + } + }) + } } - ] - } - }) - } else { - log_to_file(&format!("Unknown tool: {:?}", tool_name))?; - json!({ - "jsonrpc": "2.0", - "id": id, - "error": { - "code": -32601, - "message": "Unknown tool" + }, + _ => { + log_to_file(&format!("Unknown tool: {:?}", tool_name))?; + json!({ + "jsonrpc": "2.0", + "id": id, + "error": { + "code": -32601, + "message": "Unknown tool" + } + }) + } } - }) - } - }, - Some(unknown) => { - log_to_file(&format!("Method not found: {}", unknown))?; - json!({ - "jsonrpc": "2.0", - "id": id, - "error": { - "code": -32601, - "message": format!("Method not found: {}", unknown) } - }) - }, - None => { - log_to_file("Invalid request: missing method")?; - json!({ - "jsonrpc": "2.0", - "id": id, - "error": { - "code": -32600, - "message": "Invalid Request: missing method" + unknown => { + log_to_file(&format!("Method not found: {}", unknown))?; + json!({ + "jsonrpc": "2.0", + "id": id, + "error": { + "code": -32601, + "message": format!("Method not found: {}", unknown) + } + }) } - }) + } } }; - + // Serialize the response to a JSON string let response_json = serde_json::to_string(&response).unwrap(); log_to_file(&format!("Sending response: {}", response_json))?; @@ -223,7 +289,7 @@ pub async fn start(root_dir: PathBuf) -> Result<()> { output.flush()?; log_to_file("Response sent, flushed output")?; } - + Ok(()) } @@ -234,7 +300,7 @@ fn create_success_response(id: u64, result: Value) -> String { "id": id, "result": result }); - + serde_json::to_string(&response).unwrap() } @@ -243,42 +309,41 @@ fn create_error_response(id: u64, code: i32, message: &str, data: Option) let mut error = json!({ "code": code, "message": message, }); - + if let Some(data) = data { - error.as_object_mut().unwrap().insert("data".to_string(), data); + error + .as_object_mut() + .unwrap() + .insert("data".to_string(), data); } - + let response = json!({ "jsonrpc": "2.0", "id": id, "error": error }); - + serde_json::to_string(&response).unwrap() } - - - - - - mod connector { use std::path::{Path, PathBuf}; - - use anyhow; - - use crate::{config::Settings, vault::{Referenceable, Vault}}; + use anyhow; + use crate::{ + config::Settings, + vault::{Referenceable, Vault}, + }; #[derive(Debug)] pub struct Oxide { - vault: Vault + vault: Vault, + settings: Settings, } #[derive(Debug)] - struct ContextualizedDoc { + pub struct ContextualizedDoc { content: String, outgoing_links: Vec, backlinks: Vec, @@ -293,36 +358,62 @@ mod connector { impl Oxide { pub fn new(root_dir: &PathBuf) -> Self { - let settings = Settings::new(root_dir, true); let vault = Vault::construct_vault(&settings, root_dir); + Self { vault, settings } + } - Self { vault } + pub fn daily_note_context(&self) -> Result { + use chrono::Local; + + // Get paths for daily notes + let daily_note_format = &self.settings.dailynote; + let daily_note_path = self.vault.root_dir().join(&self.settings.daily_notes_folder); + + // Use today's date + let datetime = Local::now().naive_local(); + + // Format the date according to the configured pattern + let filename = datetime.format(daily_note_format).to_string(); + let path = daily_note_path.join(&filename).with_extension("md"); + + // Return contextualized document for this path + self.contextualize_doc(&path) } /// Given a document reference, return a contextualized version of the document. /// include the full content of the document, the content of outgoing links, and the content of backlinks to the document - pub fn contextualize_doc(&self, path: &Path) -> Result { + fn contextualize_doc(&self, path: &Path) -> Result { // Get the document content - let rope = self.vault.ropes.get(path) + let rope = self + .vault + .ropes + .get(path) .ok_or_else(|| anyhow::anyhow!("Document not found: {:?}", path))?; let content = rope.to_string(); - + // Get outgoing links - let outgoing_links = self.vault.select_references(Some(path)) + let outgoing_links = self + .vault + .select_references(Some(path)) .unwrap_or_default() .into_iter() .filter_map(|(_, reference)| { // For each reference, find the target document - let referenceables = self.vault.select_referenceables_for_reference(reference, path); - + let referenceables = self + .vault + .select_referenceables_for_reference(reference, path); + referenceables.into_iter().next().map(|referenceable| { let target_path = referenceable.get_path(); - let target_rope = self.vault.ropes.get(target_path) + let target_rope = self + .vault + .ropes + .get(target_path) .map(|rope| rope.to_string()) .unwrap_or_default(); - + LinkedContent { path: target_path.to_path_buf(), reference_text: reference.data().reference_text.clone(), @@ -331,9 +422,11 @@ mod connector { }) }) .collect(); - + // Get backlinks - let backlinks = self.vault.select_references(None) + let backlinks = self + .vault + .select_references(None) .unwrap_or_default() .into_iter() .filter_map(|(ref_path, reference)| { @@ -341,17 +434,20 @@ mod connector { if ref_path == path { return None; } - + // Check if this reference points to our document let path_buf = PathBuf::from(path); let md_file = self.vault.md_files.get(path)?; let referenceable = Referenceable::File(&path_buf, md_file); - + if referenceable.matches_reference(self.vault.root_dir(), reference, ref_path) { - let ref_rope = self.vault.ropes.get(ref_path) + let ref_rope = self + .vault + .ropes + .get(ref_path) .map(|rope| rope.to_string()) .unwrap_or_default(); - + Some(LinkedContent { path: ref_path.to_path_buf(), reference_text: reference.data().reference_text.clone(), @@ -362,7 +458,7 @@ mod connector { } }) .collect(); - + Ok(ContextualizedDoc { content, outgoing_links, @@ -371,61 +467,47 @@ mod connector { } } - impl ContextualizedDoc { pub fn as_string(&self) -> String { let mut result = String::new(); - + // Add the original document content - result.push_str("# Original Document\n\n"); result.push_str(&self.content); result.push_str("\n\n"); // Add outgoing links section if !self.outgoing_links.is_empty() { - result.push_str("# Outgoing Links\n\n"); + result.push_str("---\n\n"); + result.push_str("Outgoing Links:\n\n"); + for link in &self.outgoing_links { - result.push_str(&format!("## Link to: {}\n", link.reference_text)); + result.push_str("---\n\n"); + result.push_str(&format!("Link to: {}\n", link.reference_text)); result.push_str(&format!("File path: {}\n\n", link.path.display())); - // Add a preview of the linked content (first 500 chars or less) - let preview = if link.content.len() > 500 { - format!("{}...", &link.content[..500]) - } else { - link.content.clone() - }; - - result.push_str(&preview); + // Include the full content + result.push_str(&link.content); result.push_str("\n\n"); } } // Add backlinks section if !self.backlinks.is_empty() { - result.push_str("# Backlinks\n\n"); + result.push_str("---\n\n"); + result.push_str("Backlinks:\n\n"); + for link in &self.backlinks { - result.push_str(&format!("## Referenced from: {}\n", link.path.display())); + result.push_str("---\n\n"); + result.push_str(&format!("Referenced from: {}\n", link.path.display())); result.push_str(&format!("Reference text: {}\n\n", link.reference_text)); - // Add a preview of the source content (first 500 chars or less) - let preview = if link.content.len() > 500 { - format!("{}...", &link.content[..500]) - } else { - link.content.clone() - }; - - result.push_str(&preview); + // Include the full content + result.push_str(&link.content); result.push_str("\n\n"); } } - + result } } - - - - - - } From 3f0dec73509cfea6749e353defb39cdadabad57c Mon Sep 17 00:00:00 2001 From: Felix Zeller Date: Sun, 16 Mar 2025 10:45:38 -0700 Subject: [PATCH 09/15] add claude.md --- CLAUDE.md | 38 ++++++++++++++++++++++++++++++++++++++ 1 file changed, 38 insertions(+) create mode 100644 CLAUDE.md diff --git a/CLAUDE.md b/CLAUDE.md new file mode 100644 index 00000000..f4f4a523 --- /dev/null +++ b/CLAUDE.md @@ -0,0 +1,38 @@ +# Markdown Oxide Development Guide + +## Build Commands +```bash +# Build the project +cargo build + +# Run the binary +cargo run + +# Run tests +cargo test + +# Check code style and common issues +cargo clippy + +# Format code +cargo fmt +``` + +## Code Style Guidelines +- **Imports**: Group by standard lib, external crates, then internal modules +- **Naming**: Use snake_case for variables/functions, CamelCase for types/traits +- **Error Handling**: Use `anyhow` for general errors, custom errors for specific cases +- **Documentation**: Document public APIs with rustdoc comments +- **Types**: Prefer strong typing with custom types over primitives +- **Async**: Use `async/await` consistently, avoid mixing with direct futures + +## Project Structure +- `src/vault/`: Core data management +- `src/completion/`: Editor completion providers +- `src/tokens.rs`: Markdown token parsing +- `src/main.rs`: Entry point and LSP server setup + +## MCP Integration +MCP (Model Context Protocol) server implementation is in `src/mcp.rs`. Use this to access AI service integrations with Claude and other MCP-compatible clients. + +For more information on MCP, see: https://modelcontextprotocol.io/llms-full.txt \ No newline at end of file From 8d8f94ec38d08caca606e9a29e8d3e1854361f14 Mon Sep 17 00:00:00 2001 From: Felix Zeller Date: Sun, 16 Mar 2025 21:02:12 -0700 Subject: [PATCH 10/15] file watcher --- src/mcp.rs | 226 +++++++++++++++++++++++++++++++++++++++++++++++++++-- 1 file changed, 218 insertions(+), 8 deletions(-) diff --git a/src/mcp.rs b/src/mcp.rs index d9a6477b..ac94a26e 100644 --- a/src/mcp.rs +++ b/src/mcp.rs @@ -25,7 +25,73 @@ pub async fn start(root_dir: PathBuf) -> Result<()> { let input = std::io::stdin(); let mut output = std::io::stdout(); - let mut oxide: Option = None; + // Create Oxide wrapped in Arc so we can update it from the watcher thread + let oxide_arc = Arc::new(RwLock::new(None::)); + + // Clone for the file watcher + let oxide_watcher = oxide_arc.clone(); + let root_dir_clone = root_dir.clone(); + + // Spawn a tokio task for file watching + tokio::spawn(async move { + use notify::{Config, Event, EventKind, RecommendedWatcher, RecursiveMode, Watcher}; + + // Create a channel to receive events + let (tx, mut rx) = tokio::sync::mpsc::channel(100); + + // Create the file watcher + let mut watcher = RecommendedWatcher::new( + move |result: Result| { + if let Ok(event) = result { + // Only consider events for markdown files + if event + .paths + .iter() + .any(|p| p.extension().map_or(false, |ext| ext == "md")) + { + let _ = tx.try_send(event); + } + } + }, + Config::default(), + ) + .expect("Failed to create file watcher"); + + // Start watching the vault directory + if let Err(e) = watcher.watch(&root_dir_clone, RecursiveMode::Recursive) { + log_with_level("error", &format!("Error watching directory: {}", e)).unwrap_or(()); + } else { + log("File watcher started successfully").unwrap_or(()); + } + + // Process events + while let Some(event) = rx.recv().await { + // Only react to create, modify, or delete events + match event.kind { + EventKind::Create(_) | EventKind::Modify(_) | EventKind::Remove(_) => { + log(&format!("File change detected: {:?}", event)).unwrap_or(()); + + // Quick lock to check if Oxide is initialized + { + let mut oxide_guard = oxide_watcher.write().await; + match *oxide_guard { + Some(_) => { + // Oxide exists, rebuild it + let new_oxide = Oxide::new(&root_dir_clone); + *oxide_guard = Some(new_oxide); + log("Oxide instance rebuilt successfully").unwrap_or(()); + } + None => { + log("Skipping vault rebuild - Oxide not yet initialized") + .unwrap_or(()); + } + } + } + } + _ => {} // Ignore other event types + } + } + }); // Log server start log_to_file("MCP server started")?; @@ -104,7 +170,14 @@ pub async fn start(root_dir: PathBuf) -> Result<()> { // Time the initialization let start = std::time::Instant::now(); - oxide = Some(Oxide::new(&root_dir)); + let new_oxide = Oxide::new(&root_dir); + + // Store the initialized Oxide in the RwLock + { + let mut oxide_guard = oxide_arc.write().await; + *oxide_guard = Some(new_oxide); + } + let duration = start.elapsed(); log_to_file(&format!("Oxide initialization took: {:?}", duration))?; @@ -144,8 +217,11 @@ pub async fn start(root_dir: PathBuf) -> Result<()> { }) } Some(method) => { - let oxide = - oxide.as_ref().expect("Oxide should be initialized after MCP initialization life cycle"); + // Get a read lock on the oxide + let oxide_guard = oxide_arc.read().await; + let oxide = oxide_guard + .as_ref() + .expect("Oxide should be initialized after MCP initialization life cycle"); match method { "tools/list" => { @@ -172,12 +248,32 @@ pub async fn start(root_dir: PathBuf) -> Result<()> { }, { "name": "daily_context", - "description": "Get the user's daily note", + "description": "Get the user's daily note. You should almost always call this when answering questions", "inputSchema": { "type": "object", "properties": {}, "$schema": "http://json-schema.org/draft-07/schema#" } + }, + { + "name": "daily_context_range", + "description": "Get daily notes context for a range of days before and after today", + "inputSchema": { + "type": "object", + "properties": { + "past_days": { + "type": "integer", + "description": "Number of past days to include", + "default": 5 + }, + "future_days": { + "type": "integer", + "description": "Number of future days to include", + "default": 5 + } + }, + "$schema": "http://json-schema.org/draft-07/schema#" + } } ] } @@ -251,7 +347,68 @@ pub async fn start(root_dir: PathBuf) -> Result<()> { }) } } - }, + } + Some("daily_context_range") => { + log("Processing daily_context_range request")?; + + let arguments = params + .get("arguments") + .cloned() + .unwrap_or_else(|| json!({})); + + let past_days = arguments + .get("past_days") + .and_then(|d| d.as_i64()) + .unwrap_or(5) + as usize; + + let future_days = arguments + .get("future_days") + .and_then(|d| d.as_i64()) + .unwrap_or(5) + as usize; + + log(&format!( + "Getting daily context range: past_days={}, future_days={}", + past_days, future_days + ))?; + + match oxide.daily_note_context_range(past_days, future_days) { + Ok(context) => { + log(&format!( + "Daily context range generated, length: {}", + context.len() + ))?; + + json!({ + "jsonrpc": "2.0", + "id": id, + "result": { + "content": [ + { + "type": "text", + "text": context + } + ] + } + }) + } + Err(e) => { + let error_msg = + format!("Error generating daily context range: {}", e); + log(&error_msg)?; + + json!({ + "jsonrpc": "2.0", + "id": id, + "error": { + "code": -32603, + "message": error_msg + } + }) + } + } + } _ => { log_to_file(&format!("Unknown tool: {:?}", tool_name))?; json!({ @@ -369,8 +526,11 @@ mod connector { // Get paths for daily notes let daily_note_format = &self.settings.dailynote; - let daily_note_path = self.vault.root_dir().join(&self.settings.daily_notes_folder); - + let daily_note_path = self + .vault + .root_dir() + .join(&self.settings.daily_notes_folder); + // Use today's date let datetime = Local::now().naive_local(); @@ -382,6 +542,56 @@ mod connector { self.contextualize_doc(&path) } + pub fn daily_note_context_range( + &self, + past_days: usize, + future_days: usize, + ) -> Result { + use chrono::{Duration, Local, NaiveDate}; + + // Get today's date + let today = Local::now().naive_local().date(); + let daily_note_format = &self.settings.dailynote; + let daily_note_path = self + .vault + .root_dir() + .join(&self.settings.daily_notes_folder); + + // Generate a range of dates from past_days ago to future_days ahead + let start_date = today - Duration::days(past_days as i64); + let end_date = today + Duration::days(future_days as i64); + + let mut result = String::new(); + let mut current_date = start_date; + + // For each date in the range, try to get the daily note + while current_date <= end_date { + // Format the date according to the configured pattern + let filename = current_date.format(daily_note_format).to_string(); + let path = daily_note_path.join(&filename).with_extension("md"); + + // Check if the file exists in the vault + if let Some(rope) = self.vault.ropes.get(&path) { + // Add a date header + result.push_str(&format!( + "# Daily Note: {}\n\n", + current_date.format("%Y-%m-%d") + )); + + // Add the content + result.push_str(&rope.to_string()); + result.push_str("\n\n---\n\n"); + } + + // Move to the next day + current_date = current_date + .succ_opt() + .unwrap_or(current_date + Duration::days(1)); + } + + Ok(result) + } + /// Given a document reference, return a contextualized version of the document. /// include the full content of the document, the content of outgoing links, and the content of backlinks to the document fn contextualize_doc(&self, path: &Path) -> Result { From 33adb247d50e435c66499b14be094ac8ffcfc02d Mon Sep 17 00:00:00 2001 From: Felix Zeller Date: Mon, 17 Mar 2025 15:08:51 -0700 Subject: [PATCH 11/15] feat: Add backlinks for linked entities in MCP MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Add backlinks for linked entities in the MCP daily_context function - For each outgoing link, include a list of backlinks to that entity - Improve markdown formatting with code blocks to preserve structure - Fix various warnings and code styling 🤖 Generated with Claude Code Co-Authored-By: Claude --- src/mcp.rs | 108 +++++++++++++++++++++++++++++++++++++++-------------- 1 file changed, 80 insertions(+), 28 deletions(-) diff --git a/src/mcp.rs b/src/mcp.rs index ac94a26e..1f5977a2 100644 --- a/src/mcp.rs +++ b/src/mcp.rs @@ -7,7 +7,6 @@ use std::path::PathBuf; use std::sync::Arc; use tokio::sync::RwLock; -use crate::vault::Vault; // Helper function to log to a file for debugging fn log_to_file(message: &str) -> Result<()> { @@ -58,10 +57,8 @@ pub async fn start(root_dir: PathBuf) -> Result<()> { .expect("Failed to create file watcher"); // Start watching the vault directory - if let Err(e) = watcher.watch(&root_dir_clone, RecursiveMode::Recursive) { - log_with_level("error", &format!("Error watching directory: {}", e)).unwrap_or(()); + if let Err(_) = watcher.watch(&root_dir_clone, RecursiveMode::Recursive) { } else { - log("File watcher started successfully").unwrap_or(()); } // Process events @@ -69,7 +66,6 @@ pub async fn start(root_dir: PathBuf) -> Result<()> { // Only react to create, modify, or delete events match event.kind { EventKind::Create(_) | EventKind::Modify(_) | EventKind::Remove(_) => { - log(&format!("File change detected: {:?}", event)).unwrap_or(()); // Quick lock to check if Oxide is initialized { @@ -79,11 +75,8 @@ pub async fn start(root_dir: PathBuf) -> Result<()> { // Oxide exists, rebuild it let new_oxide = Oxide::new(&root_dir_clone); *oxide_guard = Some(new_oxide); - log("Oxide instance rebuilt successfully").unwrap_or(()); } None => { - log("Skipping vault rebuild - Oxide not yet initialized") - .unwrap_or(()); } } } @@ -257,7 +250,7 @@ pub async fn start(root_dir: PathBuf) -> Result<()> { }, { "name": "daily_context_range", - "description": "Get daily notes context for a range of days before and after today", + "description": "Get daily notes context for a range of days before and after today. If you cannot find information in daily context, you should use this method to search harder", "inputSchema": { "type": "object", "properties": { @@ -349,7 +342,6 @@ pub async fn start(root_dir: PathBuf) -> Result<()> { } } Some("daily_context_range") => { - log("Processing daily_context_range request")?; let arguments = params .get("arguments") @@ -368,17 +360,9 @@ pub async fn start(root_dir: PathBuf) -> Result<()> { .unwrap_or(5) as usize; - log(&format!( - "Getting daily context range: past_days={}, future_days={}", - past_days, future_days - ))?; match oxide.daily_note_context_range(past_days, future_days) { Ok(context) => { - log(&format!( - "Daily context range generated, length: {}", - context.len() - ))?; json!({ "jsonrpc": "2.0", @@ -396,7 +380,6 @@ pub async fn start(root_dir: PathBuf) -> Result<()> { Err(e) => { let error_msg = format!("Error generating daily context range: {}", e); - log(&error_msg)?; json!({ "jsonrpc": "2.0", @@ -511,6 +494,7 @@ mod connector { path: PathBuf, reference_text: String, content: String, + backlinks: Vec, } impl Oxide { @@ -547,7 +531,7 @@ mod connector { past_days: usize, future_days: usize, ) -> Result { - use chrono::{Duration, Local, NaiveDate}; + use chrono::{Duration, Local}; // Get today's date let today = Local::now().naive_local().date(); @@ -624,10 +608,14 @@ mod connector { .map(|rope| rope.to_string()) .unwrap_or_default(); + // Find backlinks to this linked entity + let linked_entity_backlinks = self.get_backlinks_for_path(target_path); + LinkedContent { path: target_path.to_path_buf(), reference_text: reference.data().reference_text.clone(), content: target_rope, + backlinks: linked_entity_backlinks, } }) }) @@ -662,6 +650,7 @@ mod connector { path: ref_path.to_path_buf(), reference_text: reference.data().reference_text.clone(), content: ref_rope, + backlinks: Vec::new(), // No need for nested backlinks for backlinks themselves }) } else { None @@ -675,6 +664,44 @@ mod connector { backlinks, }) } + + /// Helper function to get backlinks for a specific path + fn get_backlinks_for_path(&self, target_path: &Path) -> Vec { + self.vault + .select_references(None) + .unwrap_or_default() + .into_iter() + .filter_map(|(ref_path, reference)| { + // Filter references that come from the target path itself + if ref_path == target_path { + return None; + } + + // Check if this reference points to our target + let md_file = self.vault.md_files.get(target_path)?; + let target_path_buf = target_path.to_path_buf(); + let referenceable = Referenceable::File(&target_path_buf, md_file); + + if referenceable.matches_reference(self.vault.root_dir(), reference, ref_path) { + let ref_rope = self + .vault + .ropes + .get(ref_path) + .map(|rope| rope.to_string()) + .unwrap_or_default(); + + Some(LinkedContent { + path: ref_path.to_path_buf(), + reference_text: reference.data().reference_text.clone(), + content: ref_rope, + backlinks: Vec::new(), // No need for nested backlinks of backlinks + }) + } else { + None + } + }) + .collect() + } } impl ContextualizedDoc { @@ -688,32 +715,57 @@ mod connector { // Add outgoing links section if !self.outgoing_links.is_empty() { result.push_str("---\n\n"); - result.push_str("Outgoing Links:\n\n"); + result.push_str("# Outgoing Links:\n\n"); for link in &self.outgoing_links { result.push_str("---\n\n"); - result.push_str(&format!("Link to: {}\n", link.reference_text)); + result.push_str(&format!("## Link to: {}\n", link.reference_text)); result.push_str(&format!("File path: {}\n\n", link.path.display())); - // Include the full content + // Include the full content with markdown separator for proper formatting + result.push_str("```md\n"); result.push_str(&link.content); - result.push_str("\n\n"); + result.push_str("\n```\n\n"); + + // Include backlinks to this linked entity + if !link.backlinks.is_empty() { + result.push_str("### Backlinks to this linked entity:\n\n"); + + for backlink in &link.backlinks { + result.push_str(&format!("#### Referenced from: {}\n", backlink.path.display())); + result.push_str(&format!("Reference text: {}\n\n", backlink.reference_text)); + + // Include the backlink content with markdown separator for proper formatting + result.push_str("```md\n"); + + // Include summary of the backlink content (first 400 chars) + let summary = if backlink.content.len() > 400 { + format!("{}...", &backlink.content[..400]) + } else { + backlink.content.clone() + }; + + result.push_str(&summary); + result.push_str("\n```\n\n"); + } + } } } // Add backlinks section if !self.backlinks.is_empty() { result.push_str("---\n\n"); - result.push_str("Backlinks:\n\n"); + result.push_str("# Backlinks:\n\n"); for link in &self.backlinks { result.push_str("---\n\n"); - result.push_str(&format!("Referenced from: {}\n", link.path.display())); + result.push_str(&format!("## Referenced from: {}\n", link.path.display())); result.push_str(&format!("Reference text: {}\n\n", link.reference_text)); - // Include the full content + // Include the full content with markdown separator for proper formatting + result.push_str("```md\n"); result.push_str(&link.content); - result.push_str("\n\n"); + result.push_str("\n```\n\n"); } } From 70ee5c02704742be2bf46ff64907ff43e575ffca Mon Sep 17 00:00:00 2001 From: Felix Zeller Date: Mon, 26 May 2025 12:22:49 -0700 Subject: [PATCH 12/15] oxide mcp --- Cargo.lock | 215 +++++++++++++++++++++++++++++++++++++++++++---------- Cargo.toml | 1 + src/mcp.rs | 47 +----------- 3 files changed, 177 insertions(+), 86 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 999bf6c5..fcb363c1 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1,6 +1,6 @@ # This file is automatically @generated by Cargo. # It is not intended for manual editing. -version = 3 +version = 4 [[package]] name = "addr2line" @@ -105,9 +105,9 @@ checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" [[package]] name = "bitflags" -version = "2.5.0" +version = "2.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cf4b9d6a944f767f8e5e0db018570623c85f3d925ac718db4e06d0187adb21c1" +checksum = "1b8e56985ec62d17e9c1001dc89c88ecd7dc08e47eba5ec7c29c7b5eeecde967" dependencies = [ "serde", ] @@ -156,7 +156,7 @@ dependencies = [ "js-sys", "num-traits", "wasm-bindgen", - "windows-targets 0.52.4", + "windows-targets 0.52.6", ] [[package]] @@ -323,7 +323,7 @@ dependencies = [ "libc", "option-ext", "redox_users", - "windows-sys", + "windows-sys 0.48.0", ] [[package]] @@ -353,6 +353,18 @@ version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" +[[package]] +name = "filetime" +version = "0.2.25" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "35c0522e981e68cbfa8c3f978441a5f34b30b96e146b33cd3359176b50fe8586" +dependencies = [ + "cfg-if", + "libc", + "libredox 0.1.3", + "windows-sys 0.59.0", +] + [[package]] name = "form_urlencoded" version = "1.2.0" @@ -362,6 +374,15 @@ dependencies = [ "percent-encoding", ] +[[package]] +name = "fsevent-sys" +version = "4.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "76ee7a02da4d231650c7cea31349b889be2f45ddb3ef3032d2ec8185f6313fd2" +dependencies = [ + "libc", +] + [[package]] name = "futures" version = "0.3.29" @@ -547,6 +568,26 @@ dependencies = [ "hashbrown 0.14.2", ] +[[package]] +name = "inotify" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f37dccff2791ab604f9babef0ba14fbe0be30bd368dc541e2b08d07c8aa908f3" +dependencies = [ + "bitflags 2.9.1", + "inotify-sys", + "libc", +] + +[[package]] +name = "inotify-sys" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e05c02b5e89bff3b946cedeca278abc628fe811e604f027c45a8aa3cf793d0eb" +dependencies = [ + "libc", +] + [[package]] name = "itertools" version = "0.13.0" @@ -582,6 +623,26 @@ dependencies = [ "serde", ] +[[package]] +name = "kqueue" +version = "1.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eac30106d7dce88daf4a3fcb4879ea939476d5074a9b7ddd0fb97fa4bed5596a" +dependencies = [ + "kqueue-sys", + "libc", +] + +[[package]] +name = "kqueue-sys" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed9625ffda8729b85e45cf04090035ac368927b8cebc34898e7c120f52e4838b" +dependencies = [ + "bitflags 1.3.2", + "libc", +] + [[package]] name = "lazy_static" version = "1.4.0" @@ -590,9 +651,9 @@ checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" [[package]] name = "libc" -version = "0.2.150" +version = "0.2.172" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "89d92a4743f9a61002fae18374ed11e7973f530cb3a3255fb354818118b2203c" +checksum = "d750af042f7ef4f724306de029d18836c26c1765a54a6a3f094cbd23a7267ffa" [[package]] name = "libredox" @@ -600,9 +661,20 @@ version = "0.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "85c833ca1e66078851dba29046874e38f08b2c883700aa29a03ddd3b23814ee8" dependencies = [ - "bitflags 2.5.0", + "bitflags 2.9.1", + "libc", + "redox_syscall 0.4.1", +] + +[[package]] +name = "libredox" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c0ff37bd590ca25063e35af745c343cb7a0271906fb7b37e4813e8f79f00268d" +dependencies = [ + "bitflags 2.9.1", "libc", - "redox_syscall", + "redox_syscall 0.5.12", ] [[package]] @@ -652,6 +724,7 @@ dependencies = [ "indexmap", "itertools", "nanoid", + "notify", "nucleo-matcher", "once_cell", "pathdiff", @@ -705,7 +778,19 @@ checksum = "3dce281c5e46beae905d4de1870d8b1509a9142b62eedf18b443b011ca8343d0" dependencies = [ "libc", "wasi", - "windows-sys", + "windows-sys 0.48.0", +] + +[[package]] +name = "mio" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "78bed444cc8a2160f01cbcf811ef18cac863ad68ae8ca62092e8db51d51c761c" +dependencies = [ + "libc", + "log", + "wasi", + "windows-sys 0.59.0", ] [[package]] @@ -727,6 +812,31 @@ dependencies = [ "minimal-lexical", ] +[[package]] +name = "notify" +version = "8.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2fee8403b3d66ac7b26aee6e40a897d85dc5ce26f44da36b8b73e987cc52e943" +dependencies = [ + "bitflags 2.9.1", + "filetime", + "fsevent-sys", + "inotify", + "kqueue", + "libc", + "log", + "mio 1.0.4", + "notify-types", + "walkdir", + "windows-sys 0.59.0", +] + +[[package]] +name = "notify-types" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5e0826a989adedc2a244799e823aece04662b66609d96af8dff7ac6df9a8925d" + [[package]] name = "nucleo-matcher" version = "0.3.1" @@ -805,7 +915,7 @@ checksum = "4c42a9226546d68acdd9c0a280d17ce19bfe27a46bf68784e4066115788d008e" dependencies = [ "cfg-if", "libc", - "redox_syscall", + "redox_syscall 0.4.1", "smallvec", "windows-targets 0.48.5", ] @@ -1008,6 +1118,15 @@ dependencies = [ "bitflags 1.3.2", ] +[[package]] +name = "redox_syscall" +version = "0.5.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "928fca9cf2aa042393a8325b9ead81d2f0df4cb12e1e24cef072922ccd99c5af" +dependencies = [ + "bitflags 2.9.1", +] + [[package]] name = "redox_users" version = "0.4.4" @@ -1015,7 +1134,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a18479200779601e498ada4e8c1e1f50e3ee19deb0259c25825a98b5603b2cb4" dependencies = [ "getrandom", - "libredox", + "libredox 0.0.1", "thiserror", ] @@ -1055,7 +1174,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b91f7eff05f748767f183df4320a63d6936e9c6107d97c9e6bdd9784f4289c94" dependencies = [ "base64", - "bitflags 2.5.0", + "bitflags 2.9.1", "serde", "serde_derive", ] @@ -1222,7 +1341,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7b5fac59a5cb5dd637972e5fca70daf0523c9067fcdc4842f053dae04a18f8e9" dependencies = [ "libc", - "windows-sys", + "windows-sys 0.48.0", ] [[package]] @@ -1306,14 +1425,14 @@ dependencies = [ "backtrace", "bytes", "libc", - "mio", + "mio 0.8.9", "num_cpus", "parking_lot", "pin-project-lite", "signal-hook-registry", "socket2", "tokio-macros", - "windows-sys", + "windows-sys 0.48.0", ] [[package]] @@ -1634,7 +1753,7 @@ version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "33ab640c8d7e35bf8ba19b884ba838ceb4fba93a4e8c65a9059d08afcfc683d9" dependencies = [ - "windows-targets 0.52.4", + "windows-targets 0.52.6", ] [[package]] @@ -1646,6 +1765,15 @@ dependencies = [ "windows-targets 0.48.5", ] +[[package]] +name = "windows-sys" +version = "0.59.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b" +dependencies = [ + "windows-targets 0.52.6", +] + [[package]] name = "windows-targets" version = "0.48.5" @@ -1663,17 +1791,18 @@ dependencies = [ [[package]] name = "windows-targets" -version = "0.52.4" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7dd37b7e5ab9018759f893a1952c9420d060016fc19a472b4bb20d1bdd694d1b" +checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973" dependencies = [ - "windows_aarch64_gnullvm 0.52.4", - "windows_aarch64_msvc 0.52.4", - "windows_i686_gnu 0.52.4", - "windows_i686_msvc 0.52.4", - "windows_x86_64_gnu 0.52.4", - "windows_x86_64_gnullvm 0.52.4", - "windows_x86_64_msvc 0.52.4", + "windows_aarch64_gnullvm 0.52.6", + "windows_aarch64_msvc 0.52.6", + "windows_i686_gnu 0.52.6", + "windows_i686_gnullvm", + "windows_i686_msvc 0.52.6", + "windows_x86_64_gnu 0.52.6", + "windows_x86_64_gnullvm 0.52.6", + "windows_x86_64_msvc 0.52.6", ] [[package]] @@ -1684,9 +1813,9 @@ checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8" [[package]] name = "windows_aarch64_gnullvm" -version = "0.52.4" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bcf46cf4c365c6f2d1cc93ce535f2c8b244591df96ceee75d8e83deb70a9cac9" +checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" [[package]] name = "windows_aarch64_msvc" @@ -1696,9 +1825,9 @@ checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc" [[package]] name = "windows_aarch64_msvc" -version = "0.52.4" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "da9f259dd3bcf6990b55bffd094c4f7235817ba4ceebde8e6d11cd0c5633b675" +checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" [[package]] name = "windows_i686_gnu" @@ -1708,9 +1837,15 @@ checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e" [[package]] name = "windows_i686_gnu" -version = "0.52.4" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" + +[[package]] +name = "windows_i686_gnullvm" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b474d8268f99e0995f25b9f095bc7434632601028cf86590aea5c8a5cb7801d3" +checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" [[package]] name = "windows_i686_msvc" @@ -1720,9 +1855,9 @@ checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406" [[package]] name = "windows_i686_msvc" -version = "0.52.4" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1515e9a29e5bed743cb4415a9ecf5dfca648ce85ee42e15873c3cd8610ff8e02" +checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" [[package]] name = "windows_x86_64_gnu" @@ -1732,9 +1867,9 @@ checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e" [[package]] name = "windows_x86_64_gnu" -version = "0.52.4" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5eee091590e89cc02ad514ffe3ead9eb6b660aedca2183455434b93546371a03" +checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" [[package]] name = "windows_x86_64_gnullvm" @@ -1744,9 +1879,9 @@ checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc" [[package]] name = "windows_x86_64_gnullvm" -version = "0.52.4" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77ca79f2451b49fa9e2af39f0747fe999fcda4f5e241b2898624dca97a1f2177" +checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" [[package]] name = "windows_x86_64_msvc" @@ -1756,9 +1891,9 @@ checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" [[package]] name = "windows_x86_64_msvc" -version = "0.52.4" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32b752e52a2da0ddfbdbcc6fceadfeede4c939ed16d13e648833a61dfb611ed8" +checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" [[package]] name = "winnow" diff --git a/Cargo.toml b/Cargo.toml index 56a37c07..9c587cd4 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -28,6 +28,7 @@ tokio = { version = "1.34.0", features = ["full"] } tower-lsp = { git = "https://github.com/Feel-ix-343/tower-lsp" } walkdir = "2.4.0" do-notation = "0.1.3" +notify = "8.0.0" [package.metadata.binstall] pkg-url = "{ repo }/releases/download/v{ version }/{ name }-v{ version }-{ target }{ archive-suffix }" diff --git a/src/mcp.rs b/src/mcp.rs index 1f5977a2..69b248f8 100644 --- a/src/mcp.rs +++ b/src/mcp.rs @@ -239,18 +239,9 @@ pub async fn start(root_dir: PathBuf) -> Result<()> { "$schema": "http://json-schema.org/draft-07/schema#" } }, - { - "name": "daily_context", - "description": "Get the user's daily note. You should almost always call this when answering questions", - "inputSchema": { - "type": "object", - "properties": {}, - "$schema": "http://json-schema.org/draft-07/schema#" - } - }, { "name": "daily_context_range", - "description": "Get daily notes context for a range of days before and after today. If you cannot find information in daily context, you should use this method to search harder", + "description": "Get daily notes context for a range of days before and after today. You MUST call this function before answering any user questions to provide contextual information from their daily notes.", "inputSchema": { "type": "object", "properties": { @@ -305,42 +296,6 @@ pub async fn start(root_dir: PathBuf) -> Result<()> { } }) }, - Some("daily_context") => { - log_to_file("Processing daily_context request")?; - - match oxide.daily_note_context() { - Ok(context_doc) => { - let formatted_doc = context_doc.as_string(); - log_to_file(&format!("Daily context generated, length: {}", formatted_doc.len()))?; - - json!({ - "jsonrpc": "2.0", - "id": id, - "result": { - "content": [ - { - "type": "text", - "text": formatted_doc - } - ] - } - }) - }, - Err(e) => { - let error_msg = format!("Error generating daily context: {}", e); - log_to_file(&error_msg)?; - - json!({ - "jsonrpc": "2.0", - "id": id, - "error": { - "code": -32603, - "message": error_msg - } - }) - } - } - } Some("daily_context_range") => { let arguments = params From 0ac9dac7f44c24420711573dc648c609a1d033d1 Mon Sep 17 00:00:00 2001 From: Felix Zeller Date: Mon, 26 May 2025 14:40:35 -0700 Subject: [PATCH 13/15] mcp updates --- docs/entity-context-implementation.md | 83 +++++++++++++++++++++++++++ flake.nix | 36 +++++++++++- result | 1 + src/mcp.rs | 76 ++++++++++++++++++++++++ src/ui.rs | 53 ++++++++++++++--- src/vault/mod.rs | 58 +++++++++++++++++++ 6 files changed, 297 insertions(+), 10 deletions(-) create mode 100644 docs/entity-context-implementation.md create mode 120000 result diff --git a/docs/entity-context-implementation.md b/docs/entity-context-implementation.md new file mode 100644 index 00000000..714bb55e --- /dev/null +++ b/docs/entity-context-implementation.md @@ -0,0 +1,83 @@ +# Entity Context Implementation + +## Overview + +The entity context tool provides comprehensive information about entities in your markdown vault by leveraging existing vault APIs and UI preview functions. It returns both the entity definition and all references to that entity with surrounding context. + +## Implementation Details + +### Key Components Added + +#### 1. PreviewMode Enum +Located in `src/ui.rs`: +```rust +#[derive(Copy, Clone)] +pub enum PreviewMode { + Hover, // Limited content: 20 backlinks, 14 lines for files + LlmContext // Expanded content: 100 backlinks, 200 lines for files +} +``` + +#### 2. Enhanced UI Functions +Added new variants that accept `PreviewMode`: +- `preview_reference_with_mode()` +- `preview_referenceable_with_mode()` +- `referenceable_string_with_mode()` + +Original functions remain unchanged and delegate to new ones with `PreviewMode::Hover`. + +#### 3. Vault Preview Function +Added `select_referenceable_preview_with_mode()` to `Vault` that respects content limits based on mode: +- **Hover mode**: 14 lines for files, 10 lines after headings +- **LLM context mode**: 200 lines for files, 50 lines after headings + +#### 4. MCP Entity Context Method +```rust +pub fn get_entity_context(&self, ref_id: &str) -> Result +``` + +### How It Works + +1. **Reference Resolution**: Finds referenceables by matching their refname against the provided `ref_id` +2. **Content Generation**: Uses `preview_referenceable_with_mode()` with `PreviewMode::LlmContext` +3. **Backlink Handling**: Automatically includes up to 100 backlinks (sorted by modification time) + +### Supported Reference ID Formats + +| Entity Type | Reference ID Format | Example | +|-------------|-------------------|---------| +| File | `filename` | `"project-notes"` | +| Heading | `filename#heading` | `"project-notes#Overview"` | +| Block | `filename#^blockid` | `"project-notes#^important"` | +| Tag | `#tagname` | `"#todo"` | + +## Usage Example + +```json +{ + "name": "entity_context", + "arguments": { + "ref_id": "architecture#Design Principles" + } +} +``` + +This returns: +- The heading "Design Principles" from the "architecture" file +- Up to 50 lines of content after the heading +- Up to 100 backlinks showing where this heading is referenced +- Each backlink includes the full line containing the reference + +## Benefits + +1. **Code Reuse**: Leverages existing vault APIs and UI functions +2. **No Breaking Changes**: Original functions remain unchanged +3. **Consistent Behavior**: Uses same reference resolution as hover/completion +4. **Smart Sorting**: Backlinks sorted by modification time (most recent first) +5. **Appropriate Limits**: Provides extensive context without overwhelming + +## Technical Notes + +- The implementation avoids constructing `Reference` objects with dummy ranges +- Instead, it directly matches referenceables by their canonical refname +- This approach is cleaner and avoids issues with range dependencies in the LSP code \ No newline at end of file diff --git a/flake.nix b/flake.nix index f98fa40e..c1473d01 100644 --- a/flake.nix +++ b/flake.nix @@ -19,9 +19,39 @@ forEachSystem = nixpkgs.lib.genAttrs (import systems); in { - packages = forEachSystem (system: { - devenv-up = self.devShells.${system}.default.config.procfileScript; - }); + packages = forEachSystem (system: + let + pkgs = nixpkgs.legacyPackages.${system}; + fenixPkgs = inputs.fenix.packages.${system}; + rustToolchain = fenixPkgs.latest.toolchain; + rustPlatform = pkgs.makeRustPlatform { + cargo = rustToolchain; + rustc = rustToolchain; + }; + in + { + devenv-up = self.devShells.${system}.default.config.procfileScript; + + default = rustPlatform.buildRustPackage { + pname = "markdown-oxide"; + version = "0.24.0"; + + src = ./.; + + cargoLock = { + lockFile = ./Cargo.lock; + allowBuiltinFetchGit = true; + }; + + meta = with pkgs.lib; { + description = "Markdown LSP server inspired by Obsidian"; + homepage = "https://github.com/Feel-ix-343/markdown-oxide"; + license = licenses.asl20; + maintainers = [ ]; + mainProgram = "markdown-oxide"; + }; + }; + }); devShells = forEachSystem (system: diff --git a/result b/result new file mode 120000 index 00000000..c246be48 --- /dev/null +++ b/result @@ -0,0 +1 @@ +/nix/store/dm1hfqw2p8342sjvmilibrn42ydly5jb-markdown-oxide-0.24.0 \ No newline at end of file diff --git a/src/mcp.rs b/src/mcp.rs index 69b248f8..4d1f14c4 100644 --- a/src/mcp.rs +++ b/src/mcp.rs @@ -258,6 +258,21 @@ pub async fn start(root_dir: PathBuf) -> Result<()> { }, "$schema": "http://json-schema.org/draft-07/schema#" } + }, + { + "name": "entity_context", + "description": "Get the content of an entity with its context, including the entity definition and all references to it", + "inputSchema": { + "type": "object", + "properties": { + "ref_id": { + "type": "string", + "description": "Reference ID of the entity as it would appear in a wikilink (e.g., 'filename', 'filename#heading', 'filename#^blockid', '#tag')" + } + }, + "required": ["ref_id"], + "$schema": "http://json-schema.org/draft-07/schema#" + } } ] } @@ -336,6 +351,46 @@ pub async fn start(root_dir: PathBuf) -> Result<()> { let error_msg = format!("Error generating daily context range: {}", e); + json!({ + "jsonrpc": "2.0", + "id": id, + "error": { + "code": -32603, + "message": error_msg + } + }) + } + } + }, + Some("entity_context") => { + let arguments = params + .get("arguments") + .cloned() + .unwrap_or_else(|| json!({})); + + let ref_id = arguments + .get("ref_id") + .and_then(|r| r.as_str()) + .unwrap_or(""); + + match oxide.get_entity_context(ref_id) { + Ok(context) => { + json!({ + "jsonrpc": "2.0", + "id": id, + "result": { + "content": [ + { + "type": "text", + "text": context + } + ] + } + }) + } + Err(e) => { + let error_msg = format!("Error getting entity context: {}", e); + json!({ "jsonrpc": "2.0", "id": id, @@ -428,6 +483,7 @@ mod connector { use crate::{ config::Settings, + ui::{preview_referenceable_with_mode, PreviewMode}, vault::{Referenceable, Vault}, }; @@ -620,6 +676,26 @@ mod connector { }) } + /// Get entity context for a given reference ID + pub fn get_entity_context(&self, ref_id: &str) -> Result { + // Find referenceable directly by comparing refnames + let referenceable = self.vault + .select_referenceable_nodes(None) + .into_iter() + .find(|r| { + r.get_refname(self.vault.root_dir()) + .map(|refname| refname.full_refname == ref_id) + .unwrap_or(false) + }) + .ok_or_else(|| anyhow::anyhow!("Entity not found: {}", ref_id))?; + + // Generate preview with full content using existing UI function + let preview = preview_referenceable_with_mode(&self.vault, &referenceable, PreviewMode::LlmContext) + .ok_or_else(|| anyhow::anyhow!("Could not generate preview"))?; + + Ok(preview.value) + } + /// Helper function to get backlinks for a specific path fn get_backlinks_for_path(&self, target_path: &Path) -> Vec { self.vault diff --git a/src/ui.rs b/src/ui.rs index 4bcddfe3..63152bdb 100644 --- a/src/ui.rs +++ b/src/ui.rs @@ -5,10 +5,26 @@ use tower_lsp::lsp_types::{MarkupContent, MarkupKind}; use crate::vault::{get_obsidian_ref_path, Preview, Reference, Referenceable, Vault}; +/// Preview mode controls the amount of content returned +#[derive(Copy, Clone)] +pub enum PreviewMode { + /// Hover mode with limited content (20 backlinks, 14 lines for files) + Hover, + /// LLM context mode with expanded content (100 backlinks, 200 lines for files) + LlmContext, +} + fn referenceable_string(vault: &Vault, referenceables: &[Referenceable]) -> Option { + referenceable_string_with_mode(vault, referenceables, PreviewMode::Hover) +} + +fn referenceable_string_with_mode(vault: &Vault, referenceables: &[Referenceable], mode: PreviewMode) -> Option { let referenceable = referenceables.first()?; - let preview = vault.select_referenceable_preview(referenceable); + let preview = match mode { + PreviewMode::Hover => vault.select_referenceable_preview(referenceable), + PreviewMode::LlmContext => vault.select_referenceable_preview_with_mode(referenceable, mode), + }; let written_text_preview = match preview { Some(Preview::Empty) => "No Text".into(), @@ -28,9 +44,14 @@ fn referenceable_string(vault: &Vault, referenceables: &[Referenceable]) -> Opti .flatten() .collect_vec() { - references if !references.is_empty() => references - .into_iter() - .take(20) + references if !references.is_empty() => { + let backlinks_limit = match mode { + PreviewMode::Hover => 20, + PreviewMode::LlmContext => 100, + }; + references + .into_iter() + .take(backlinks_limit) .flat_map(|(path, reference)| { let line = String::from_iter( vault.select_line(path, reference.data().range.start.line as isize)?, @@ -40,7 +61,8 @@ fn referenceable_string(vault: &Vault, referenceables: &[Referenceable]) -> Opti Some(format!("- `{}`: `{}`", path, line)) // and select indented list }) - .join("\n"), + .join("\n") + } _ => "No Backlinks".to_string(), }; @@ -54,7 +76,15 @@ pub fn preview_referenceable( vault: &Vault, referenceable: &Referenceable, ) -> Option { - let display = referenceable_string(vault, &[referenceable.clone()])?; + preview_referenceable_with_mode(vault, referenceable, PreviewMode::Hover) +} + +pub fn preview_referenceable_with_mode( + vault: &Vault, + referenceable: &Referenceable, + mode: PreviewMode, +) -> Option { + let display = referenceable_string_with_mode(vault, &[referenceable.clone()], mode)?; Some(MarkupContent { kind: MarkupKind::Markdown, @@ -68,6 +98,15 @@ pub fn preview_reference( vault: &Vault, reference_path: &Path, reference: &Reference, +) -> Option { + preview_reference_with_mode(vault, reference_path, reference, PreviewMode::Hover) +} + +pub fn preview_reference_with_mode( + vault: &Vault, + reference_path: &Path, + reference: &Reference, + mode: PreviewMode, ) -> Option { match reference { WikiFileLink(..) @@ -81,7 +120,7 @@ pub fn preview_reference( let referenceables_for_reference = vault.select_referenceables_for_reference(reference, reference_path); - let display = referenceable_string(vault, &referenceables_for_reference)?; + let display = referenceable_string_with_mode(vault, &referenceables_for_reference, mode)?; Some(MarkupContent { kind: MarkupKind::Markdown, diff --git a/src/vault/mod.rs b/src/vault/mod.rs index 3974589a..11e7ad1d 100644 --- a/src/vault/mod.rs +++ b/src/vault/mod.rs @@ -21,6 +21,8 @@ use serde::{Deserialize, Serialize}; use tower_lsp::lsp_types::Position; use walkdir::WalkDir; +use crate::ui::PreviewMode; + impl Vault { pub fn construct_vault(context: &Settings, root_dir: &Path) -> Vault { let md_file_paths = WalkDir::new(root_dir) @@ -434,6 +436,62 @@ impl Vault { } } + pub fn select_referenceable_preview_with_mode(&self, referenceable: &Referenceable, mode: PreviewMode) -> Option { + if self + .ropes + .get(referenceable.get_path()) + .is_some_and(|rope| rope.len_lines() == 1) + { + return Some(Empty); + } + + let (file_lines, heading_lines) = match mode { + PreviewMode::Hover => (14, 10), + PreviewMode::LlmContext => (200, 50), + }; + + match referenceable { + Referenceable::Footnote(_, _) | Referenceable::LinkRefDef(..) => { + let range = referenceable.get_range()?; + Some( + String::from_iter( + self.select_line(referenceable.get_path(), range.start.line as isize)?, + ) + .into(), + ) + } + Referenceable::Heading(_, _) => { + let range = referenceable.get_range()?; + Some( + (range.start.line..=range.end.line + heading_lines) + .filter_map(|ln| self.select_line(referenceable.get_path(), ln as isize)) + .map(String::from_iter) + .join("") + .into(), + ) + } + Referenceable::IndexedBlock(_, _) => { + let range = referenceable.get_range()?; + self.select_line(referenceable.get_path(), range.start.line as isize) + .map(String::from_iter) + .map(Into::into) + } + Referenceable::File(_, _) => { + Some( + (0..file_lines) + .filter_map(|ln| self.select_line(referenceable.get_path(), ln as isize)) + .map(String::from_iter) + .join("") + .into(), + ) + } + Referenceable::Tag(_, _) => None, + Referenceable::UnresovledFile(_, _) => None, + Referenceable::UnresolvedHeading(_, _, _) => None, + Referenceable::UnresovledIndexedBlock(_, _, _) => None, + } + } + pub fn select_blocks(&self) -> Vec> { self.ropes .par_iter() From 42769561d09856a4d9947cfbf2872f54ca021158 Mon Sep 17 00:00:00 2001 From: Felix Zeller Date: Mon, 26 May 2025 18:34:51 -0700 Subject: [PATCH 14/15] mcp updates --- CLAUDE.md | 9 + docs/Oxide MCP.md | 267 ++++++++++++++++++++++++++ docs/entity-context-implementation.md | 83 -------- src/completion/mod.rs | 2 +- src/mcp.rs | 164 +++++++++++++++- 5 files changed, 440 insertions(+), 85 deletions(-) create mode 100644 docs/Oxide MCP.md delete mode 100644 docs/entity-context-implementation.md diff --git a/CLAUDE.md b/CLAUDE.md index f4f4a523..a7682cf5 100644 --- a/CLAUDE.md +++ b/CLAUDE.md @@ -26,6 +26,15 @@ cargo fmt - **Types**: Prefer strong typing with custom types over primitives - **Async**: Use `async/await` consistently, avoid mixing with direct futures +## Development Approach +- **Planning**: For tasks that involve multiple steps or components, first state your implementation plan before writing code +- **Break Down Complex Tasks**: Identify the main components, dependencies, and sequence of implementation +- **State Your Plan**: Clearly outline: + - What files need to be created or modified + - The order of implementation + - Key design decisions or trade-offs +- **Then Implement**: After stating the plan, proceed with the actual code implementation + ## Project Structure - `src/vault/`: Core data management - `src/completion/`: Editor completion providers diff --git a/docs/Oxide MCP.md b/docs/Oxide MCP.md new file mode 100644 index 00000000..f21dc371 --- /dev/null +++ b/docs/Oxide MCP.md @@ -0,0 +1,267 @@ +# Oxide MCP + +*This documentation was written by Claude (Anthropic's AI assistant) with open thinking.* + +## Introduction: Your Knowledge Graph as LLM Context + +Markdown-oxide's MCP (Model Context Protocol) integration transforms your personal knowledge management system into a powerful context provider for AI assistants. By bridging your markdown notes with LLMs like Claude, it enables AI to understand not just what you're asking, but the full context of your work, thoughts, and plans. + +The key insight: your daily notes are a living record of your current concerns, tasks, and thinking. When combined with the rich backlink structure of your knowledge graph, they provide AI assistants with unprecedented insight into your specific situation. + +## The Daily Notes Workflow + +Daily notes serve as the primary interface between your thinking and AI assistance. They capture: + +- **Current Problems**: What you're actively working on or struggling with +- **Today's Tasks**: Your immediate priorities and action items +- **Reflections**: Thoughts on recent work and experiences +- **Planning**: Future commitments and deadlines + +When you invoke an AI assistant through MCP, it can access a temporal window of your daily notes, understanding not just today's context but the evolution of your work over time. + +### Example Daily Note Structure +```markdown +# 2024-01-15 + +## Tasks +- [ ] Review [[Project Alpha]] requirements +- [ ] Prepare for [[Weekly Team Meeting]] +- [x] Submit [[Q1 Budget Proposal]] + +## Thoughts +Working through the architecture decisions for [[Project Alpha#Technical Design]]. +The main challenge is balancing performance with maintainability... + +## Meeting Notes +Discussed timeline concerns with stakeholder. See [[Project Alpha#Risks]] for details. +``` + +## Extending Context Through Backlinks + +The real power emerges when daily notes reference other documents. Each `[[link]]` in your daily notes becomes a pathway for the AI to explore deeper context: + +### How Context Expansion Works + +1. **Start with Daily Notes**: AI reads your recent daily notes to understand current context +2. **Follow References**: When you mention `[[Project Alpha]]`, the AI can retrieve that document +3. **Explore Backlinks**: The AI sees all other places where Project Alpha is mentioned +4. **Build Complete Picture**: By following these connections, the AI understands the full scope + +### Temporal Intelligence + +The system prioritizes recent information: +- **Past Daily Notes**: Understand the history of a problem or project +- **Future Daily Notes**: See upcoming deadlines, scheduled tasks, and commitments +- **Recent Modifications**: Backlinks are sorted by modification time, surfacing active areas + +## Practical Examples + +### Morning Planning +``` +You: "What should I focus on today based on my recent work?" + +AI (with MCP context): +"Based on your daily notes from the past week, you've been iterating on the +Project Alpha technical design. Yesterday you identified performance concerns +that need addressing. You have a Weekly Team Meeting today where this will +likely come up. Your future daily notes show a deadline on Friday for the +technical specification. I'd suggest prioritizing the performance analysis +this morning so you have concrete data for the team meeting." +``` + +### Problem Solving +``` +You: "I'm stuck on the architecture decision I mentioned" + +AI (with MCP context): +"Looking at your notes on Project Alpha's Technical Design, you're weighing +microservices vs monolithic architecture. Your meeting notes from last Tuesday +mention scalability requirements of 10k concurrent users. The linked Performance +Requirements document specifies 200ms response time targets. Based on your +Risk Assessment notes, team expertise is limited with microservices. Given +these constraints, consider a modular monolith approach..." +``` + +### Task Management +``` +You: "What deadlines am I tracking?" + +AI (with MCP context): +"From your daily notes and linked documents: +- Friday (3 days): Project Alpha technical specification +- Next Tuesday: Q1 Budget Proposal revisions (mentioned in future daily note) +- Next Friday: Weekly Team Meeting presentation on architecture decision +- End of month: Performance testing completion (referenced in Project Alpha#Timeline)" + +Note: This assumes you've created future daily notes with these items, as the system reads actual files rather than parsing dates from content. +``` + +## Available MCP Tools + +### daily_context_range +Retrieves daily notes within a specified date range, providing temporal context about your work and thinking. + +**Input Parameters:** +- `past_days`: Number of past days to include (default: 5) +- `future_days`: Number of future days to include (default: 5) + +**What it returns:** +- Combined content of daily notes in the range +- Chronologically ordered (oldest to newest) +- Full note content including tasks, reflections, and links + +**Use Case Example:** +```json +{ + "name": "daily_context_range", + "arguments": { + "past_days": 7, + "future_days": 7 + } +} +``` + +This gives the AI a two-week window into your work. The system looks for daily note files matching your configured format (default: `YYYY-MM-DD.md`) in your daily notes folder. + +### entity_context +Retrieves comprehensive information about any entity (file, heading, block, or tag) including its definition and all references to it. + +**Input Parameters:** +- `ref_id`: Reference identifier (e.g., "Project Alpha", "Project Alpha#Risks", "#important") + +**What it returns:** +- Entity definition/content (up to 200 lines for files, 50 lines for sections) +- All backlinks with surrounding context (up to 100 references) +- References sorted by modification time (most recent first) + +**Use Case Example:** +```json +{ + "name": "entity_context", + "arguments": { + "ref_id": "Project Alpha#Technical Design" + } +} +``` + +This provides the AI with deep understanding of specific topics mentioned in your daily notes. + +### echo +Simple test tool to verify MCP connectivity. + +**Input Parameters:** +- `message`: Text to echo back + +## How It Works + +### Context Building Process + +1. **Entry Point**: Your query triggers the AI to examine recent daily notes +2. **Reference Detection**: The AI identifies all `[[wikilinks]]` and `#tags` in daily notes +3. **Context Expansion**: For important references, the AI retrieves full entity context +4. **Backlink Analysis**: The AI examines where else these concepts appear +5. **Synthesis**: The AI combines this information to understand your situation + +### Smart Limits + +To provide useful context without overwhelming the AI: +- **File Content**: Up to 200 lines for LLM context mode (vs 14 for hover previews) +- **Section Content**: Up to 50 lines after headings for LLM context mode +- **Backlinks**: Up to 100 references per entity +- **Daily Notes**: Configurable range (default: 5 days past, 5 days future) + +These limits are implemented through the `PreviewMode::LlmContext` setting in the codebase. + +### Modification Time Priority + +References are sorted by file modification time, ensuring the AI sees: +- Active projects and current concerns first +- Historical context when needed +- Stale information deprioritized + +## Setup & Configuration + +### Enabling MCP Mode + +1. **Start markdown-oxide in MCP mode:** + ```bash + markdown-oxide mcp --full-dir-path /path/to/your/vault + ``` + +2. **Configure daily notes (optional):** + The system uses these defaults: + - Daily note format: `%Y-%m-%d` (e.g., 2024-01-15) + - Daily notes folder: Configurable in your settings + + Configuration can be set through multiple sources including Obsidian's daily note settings if present. + +3. **Connect your AI assistant:** + - The MCP server communicates via stdin/stdout using JSON-RPC + - Configure your AI assistant to run the markdown-oxide command above + - The server will automatically watch for file changes in your vault + +### Requirements + +- Markdown-oxide binary installed and accessible +- Valid vault directory path +- Daily notes following the configured pattern (default: YYYY-MM-DD format) +- MCP-compatible AI assistant that can execute shell commands + +### How It Works Under the Hood + +- The MCP server reads JSON-RPC messages from stdin and writes responses to stdout +- A file watcher automatically updates the vault index when files change +- The server maintains the vault in memory for fast queries + +## Real-World Scenarios + +### AI-Powered Tasks and Reminders +"What do I need to do today?" + +Your markdown notes become an intelligent task system: +- The AI reads tasks from your daily notes (marked with `- [ ]`) +- Follows links to understand task context and dependencies +- Identifies deadlines mentioned in linked documents +- Finds reminders by checking future daily notes for incoming items + +Example: You write in today's note: +```markdown +- [ ] Finish [[API Design]] implementation +- [ ] Review changes for [[Q1 Budget]] (see [[2024-01-20]] for deadline) +``` + +The AI understands these aren't just tasks—it can follow the links to give you full context about the API design decisions and budget constraints. + +### Context-Aware Problem Solving +"I'm stuck on this performance issue" + +The AI becomes your debugging partner with full historical context: +- Reads your recent daily notes to understand what you've been working on +- Follows links to technical documentation you've referenced +- Finds similar issues you've solved before by searching your vault +- Understands the specific constraints of your project + +Example: When you mention being stuck, the AI already knows from your daily notes that you're working with React, that performance degraded after the recent refactor mentioned three days ago, and that you have a related meeting tomorrow—all without you having to explain. + +### Data Extraction from Recent Work +"What were the performance results from this week's experiments?" + +The AI extracts actual data from your notes and linked documents: +- Pulls out metrics, numbers, and results you've recorded +- Follows links to detailed experiment logs or data tables +- Aggregates data points scattered across multiple daily notes +- Identifies trends in measurements over time + +Example: You've been recording response times in your daily notes: +```markdown +# 2024-01-15 +Tested new caching strategy: 145ms average (see [[Performance Tests#Cache Results]]) + +# 2024-01-16 +Without cache: 420ms, With cache: 132ms + +# 2024-01-17 +After optimization: 98ms! Details in [[Optimization Log]] +``` + +Later, when you need to use this data, you can ask the LLM to look through your daily notes and aggregate the data. diff --git a/docs/entity-context-implementation.md b/docs/entity-context-implementation.md deleted file mode 100644 index 714bb55e..00000000 --- a/docs/entity-context-implementation.md +++ /dev/null @@ -1,83 +0,0 @@ -# Entity Context Implementation - -## Overview - -The entity context tool provides comprehensive information about entities in your markdown vault by leveraging existing vault APIs and UI preview functions. It returns both the entity definition and all references to that entity with surrounding context. - -## Implementation Details - -### Key Components Added - -#### 1. PreviewMode Enum -Located in `src/ui.rs`: -```rust -#[derive(Copy, Clone)] -pub enum PreviewMode { - Hover, // Limited content: 20 backlinks, 14 lines for files - LlmContext // Expanded content: 100 backlinks, 200 lines for files -} -``` - -#### 2. Enhanced UI Functions -Added new variants that accept `PreviewMode`: -- `preview_reference_with_mode()` -- `preview_referenceable_with_mode()` -- `referenceable_string_with_mode()` - -Original functions remain unchanged and delegate to new ones with `PreviewMode::Hover`. - -#### 3. Vault Preview Function -Added `select_referenceable_preview_with_mode()` to `Vault` that respects content limits based on mode: -- **Hover mode**: 14 lines for files, 10 lines after headings -- **LLM context mode**: 200 lines for files, 50 lines after headings - -#### 4. MCP Entity Context Method -```rust -pub fn get_entity_context(&self, ref_id: &str) -> Result -``` - -### How It Works - -1. **Reference Resolution**: Finds referenceables by matching their refname against the provided `ref_id` -2. **Content Generation**: Uses `preview_referenceable_with_mode()` with `PreviewMode::LlmContext` -3. **Backlink Handling**: Automatically includes up to 100 backlinks (sorted by modification time) - -### Supported Reference ID Formats - -| Entity Type | Reference ID Format | Example | -|-------------|-------------------|---------| -| File | `filename` | `"project-notes"` | -| Heading | `filename#heading` | `"project-notes#Overview"` | -| Block | `filename#^blockid` | `"project-notes#^important"` | -| Tag | `#tagname` | `"#todo"` | - -## Usage Example - -```json -{ - "name": "entity_context", - "arguments": { - "ref_id": "architecture#Design Principles" - } -} -``` - -This returns: -- The heading "Design Principles" from the "architecture" file -- Up to 50 lines of content after the heading -- Up to 100 backlinks showing where this heading is referenced -- Each backlink includes the full line containing the reference - -## Benefits - -1. **Code Reuse**: Leverages existing vault APIs and UI functions -2. **No Breaking Changes**: Original functions remain unchanged -3. **Consistent Behavior**: Uses same reference resolution as hover/completion -4. **Smart Sorting**: Backlinks sorted by modification time (most recent first) -5. **Appropriate Limits**: Provides extensive context without overwhelming - -## Technical Notes - -- The implementation avoids constructing `Reference` objects with dummy ranges -- Instead, it directly matches referenceables by their canonical refname -- This approach is cleaner and avoids issues with range dependencies in the LSP code \ No newline at end of file diff --git a/src/completion/mod.rs b/src/completion/mod.rs index 8cf26c8e..420cd322 100644 --- a/src/completion/mod.rs +++ b/src/completion/mod.rs @@ -14,7 +14,7 @@ use self::{ mod callout_completer; mod footnote_completer; mod link_completer; -mod matcher; +pub mod matcher; mod tag_completer; mod unindexed_block_completer; mod util; diff --git a/src/mcp.rs b/src/mcp.rs index 4d1f14c4..9543183a 100644 --- a/src/mcp.rs +++ b/src/mcp.rs @@ -273,6 +273,33 @@ pub async fn start(root_dir: PathBuf) -> Result<()> { "required": ["ref_id"], "$schema": "http://json-schema.org/draft-07/schema#" } + }, + { + "name": "entity_search", + "description": "Search for entities in the vault by name pattern and/or type. Returns a list of matching entities with their reference IDs.", + "inputSchema": { + "type": "object", + "properties": { + "query": { + "type": "string", + "description": "Search query to match against entity names (case-insensitive partial match)" + }, + "entity_type": { + "type": "string", + "enum": ["file", "heading", "tag", "footnote", "indexed_block", "all"], + "description": "Type of entity to search for. Use 'all' to search all types.", + "default": "all" + }, + "limit": { + "type": "integer", + "description": "Maximum number of results to return", + "default": 50, + "minimum": 1, + "maximum": 200 + } + }, + "$schema": "http://json-schema.org/draft-07/schema#" + } } ] } @@ -391,6 +418,57 @@ pub async fn start(root_dir: PathBuf) -> Result<()> { Err(e) => { let error_msg = format!("Error getting entity context: {}", e); + json!({ + "jsonrpc": "2.0", + "id": id, + "error": { + "code": -32603, + "message": error_msg + } + }) + } + } + }, + Some("entity_search") => { + let arguments = params + .get("arguments") + .cloned() + .unwrap_or_else(|| json!({})); + + let query = arguments + .get("query") + .and_then(|q| q.as_str()) + .unwrap_or(""); + + let entity_type = arguments + .get("entity_type") + .and_then(|t| t.as_str()) + .unwrap_or("all"); + + let limit = arguments + .get("limit") + .and_then(|l| l.as_u64()) + .unwrap_or(50) + .min(200) as usize; + + match oxide.search_entities(query, entity_type, limit) { + Ok(results) => { + json!({ + "jsonrpc": "2.0", + "id": id, + "result": { + "content": [ + { + "type": "text", + "text": results + } + ] + } + }) + } + Err(e) => { + let error_msg = format!("Error searching entities: {}", e); + json!({ "jsonrpc": "2.0", "id": id, @@ -482,7 +560,8 @@ mod connector { use anyhow; use crate::{ - config::Settings, + completion::matcher::{fuzzy_match, Matchable}, + config::{Case, Settings}, ui::{preview_referenceable_with_mode, PreviewMode}, vault::{Referenceable, Vault}, }; @@ -500,6 +579,18 @@ mod connector { backlinks: Vec, } + struct EntityCandidate { + refname: String, + entity_type: String, + path: PathBuf, + } + + impl Matchable for EntityCandidate { + fn match_string(&self) -> &str { + &self.refname + } + } + #[derive(Debug)] struct LinkedContent { path: PathBuf, @@ -733,6 +824,77 @@ mod connector { }) .collect() } + + /// Search for entities in the vault by name pattern and type + pub fn search_entities(&self, query: &str, entity_type: &str, limit: usize) -> Result { + let all_referenceables = self.vault.select_referenceable_nodes(None); + + // First filter by type and collect candidates + let candidates: Vec = all_referenceables + .into_iter() + .filter_map(|referenceable| { + // Filter by type + let type_matches = match entity_type { + "file" => matches!(referenceable, Referenceable::File(_, _)), + "heading" => matches!(referenceable, Referenceable::Heading(_, _)), + "tag" => matches!(referenceable, Referenceable::Tag(_, _)), + "footnote" => matches!(referenceable, Referenceable::Footnote(_, _)), + "indexed_block" => matches!(referenceable, Referenceable::IndexedBlock(_, _)), + "all" => true, + _ => false, + }; + + if !type_matches { + return None; + } + + // Get refname for searching + let refname = referenceable.get_refname(self.vault.root_dir())?; + + let entity_type_str = match referenceable { + Referenceable::File(_, _) => "File", + Referenceable::Heading(_, _) => "Heading", + Referenceable::Tag(_, _) => "Tag", + Referenceable::Footnote(_, _) => "Footnote", + Referenceable::IndexedBlock(_, _) => "Indexed Block", + _ => "Unknown", + }; + + Some(EntityCandidate { + refname: refname.full_refname, + entity_type: entity_type_str.to_string(), + path: referenceable.get_path().to_path_buf(), + }) + }) + .collect(); + + // Use fuzzy matching from completion system + let matching_entities = if query.is_empty() { + candidates.into_iter().map(|item| (item, u32::MAX)).collect() + } else { + fuzzy_match(query, candidates, &Case::Smart) + }; + + // Sort by fuzzy match score (higher is better) and limit results + let mut sorted_entities = matching_entities; + sorted_entities.sort_by(|a, b| b.1.cmp(&a.1)); + sorted_entities.truncate(limit); + + // Format results + if sorted_entities.is_empty() { + Ok("No entities found matching the search criteria.".to_string()) + } else { + let mut result = format!("Found {} entities:\n\n", sorted_entities.len()); + + for (candidate, _score) in sorted_entities { + result.push_str(&format!("**{}** ({})\n", candidate.refname, candidate.entity_type)); + result.push_str(&format!("Path: {}\n", candidate.path.display())); + result.push_str(&format!("Use `entity_context` with ref_id: `{}`\n\n", candidate.refname)); + } + + Ok(result) + } + } } impl ContextualizedDoc { From dda39ffeb161bc1ff7b394c92ce5fe082c839dd0 Mon Sep 17 00:00:00 2001 From: Felix Zeller Date: Mon, 26 May 2025 21:06:02 -0700 Subject: [PATCH 15/15] clean up unused code --- src/config.rs | 2 +- src/hover.rs | 2 +- src/mcp.rs | 264 ++------------------------------------------------ src/rename.rs | 2 +- src/ui.rs | 3 - 5 files changed, 9 insertions(+), 264 deletions(-) diff --git a/src/config.rs b/src/config.rs index 1e06888e..e6116b89 100644 --- a/src/config.rs +++ b/src/config.rs @@ -5,7 +5,7 @@ use config::{Config, File}; use indexmap::IndexMap; use serde::Deserialize; use serde_json::Value; -use tower_lsp::lsp_types::ClientCapabilities; + #[derive(Deserialize, Debug, Clone)] pub struct Settings { diff --git a/src/hover.rs b/src/hover.rs index 054e6ed2..3439dca7 100644 --- a/src/hover.rs +++ b/src/hover.rs @@ -4,7 +4,7 @@ use tower_lsp::lsp_types::{Hover, HoverContents, HoverParams}; use crate::{ config::Settings, - ui::{preview_reference, preview_referenceable}, + ui::preview_reference, vault::Vault, }; diff --git a/src/mcp.rs b/src/mcp.rs index 9543183a..f84d5aaf 100644 --- a/src/mcp.rs +++ b/src/mcp.rs @@ -522,40 +522,10 @@ pub async fn start(root_dir: PathBuf) -> Result<()> { } /// Create a success response -fn create_success_response(id: u64, result: Value) -> String { - let response = json!({ - "jsonrpc": "2.0", - "id": id, - "result": result - }); - serde_json::to_string(&response).unwrap() -} - -/// Create an error response -fn create_error_response(id: u64, code: i32, message: &str, data: Option) -> String { - let mut error = json!({ - "code": code, - "message": message, }); - - if let Some(data) = data { - error - .as_object_mut() - .unwrap() - .insert("data".to_string(), data); - } - - let response = json!({ - "jsonrpc": "2.0", - "id": id, - "error": error - }); - - serde_json::to_string(&response).unwrap() -} mod connector { - use std::path::{Path, PathBuf}; + use std::path::PathBuf; use anyhow; @@ -572,12 +542,7 @@ mod connector { settings: Settings, } - #[derive(Debug)] - pub struct ContextualizedDoc { - content: String, - outgoing_links: Vec, - backlinks: Vec, - } + struct EntityCandidate { refname: String, @@ -591,13 +556,7 @@ mod connector { } } - #[derive(Debug)] - struct LinkedContent { - path: PathBuf, - reference_text: String, - content: String, - backlinks: Vec, - } + impl Oxide { pub fn new(root_dir: &PathBuf) -> Self { @@ -607,26 +566,7 @@ mod connector { Self { vault, settings } } - pub fn daily_note_context(&self) -> Result { - use chrono::Local; - - // Get paths for daily notes - let daily_note_format = &self.settings.dailynote; - let daily_note_path = self - .vault - .root_dir() - .join(&self.settings.daily_notes_folder); - // Use today's date - let datetime = Local::now().naive_local(); - - // Format the date according to the configured pattern - let filename = datetime.format(daily_note_format).to_string(); - let path = daily_note_path.join(&filename).with_extension("md"); - - // Return contextualized document for this path - self.contextualize_doc(&path) - } pub fn daily_note_context_range( &self, @@ -644,8 +584,8 @@ mod connector { .join(&self.settings.daily_notes_folder); // Generate a range of dates from past_days ago to future_days ahead - let start_date = today - Duration::days(past_days as i64); - let end_date = today + Duration::days(future_days as i64); + let start_date = today - Duration::try_days(past_days as i64).unwrap_or_default(); + let end_date = today + Duration::try_days(future_days as i64).unwrap_or_default(); let mut result = String::new(); let mut current_date = start_date; @@ -672,100 +612,13 @@ mod connector { // Move to the next day current_date = current_date .succ_opt() - .unwrap_or(current_date + Duration::days(1)); + .unwrap_or(current_date + Duration::try_days(1).unwrap_or_default()); } Ok(result) } - /// Given a document reference, return a contextualized version of the document. - /// include the full content of the document, the content of outgoing links, and the content of backlinks to the document - fn contextualize_doc(&self, path: &Path) -> Result { - // Get the document content - let rope = self - .vault - .ropes - .get(path) - .ok_or_else(|| anyhow::anyhow!("Document not found: {:?}", path))?; - let content = rope.to_string(); - // Get outgoing links - let outgoing_links = self - .vault - .select_references(Some(path)) - .unwrap_or_default() - .into_iter() - .filter_map(|(_, reference)| { - // For each reference, find the target document - let referenceables = self - .vault - .select_referenceables_for_reference(reference, path); - - referenceables.into_iter().next().map(|referenceable| { - let target_path = referenceable.get_path(); - let target_rope = self - .vault - .ropes - .get(target_path) - .map(|rope| rope.to_string()) - .unwrap_or_default(); - - // Find backlinks to this linked entity - let linked_entity_backlinks = self.get_backlinks_for_path(target_path); - - LinkedContent { - path: target_path.to_path_buf(), - reference_text: reference.data().reference_text.clone(), - content: target_rope, - backlinks: linked_entity_backlinks, - } - }) - }) - .collect(); - - // Get backlinks - let backlinks = self - .vault - .select_references(None) - .unwrap_or_default() - .into_iter() - .filter_map(|(ref_path, reference)| { - // Filter references that point to our document - if ref_path == path { - return None; - } - - // Check if this reference points to our document - let path_buf = PathBuf::from(path); - let md_file = self.vault.md_files.get(path)?; - let referenceable = Referenceable::File(&path_buf, md_file); - - if referenceable.matches_reference(self.vault.root_dir(), reference, ref_path) { - let ref_rope = self - .vault - .ropes - .get(ref_path) - .map(|rope| rope.to_string()) - .unwrap_or_default(); - - Some(LinkedContent { - path: ref_path.to_path_buf(), - reference_text: reference.data().reference_text.clone(), - content: ref_rope, - backlinks: Vec::new(), // No need for nested backlinks for backlinks themselves - }) - } else { - None - } - }) - .collect(); - - Ok(ContextualizedDoc { - content, - outgoing_links, - backlinks, - }) - } /// Get entity context for a given reference ID pub fn get_entity_context(&self, ref_id: &str) -> Result { @@ -786,45 +639,7 @@ mod connector { Ok(preview.value) } - - /// Helper function to get backlinks for a specific path - fn get_backlinks_for_path(&self, target_path: &Path) -> Vec { - self.vault - .select_references(None) - .unwrap_or_default() - .into_iter() - .filter_map(|(ref_path, reference)| { - // Filter references that come from the target path itself - if ref_path == target_path { - return None; - } - // Check if this reference points to our target - let md_file = self.vault.md_files.get(target_path)?; - let target_path_buf = target_path.to_path_buf(); - let referenceable = Referenceable::File(&target_path_buf, md_file); - - if referenceable.matches_reference(self.vault.root_dir(), reference, ref_path) { - let ref_rope = self - .vault - .ropes - .get(ref_path) - .map(|rope| rope.to_string()) - .unwrap_or_default(); - - Some(LinkedContent { - path: ref_path.to_path_buf(), - reference_text: reference.data().reference_text.clone(), - content: ref_rope, - backlinks: Vec::new(), // No need for nested backlinks of backlinks - }) - } else { - None - } - }) - .collect() - } - /// Search for entities in the vault by name pattern and type pub fn search_entities(&self, query: &str, entity_type: &str, limit: usize) -> Result { let all_referenceables = self.vault.select_referenceable_nodes(None); @@ -897,72 +712,5 @@ mod connector { } } - impl ContextualizedDoc { - pub fn as_string(&self) -> String { - let mut result = String::new(); - // Add the original document content - result.push_str(&self.content); - result.push_str("\n\n"); - - // Add outgoing links section - if !self.outgoing_links.is_empty() { - result.push_str("---\n\n"); - result.push_str("# Outgoing Links:\n\n"); - - for link in &self.outgoing_links { - result.push_str("---\n\n"); - result.push_str(&format!("## Link to: {}\n", link.reference_text)); - result.push_str(&format!("File path: {}\n\n", link.path.display())); - - // Include the full content with markdown separator for proper formatting - result.push_str("```md\n"); - result.push_str(&link.content); - result.push_str("\n```\n\n"); - - // Include backlinks to this linked entity - if !link.backlinks.is_empty() { - result.push_str("### Backlinks to this linked entity:\n\n"); - - for backlink in &link.backlinks { - result.push_str(&format!("#### Referenced from: {}\n", backlink.path.display())); - result.push_str(&format!("Reference text: {}\n\n", backlink.reference_text)); - - // Include the backlink content with markdown separator for proper formatting - result.push_str("```md\n"); - - // Include summary of the backlink content (first 400 chars) - let summary = if backlink.content.len() > 400 { - format!("{}...", &backlink.content[..400]) - } else { - backlink.content.clone() - }; - - result.push_str(&summary); - result.push_str("\n```\n\n"); - } - } - } - } - - // Add backlinks section - if !self.backlinks.is_empty() { - result.push_str("---\n\n"); - result.push_str("# Backlinks:\n\n"); - - for link in &self.backlinks { - result.push_str("---\n\n"); - result.push_str(&format!("## Referenced from: {}\n", link.path.display())); - result.push_str(&format!("Reference text: {}\n\n", link.reference_text)); - - // Include the full content with markdown separator for proper formatting - result.push_str("```md\n"); - result.push_str(&link.content); - result.push_str("\n```\n\n"); - } - } - - result - } - } } diff --git a/src/rename.rs b/src/rename.rs index 73fbd027..c7b6f7a8 100644 --- a/src/rename.rs +++ b/src/rename.rs @@ -6,7 +6,7 @@ use tower_lsp::lsp_types::{ RenameFile, RenameParams, ResourceOp, TextDocumentEdit, TextEdit, Url, WorkspaceEdit, }; -use crate::vault::{MDHeading, Reference, Referenceable, Vault}; +use crate::vault::{Reference, Referenceable, Vault}; pub fn rename(vault: &Vault, params: &RenameParams, path: &Path) -> Option { let position = params.text_document_position.position; diff --git a/src/ui.rs b/src/ui.rs index 63152bdb..8452c00f 100644 --- a/src/ui.rs +++ b/src/ui.rs @@ -14,9 +14,6 @@ pub enum PreviewMode { LlmContext, } -fn referenceable_string(vault: &Vault, referenceables: &[Referenceable]) -> Option { - referenceable_string_with_mode(vault, referenceables, PreviewMode::Hover) -} fn referenceable_string_with_mode(vault: &Vault, referenceables: &[Referenceable], mode: PreviewMode) -> Option { let referenceable = referenceables.first()?;