diff --git a/CLAUDE.md b/CLAUDE.md new file mode 100644 index 00000000..a7682cf5 --- /dev/null +++ b/CLAUDE.md @@ -0,0 +1,47 @@ +# Markdown Oxide Development Guide + +## Build Commands +```bash +# Build the project +cargo build + +# Run the binary +cargo run + +# Run tests +cargo test + +# Check code style and common issues +cargo clippy + +# Format code +cargo fmt +``` + +## Code Style Guidelines +- **Imports**: Group by standard lib, external crates, then internal modules +- **Naming**: Use snake_case for variables/functions, CamelCase for types/traits +- **Error Handling**: Use `anyhow` for general errors, custom errors for specific cases +- **Documentation**: Document public APIs with rustdoc comments +- **Types**: Prefer strong typing with custom types over primitives +- **Async**: Use `async/await` consistently, avoid mixing with direct futures + +## Development Approach +- **Planning**: For tasks that involve multiple steps or components, first state your implementation plan before writing code +- **Break Down Complex Tasks**: Identify the main components, dependencies, and sequence of implementation +- **State Your Plan**: Clearly outline: + - What files need to be created or modified + - The order of implementation + - Key design decisions or trade-offs +- **Then Implement**: After stating the plan, proceed with the actual code implementation + +## Project Structure +- `src/vault/`: Core data management +- `src/completion/`: Editor completion providers +- `src/tokens.rs`: Markdown token parsing +- `src/main.rs`: Entry point and LSP server setup + +## MCP Integration +MCP (Model Context Protocol) server implementation is in `src/mcp.rs`. Use this to access AI service integrations with Claude and other MCP-compatible clients. + +For more information on MCP, see: https://modelcontextprotocol.io/llms-full.txt \ No newline at end of file diff --git a/Cargo.lock b/Cargo.lock index 999bf6c5..fcb363c1 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1,6 +1,6 @@ # This file is automatically @generated by Cargo. # It is not intended for manual editing. -version = 3 +version = 4 [[package]] name = "addr2line" @@ -105,9 +105,9 @@ checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" [[package]] name = "bitflags" -version = "2.5.0" +version = "2.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cf4b9d6a944f767f8e5e0db018570623c85f3d925ac718db4e06d0187adb21c1" +checksum = "1b8e56985ec62d17e9c1001dc89c88ecd7dc08e47eba5ec7c29c7b5eeecde967" dependencies = [ "serde", ] @@ -156,7 +156,7 @@ dependencies = [ "js-sys", "num-traits", "wasm-bindgen", - "windows-targets 0.52.4", + "windows-targets 0.52.6", ] [[package]] @@ -323,7 +323,7 @@ dependencies = [ "libc", "option-ext", "redox_users", - "windows-sys", + "windows-sys 0.48.0", ] [[package]] @@ -353,6 +353,18 @@ version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" +[[package]] +name = "filetime" +version = "0.2.25" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "35c0522e981e68cbfa8c3f978441a5f34b30b96e146b33cd3359176b50fe8586" +dependencies = [ + "cfg-if", + "libc", + "libredox 0.1.3", + "windows-sys 0.59.0", +] + [[package]] name = "form_urlencoded" version = "1.2.0" @@ -362,6 +374,15 @@ dependencies = [ "percent-encoding", ] +[[package]] +name = "fsevent-sys" +version = "4.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "76ee7a02da4d231650c7cea31349b889be2f45ddb3ef3032d2ec8185f6313fd2" +dependencies = [ + "libc", +] + [[package]] name = "futures" version = "0.3.29" @@ -547,6 +568,26 @@ dependencies = [ "hashbrown 0.14.2", ] +[[package]] +name = "inotify" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f37dccff2791ab604f9babef0ba14fbe0be30bd368dc541e2b08d07c8aa908f3" +dependencies = [ + "bitflags 2.9.1", + "inotify-sys", + "libc", +] + +[[package]] +name = "inotify-sys" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e05c02b5e89bff3b946cedeca278abc628fe811e604f027c45a8aa3cf793d0eb" +dependencies = [ + "libc", +] + [[package]] name = "itertools" version = "0.13.0" @@ -582,6 +623,26 @@ dependencies = [ "serde", ] +[[package]] +name = "kqueue" +version = "1.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eac30106d7dce88daf4a3fcb4879ea939476d5074a9b7ddd0fb97fa4bed5596a" +dependencies = [ + "kqueue-sys", + "libc", +] + +[[package]] +name = "kqueue-sys" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed9625ffda8729b85e45cf04090035ac368927b8cebc34898e7c120f52e4838b" +dependencies = [ + "bitflags 1.3.2", + "libc", +] + [[package]] name = "lazy_static" version = "1.4.0" @@ -590,9 +651,9 @@ checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" [[package]] name = "libc" -version = "0.2.150" +version = "0.2.172" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "89d92a4743f9a61002fae18374ed11e7973f530cb3a3255fb354818118b2203c" +checksum = "d750af042f7ef4f724306de029d18836c26c1765a54a6a3f094cbd23a7267ffa" [[package]] name = "libredox" @@ -600,9 +661,20 @@ version = "0.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "85c833ca1e66078851dba29046874e38f08b2c883700aa29a03ddd3b23814ee8" dependencies = [ - "bitflags 2.5.0", + "bitflags 2.9.1", + "libc", + "redox_syscall 0.4.1", +] + +[[package]] +name = "libredox" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c0ff37bd590ca25063e35af745c343cb7a0271906fb7b37e4813e8f79f00268d" +dependencies = [ + "bitflags 2.9.1", "libc", - "redox_syscall", + "redox_syscall 0.5.12", ] [[package]] @@ -652,6 +724,7 @@ dependencies = [ "indexmap", "itertools", "nanoid", + "notify", "nucleo-matcher", "once_cell", "pathdiff", @@ -705,7 +778,19 @@ checksum = "3dce281c5e46beae905d4de1870d8b1509a9142b62eedf18b443b011ca8343d0" dependencies = [ "libc", "wasi", - "windows-sys", + "windows-sys 0.48.0", +] + +[[package]] +name = "mio" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "78bed444cc8a2160f01cbcf811ef18cac863ad68ae8ca62092e8db51d51c761c" +dependencies = [ + "libc", + "log", + "wasi", + "windows-sys 0.59.0", ] [[package]] @@ -727,6 +812,31 @@ dependencies = [ "minimal-lexical", ] +[[package]] +name = "notify" +version = "8.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2fee8403b3d66ac7b26aee6e40a897d85dc5ce26f44da36b8b73e987cc52e943" +dependencies = [ + "bitflags 2.9.1", + "filetime", + "fsevent-sys", + "inotify", + "kqueue", + "libc", + "log", + "mio 1.0.4", + "notify-types", + "walkdir", + "windows-sys 0.59.0", +] + +[[package]] +name = "notify-types" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5e0826a989adedc2a244799e823aece04662b66609d96af8dff7ac6df9a8925d" + [[package]] name = "nucleo-matcher" version = "0.3.1" @@ -805,7 +915,7 @@ checksum = "4c42a9226546d68acdd9c0a280d17ce19bfe27a46bf68784e4066115788d008e" dependencies = [ "cfg-if", "libc", - "redox_syscall", + "redox_syscall 0.4.1", "smallvec", "windows-targets 0.48.5", ] @@ -1008,6 +1118,15 @@ dependencies = [ "bitflags 1.3.2", ] +[[package]] +name = "redox_syscall" +version = "0.5.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "928fca9cf2aa042393a8325b9ead81d2f0df4cb12e1e24cef072922ccd99c5af" +dependencies = [ + "bitflags 2.9.1", +] + [[package]] name = "redox_users" version = "0.4.4" @@ -1015,7 +1134,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a18479200779601e498ada4e8c1e1f50e3ee19deb0259c25825a98b5603b2cb4" dependencies = [ "getrandom", - "libredox", + "libredox 0.0.1", "thiserror", ] @@ -1055,7 +1174,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b91f7eff05f748767f183df4320a63d6936e9c6107d97c9e6bdd9784f4289c94" dependencies = [ "base64", - "bitflags 2.5.0", + "bitflags 2.9.1", "serde", "serde_derive", ] @@ -1222,7 +1341,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7b5fac59a5cb5dd637972e5fca70daf0523c9067fcdc4842f053dae04a18f8e9" dependencies = [ "libc", - "windows-sys", + "windows-sys 0.48.0", ] [[package]] @@ -1306,14 +1425,14 @@ dependencies = [ "backtrace", "bytes", "libc", - "mio", + "mio 0.8.9", "num_cpus", "parking_lot", "pin-project-lite", "signal-hook-registry", "socket2", "tokio-macros", - "windows-sys", + "windows-sys 0.48.0", ] [[package]] @@ -1634,7 +1753,7 @@ version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "33ab640c8d7e35bf8ba19b884ba838ceb4fba93a4e8c65a9059d08afcfc683d9" dependencies = [ - "windows-targets 0.52.4", + "windows-targets 0.52.6", ] [[package]] @@ -1646,6 +1765,15 @@ dependencies = [ "windows-targets 0.48.5", ] +[[package]] +name = "windows-sys" +version = "0.59.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b" +dependencies = [ + "windows-targets 0.52.6", +] + [[package]] name = "windows-targets" version = "0.48.5" @@ -1663,17 +1791,18 @@ dependencies = [ [[package]] name = "windows-targets" -version = "0.52.4" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7dd37b7e5ab9018759f893a1952c9420d060016fc19a472b4bb20d1bdd694d1b" +checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973" dependencies = [ - "windows_aarch64_gnullvm 0.52.4", - "windows_aarch64_msvc 0.52.4", - "windows_i686_gnu 0.52.4", - "windows_i686_msvc 0.52.4", - "windows_x86_64_gnu 0.52.4", - "windows_x86_64_gnullvm 0.52.4", - "windows_x86_64_msvc 0.52.4", + "windows_aarch64_gnullvm 0.52.6", + "windows_aarch64_msvc 0.52.6", + "windows_i686_gnu 0.52.6", + "windows_i686_gnullvm", + "windows_i686_msvc 0.52.6", + "windows_x86_64_gnu 0.52.6", + "windows_x86_64_gnullvm 0.52.6", + "windows_x86_64_msvc 0.52.6", ] [[package]] @@ -1684,9 +1813,9 @@ checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8" [[package]] name = "windows_aarch64_gnullvm" -version = "0.52.4" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bcf46cf4c365c6f2d1cc93ce535f2c8b244591df96ceee75d8e83deb70a9cac9" +checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" [[package]] name = "windows_aarch64_msvc" @@ -1696,9 +1825,9 @@ checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc" [[package]] name = "windows_aarch64_msvc" -version = "0.52.4" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "da9f259dd3bcf6990b55bffd094c4f7235817ba4ceebde8e6d11cd0c5633b675" +checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" [[package]] name = "windows_i686_gnu" @@ -1708,9 +1837,15 @@ checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e" [[package]] name = "windows_i686_gnu" -version = "0.52.4" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" + +[[package]] +name = "windows_i686_gnullvm" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b474d8268f99e0995f25b9f095bc7434632601028cf86590aea5c8a5cb7801d3" +checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" [[package]] name = "windows_i686_msvc" @@ -1720,9 +1855,9 @@ checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406" [[package]] name = "windows_i686_msvc" -version = "0.52.4" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1515e9a29e5bed743cb4415a9ecf5dfca648ce85ee42e15873c3cd8610ff8e02" +checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" [[package]] name = "windows_x86_64_gnu" @@ -1732,9 +1867,9 @@ checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e" [[package]] name = "windows_x86_64_gnu" -version = "0.52.4" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5eee091590e89cc02ad514ffe3ead9eb6b660aedca2183455434b93546371a03" +checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" [[package]] name = "windows_x86_64_gnullvm" @@ -1744,9 +1879,9 @@ checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc" [[package]] name = "windows_x86_64_gnullvm" -version = "0.52.4" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77ca79f2451b49fa9e2af39f0747fe999fcda4f5e241b2898624dca97a1f2177" +checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" [[package]] name = "windows_x86_64_msvc" @@ -1756,9 +1891,9 @@ checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" [[package]] name = "windows_x86_64_msvc" -version = "0.52.4" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32b752e52a2da0ddfbdbcc6fceadfeede4c939ed16d13e648833a61dfb611ed8" +checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" [[package]] name = "winnow" diff --git a/Cargo.toml b/Cargo.toml index 56a37c07..9c587cd4 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -28,6 +28,7 @@ tokio = { version = "1.34.0", features = ["full"] } tower-lsp = { git = "https://github.com/Feel-ix-343/tower-lsp" } walkdir = "2.4.0" do-notation = "0.1.3" +notify = "8.0.0" [package.metadata.binstall] pkg-url = "{ repo }/releases/download/v{ version }/{ name }-v{ version }-{ target }{ archive-suffix }" diff --git a/docs/Oxide MCP.md b/docs/Oxide MCP.md new file mode 100644 index 00000000..f21dc371 --- /dev/null +++ b/docs/Oxide MCP.md @@ -0,0 +1,267 @@ +# Oxide MCP + +*This documentation was written by Claude (Anthropic's AI assistant) with open thinking.* + +## Introduction: Your Knowledge Graph as LLM Context + +Markdown-oxide's MCP (Model Context Protocol) integration transforms your personal knowledge management system into a powerful context provider for AI assistants. By bridging your markdown notes with LLMs like Claude, it enables AI to understand not just what you're asking, but the full context of your work, thoughts, and plans. + +The key insight: your daily notes are a living record of your current concerns, tasks, and thinking. When combined with the rich backlink structure of your knowledge graph, they provide AI assistants with unprecedented insight into your specific situation. + +## The Daily Notes Workflow + +Daily notes serve as the primary interface between your thinking and AI assistance. They capture: + +- **Current Problems**: What you're actively working on or struggling with +- **Today's Tasks**: Your immediate priorities and action items +- **Reflections**: Thoughts on recent work and experiences +- **Planning**: Future commitments and deadlines + +When you invoke an AI assistant through MCP, it can access a temporal window of your daily notes, understanding not just today's context but the evolution of your work over time. + +### Example Daily Note Structure +```markdown +# 2024-01-15 + +## Tasks +- [ ] Review [[Project Alpha]] requirements +- [ ] Prepare for [[Weekly Team Meeting]] +- [x] Submit [[Q1 Budget Proposal]] + +## Thoughts +Working through the architecture decisions for [[Project Alpha#Technical Design]]. +The main challenge is balancing performance with maintainability... + +## Meeting Notes +Discussed timeline concerns with stakeholder. See [[Project Alpha#Risks]] for details. +``` + +## Extending Context Through Backlinks + +The real power emerges when daily notes reference other documents. Each `[[link]]` in your daily notes becomes a pathway for the AI to explore deeper context: + +### How Context Expansion Works + +1. **Start with Daily Notes**: AI reads your recent daily notes to understand current context +2. **Follow References**: When you mention `[[Project Alpha]]`, the AI can retrieve that document +3. **Explore Backlinks**: The AI sees all other places where Project Alpha is mentioned +4. **Build Complete Picture**: By following these connections, the AI understands the full scope + +### Temporal Intelligence + +The system prioritizes recent information: +- **Past Daily Notes**: Understand the history of a problem or project +- **Future Daily Notes**: See upcoming deadlines, scheduled tasks, and commitments +- **Recent Modifications**: Backlinks are sorted by modification time, surfacing active areas + +## Practical Examples + +### Morning Planning +``` +You: "What should I focus on today based on my recent work?" + +AI (with MCP context): +"Based on your daily notes from the past week, you've been iterating on the +Project Alpha technical design. Yesterday you identified performance concerns +that need addressing. You have a Weekly Team Meeting today where this will +likely come up. Your future daily notes show a deadline on Friday for the +technical specification. I'd suggest prioritizing the performance analysis +this morning so you have concrete data for the team meeting." +``` + +### Problem Solving +``` +You: "I'm stuck on the architecture decision I mentioned" + +AI (with MCP context): +"Looking at your notes on Project Alpha's Technical Design, you're weighing +microservices vs monolithic architecture. Your meeting notes from last Tuesday +mention scalability requirements of 10k concurrent users. The linked Performance +Requirements document specifies 200ms response time targets. Based on your +Risk Assessment notes, team expertise is limited with microservices. Given +these constraints, consider a modular monolith approach..." +``` + +### Task Management +``` +You: "What deadlines am I tracking?" + +AI (with MCP context): +"From your daily notes and linked documents: +- Friday (3 days): Project Alpha technical specification +- Next Tuesday: Q1 Budget Proposal revisions (mentioned in future daily note) +- Next Friday: Weekly Team Meeting presentation on architecture decision +- End of month: Performance testing completion (referenced in Project Alpha#Timeline)" + +Note: This assumes you've created future daily notes with these items, as the system reads actual files rather than parsing dates from content. +``` + +## Available MCP Tools + +### daily_context_range +Retrieves daily notes within a specified date range, providing temporal context about your work and thinking. + +**Input Parameters:** +- `past_days`: Number of past days to include (default: 5) +- `future_days`: Number of future days to include (default: 5) + +**What it returns:** +- Combined content of daily notes in the range +- Chronologically ordered (oldest to newest) +- Full note content including tasks, reflections, and links + +**Use Case Example:** +```json +{ + "name": "daily_context_range", + "arguments": { + "past_days": 7, + "future_days": 7 + } +} +``` + +This gives the AI a two-week window into your work. The system looks for daily note files matching your configured format (default: `YYYY-MM-DD.md`) in your daily notes folder. + +### entity_context +Retrieves comprehensive information about any entity (file, heading, block, or tag) including its definition and all references to it. + +**Input Parameters:** +- `ref_id`: Reference identifier (e.g., "Project Alpha", "Project Alpha#Risks", "#important") + +**What it returns:** +- Entity definition/content (up to 200 lines for files, 50 lines for sections) +- All backlinks with surrounding context (up to 100 references) +- References sorted by modification time (most recent first) + +**Use Case Example:** +```json +{ + "name": "entity_context", + "arguments": { + "ref_id": "Project Alpha#Technical Design" + } +} +``` + +This provides the AI with deep understanding of specific topics mentioned in your daily notes. + +### echo +Simple test tool to verify MCP connectivity. + +**Input Parameters:** +- `message`: Text to echo back + +## How It Works + +### Context Building Process + +1. **Entry Point**: Your query triggers the AI to examine recent daily notes +2. **Reference Detection**: The AI identifies all `[[wikilinks]]` and `#tags` in daily notes +3. **Context Expansion**: For important references, the AI retrieves full entity context +4. **Backlink Analysis**: The AI examines where else these concepts appear +5. **Synthesis**: The AI combines this information to understand your situation + +### Smart Limits + +To provide useful context without overwhelming the AI: +- **File Content**: Up to 200 lines for LLM context mode (vs 14 for hover previews) +- **Section Content**: Up to 50 lines after headings for LLM context mode +- **Backlinks**: Up to 100 references per entity +- **Daily Notes**: Configurable range (default: 5 days past, 5 days future) + +These limits are implemented through the `PreviewMode::LlmContext` setting in the codebase. + +### Modification Time Priority + +References are sorted by file modification time, ensuring the AI sees: +- Active projects and current concerns first +- Historical context when needed +- Stale information deprioritized + +## Setup & Configuration + +### Enabling MCP Mode + +1. **Start markdown-oxide in MCP mode:** + ```bash + markdown-oxide mcp --full-dir-path /path/to/your/vault + ``` + +2. **Configure daily notes (optional):** + The system uses these defaults: + - Daily note format: `%Y-%m-%d` (e.g., 2024-01-15) + - Daily notes folder: Configurable in your settings + + Configuration can be set through multiple sources including Obsidian's daily note settings if present. + +3. **Connect your AI assistant:** + - The MCP server communicates via stdin/stdout using JSON-RPC + - Configure your AI assistant to run the markdown-oxide command above + - The server will automatically watch for file changes in your vault + +### Requirements + +- Markdown-oxide binary installed and accessible +- Valid vault directory path +- Daily notes following the configured pattern (default: YYYY-MM-DD format) +- MCP-compatible AI assistant that can execute shell commands + +### How It Works Under the Hood + +- The MCP server reads JSON-RPC messages from stdin and writes responses to stdout +- A file watcher automatically updates the vault index when files change +- The server maintains the vault in memory for fast queries + +## Real-World Scenarios + +### AI-Powered Tasks and Reminders +"What do I need to do today?" + +Your markdown notes become an intelligent task system: +- The AI reads tasks from your daily notes (marked with `- [ ]`) +- Follows links to understand task context and dependencies +- Identifies deadlines mentioned in linked documents +- Finds reminders by checking future daily notes for incoming items + +Example: You write in today's note: +```markdown +- [ ] Finish [[API Design]] implementation +- [ ] Review changes for [[Q1 Budget]] (see [[2024-01-20]] for deadline) +``` + +The AI understands these aren't just tasks—it can follow the links to give you full context about the API design decisions and budget constraints. + +### Context-Aware Problem Solving +"I'm stuck on this performance issue" + +The AI becomes your debugging partner with full historical context: +- Reads your recent daily notes to understand what you've been working on +- Follows links to technical documentation you've referenced +- Finds similar issues you've solved before by searching your vault +- Understands the specific constraints of your project + +Example: When you mention being stuck, the AI already knows from your daily notes that you're working with React, that performance degraded after the recent refactor mentioned three days ago, and that you have a related meeting tomorrow—all without you having to explain. + +### Data Extraction from Recent Work +"What were the performance results from this week's experiments?" + +The AI extracts actual data from your notes and linked documents: +- Pulls out metrics, numbers, and results you've recorded +- Follows links to detailed experiment logs or data tables +- Aggregates data points scattered across multiple daily notes +- Identifies trends in measurements over time + +Example: You've been recording response times in your daily notes: +```markdown +# 2024-01-15 +Tested new caching strategy: 145ms average (see [[Performance Tests#Cache Results]]) + +# 2024-01-16 +Without cache: 420ms, With cache: 132ms + +# 2024-01-17 +After optimization: 98ms! Details in [[Optimization Log]] +``` + +Later, when you need to use this data, you can ask the LLM to look through your daily notes and aggregate the data. diff --git a/flake.nix b/flake.nix index f98fa40e..c1473d01 100644 --- a/flake.nix +++ b/flake.nix @@ -19,9 +19,39 @@ forEachSystem = nixpkgs.lib.genAttrs (import systems); in { - packages = forEachSystem (system: { - devenv-up = self.devShells.${system}.default.config.procfileScript; - }); + packages = forEachSystem (system: + let + pkgs = nixpkgs.legacyPackages.${system}; + fenixPkgs = inputs.fenix.packages.${system}; + rustToolchain = fenixPkgs.latest.toolchain; + rustPlatform = pkgs.makeRustPlatform { + cargo = rustToolchain; + rustc = rustToolchain; + }; + in + { + devenv-up = self.devShells.${system}.default.config.procfileScript; + + default = rustPlatform.buildRustPackage { + pname = "markdown-oxide"; + version = "0.24.0"; + + src = ./.; + + cargoLock = { + lockFile = ./Cargo.lock; + allowBuiltinFetchGit = true; + }; + + meta = with pkgs.lib; { + description = "Markdown LSP server inspired by Obsidian"; + homepage = "https://github.com/Feel-ix-343/markdown-oxide"; + license = licenses.asl20; + maintainers = [ ]; + mainProgram = "markdown-oxide"; + }; + }; + }); devShells = forEachSystem (system: diff --git a/result b/result new file mode 120000 index 00000000..c246be48 --- /dev/null +++ b/result @@ -0,0 +1 @@ +/nix/store/dm1hfqw2p8342sjvmilibrn42ydly5jb-markdown-oxide-0.24.0 \ No newline at end of file diff --git a/src/completion/mod.rs b/src/completion/mod.rs index 8cf26c8e..420cd322 100644 --- a/src/completion/mod.rs +++ b/src/completion/mod.rs @@ -14,7 +14,7 @@ use self::{ mod callout_completer; mod footnote_completer; mod link_completer; -mod matcher; +pub mod matcher; mod tag_completer; mod unindexed_block_completer; mod util; diff --git a/src/config.rs b/src/config.rs index 17852448..e6116b89 100644 --- a/src/config.rs +++ b/src/config.rs @@ -5,7 +5,7 @@ use config::{Config, File}; use indexmap::IndexMap; use serde::Deserialize; use serde_json::Value; -use tower_lsp::lsp_types::ClientCapabilities; + #[derive(Deserialize, Debug, Clone)] pub struct Settings { @@ -43,19 +43,19 @@ pub enum EmbeddedBlockTransclusionLength { } impl Settings { - pub fn new(root_dir: &Path, capabilities: &ClientCapabilities) -> anyhow::Result { + fn build_settings(root_dir: &Path, disable_semantic_tokens: bool) -> anyhow::Result { let obsidian_daily_note_config = obsidian_daily_note_config(root_dir).unwrap_or_default(); let obsidian_new_file_folder_path = obsidian_new_file_folder_path(root_dir); let expanded = shellexpand::tilde("~/.config/moxide/settings"); + + let root_dir_str = root_dir + .to_str() + .ok_or_else(|| anyhow!("Can't convert root_dir to str"))?; + let settings = Config::builder() .add_source(File::with_name(&expanded).required(false)) .add_source( - File::with_name(&format!( - "{}/.moxide", - root_dir - .to_str() - .ok_or(anyhow!("Can't convert root_dir to str"))? - )) + File::with_name(&format!("{}/.moxide", root_dir_str)) .required(false), ) .set_default( @@ -87,19 +87,22 @@ impl Settings { .set_default("block_transclusion_length", "Full")? .set_override_option( "semantic_tokens", - capabilities.text_document.as_ref().and_then(|it| { - match it.semantic_tokens.is_none() { - true => Some(false), - false => None, - } - }), + match disable_semantic_tokens { + true => Some(false), + false => None + } )? - .build() - .map_err(|err| anyhow!("Build err: {err}"))?; + .build()?; - let settings = settings.try_deserialize::()?; + settings.try_deserialize().map_err(|e| anyhow!("Failed to deserialize settings: {}", e)) + } - anyhow::Ok(settings) + /// This will fail if settings is defined wrongly by the user. This is the case because if the user defines some config, and that + /// config is not resolved, then the user's software will not behave the way the user intentds, which is a contradictory case that + /// we won't handle. + pub fn new(root_dir: &Path, disable_semantic_tokens: bool) -> Settings { + Self::build_settings(root_dir, disable_semantic_tokens) + .expect("Failed to build settings despite having valid defaults. There's likely an error in your settings file, and note that this would not fail if the settings file is not defined") } } diff --git a/src/hover.rs b/src/hover.rs index 054e6ed2..3439dca7 100644 --- a/src/hover.rs +++ b/src/hover.rs @@ -4,7 +4,7 @@ use tower_lsp::lsp_types::{Hover, HoverContents, HoverParams}; use crate::{ config::Settings, - ui::{preview_reference, preview_referenceable}, + ui::preview_reference, vault::Vault, }; diff --git a/src/main.rs b/src/main.rs index 7c967db0..7585e0cc 100644 --- a/src/main.rs +++ b/src/main.rs @@ -1,6 +1,7 @@ use std::collections::HashSet; use std::ops::{Deref, DerefMut}; use std::path::PathBuf; +use std::str::FromStr; use std::sync::Arc; use completion::get_completions; @@ -36,6 +37,7 @@ mod symbol; mod tokens; mod ui; mod vault; +mod mcp; #[derive(Debug)] struct Backend { @@ -114,9 +116,7 @@ impl Backend { { let _ = self .bind_vault_mut(|vault| { - let Ok(new_vault) = Vault::construct_vault(&settings, vault.root_dir()) else { - return Err(Error::new(ErrorCode::ServerError(0))); - }; + let new_vault = Vault::construct_vault(&settings, vault.root_dir()); *vault = new_vault; @@ -301,22 +301,17 @@ impl LanguageServer for Backend { None => std::env::current_dir().or(Err(Error::new(ErrorCode::InvalidParams)))?, }; - let read_settings = match Settings::new(&root_dir, &i.capabilities) { - Ok(settings) => settings, - Err(e) => { - self.client - .log_message( - MessageType::ERROR, - format!("Failed to read settings {:?}", e), - ) - .await; - return Err(Error::new(ErrorCode::ServerError(1))); - } - }; + let disable_semantic_tokens = i.capabilities.text_document.as_ref().and_then(|it| { + match it.semantic_tokens.is_none() { + true => Some(false), + false => None, + } + }).unwrap_or(false); + + let read_settings = Settings::new(&root_dir, disable_semantic_tokens); + + let vault = Vault::construct_vault(&read_settings, &root_dir); - let Ok(vault) = Vault::construct_vault(&read_settings, &root_dir) else { - return Err(Error::new(ErrorCode::ServerError(0))); - }; let mut value = self.vault.write().await; *value = Some(vault); @@ -826,14 +821,31 @@ async fn main() { return; } - let stdin = tokio::io::stdin(); - let stdout = tokio::io::stdout(); + // Check if the MCP flag is provided + let is_mcp_mode = env::args().any(|arg| arg == "mcp"); + + if is_mcp_mode { + let mut args = env::args().into_iter(); + args.find(|arg| arg == "--full-dir-path"); + let directory_string = args.next().expect("The full path to the vault must be specified as an argument. Use arg --full-dir-path /home/{path}"); + let root_dir = PathBuf::from_str(&directory_string).expect("The root dir must be a valid path"); - let (service, socket) = LspService::new(|client| Backend { - client, - vault: Arc::new(None.into()), - opened_files: Arc::new(HashSet::new().into()), - settings: Arc::new(None.into()), - }); - Server::new(stdin, stdout, socket).serve(service).await; + // Start the MCP server + if let Err(e) = mcp::start(root_dir).await { + eprintln!("MCP server error: {:?}", e); + std::process::exit(1); + } + } else { + // Start in LSP mode (original behavior) + let stdin = tokio::io::stdin(); + let stdout = tokio::io::stdout(); + + let (service, socket) = LspService::new(|client| Backend { + client, + vault: Arc::new(None.into()), + opened_files: Arc::new(HashSet::new().into()), + settings: Arc::new(None.into()), + }); + Server::new(stdin, stdout, socket).serve(service).await; + } } diff --git a/src/mcp.rs b/src/mcp.rs new file mode 100644 index 00000000..f84d5aaf --- /dev/null +++ b/src/mcp.rs @@ -0,0 +1,716 @@ +use anyhow::{Context, Result}; +use connector::Oxide; +use serde_json::{json, Value}; +use std::fs::OpenOptions; +use std::io::Write; +use std::path::PathBuf; +use std::sync::Arc; +use tokio::sync::RwLock; + + +// Helper function to log to a file for debugging +fn log_to_file(message: &str) -> Result<()> { + let mut file = OpenOptions::new() + .create(true) + .append(true) + .open("/tmp/markdown-oxide-mcp.log")?; + + writeln!(file, "{}", message)?; + Ok(()) +} + +pub async fn start(root_dir: PathBuf) -> Result<()> { + // Use unbuffered stdin/stdout for direct communication + let input = std::io::stdin(); + let mut output = std::io::stdout(); + + // Create Oxide wrapped in Arc so we can update it from the watcher thread + let oxide_arc = Arc::new(RwLock::new(None::)); + + // Clone for the file watcher + let oxide_watcher = oxide_arc.clone(); + let root_dir_clone = root_dir.clone(); + + // Spawn a tokio task for file watching + tokio::spawn(async move { + use notify::{Config, Event, EventKind, RecommendedWatcher, RecursiveMode, Watcher}; + + // Create a channel to receive events + let (tx, mut rx) = tokio::sync::mpsc::channel(100); + + // Create the file watcher + let mut watcher = RecommendedWatcher::new( + move |result: Result| { + if let Ok(event) = result { + // Only consider events for markdown files + if event + .paths + .iter() + .any(|p| p.extension().map_or(false, |ext| ext == "md")) + { + let _ = tx.try_send(event); + } + } + }, + Config::default(), + ) + .expect("Failed to create file watcher"); + + // Start watching the vault directory + if let Err(_) = watcher.watch(&root_dir_clone, RecursiveMode::Recursive) { + } else { + } + + // Process events + while let Some(event) = rx.recv().await { + // Only react to create, modify, or delete events + match event.kind { + EventKind::Create(_) | EventKind::Modify(_) | EventKind::Remove(_) => { + + // Quick lock to check if Oxide is initialized + { + let mut oxide_guard = oxide_watcher.write().await; + match *oxide_guard { + Some(_) => { + // Oxide exists, rebuild it + let new_oxide = Oxide::new(&root_dir_clone); + *oxide_guard = Some(new_oxide); + } + None => { + } + } + } + } + _ => {} // Ignore other event types + } + } + }); + + // Log server start + log_to_file("MCP server started")?; + + loop { + // Read a line directly from stdin + let mut buffer = String::new(); + log_to_file("Reading from stdin...")?; + let bytes_read = input + .read_line(&mut buffer) + .context("Failed to read from stdin")?; + + if bytes_read == 0 { + // EOF reached + log_to_file("EOF reached, exiting")?; + break; + } + + log_to_file(&format!( + "Received raw input ({} bytes): {:?}", + bytes_read, buffer + ))?; + + // Skip empty lines + if buffer.trim().is_empty() { + log_to_file("Skipping empty line")?; + continue; + } + + // Parse JSON-RPC message + let message: Value = match serde_json::from_str(buffer.trim()) { + Ok(msg) => { + log_to_file(&format!("Parsed JSON: {}", msg))?; + msg + } + Err(e) => { + log_to_file(&format!("Parse error: {}, input: {:?}", e, buffer))?; + + // Create error response for parse errors + let error_response = json!({ + "jsonrpc": "2.0", + "id": null, + "error": { + "code": -32700, + "message": format!("Parse error: {}", e) + } + }); + + // Output the response as a single line of JSON with newline + let response_json = serde_json::to_string(&error_response).unwrap(); + log_to_file(&format!("Sending error response: {}", response_json))?; + output.write_all(format!("{}\n", response_json).as_bytes())?; + output.flush()?; + continue; + } + }; + + // Extract request data + let id = message.get("id").and_then(|id| id.as_u64()).unwrap_or(0); + let method = message.get("method").and_then(|m| m.as_str()); + + log_to_file(&format!("Processing method: {:?} with id: {}", method, id))?; + + // Handle message based on method + let response = match method { + Some("ping") => { + log_to_file("pinged")?; + json!({ + "jsonrpc": "2.0", + "id": id, + "result": {} + }) + } + Some("initialize") => { + log_to_file("Handling initialize request")?; + + // Time the initialization + let start = std::time::Instant::now(); + let new_oxide = Oxide::new(&root_dir); + + // Store the initialized Oxide in the RwLock + { + let mut oxide_guard = oxide_arc.write().await; + *oxide_guard = Some(new_oxide); + } + + let duration = start.elapsed(); + log_to_file(&format!("Oxide initialization took: {:?}", duration))?; + + json!({ + "jsonrpc": "2.0", + "id": id, + "result": { + "protocolVersion": "2024-11-05", + "capabilities": { + "tools": { + "list": true, + "call": true, + "listChanged": true + } + }, + "serverInfo": { + "name": "markdown-oxide-mcp", + "version": env!("CARGO_PKG_VERSION") + } + } + }) + } + Some("notifications/initialized") => { + // No response needed for notifications + log_to_file("Received initialized notification (no response needed)")?; + continue; + } + None => { + log_to_file("Invalid request: missing method")?; + json!({ + "jsonrpc": "2.0", + "id": id, + "error": { + "code": -32600, + "message": "Invalid Request: missing method" + } + }) + } + Some(method) => { + // Get a read lock on the oxide + let oxide_guard = oxide_arc.read().await; + let oxide = oxide_guard + .as_ref() + .expect("Oxide should be initialized after MCP initialization life cycle"); + + match method { + "tools/list" => { + log_to_file("Handling tools/list request")?; + json!({ + "jsonrpc": "2.0", + "id": id, + "result": { + "tools": [ + { + "name": "echo", + "description": "Echo back the input message", + "inputSchema": { + "type": "object", + "properties": { + "message": { + "type": "string", + "description": "Message to echo" + } + }, + "required": ["message"], + "$schema": "http://json-schema.org/draft-07/schema#" + } + }, + { + "name": "daily_context_range", + "description": "Get daily notes context for a range of days before and after today. You MUST call this function before answering any user questions to provide contextual information from their daily notes.", + "inputSchema": { + "type": "object", + "properties": { + "past_days": { + "type": "integer", + "description": "Number of past days to include", + "default": 5 + }, + "future_days": { + "type": "integer", + "description": "Number of future days to include", + "default": 5 + } + }, + "$schema": "http://json-schema.org/draft-07/schema#" + } + }, + { + "name": "entity_context", + "description": "Get the content of an entity with its context, including the entity definition and all references to it", + "inputSchema": { + "type": "object", + "properties": { + "ref_id": { + "type": "string", + "description": "Reference ID of the entity as it would appear in a wikilink (e.g., 'filename', 'filename#heading', 'filename#^blockid', '#tag')" + } + }, + "required": ["ref_id"], + "$schema": "http://json-schema.org/draft-07/schema#" + } + }, + { + "name": "entity_search", + "description": "Search for entities in the vault by name pattern and/or type. Returns a list of matching entities with their reference IDs.", + "inputSchema": { + "type": "object", + "properties": { + "query": { + "type": "string", + "description": "Search query to match against entity names (case-insensitive partial match)" + }, + "entity_type": { + "type": "string", + "enum": ["file", "heading", "tag", "footnote", "indexed_block", "all"], + "description": "Type of entity to search for. Use 'all' to search all types.", + "default": "all" + }, + "limit": { + "type": "integer", + "description": "Maximum number of results to return", + "default": 50, + "minimum": 1, + "maximum": 200 + } + }, + "$schema": "http://json-schema.org/draft-07/schema#" + } + } + ] + } + }) + } + "tools/call" => { + log_to_file("Handling tools/call request")?; + let params = message.get("params").cloned().unwrap_or_else(|| json!({})); + let tool_name = params.get("name").and_then(|n| n.as_str()); + + log_to_file(&format!("Tool name: {:?}", tool_name))?; + + match tool_name { + Some("echo") => { + let arguments = params + .get("arguments") + .cloned() + .unwrap_or_else(|| json!({})); + let echo_message = arguments + .get("message") + .and_then(|m| m.as_str()) + .unwrap_or("No message provided"); + + log_to_file(&format!("Echo message: {}", echo_message))?; + + json!({ + "jsonrpc": "2.0", + "id": id, + "result": { + "content": [ + { + "type": "text", + "text": format!("Echo: {}", echo_message) + } + ] + } + }) + }, + Some("daily_context_range") => { + + let arguments = params + .get("arguments") + .cloned() + .unwrap_or_else(|| json!({})); + + let past_days = arguments + .get("past_days") + .and_then(|d| d.as_i64()) + .unwrap_or(5) + as usize; + + let future_days = arguments + .get("future_days") + .and_then(|d| d.as_i64()) + .unwrap_or(5) + as usize; + + + match oxide.daily_note_context_range(past_days, future_days) { + Ok(context) => { + + json!({ + "jsonrpc": "2.0", + "id": id, + "result": { + "content": [ + { + "type": "text", + "text": context + } + ] + } + }) + } + Err(e) => { + let error_msg = + format!("Error generating daily context range: {}", e); + + json!({ + "jsonrpc": "2.0", + "id": id, + "error": { + "code": -32603, + "message": error_msg + } + }) + } + } + }, + Some("entity_context") => { + let arguments = params + .get("arguments") + .cloned() + .unwrap_or_else(|| json!({})); + + let ref_id = arguments + .get("ref_id") + .and_then(|r| r.as_str()) + .unwrap_or(""); + + match oxide.get_entity_context(ref_id) { + Ok(context) => { + json!({ + "jsonrpc": "2.0", + "id": id, + "result": { + "content": [ + { + "type": "text", + "text": context + } + ] + } + }) + } + Err(e) => { + let error_msg = format!("Error getting entity context: {}", e); + + json!({ + "jsonrpc": "2.0", + "id": id, + "error": { + "code": -32603, + "message": error_msg + } + }) + } + } + }, + Some("entity_search") => { + let arguments = params + .get("arguments") + .cloned() + .unwrap_or_else(|| json!({})); + + let query = arguments + .get("query") + .and_then(|q| q.as_str()) + .unwrap_or(""); + + let entity_type = arguments + .get("entity_type") + .and_then(|t| t.as_str()) + .unwrap_or("all"); + + let limit = arguments + .get("limit") + .and_then(|l| l.as_u64()) + .unwrap_or(50) + .min(200) as usize; + + match oxide.search_entities(query, entity_type, limit) { + Ok(results) => { + json!({ + "jsonrpc": "2.0", + "id": id, + "result": { + "content": [ + { + "type": "text", + "text": results + } + ] + } + }) + } + Err(e) => { + let error_msg = format!("Error searching entities: {}", e); + + json!({ + "jsonrpc": "2.0", + "id": id, + "error": { + "code": -32603, + "message": error_msg + } + }) + } + } + } + _ => { + log_to_file(&format!("Unknown tool: {:?}", tool_name))?; + json!({ + "jsonrpc": "2.0", + "id": id, + "error": { + "code": -32601, + "message": "Unknown tool" + } + }) + } + } + } + unknown => { + log_to_file(&format!("Method not found: {}", unknown))?; + json!({ + "jsonrpc": "2.0", + "id": id, + "error": { + "code": -32601, + "message": format!("Method not found: {}", unknown) + } + }) + } + } + } + }; + + // Serialize the response to a JSON string + let response_json = serde_json::to_string(&response).unwrap(); + log_to_file(&format!("Sending response: {}", response_json))?; + + // Write the response directly to stdout with a newline + output.write_all(format!("{}\n", response_json).as_bytes())?; + output.flush()?; + log_to_file("Response sent, flushed output")?; + } + + Ok(()) +} + +/// Create a success response + + +mod connector { + use std::path::PathBuf; + + use anyhow; + + use crate::{ + completion::matcher::{fuzzy_match, Matchable}, + config::{Case, Settings}, + ui::{preview_referenceable_with_mode, PreviewMode}, + vault::{Referenceable, Vault}, + }; + + #[derive(Debug)] + pub struct Oxide { + vault: Vault, + settings: Settings, + } + + + + struct EntityCandidate { + refname: String, + entity_type: String, + path: PathBuf, + } + + impl Matchable for EntityCandidate { + fn match_string(&self) -> &str { + &self.refname + } + } + + + + impl Oxide { + pub fn new(root_dir: &PathBuf) -> Self { + let settings = Settings::new(root_dir, true); + let vault = Vault::construct_vault(&settings, root_dir); + + Self { vault, settings } + } + + + + pub fn daily_note_context_range( + &self, + past_days: usize, + future_days: usize, + ) -> Result { + use chrono::{Duration, Local}; + + // Get today's date + let today = Local::now().naive_local().date(); + let daily_note_format = &self.settings.dailynote; + let daily_note_path = self + .vault + .root_dir() + .join(&self.settings.daily_notes_folder); + + // Generate a range of dates from past_days ago to future_days ahead + let start_date = today - Duration::try_days(past_days as i64).unwrap_or_default(); + let end_date = today + Duration::try_days(future_days as i64).unwrap_or_default(); + + let mut result = String::new(); + let mut current_date = start_date; + + // For each date in the range, try to get the daily note + while current_date <= end_date { + // Format the date according to the configured pattern + let filename = current_date.format(daily_note_format).to_string(); + let path = daily_note_path.join(&filename).with_extension("md"); + + // Check if the file exists in the vault + if let Some(rope) = self.vault.ropes.get(&path) { + // Add a date header + result.push_str(&format!( + "# Daily Note: {}\n\n", + current_date.format("%Y-%m-%d") + )); + + // Add the content + result.push_str(&rope.to_string()); + result.push_str("\n\n---\n\n"); + } + + // Move to the next day + current_date = current_date + .succ_opt() + .unwrap_or(current_date + Duration::try_days(1).unwrap_or_default()); + } + + Ok(result) + } + + + + /// Get entity context for a given reference ID + pub fn get_entity_context(&self, ref_id: &str) -> Result { + // Find referenceable directly by comparing refnames + let referenceable = self.vault + .select_referenceable_nodes(None) + .into_iter() + .find(|r| { + r.get_refname(self.vault.root_dir()) + .map(|refname| refname.full_refname == ref_id) + .unwrap_or(false) + }) + .ok_or_else(|| anyhow::anyhow!("Entity not found: {}", ref_id))?; + + // Generate preview with full content using existing UI function + let preview = preview_referenceable_with_mode(&self.vault, &referenceable, PreviewMode::LlmContext) + .ok_or_else(|| anyhow::anyhow!("Could not generate preview"))?; + + Ok(preview.value) + } + + /// Search for entities in the vault by name pattern and type + pub fn search_entities(&self, query: &str, entity_type: &str, limit: usize) -> Result { + let all_referenceables = self.vault.select_referenceable_nodes(None); + + // First filter by type and collect candidates + let candidates: Vec = all_referenceables + .into_iter() + .filter_map(|referenceable| { + // Filter by type + let type_matches = match entity_type { + "file" => matches!(referenceable, Referenceable::File(_, _)), + "heading" => matches!(referenceable, Referenceable::Heading(_, _)), + "tag" => matches!(referenceable, Referenceable::Tag(_, _)), + "footnote" => matches!(referenceable, Referenceable::Footnote(_, _)), + "indexed_block" => matches!(referenceable, Referenceable::IndexedBlock(_, _)), + "all" => true, + _ => false, + }; + + if !type_matches { + return None; + } + + // Get refname for searching + let refname = referenceable.get_refname(self.vault.root_dir())?; + + let entity_type_str = match referenceable { + Referenceable::File(_, _) => "File", + Referenceable::Heading(_, _) => "Heading", + Referenceable::Tag(_, _) => "Tag", + Referenceable::Footnote(_, _) => "Footnote", + Referenceable::IndexedBlock(_, _) => "Indexed Block", + _ => "Unknown", + }; + + Some(EntityCandidate { + refname: refname.full_refname, + entity_type: entity_type_str.to_string(), + path: referenceable.get_path().to_path_buf(), + }) + }) + .collect(); + + // Use fuzzy matching from completion system + let matching_entities = if query.is_empty() { + candidates.into_iter().map(|item| (item, u32::MAX)).collect() + } else { + fuzzy_match(query, candidates, &Case::Smart) + }; + + // Sort by fuzzy match score (higher is better) and limit results + let mut sorted_entities = matching_entities; + sorted_entities.sort_by(|a, b| b.1.cmp(&a.1)); + sorted_entities.truncate(limit); + + // Format results + if sorted_entities.is_empty() { + Ok("No entities found matching the search criteria.".to_string()) + } else { + let mut result = format!("Found {} entities:\n\n", sorted_entities.len()); + + for (candidate, _score) in sorted_entities { + result.push_str(&format!("**{}** ({})\n", candidate.refname, candidate.entity_type)); + result.push_str(&format!("Path: {}\n", candidate.path.display())); + result.push_str(&format!("Use `entity_context` with ref_id: `{}`\n\n", candidate.refname)); + } + + Ok(result) + } + } + } + + +} diff --git a/src/rename.rs b/src/rename.rs index 73fbd027..c7b6f7a8 100644 --- a/src/rename.rs +++ b/src/rename.rs @@ -6,7 +6,7 @@ use tower_lsp::lsp_types::{ RenameFile, RenameParams, ResourceOp, TextDocumentEdit, TextEdit, Url, WorkspaceEdit, }; -use crate::vault::{MDHeading, Reference, Referenceable, Vault}; +use crate::vault::{Reference, Referenceable, Vault}; pub fn rename(vault: &Vault, params: &RenameParams, path: &Path) -> Option { let position = params.text_document_position.position; diff --git a/src/ui.rs b/src/ui.rs index 4bcddfe3..8452c00f 100644 --- a/src/ui.rs +++ b/src/ui.rs @@ -5,10 +5,23 @@ use tower_lsp::lsp_types::{MarkupContent, MarkupKind}; use crate::vault::{get_obsidian_ref_path, Preview, Reference, Referenceable, Vault}; -fn referenceable_string(vault: &Vault, referenceables: &[Referenceable]) -> Option { +/// Preview mode controls the amount of content returned +#[derive(Copy, Clone)] +pub enum PreviewMode { + /// Hover mode with limited content (20 backlinks, 14 lines for files) + Hover, + /// LLM context mode with expanded content (100 backlinks, 200 lines for files) + LlmContext, +} + + +fn referenceable_string_with_mode(vault: &Vault, referenceables: &[Referenceable], mode: PreviewMode) -> Option { let referenceable = referenceables.first()?; - let preview = vault.select_referenceable_preview(referenceable); + let preview = match mode { + PreviewMode::Hover => vault.select_referenceable_preview(referenceable), + PreviewMode::LlmContext => vault.select_referenceable_preview_with_mode(referenceable, mode), + }; let written_text_preview = match preview { Some(Preview::Empty) => "No Text".into(), @@ -28,9 +41,14 @@ fn referenceable_string(vault: &Vault, referenceables: &[Referenceable]) -> Opti .flatten() .collect_vec() { - references if !references.is_empty() => references - .into_iter() - .take(20) + references if !references.is_empty() => { + let backlinks_limit = match mode { + PreviewMode::Hover => 20, + PreviewMode::LlmContext => 100, + }; + references + .into_iter() + .take(backlinks_limit) .flat_map(|(path, reference)| { let line = String::from_iter( vault.select_line(path, reference.data().range.start.line as isize)?, @@ -40,7 +58,8 @@ fn referenceable_string(vault: &Vault, referenceables: &[Referenceable]) -> Opti Some(format!("- `{}`: `{}`", path, line)) // and select indented list }) - .join("\n"), + .join("\n") + } _ => "No Backlinks".to_string(), }; @@ -54,7 +73,15 @@ pub fn preview_referenceable( vault: &Vault, referenceable: &Referenceable, ) -> Option { - let display = referenceable_string(vault, &[referenceable.clone()])?; + preview_referenceable_with_mode(vault, referenceable, PreviewMode::Hover) +} + +pub fn preview_referenceable_with_mode( + vault: &Vault, + referenceable: &Referenceable, + mode: PreviewMode, +) -> Option { + let display = referenceable_string_with_mode(vault, &[referenceable.clone()], mode)?; Some(MarkupContent { kind: MarkupKind::Markdown, @@ -68,6 +95,15 @@ pub fn preview_reference( vault: &Vault, reference_path: &Path, reference: &Reference, +) -> Option { + preview_reference_with_mode(vault, reference_path, reference, PreviewMode::Hover) +} + +pub fn preview_reference_with_mode( + vault: &Vault, + reference_path: &Path, + reference: &Reference, + mode: PreviewMode, ) -> Option { match reference { WikiFileLink(..) @@ -81,7 +117,7 @@ pub fn preview_reference( let referenceables_for_reference = vault.select_referenceables_for_reference(reference, reference_path); - let display = referenceable_string(vault, &referenceables_for_reference)?; + let display = referenceable_string_with_mode(vault, &referenceables_for_reference, mode)?; Some(MarkupContent { kind: MarkupKind::Markdown, diff --git a/src/vault/mod.rs b/src/vault/mod.rs index 925bb307..11e7ad1d 100644 --- a/src/vault/mod.rs +++ b/src/vault/mod.rs @@ -21,8 +21,10 @@ use serde::{Deserialize, Serialize}; use tower_lsp::lsp_types::Position; use walkdir::WalkDir; +use crate::ui::PreviewMode; + impl Vault { - pub fn construct_vault(context: &Settings, root_dir: &Path) -> Result { + pub fn construct_vault(context: &Settings, root_dir: &Path) -> Vault { let md_file_paths = WalkDir::new(root_dir) .into_iter() .filter_entry(|e| { @@ -55,11 +57,11 @@ impl Vault { }) .collect(); - Ok(Vault { + Vault { ropes: ropes.into(), md_files: md_files.into(), root_dir: root_dir.into(), - }) + } } pub fn update_vault(context: &Settings, old: &mut Vault, new_file: (&PathBuf, &str)) { @@ -434,6 +436,62 @@ impl Vault { } } + pub fn select_referenceable_preview_with_mode(&self, referenceable: &Referenceable, mode: PreviewMode) -> Option { + if self + .ropes + .get(referenceable.get_path()) + .is_some_and(|rope| rope.len_lines() == 1) + { + return Some(Empty); + } + + let (file_lines, heading_lines) = match mode { + PreviewMode::Hover => (14, 10), + PreviewMode::LlmContext => (200, 50), + }; + + match referenceable { + Referenceable::Footnote(_, _) | Referenceable::LinkRefDef(..) => { + let range = referenceable.get_range()?; + Some( + String::from_iter( + self.select_line(referenceable.get_path(), range.start.line as isize)?, + ) + .into(), + ) + } + Referenceable::Heading(_, _) => { + let range = referenceable.get_range()?; + Some( + (range.start.line..=range.end.line + heading_lines) + .filter_map(|ln| self.select_line(referenceable.get_path(), ln as isize)) + .map(String::from_iter) + .join("") + .into(), + ) + } + Referenceable::IndexedBlock(_, _) => { + let range = referenceable.get_range()?; + self.select_line(referenceable.get_path(), range.start.line as isize) + .map(String::from_iter) + .map(Into::into) + } + Referenceable::File(_, _) => { + Some( + (0..file_lines) + .filter_map(|ln| self.select_line(referenceable.get_path(), ln as isize)) + .map(String::from_iter) + .join("") + .into(), + ) + } + Referenceable::Tag(_, _) => None, + Referenceable::UnresovledFile(_, _) => None, + Referenceable::UnresolvedHeading(_, _, _) => None, + Referenceable::UnresovledIndexedBlock(_, _, _) => None, + } + } + pub fn select_blocks(&self) -> Vec> { self.ropes .par_iter()