From ff7e31488ed2bfc5db7fe606f0970f700f86ff49 Mon Sep 17 00:00:00 2001 From: Oleksandr Ostrovskyi Date: Tue, 31 Mar 2026 19:10:12 +0300 Subject: [PATCH] v1.5.3: Fix write pipeline, WAL concurrency, --read-only mode Write pipeline fixes: - Auto-link resolver now skips code blocks, inline code, frontmatter, and filenames with extensions (prevents content mangling) - Frontmatter merge: user-provided FM fields merged into auto-generated block instead of creating duplicate FM sections - mtime synced to DB after edit/rewrite operations (prevents false conflict errors on successive writes) Concurrency: - SQLite WAL mode + 5s busy_timeout for concurrent MCP + CLI access Read-only mode: - --read-only flag for engraph serve disables all write MCP and HTTP tools while keeping search/read/context tools accessible 450 tests (up from 426), all passing. --- Cargo.lock | 2 +- Cargo.toml | 2 +- src/http.rs | 41 ++++ src/links.rs | 338 ++++++++++++++++++++++++++++++- src/main.rs | 6 +- src/serve.rs | 30 ++- src/store.rs | 78 +++++++ src/writer.rs | 552 +++++++++++++++++++++++++++++++++++++++++++++++--- 8 files changed, 1011 insertions(+), 38 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index f075c5f..9ed999b 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -674,7 +674,7 @@ dependencies = [ [[package]] name = "engraph" -version = "1.5.0" +version = "1.5.3" dependencies = [ "anyhow", "axum", diff --git a/Cargo.toml b/Cargo.toml index b4274fd..b1e4d1e 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "engraph" -version = "1.5.0" +version = "1.5.3" edition = "2024" description = "Local knowledge graph for AI agents. Hybrid search + MCP server for Obsidian vaults." license = "MIT" diff --git a/src/http.rs b/src/http.rs index 7eabe7f..36720ad 100644 --- a/src/http.rs +++ b/src/http.rs @@ -43,6 +43,7 @@ pub struct ApiState { pub no_auth: bool, pub recent_writes: RecentWrites, pub rate_limiter: Arc, + pub read_only: bool, } // --------------------------------------------------------------------------- @@ -697,6 +698,9 @@ async fn handle_create( Json(body): Json, ) -> Result { authorize(&headers, &state, true)?; + if state.read_only { + return Err(ApiError::forbidden("Write operations disabled in read-only mode")); + } let store = state.store.lock().await; let mut embedder = state.embedder.lock().await; let input = CreateNoteInput { @@ -726,6 +730,9 @@ async fn handle_append( Json(body): Json, ) -> Result { authorize(&headers, &state, true)?; + if state.read_only { + return Err(ApiError::forbidden("Write operations disabled in read-only mode")); + } let store = state.store.lock().await; let mut embedder = state.embedder.lock().await; let input = AppendInput { @@ -746,6 +753,9 @@ async fn handle_edit( Json(body): Json, ) -> Result { authorize(&headers, &state, true)?; + if state.read_only { + return Err(ApiError::forbidden("Write operations disabled in read-only mode")); + } let store = state.store.lock().await; let mode = match body.mode.as_deref().unwrap_or("append") { "replace" => EditMode::Replace, @@ -772,6 +782,9 @@ async fn handle_rewrite( Json(body): Json, ) -> Result { authorize(&headers, &state, true)?; + if state.read_only { + return Err(ApiError::forbidden("Write operations disabled in read-only mode")); + } let store = state.store.lock().await; let input = RewriteInput { file: body.file, @@ -792,6 +805,9 @@ async fn handle_edit_frontmatter( Json(body): Json, ) -> Result { authorize(&headers, &state, true)?; + if state.read_only { + return Err(ApiError::forbidden("Write operations disabled in read-only mode")); + } let ops = parse_frontmatter_ops(&body.operations)?; let store = state.store.lock().await; let input = EditFrontmatterInput { @@ -812,6 +828,9 @@ async fn handle_move( Json(body): Json, ) -> Result { authorize(&headers, &state, true)?; + if state.read_only { + return Err(ApiError::forbidden("Write operations disabled in read-only mode")); + } let store = state.store.lock().await; let result = writer::move_note(&body.file, &body.new_folder, &store, &state.vault_path) .map_err(|e| ApiError::internal(&format!("{e:#}")))?; @@ -826,6 +845,9 @@ async fn handle_archive( Json(body): Json, ) -> Result { authorize(&headers, &state, true)?; + if state.read_only { + return Err(ApiError::forbidden("Write operations disabled in read-only mode")); + } let store = state.store.lock().await; let result = writer::archive_note( &body.file, @@ -845,6 +867,9 @@ async fn handle_unarchive( Json(body): Json, ) -> Result { authorize(&headers, &state, true)?; + if state.read_only { + return Err(ApiError::forbidden("Write operations disabled in read-only mode")); + } let store = state.store.lock().await; let mut embedder = state.embedder.lock().await; let result = writer::unarchive_note(&body.file, &store, &mut *embedder, &state.vault_path) @@ -860,6 +885,9 @@ async fn handle_update_metadata( Json(body): Json, ) -> Result { authorize(&headers, &state, true)?; + if state.read_only { + return Err(ApiError::forbidden("Write operations disabled in read-only mode")); + } let store = state.store.lock().await; let input = UpdateMetadataInput { file: body.file, @@ -883,6 +911,9 @@ async fn handle_migrate_preview( headers: HeaderMap, ) -> Result { authorize(&headers, &state, true)?; + if state.read_only { + return Err(ApiError::forbidden("Write operations disabled in read-only mode")); + } let store = state.store.lock().await; let profile_ref = state.profile.as_ref().as_ref(); let preview = crate::migrate::generate_preview(&store, &state.vault_path, profile_ref) @@ -901,6 +932,9 @@ async fn handle_migrate_apply( Json(body): Json, ) -> Result { authorize(&headers, &state, true)?; + if state.read_only { + return Err(ApiError::forbidden("Write operations disabled in read-only mode")); + } let store = state.store.lock().await; let preview: crate::migrate::MigrationPreview = serde_json::from_value(body.preview) .map_err(|e| ApiError::bad_request(&format!("Invalid preview: {e}")))?; @@ -914,6 +948,9 @@ async fn handle_migrate_undo( headers: HeaderMap, ) -> Result { authorize(&headers, &state, true)?; + if state.read_only { + return Err(ApiError::forbidden("Write operations disabled in read-only mode")); + } let store = state.store.lock().await; let result = crate::migrate::undo_last(&store, &state.vault_path) .map_err(|e| ApiError::internal(&format!("{e:#}")))?; @@ -926,6 +963,9 @@ async fn handle_delete( Json(body): Json, ) -> Result { authorize(&headers, &state, true)?; + if state.read_only { + return Err(ApiError::forbidden("Write operations disabled in read-only mode")); + } let store = state.store.lock().await; let mode = match body.mode.as_deref().unwrap_or("soft") { "hard" => DeleteMode::Hard, @@ -1013,6 +1053,7 @@ mod tests { no_auth: false, recent_writes: Arc::new(Mutex::new(HashMap::::new())), rate_limiter, + read_only: false, } } diff --git a/src/links.rs b/src/links.rs index 9453c5a..956fe3d 100644 --- a/src/links.rs +++ b/src/links.rs @@ -99,6 +99,148 @@ pub(crate) fn build_name_index(store: &Store, vault_path: &Path) -> Result Vec<(usize, usize)> { + let bytes = content.as_bytes(); + let len = bytes.len(); + let mut regions = Vec::new(); + + // --- Frontmatter: must start at byte 0 with "---" followed by newline --- + let mut body_start = 0; + if content.starts_with("---\n") || content.starts_with("---\r\n") { + let after_open = if bytes[3] == b'\n' { 4 } else { 5 }; // skip past first ---\n or ---\r\n + if let Some(close_rel) = content[after_open..].find("\n---\n") { + let close_end = after_open + close_rel + 5; // include the closing ---\n + regions.push((0, close_end)); + body_start = close_end; + } else if let Some(close_rel) = content[after_open..].find("\n---\r\n") { + let close_end = after_open + close_rel + 6; + regions.push((0, close_end)); + body_start = close_end; + } else if content[after_open..].ends_with("\n---") { + // frontmatter at very end of content (no trailing newline) + regions.push((0, len)); + body_start = len; + } + } + + // --- Fenced code blocks and inline code in the body --- + let mut i = body_start; + while i < len { + // Check for fenced code block (``` or ~~~) at start of line + let at_line_start = i == body_start || (i > 0 && bytes[i - 1] == b'\n'); + if at_line_start && i + 2 < len { + let fence_char = bytes[i]; + if (fence_char == b'`' || fence_char == b'~') + && bytes[i + 1] == fence_char + && bytes[i + 2] == fence_char + { + let fence_start = i; + // Skip past the opening fence line + let line_end = content[i..].find('\n').map(|p| i + p + 1).unwrap_or(len); + let mut j = line_end; + // Scan for matching closing fence + let mut found_close = false; + while j < len { + let j_at_line_start = j == 0 || bytes[j - 1] == b'\n'; + if j_at_line_start + && j + 2 < len + && bytes[j] == fence_char + && bytes[j + 1] == fence_char + && bytes[j + 2] == fence_char + { + // Found closing fence — find end of this line + let close_line_end = + content[j..].find('\n').map(|p| j + p + 1).unwrap_or(len); + regions.push((fence_start, close_line_end)); + i = close_line_end; + found_close = true; + break; + } + // Advance to next line + j = content[j..].find('\n').map(|p| j + p + 1).unwrap_or(len); + if j == len && !found_close { + break; + } + } + if !found_close { + // Unclosed fence — protect to end of content + regions.push((fence_start, len)); + i = len; + } + continue; + } + } + + // Check for inline code (backticks) + if bytes[i] == b'`' { + let start = i; + i += 1; + // Find matching closing backtick (single backtick inline code) + while i < len { + if bytes[i] == b'`' { + regions.push((start, i + 1)); + i += 1; + break; + } + if bytes[i] == b'\n' { + // Inline code doesn't span lines — abandon + break; + } + i += 1; + } + continue; + } + + i += 1; + } + + regions +} + +/// Check if a match followed by a `.` + 1-4 alphanumeric chars looks like a file extension. +fn followed_by_file_extension(content: &[u8], end: usize) -> bool { + if end >= content.len() || content[end] != b'.' { + return false; + } + let after_dot = end + 1; + if after_dot >= content.len() { + return false; + } + let mut ext_len = 0; + let mut j = after_dot; + while j < content.len() && content[j].is_ascii_alphanumeric() { + ext_len += 1; + j += 1; + } + // Valid extension: 1-6 alphanumeric chars, followed by non-alphanumeric or end + ext_len >= 1 && ext_len <= 6 +} + +/// Check if the matched text looks like a bare date (YYYY-MM-DD). +/// Daily notes have filenames like `2026-03-31.md` so their basename matches date patterns. +/// We skip these to avoid linking plain dates in body text. +fn is_date_pattern(text: &str) -> bool { + let t = text.trim(); + if t.len() != 10 { + return false; + } + let bytes = t.as_bytes(); + // YYYY-MM-DD: digits at [0-3], dash at [4], digits at [5-6], dash at [7], digits at [8-9] + bytes[0..4].iter().all(|b| b.is_ascii_digit()) + && bytes[4] == b'-' + && bytes[5..7].iter().all(|b| b.is_ascii_digit()) + && bytes[7] == b'-' + && bytes[8..10].iter().all(|b| b.is_ascii_digit()) +} + /// Find byte ranges of existing `[[...]]` wikilinks in content. /// /// Returns `(start, end)` pairs where start is the index of the first `[` @@ -344,6 +486,7 @@ pub fn discover_links( ) -> Result> { let name_index = build_name_index(store, vault_path)?; let wikilink_regions = find_wikilink_regions(content); + let protected_regions = find_protected_regions(content); let content_lower = content.to_lowercase(); let content_bytes = content.as_bytes(); @@ -366,6 +509,11 @@ pub fn discover_links( continue; } + // Skip if inside a protected region (frontmatter, code block, inline code) + if inside_region(pos, end, &protected_regions) { + continue; + } + // Skip if overlapping with an already-claimed (longer) match if overlaps_claimed(pos, end, &claimed) { continue; @@ -377,6 +525,16 @@ pub fn discover_links( continue; } + // Skip if match is followed by a file extension (e.g., image-url.ts) + if followed_by_file_extension(content_bytes, end) { + continue; + } + + // Skip bare date patterns (e.g., 2026-03-31 matching a daily note) + if is_date_pattern(&content[pos..end]) { + continue; + } + let matched_text = content[pos..end].to_string(); let display = match entry.match_type { @@ -418,9 +576,10 @@ pub fn discover_links( .cloned() .collect(); - // Combine exact match regions and wikilink regions for fuzzy exclusion + // Combine exact match regions, wikilink regions, and protected regions for fuzzy exclusion let mut fuzzy_excluded = claimed.clone(); fuzzy_excluded.extend_from_slice(&wikilink_regions); + fuzzy_excluded.extend_from_slice(&protected_regions); let fuzzy_matches = find_fuzzy_matches(content, &eligible, &fuzzy_excluded, people_folder); // Track fuzzy match regions for first-name exclusion @@ -488,6 +647,7 @@ pub fn apply_links(content: &str, links: &[DiscoveredLink]) -> String { let content_lower = content.to_lowercase(); let content_bytes = content.as_bytes(); let wikilink_regions = find_wikilink_regions(content); + let protected_regions = find_protected_regions(content); // Find the position of each link in the content let mut replacements: Vec<(usize, usize, String)> = Vec::new(); @@ -505,6 +665,9 @@ pub fn apply_links(content: &str, links: &[DiscoveredLink]) -> String { if inside_region(pos, end, &wikilink_regions) { continue; } + if inside_region(pos, end, &protected_regions) { + continue; + } if overlaps_claimed(pos, end, &claimed) { continue; } @@ -512,6 +675,12 @@ pub fn apply_links(content: &str, links: &[DiscoveredLink]) -> String { { continue; } + if followed_by_file_extension(content_bytes, end) { + continue; + } + if is_date_pattern(&content[pos..end]) { + continue; + } let target_name = link .target_path @@ -823,4 +992,171 @@ mod tests { let matches = find_first_name_matches("I talked to Steve about it.", &people, &[]); assert_eq!(matches.len(), 0); // "steve" doesn't start with "steve " (no space after) } + + // --- Protected region tests --- + + #[test] + fn test_find_protected_regions_frontmatter() { + let content = "---\ntitle: My Note\ntags: [drift]\n---\nSome body text"; + let regions = find_protected_regions(content); + // Frontmatter region should cover from 0 to end of closing ---\n + assert!(!regions.is_empty()); + let fm = regions[0]; + assert_eq!(fm.0, 0); + // The closing "---\n" ends at byte 36 + let fm_text = &content[fm.0..fm.1]; + assert!(fm_text.contains("tags: [drift]")); + assert!(fm_text.ends_with("---\n")); + } + + #[test] + fn test_find_protected_regions_fenced_code_block() { + let content = "Some text\n```rust\nlet drift = 42;\n```\nMore text"; + let regions = find_protected_regions(content); + assert_eq!(regions.len(), 1); + let block = &content[regions[0].0..regions[0].1]; + assert!(block.contains("let drift = 42;")); + } + + #[test] + fn test_find_protected_regions_tilde_fence() { + let content = "Some text\n~~~\nlet drift = 42;\n~~~\nMore text"; + let regions = find_protected_regions(content); + assert_eq!(regions.len(), 1); + let block = &content[regions[0].0..regions[0].1]; + assert!(block.contains("let drift = 42;")); + } + + #[test] + fn test_find_protected_regions_inline_code() { + let content = "Use the `drift` command here"; + let regions = find_protected_regions(content); + assert_eq!(regions.len(), 1); + assert_eq!(&content[regions[0].0..regions[0].1], "`drift`"); + } + + #[test] + fn test_skip_fenced_code_block_in_discover() { + let (store, vault_dir) = setup_store_and_vault(); + // Add a note named "Drift" so it can be matched + store + .insert_file("01-Projects/Drift.md", "h", 0, &[], "ccc333", None, None) + .unwrap(); + let projects = vault_dir.path().join("01-Projects"); + std::fs::create_dir_all(&projects).unwrap(); + std::fs::write(projects.join("Drift.md"), "# Drift\n").unwrap(); + + let content = "Some text\n```\nDrift config here\n```\nMore text about Drift"; + let links = discover_links(&store, content, vault_dir.path(), None).unwrap(); + // Should only match the "Drift" outside the code block + assert_eq!(links.len(), 1); + // The matched position should be in the "More text about Drift" part + assert_eq!(links[0].matched_text, "Drift"); + } + + #[test] + fn test_skip_inline_code_in_discover() { + let (store, vault_dir) = setup_store_and_vault(); + store + .insert_file("01-Projects/Drift.md", "h", 0, &[], "ccc333", None, None) + .unwrap(); + let projects = vault_dir.path().join("01-Projects"); + std::fs::create_dir_all(&projects).unwrap(); + std::fs::write(projects.join("Drift.md"), "# Drift\n").unwrap(); + + let content = "Use `Drift` in code but Drift in text"; + let links = discover_links(&store, content, vault_dir.path(), None).unwrap(); + // Should only match the Drift outside backticks + assert_eq!(links.len(), 1); + assert_eq!(links[0].matched_text, "Drift"); + } + + #[test] + fn test_skip_frontmatter_in_discover() { + let (store, vault_dir) = setup_store_and_vault(); + store + .insert_file("01-Projects/Drift.md", "h", 0, &[], "ccc333", None, None) + .unwrap(); + let projects = vault_dir.path().join("01-Projects"); + std::fs::create_dir_all(&projects).unwrap(); + std::fs::write(projects.join("Drift.md"), "# Drift\n").unwrap(); + + let content = "---\ntags: [drift]\ndate: 2026-03-27\n---\nTalked about Drift today"; + let links = discover_links(&store, content, vault_dir.path(), None).unwrap(); + // Should only match "Drift" in the body, not "drift" in frontmatter tags + assert_eq!(links.len(), 1); + assert_eq!(links[0].matched_text, "Drift"); + } + + #[test] + fn test_skip_file_extension_in_discover() { + let (store, vault_dir) = setup_store_and_vault(); + // Add a note called "image-url" that could match + store + .insert_file( + "03-Resources/image-url.md", + "h", + 0, + &[], + "ddd444", + None, + None, + ) + .unwrap(); + let resources = vault_dir.path().join("03-Resources"); + std::fs::create_dir_all(&resources).unwrap(); + std::fs::write(resources.join("image-url.md"), "# image-url\n").unwrap(); + + let content = "Edit image-url.ts for the fix, then check image-url docs"; + let links = discover_links(&store, content, vault_dir.path(), None).unwrap(); + // "image-url.ts" should NOT match (file extension), but "image-url" at end should + assert_eq!(links.len(), 1); + assert_eq!(links[0].matched_text, "image-url"); + } + + #[test] + fn test_apply_links_skips_protected_regions() { + let (store, vault_dir) = setup_store_and_vault(); + store + .insert_file("01-Projects/Drift.md", "h", 0, &[], "ccc333", None, None) + .unwrap(); + let projects = vault_dir.path().join("01-Projects"); + std::fs::create_dir_all(&projects).unwrap(); + std::fs::write(projects.join("Drift.md"), "# Drift\n").unwrap(); + + let content = "---\ntags: [drift]\n---\n`Drift` config\n```\nDrift code\n```\nReal Drift ref"; + let links = discover_links(&store, content, vault_dir.path(), None).unwrap(); + let result = apply_links(content, &links); + + // Frontmatter, inline code, and fenced code block should be untouched + assert!(result.contains("tags: [drift]")); + assert!(result.contains("`Drift`")); + assert!(result.contains("```\nDrift code\n```")); + // But the body reference should be linked + assert!(result.contains("[[Drift]]")); + } + + #[test] + fn test_normal_text_still_linked() { + let (store, vault_dir) = setup_store_and_vault(); + let content = "Steve Barbera and Reciprocal Rank Fusion are great"; + let links = discover_links(&store, content, vault_dir.path(), None).unwrap(); + let result = apply_links(content, &links); + assert!(result.contains("[[Steve Barbera]]")); + assert!(result.contains("[[Reciprocal Rank Fusion]]")); + } + + #[test] + fn test_followed_by_file_extension() { + assert!(followed_by_file_extension(b"image-url.ts rest", 9)); + assert!(followed_by_file_extension(b"drift.js", 5)); + assert!(followed_by_file_extension(b"note.md", 4)); + assert!(followed_by_file_extension(b"file.liquid", 4)); + // Not a file extension: dot followed by 7+ chars + assert!(!followed_by_file_extension(b"word.abcdefg", 4)); + // Not a file extension: no dot + assert!(!followed_by_file_extension(b"word rest", 4)); + // Not a file extension: dot at end + assert!(!followed_by_file_extension(b"word.", 4)); + } } diff --git a/src/main.rs b/src/main.rs index 82f564f..9b66a37 100644 --- a/src/main.rs +++ b/src/main.rs @@ -143,6 +143,9 @@ enum Command { /// Disable API key authentication (local development only, 127.0.0.1 only). #[arg(long)] no_auth: bool, + /// Read-only mode: only expose search and read MCP tools, disable all write operations. + #[arg(long)] + read_only: bool, }, /// Inspect vault graph connections. @@ -1220,6 +1223,7 @@ async fn main() -> Result<()> { port, host, no_auth, + read_only, } => { if !index_exists(&data_dir) { eprintln!("No index found. Run 'engraph index ' first."); @@ -1235,7 +1239,7 @@ async fn main() -> Result<()> { } else { None }; - engraph::serve::run_serve(&data_dir, http_opts).await?; + engraph::serve::run_serve(&data_dir, http_opts, read_only).await?; } Command::Write { action } => { diff --git a/src/serve.rs b/src/serve.rs index f82c344..831a620 100644 --- a/src/serve.rs +++ b/src/serve.rs @@ -205,6 +205,16 @@ pub struct EngraphServer { reranker: Option>>>, /// Tracks files recently written by MCP tools so the watcher can skip re-indexing them. recent_writes: RecentWrites, + /// When true, write/edit/delete MCP tools return an error instead of executing. + read_only: bool, +} + +fn read_only_err() -> McpError { + McpError::new( + rmcp::model::ErrorCode::INVALID_REQUEST, + "Write operations disabled in read-only mode. Start server without --read-only to enable writes.".to_string(), + None::, + ) } fn mcp_err(e: &anyhow::Error) -> McpError { @@ -490,6 +500,7 @@ impl EngraphServer { description = "Create a new note with automatic tag resolution, link discovery, and folder placement. Returns the created file's path, docid, and what was auto-resolved." )] async fn create(&self, params: Parameters) -> Result { + if self.read_only { return Err(read_only_err()); } let store = self.store.lock().await; let mut embedder = self.embedder.lock().await; let input = crate::writer::CreateNoteInput { @@ -516,6 +527,7 @@ impl EngraphServer { description = "Append content to an existing note. Safe: only adds content, never overwrites. Detects conflicts via mtime checking." )] async fn append(&self, params: Parameters) -> Result { + if self.read_only { return Err(read_only_err()); } let store = self.store.lock().await; let mut embedder = self.embedder.lock().await; let input = crate::writer::AppendInput { @@ -536,6 +548,7 @@ impl EngraphServer { &self, params: Parameters, ) -> Result { + if self.read_only { return Err(read_only_err()); } let store = self.store.lock().await; let input = crate::writer::UpdateMetadataInput { file: params.0.file, @@ -556,6 +569,7 @@ impl EngraphServer { &self, params: Parameters, ) -> Result { + if self.read_only { return Err(read_only_err()); } let store = self.store.lock().await; let result = crate::writer::move_note( ¶ms.0.file, @@ -572,6 +586,7 @@ impl EngraphServer { description = "Archive a note: moves it to the archive folder, removes from search index. The note is preserved on disk but invisible to search/context. Use unarchive to restore." )] async fn archive(&self, params: Parameters) -> Result { + if self.read_only { return Err(read_only_err()); } let store = self.store.lock().await; let result = crate::writer::archive_note( ¶ms.0.file, @@ -591,6 +606,7 @@ impl EngraphServer { &self, params: Parameters, ) -> Result { + if self.read_only { return Err(read_only_err()); } let store = self.store.lock().await; let mut embedder = self.embedder.lock().await; let result = @@ -635,6 +651,7 @@ impl EngraphServer { description = "Edit a specific section of a note. Supports replace, prepend, or append modes. Targets sections by heading name." )] async fn edit(&self, params: Parameters) -> Result { + if self.read_only { return Err(read_only_err()); } let store = self.store.lock().await; let mode = match params.0.mode.as_deref().unwrap_or("append") { "replace" => crate::writer::EditMode::Replace, @@ -661,6 +678,7 @@ impl EngraphServer { description = "Replace the entire body of a note. Optionally preserves existing frontmatter. Use for major content overhauls." )] async fn rewrite(&self, params: Parameters) -> Result { + if self.read_only { return Err(read_only_err()); } let store = self.store.lock().await; let input = crate::writer::RewriteInput { file: params.0.file, @@ -683,6 +701,7 @@ impl EngraphServer { &self, params: Parameters, ) -> Result { + if self.read_only { return Err(read_only_err()); } let ops = parse_frontmatter_ops(¶ms.0.operations)?; let store = self.store.lock().await; let input = crate::writer::EditFrontmatterInput { @@ -720,6 +739,7 @@ impl EngraphServer { &self, params: Parameters, ) -> Result { + if self.read_only { return Err(read_only_err()); } let store = self.store.lock().await; let preview: crate::migrate::MigrationPreview = serde_json::from_value(params.0.preview) .map_err(|e| mcp_err(&anyhow::anyhow!("Invalid preview JSON: {e}")))?; @@ -736,6 +756,7 @@ impl EngraphServer { &self, _params: Parameters, ) -> Result { + if self.read_only { return Err(read_only_err()); } let store = self.store.lock().await; let result = crate::migrate::undo_last(&store, &self.vault_path).map_err(|e| mcp_err(&e))?; @@ -747,6 +768,7 @@ impl EngraphServer { description = "Delete a note. Soft mode (default) moves it to the archive folder. Hard mode permanently removes it from disk and index." )] async fn delete(&self, params: Parameters) -> Result { + if self.read_only { return Err(read_only_err()); } let store = self.store.lock().await; let mode = match params.0.mode.as_deref().unwrap_or("soft") { "hard" => crate::writer::DeleteMode::Hard, @@ -802,7 +824,7 @@ pub struct HttpServeOpts { // Entry point // --------------------------------------------------------------------------- -pub async fn run_serve(data_dir: &Path, http_opts: Option) -> Result<()> { +pub async fn run_serve(data_dir: &Path, http_opts: Option, read_only: bool) -> Result<()> { if let Some(ref opts) = http_opts && opts.no_auth && opts.host != "127.0.0.1" @@ -907,6 +929,10 @@ pub async fn run_serve(data_dir: &Path, http_opts: Option) -> Res recent_writes.clone(), )?; + if read_only { + eprintln!("Read-only mode: write tools disabled"); + } + let server = EngraphServer { store: store_arc, embedder: embedder_arc, @@ -916,6 +942,7 @@ pub async fn run_serve(data_dir: &Path, http_opts: Option) -> Res orchestrator, reranker, recent_writes, + read_only, }; // Cancellation token for coordinated shutdown of HTTP + MCP @@ -935,6 +962,7 @@ pub async fn run_serve(data_dir: &Path, http_opts: Option) -> Res no_auth: opts.no_auth, recent_writes: http_recent_writes, rate_limiter: Arc::new(crate::http::RateLimiter::new(config.http.rate_limit)), + read_only, }; let router = crate::http::build_router(api_state); let addr = format!("{}:{}", opts.host, opts.port); diff --git a/src/store.rs b/src/store.rs index 42423ab..fbc3aaf 100644 --- a/src/store.rs +++ b/src/store.rs @@ -159,6 +159,15 @@ impl Store { } fn init(&self) -> Result<()> { + // Enable WAL mode for concurrent reads during writes (fixes "database is locked" + // errors with rapid MCP calls and parallel CLI + server access). + // busy_timeout makes SQLite retry for up to 5 seconds instead of failing immediately. + self.conn + .execute_batch( + "PRAGMA journal_mode = WAL; + PRAGMA busy_timeout = 5000;", + ) + .context("failed to set WAL pragmas")?; self.conn .execute_batch(SCHEMA) .context("failed to initialize schema")?; @@ -1291,6 +1300,19 @@ impl Store { Ok(()) } + /// Update only the mtime (and optionally content_hash) for a file in the store. + /// Used after write operations to keep the stored mtime in sync with disk. + pub fn update_file_mtime(&self, path: &str, mtime: i64) -> Result<()> { + let rows_affected = self.conn.execute( + "UPDATE files SET mtime = ?1 WHERE path = ?2", + params![mtime, path], + )?; + if rows_affected == 0 { + anyhow::bail!("file not found in store: {}", path); + } + Ok(()) + } + // ── Vec (sqlite-vec) ──────────────────────────────────────── pub fn insert_vec(&self, vector_id: u64, embedding: &[f32]) -> Result<()> { @@ -3447,4 +3469,60 @@ mod tests { store.delete_migration("mig-001").unwrap(); assert!(store.get_migration("mig-001").unwrap().is_empty()); } + + #[test] + fn test_wal_mode_enabled() { + // In-memory databases report "memory" for journal_mode, but busy_timeout should still apply. + let store = Store::open_memory().unwrap(); + let mode: String = store + .conn + .query_row("PRAGMA journal_mode", [], |row| row.get(0)) + .unwrap(); + assert!( + mode == "wal" || mode == "memory", + "expected 'wal' or 'memory', got '{mode}'" + ); + let timeout: i64 = store + .conn + .query_row("PRAGMA busy_timeout", [], |row| row.get(0)) + .unwrap(); + assert_eq!(timeout, 5000); + } + + #[test] + fn test_wal_mode_file_backed() { + let dir = tempfile::tempdir().unwrap(); + let db_path = dir.path().join("test_wal.db"); + let store = Store::open(&db_path).unwrap(); + let mode: String = store + .conn + .query_row("PRAGMA journal_mode", [], |row| row.get(0)) + .unwrap(); + assert_eq!(mode, "wal"); + let timeout: i64 = store + .conn + .query_row("PRAGMA busy_timeout", [], |row| row.get(0)) + .unwrap(); + assert_eq!(timeout, 5000); + } + + #[test] + fn test_concurrent_file_backed_access() { + // Two Store instances can open the same DB file simultaneously with WAL mode. + let dir = tempfile::tempdir().unwrap(); + let db_path = dir.path().join("test_concurrent.db"); + + let store1 = Store::open(&db_path).unwrap(); + let store2 = Store::open(&db_path).unwrap(); + + // Write with store1 + store1 + .insert_file("concurrent.md", "hash1", 1000, &[], "doc-1", None, None) + .unwrap(); + + // Read with store2 while store1 has been writing + let record = store2.get_file("concurrent.md").unwrap(); + assert!(record.is_some()); + assert_eq!(record.unwrap().content_hash, "hash1"); + } } diff --git a/src/writer.rs b/src/writer.rs index 8229661..f284566 100644 --- a/src/writer.rs +++ b/src/writer.rs @@ -1,3 +1,4 @@ +use std::collections::BTreeMap; use std::path::Path; use anyhow::{Result, bail}; @@ -114,7 +115,25 @@ pub fn generate_filename(title: &str) -> String { /// Extract a title from content: first `# heading` or first non-empty line, truncated to 50 chars. pub fn extract_title(content: &str) -> String { - for line in content.lines() { + // If content has frontmatter, check for a title field and skip FM for heading search + let (fm, body) = split_frontmatter(content); + if !fm.is_empty() { + // Check for title: field in frontmatter + let (scalars, _, _) = parse_frontmatter_fields(&fm); + if let Some(title) = scalars.get("title") { + let title = title.trim(); + if !title.is_empty() { + if title.len() > 50 { + return title[..50].to_string(); + } + return title.to_string(); + } + } + } + + // Search body (or full content if no FM) for heading or first non-empty line + let search_content = if fm.is_empty() { content } else { body.as_str() }; + for line in search_content.lines() { let trimmed = line.trim(); if trimmed.is_empty() { continue; @@ -213,6 +232,170 @@ pub fn split_frontmatter(content: &str) -> (String, String) { } } +/// Parse frontmatter YAML string (without the --- delimiters) into a map of +/// scalar fields plus separate lists for `tags` and `aliases`. +/// +/// Returns (scalars, tags, aliases). +fn parse_frontmatter_fields( + fm_block: &str, +) -> (BTreeMap, Vec, Vec) { + let mut scalars: BTreeMap = BTreeMap::new(); + let mut tags: Vec = Vec::new(); + let mut aliases: Vec = Vec::new(); + + // Strip the --- delimiters + let inner = fm_block + .trim() + .strip_prefix("---") + .unwrap_or(fm_block) + .trim_start_matches('-') + .trim(); + let inner = inner.strip_suffix("---").unwrap_or(inner).trim(); + + if inner.is_empty() { + return (scalars, tags, aliases); + } + + // Try to parse as YAML via serde_yaml + if let Ok(yaml) = serde_yaml::from_str::(inner) { + if let Some(map) = yaml.as_mapping() { + for (k, v) in map { + let key = match k.as_str() { + Some(s) => s.to_string(), + None => continue, + }; + match key.as_str() { + "tags" => { + if let Some(seq) = v.as_sequence() { + for item in seq { + if let Some(s) = item.as_str() { + tags.push(s.to_string()); + } + } + } else if let Some(s) = v.as_str() { + // Handle inline `tags: foo` or `tags: [a, b]` parsed as string + for t in s.split(',') { + let t = t.trim(); + if !t.is_empty() { + tags.push(t.to_string()); + } + } + } + } + "aliases" => { + if let Some(seq) = v.as_sequence() { + for item in seq { + if let Some(s) = item.as_str() { + aliases.push(s.to_string()); + } + } + } else if let Some(s) = v.as_str() { + for a in s.split(',') { + let a = a.trim(); + if !a.is_empty() { + aliases.push(a.to_string()); + } + } + } + } + _ => { + // Serialize value back to a string representation + let val_str = match v { + serde_yaml::Value::String(s) => s.clone(), + serde_yaml::Value::Number(n) => n.to_string(), + serde_yaml::Value::Bool(b) => b.to_string(), + serde_yaml::Value::Null => String::new(), + other => { + // serde_yaml may parse dates/timestamps as tagged + // values. Serialize and clean up the output. + let raw = serde_yaml::to_string(other) + .unwrap_or_default() + .trim_start_matches("---") + .trim() + .to_string(); + // Strip YAML sequence prefix artifacts (e.g., "- - 2026-03-31" → "2026-03-31") + let cleaned = raw.trim_start_matches("- ").trim().to_string(); + cleaned + } + }; + if !val_str.is_empty() { + scalars.insert(key, val_str); + } + } + } + } + } + } + + (scalars, tags, aliases) +} + +/// Build a merged frontmatter block from auto-generated fields + user-provided fields. +/// +/// - `tags` and `aliases` are merged (deduplicated), user values included +/// - `created` and `created_by` always use auto-generated values +/// - All other user fields are passed through +fn build_merged_frontmatter( + auto_tags: &[String], + created_by: Option<&str>, + suggestion: Option<&PlacementSuggestion>, + user_scalars: &BTreeMap, + user_tags: &[String], + user_aliases: &[String], +) -> String { + // Merge tags: auto first, then user, deduplicated + let mut merged_tags: Vec = auto_tags.to_vec(); + for t in user_tags { + if !merged_tags.iter().any(|existing| existing == t) { + merged_tags.push(t.clone()); + } + } + + // Merge aliases: just user aliases (auto has none by default from create_note) + let merged_aliases: Vec = user_aliases.to_vec(); + + let mut fm = String::from("---\n"); + + if !merged_tags.is_empty() { + fm.push_str("tags:\n"); + for tag in &merged_tags { + fm.push_str(&format!(" - {}\n", tag)); + } + } + + if !merged_aliases.is_empty() { + fm.push_str("aliases:\n"); + for alias in &merged_aliases { + fm.push_str(&format!(" - {}\n", alias)); + } + } + + // Always auto-generated + fm.push_str(&format!("created: {}\n", today_date())); + + if let Some(by) = created_by { + fm.push_str(&format!("created_by: {}\n", by)); + } + + // User scalar fields (skip created/created_by — always auto-generated) + for (key, val) in user_scalars { + match key.as_str() { + "created" | "created_by" => continue, + _ => fm.push_str(&format!("{}: {}\n", key, val)), + } + } + + // Placement suggestion for inbox notes + if let Some(s) = suggestion { + fm.push_str(&format!("suggested_folder: {}\n", s.suggested_folder)); + fm.push_str(&format!("confidence: {:.2}\n", s.confidence)); + fm.push_str(&format!("reason: \"{}\"\n", s.reason)); + } + + fm.push_str("---\n\n"); + fm +} + /// Returns today's date as "YYYY-MM-DD". pub fn today_date() -> String { let now = OffsetDateTime::now_utc(); @@ -356,33 +539,8 @@ pub fn create_note( }) .collect(); - // Apply auto-apply links to content — wrap matched text in [[wikilinks]] - let mut content_with_links = input.content.clone(); - // Apply in reverse order of position to preserve offsets - let mut replacements: Vec<(usize, usize, String)> = Vec::new(); - let content_lower = content_with_links.to_lowercase(); - for link in &auto_apply { - let search_lower = link.matched_text.to_lowercase(); - if let Some(pos) = content_lower.find(&search_lower) { - let end = pos + link.matched_text.len(); - let original_text = &content_with_links[pos..end]; - let wikilink = if let Some(ref display) = link.display { - format!( - "[[{}|{}]]", - link.target_path.trim_end_matches(".md"), - display - ) - } else { - format!("[[{}]]", original_text) - }; - replacements.push((pos, end, wikilink)); - } - } - // Sort by position descending so replacements don't shift offsets - replacements.sort_by(|a, b| b.0.cmp(&a.0)); - for (start, end, replacement) in replacements { - content_with_links.replace_range(start..end, &replacement); - } + // Apply auto-apply links to content via apply_links (respects protected regions) + let content_with_links = links::apply_links(&input.content, &auto_apply); // Step 4: Determine folder placement let placement_result = if let Some(ref folder) = input.folder { @@ -402,6 +560,14 @@ pub fn create_note( }; // Step 5: Build frontmatter and assemble content + // Split user frontmatter from body so we can merge instead of duplicate + let (user_fm, body) = split_frontmatter(&content_with_links); + let (user_scalars, user_tags, user_aliases) = if !user_fm.is_empty() { + parse_frontmatter_fields(&user_fm) + } else { + (BTreeMap::new(), Vec::new(), Vec::new()) + }; + // If placement fell back to inbox with a suggestion, inject suggested_folder metadata let suggestion = if placement_result.strategy == placement::PlacementStrategy::InboxFallback { placement_result @@ -415,13 +581,15 @@ pub fn create_note( } else { None }; - let frontmatter = build_frontmatter( + let frontmatter = build_merged_frontmatter( &resolved_tags, Some(&input.created_by), - None, suggestion.as_ref(), + &user_scalars, + &user_tags, + &user_aliases, ); - let full_content = format!("{}{}", frontmatter, content_with_links); + let full_content = format!("{}{}", frontmatter, body); let rel_path = format!("{}/{}", placement_result.folder, filename); let final_path = vault_path.join(&rel_path); @@ -825,7 +993,11 @@ pub fn edit_note( // Step 6: Write atomically (overwrite = true) atomic_write(&full_path, &new_content, true)?; - // Step 7: Return EditResult + // Step 7: Update stored mtime to match actual file after write + let actual_mtime = file_mtime(&full_path).unwrap_or(0); + store.update_file_mtime(&file_record.path, actual_mtime)?; + + // Step 8: Return EditResult Ok(EditResult { path: file_record.path, heading: input.heading.clone(), @@ -869,7 +1041,11 @@ pub fn rewrite_note(store: &Store, vault_path: &Path, input: &RewriteInput) -> R // Step 5: Write atomically (overwrite = true) atomic_write(&full_path, &new_content, true)?; - // Step 6: Return EditResult (reusing existing result type) + // Step 6: Update stored mtime to match actual file after write + let actual_mtime = file_mtime(&full_path).unwrap_or(0); + store.update_file_mtime(&file_record.path, actual_mtime)?; + + // Step 7: Return EditResult (reusing existing result type) Ok(EditResult { path: file_record.path, heading: String::new(), @@ -2003,4 +2179,314 @@ mod tests { assert!(store.get_file("gone.md").unwrap().is_none()); drop(tmp); } + + // ── Frontmatter merge tests ──────────────────────────────────── + + #[test] + fn test_merge_user_frontmatter_produces_single_block() { + let user_content = "---\ntitle: My Note\ntags:\n - project\n - work\n---\n\n# My Note content\n"; + let (user_fm, body) = split_frontmatter(user_content); + assert!(!user_fm.is_empty()); + + let (user_scalars, user_tags, user_aliases) = parse_frontmatter_fields(&user_fm); + let auto_tags = vec!["project".to_string()]; + + let merged = build_merged_frontmatter( + &auto_tags, + Some("mcp"), + None, + &user_scalars, + &user_tags, + &user_aliases, + ); + let full = format!("{}{}", merged, body); + + // Count frontmatter blocks: should be exactly one + let fm_count = full.matches("\n---\n").count(); + // The opening ---\n at the start + the closing \n---\n = pattern appears once for closing + assert!(full.starts_with("---\n")); + assert_eq!(fm_count, 1, "Should have exactly one closing --- delimiter"); + assert!(full.contains("# My Note content")); + } + + #[test] + fn test_merge_tags_deduplicated() { + let user_fm = "---\ntags:\n - project\n - work\n - rust\n---\n"; + let (user_scalars, user_tags, user_aliases) = parse_frontmatter_fields(user_fm); + let auto_tags = vec!["project".to_string(), "engraph".to_string()]; + + let merged = build_merged_frontmatter( + &auto_tags, + Some("mcp"), + None, + &user_scalars, + &user_tags, + &user_aliases, + ); + + // "project" should appear once, "engraph" from auto, "work" and "rust" from user + let tag_lines: Vec<&str> = merged.lines().filter(|l| l.starts_with(" - ")).collect(); + assert_eq!(tag_lines.len(), 4); + assert!(merged.contains(" - project\n")); + assert!(merged.contains(" - engraph\n")); + assert!(merged.contains(" - work\n")); + assert!(merged.contains(" - rust\n")); + + // "project" tag line should appear only once + let project_count = merged.matches(" - project\n").count(); + assert_eq!(project_count, 1, "Duplicate tag 'project' should be deduplicated"); + } + + #[test] + fn test_merge_preserves_user_custom_fields() { + let user_fm = "---\ntitle: My Project\nstatus: active\npriority: high\ntags:\n - work\n---\n"; + let (user_scalars, user_tags, user_aliases) = parse_frontmatter_fields(user_fm); + let auto_tags = vec!["project".to_string()]; + + let merged = build_merged_frontmatter( + &auto_tags, + Some("mcp"), + None, + &user_scalars, + &user_tags, + &user_aliases, + ); + + assert!(merged.contains("title: My Project")); + assert!(merged.contains("status: active")); + assert!(merged.contains("priority: high")); + assert!(merged.contains(" - work")); + assert!(merged.contains(" - project")); + } + + #[test] + fn test_merge_created_always_auto_generated() { + let user_fm = "---\ncreated: 2020-01-01\ncreated_by: user\ntitle: Test\n---\n"; + let (user_scalars, user_tags, user_aliases) = parse_frontmatter_fields(user_fm); + let auto_tags = vec![]; + + let merged = build_merged_frontmatter( + &auto_tags, + Some("mcp"), + None, + &user_scalars, + &user_tags, + &user_aliases, + ); + + // created should be today's date, not 2020-01-01 + assert!(!merged.contains("2020-01-01")); + assert!(merged.contains(&format!("created: {}", today_date()))); + // created_by should be "mcp", not "user" + assert!(merged.contains("created_by: mcp")); + assert!(!merged.contains("created_by: user")); + // But title should still be preserved + assert!(merged.contains("title: Test")); + } + + #[test] + fn test_merge_content_without_frontmatter_unchanged() { + let content = "# Just a heading\n\nSome body text.\n"; + let (user_fm, body) = split_frontmatter(content); + assert!(user_fm.is_empty()); + + let (user_scalars, user_tags, user_aliases) = parse_frontmatter_fields(&user_fm); + let auto_tags = vec!["inbox".to_string()]; + + let merged = build_merged_frontmatter( + &auto_tags, + Some("mcp"), + None, + &user_scalars, + &user_tags, + &user_aliases, + ); + let full = format!("{}{}", merged, body); + + // Should have frontmatter from auto-gen only + assert!(full.starts_with("---\n")); + assert!(full.contains(" - inbox")); + assert!(full.contains("created_by: mcp")); + // Body should be intact + assert!(full.contains("# Just a heading")); + assert!(full.contains("Some body text.")); + } + + #[test] + fn test_merge_user_aliases_preserved() { + let user_fm = "---\naliases:\n - My Alias\n - Another Name\ntags:\n - test\n---\n"; + let (user_scalars, user_tags, user_aliases) = parse_frontmatter_fields(user_fm); + let auto_tags = vec!["auto".to_string()]; + + let merged = build_merged_frontmatter( + &auto_tags, + Some("mcp"), + None, + &user_scalars, + &user_tags, + &user_aliases, + ); + + assert!(merged.contains("aliases:")); + assert!(merged.contains(" - My Alias")); + assert!(merged.contains(" - Another Name")); + assert!(merged.contains(" - test")); + assert!(merged.contains(" - auto")); + } + + #[test] + fn test_parse_frontmatter_fields_empty() { + let (scalars, tags, aliases) = parse_frontmatter_fields(""); + assert!(scalars.is_empty()); + assert!(tags.is_empty()); + assert!(aliases.is_empty()); + } + + #[test] + fn test_edit_then_append_no_mtime_conflict() { + use crate::llm::MockLlm; + + let (_tmp, store, root) = setup_vault(); + let mut embedder = MockLlm::new(256); + + // Create a note on disk + let content = "# Test Note\n\n## Section\n\nOriginal content\n"; + let file_path = root.join("mtime-test.md"); + std::fs::write(&file_path, content).unwrap(); + + // Register in store with the ACTUAL mtime from disk + let mtime = file_mtime(&file_path).unwrap(); + store + .insert_file("mtime-test.md", "hash", mtime, &[], "mt123", None, None) + .unwrap(); + + // Step 1: edit_note modifies the file + let edit_input = EditInput { + file: "mtime-test.md".into(), + heading: "Section".into(), + content: "Edited content".into(), + mode: EditMode::Replace, + modified_by: "test".into(), + }; + edit_note(&store, &root, &edit_input, None).unwrap(); + + // Step 2: append_to_note immediately after — should NOT fail with mtime conflict + let append_input = AppendInput { + file: "mtime-test.md".into(), + content: "\n## Appended\n\nAppended content\n".into(), + modified_by: "test".into(), + }; + let result = append_to_note(append_input, &store, &mut embedder, &root); + assert!( + result.is_ok(), + "append after edit should not fail with mtime conflict, got: {:?}", + result.err() + ); + + // Verify both edits are present + let final_content = std::fs::read_to_string(&file_path).unwrap(); + assert!(final_content.contains("Edited content")); + assert!(final_content.contains("Appended content")); + } + + #[test] + fn test_rewrite_then_append_no_mtime_conflict() { + use crate::llm::MockLlm; + + let (_tmp, store, root) = setup_vault(); + let mut embedder = MockLlm::new(256); + + // Create a note on disk with frontmatter + let content = "---\ntags:\n - test\n---\n\n# Rewrite Test\n\nOriginal body\n"; + let file_path = root.join("rewrite-mtime.md"); + std::fs::write(&file_path, content).unwrap(); + + // Register with actual mtime + let mtime = file_mtime(&file_path).unwrap(); + store + .insert_file( + "rewrite-mtime.md", + "hash", + mtime, + &["test".to_string()], + "rwmt1", + None, + None, + ) + .unwrap(); + + // Step 1: rewrite_note modifies the file + let rewrite_input = RewriteInput { + file: "rewrite-mtime.md".into(), + content: "# Rewritten\n\nNew body\n".into(), + preserve_frontmatter: true, + modified_by: "test".into(), + }; + rewrite_note(&store, &root, &rewrite_input).unwrap(); + + // Step 2: append_to_note immediately after — should NOT fail with mtime conflict + let append_input = AppendInput { + file: "rewrite-mtime.md".into(), + content: "\n## Extra\n\nMore content\n".into(), + modified_by: "test".into(), + }; + let result = append_to_note(append_input, &store, &mut embedder, &root); + assert!( + result.is_ok(), + "append after rewrite should not fail with mtime conflict, got: {:?}", + result.err() + ); + + let final_content = std::fs::read_to_string(&file_path).unwrap(); + assert!(final_content.contains("New body")); + assert!(final_content.contains("More content")); + } + + #[test] + fn test_edit_frontmatter_then_append_no_mtime_conflict() { + use crate::llm::MockLlm; + + let (_tmp, store, root) = setup_vault(); + let mut embedder = MockLlm::new(256); + + // Create a note on disk + let content = "---\ntags:\n - original\n---\n\n# FM Test\n\nBody\n"; + let file_path = root.join("fm-mtime.md"); + std::fs::write(&file_path, content).unwrap(); + + // Register with actual mtime + let mtime = file_mtime(&file_path).unwrap(); + store + .insert_file( + "fm-mtime.md", + "hash", + mtime, + &["original".to_string()], + "fmmt1", + None, + None, + ) + .unwrap(); + + // Step 1: edit_frontmatter modifies the file + let fm_input = EditFrontmatterInput { + file: "fm-mtime.md".into(), + operations: vec![FrontmatterOp::AddTag("added".into())], + modified_by: "test".into(), + }; + edit_frontmatter(&store, &root, &fm_input).unwrap(); + + // Step 2: append_to_note immediately after — should NOT fail with mtime conflict + let append_input = AppendInput { + file: "fm-mtime.md".into(), + content: "\n## Appended\n\nMore\n".into(), + modified_by: "test".into(), + }; + let result = append_to_note(append_input, &store, &mut embedder, &root); + assert!( + result.is_ok(), + "append after edit_frontmatter should not fail with mtime conflict, got: {:?}", + result.err() + ); + } }