Skip to content

Commit 84429e6

Browse files
authored
Merge main into docs/claude-code-architecture-analysis
2 parents 14e3339 + 13665d4 commit 84429e6

5 files changed

Lines changed: 69 additions & 48 deletions

File tree

Cargo.lock

Lines changed: 1 addition & 1 deletion
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

crates/codegraph-core/src/build_pipeline.rs

Lines changed: 16 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -117,8 +117,16 @@ pub fn run_pipeline(
117117

118118
// ── Stage 2: Collect files ─────────────────────────────────────────
119119
let t0 = Instant::now();
120+
// For scoped builds, track all scoped relative paths (including deleted files)
121+
// so detect_removed_files only flags scoped files as removed, not everything.
122+
let scoped_rel_paths: Option<HashSet<String>> = opts.scope.as_ref().map(|scope| {
123+
scope
124+
.iter()
125+
.map(|f| normalize_path(f))
126+
.collect()
127+
});
120128
let collect_result = if let Some(ref scope) = opts.scope {
121-
// Scoped rebuild
129+
// Scoped rebuild — only collect files that exist on disk
122130
let files: Vec<String> = scope
123131
.iter()
124132
.map(|f| {
@@ -179,6 +187,7 @@ pub fn run_pipeline(
179187
root_dir,
180188
incremental,
181189
force_full_rebuild,
190+
scoped_rel_paths.as_ref(),
182191
);
183192
timing.detect_ms = t0.elapsed().as_secs_f64() * 1000.0;
184193

@@ -292,7 +301,8 @@ pub fn run_pipeline(
292301
let mut batch_inputs: Vec<ImportResolutionInput> = Vec::new();
293302
for (rel_path, symbols) in &file_symbols {
294303
let abs_file = Path::new(root_dir).join(rel_path);
295-
let abs_str = abs_file.to_str().unwrap_or("").to_string();
304+
// Normalize to forward slashes so batch_resolved keys match Stage 6b lookups on Windows.
305+
let abs_str = abs_file.to_str().unwrap_or("").replace('\\', "/");
296306
for imp in &symbols.imports {
297307
batch_inputs.push(ImportResolutionInput {
298308
from_file: abs_str.clone(),
@@ -416,8 +426,9 @@ pub fn run_pipeline(
416426
rusqlite::params![&rel],
417427
);
418428
// Re-resolve imports for the barrel file
429+
// Normalize to forward slashes so batch_resolved keys match get_resolved lookups on Windows.
419430
let abs_str =
420-
Path::new(root_dir).join(&rel).to_str().unwrap_or("").to_string();
431+
Path::new(root_dir).join(&rel).to_str().unwrap_or("").replace('\\', "/");
421432
for imp in &sym.imports {
422433
let input = ImportResolutionInput {
423434
from_file: abs_str.clone(),
@@ -549,7 +560,7 @@ pub fn run_pipeline(
549560

550561
// Persist build metadata
551562
let version = env!("CARGO_PKG_VERSION");
552-
let meta_sql = "INSERT OR REPLACE INTO metadata (key, value) VALUES (?, ?)";
563+
let meta_sql = "INSERT OR REPLACE INTO build_meta (key, value) VALUES (?, ?)";
553564
if let Ok(mut stmt) = conn.prepare(meta_sql) {
554565
let _ = stmt.execute(["engine", "native"]);
555566
let _ = stmt.execute(["engine_version", version]);
@@ -593,7 +604,7 @@ pub fn run_pipeline(
593604
/// Check if engine/schema/version changed since last build (forces full rebuild).
594605
fn check_version_mismatch(conn: &Connection) -> bool {
595606
let get_meta = |key: &str| -> Option<String> {
596-
conn.query_row("SELECT value FROM metadata WHERE key = ?", [key], |row| {
607+
conn.query_row("SELECT value FROM build_meta WHERE key = ?", [key], |row| {
597608
row.get(0)
598609
})
599610
.ok()

crates/codegraph-core/src/change_detection.rs

Lines changed: 48 additions & 40 deletions
Original file line numberDiff line numberDiff line change
@@ -125,10 +125,15 @@ fn load_file_hashes(conn: &Connection) -> Option<HashMap<String, FileHashRow>> {
125125
}
126126

127127
/// Detect removed files: files in DB but not in current file list.
128+
///
129+
/// When `scoped_rel_paths` is provided (scoped rebuild), only files within that
130+
/// scope are considered candidates for removal. Without it, all DB files not
131+
/// found on disk are treated as removed.
128132
fn detect_removed_files(
129133
existing: &HashMap<String, FileHashRow>,
130134
all_files: &[String],
131135
root_dir: &str,
136+
scoped_rel_paths: Option<&HashSet<String>>,
132137
) -> Vec<String> {
133138
let current: HashSet<String> = all_files
134139
.iter()
@@ -137,7 +142,14 @@ fn detect_removed_files(
137142

138143
existing
139144
.keys()
140-
.filter(|f| !current.contains(*f))
145+
.filter(|f| {
146+
// When scope is set, only consider files within scope as candidates.
147+
if let Some(scope) = scoped_rel_paths {
148+
scope.contains(*f) && !current.contains(*f)
149+
} else {
150+
!current.contains(*f)
151+
}
152+
})
141153
.cloned()
142154
.collect()
143155
}
@@ -371,6 +383,10 @@ pub fn find_reverse_dependencies(
371383
}
372384

373385
/// Purge graph data for changed/removed files and delete outgoing edges for reverse deps.
386+
///
387+
/// Deletion order: analysis dependents → edges → nodes (matches `native_db::purge_files_data`).
388+
/// Analysis tables use join-based queries (node_id IN SELECT id FROM nodes) because they
389+
/// reference nodes by ID, not by file path directly.
374390
pub fn purge_changed_files(
375391
conn: &Connection,
376392
files_to_purge: &[String],
@@ -385,48 +401,36 @@ pub fn purge_changed_files(
385401
Err(_) => return,
386402
};
387403

388-
// Purge nodes and edges for changed/removed files
389-
if !files_to_purge.is_empty() {
390-
// Delete edges where source or target is in the purged files
391-
if let Ok(mut stmt) =
392-
tx.prepare("DELETE FROM edges WHERE source_id IN (SELECT id FROM nodes WHERE file = ?)")
393-
{
394-
for f in files_to_purge {
395-
let _ = stmt.execute([f]);
396-
}
397-
}
398-
if let Ok(mut stmt) =
399-
tx.prepare("DELETE FROM edges WHERE target_id IN (SELECT id FROM nodes WHERE file = ?)")
400-
{
401-
for f in files_to_purge {
402-
let _ = stmt.execute([f]);
403-
}
404-
}
405-
// Delete nodes
406-
if let Ok(mut stmt) = tx.prepare("DELETE FROM nodes WHERE file = ?") {
407-
for f in files_to_purge {
408-
let _ = stmt.execute([f]);
409-
}
410-
}
411-
// Delete analysis data
412-
for table in &[
413-
"function_complexity",
414-
"cfg_blocks",
415-
"cfg_edges",
416-
"dataflow",
417-
"ast_nodes",
418-
"node_metrics",
419-
] {
420-
let sql = format!("DELETE FROM {table} WHERE file = ?");
421-
if let Ok(mut stmt) = tx.prepare(&sql) {
422-
for f in files_to_purge {
423-
let _ = stmt.execute([f]);
404+
// Purge each file across all tables. Optional tables are silently skipped
405+
// if they don't exist. Order: analysis dependents → edges → nodes.
406+
let purge_sql: &[(&str, bool)] = &[
407+
// Analysis tables (optional — may not exist)
408+
("DELETE FROM embeddings WHERE node_id IN (SELECT id FROM nodes WHERE file = ?1)", false),
409+
("DELETE FROM cfg_edges WHERE function_node_id IN (SELECT id FROM nodes WHERE file = ?1)", false),
410+
("DELETE FROM cfg_blocks WHERE function_node_id IN (SELECT id FROM nodes WHERE file = ?1)", false),
411+
("DELETE FROM dataflow WHERE source_id IN (SELECT id FROM nodes WHERE file = ?1) OR target_id IN (SELECT id FROM nodes WHERE file = ?1)", false),
412+
("DELETE FROM function_complexity WHERE node_id IN (SELECT id FROM nodes WHERE file = ?1)", false),
413+
("DELETE FROM node_metrics WHERE node_id IN (SELECT id FROM nodes WHERE file = ?1)", false),
414+
("DELETE FROM ast_nodes WHERE file = ?1", false),
415+
// Core tables (errors logged)
416+
("DELETE FROM edges WHERE source_id IN (SELECT id FROM nodes WHERE file = ?1) OR target_id IN (SELECT id FROM nodes WHERE file = ?1)", true),
417+
("DELETE FROM nodes WHERE file = ?1", true),
418+
];
419+
420+
for file in files_to_purge {
421+
for &(sql, required) in purge_sql {
422+
match tx.execute(sql, rusqlite::params![file]) {
423+
Ok(_) => {}
424+
Err(e) if required => {
425+
eprintln!("[codegraph] purge failed for \"{file}\": {e}");
424426
}
427+
Err(_) => {} // optional table missing — skip
425428
}
426429
}
427430
}
428431

429-
// Delete outgoing edges for reverse-dep files (they'll be re-built)
432+
// Delete outgoing edges for reverse-dep files (they'll be re-built).
433+
// These files keep their nodes but need outgoing edges rebuilt.
430434
if !reverse_dep_files.is_empty() {
431435
if let Ok(mut stmt) =
432436
tx.prepare("DELETE FROM edges WHERE source_id IN (SELECT id FROM nodes WHERE file = ?)")
@@ -487,12 +491,16 @@ pub fn heal_metadata(conn: &Connection, updates: &[MetadataUpdate]) {
487491
/// Main entry point: detect changes using the tiered strategy.
488492
///
489493
/// Returns `None` for full builds (no file_hashes table or force flag).
494+
///
495+
/// When `scoped_rel_paths` is provided, removal detection is limited to files
496+
/// within that scope — non-scoped files in the DB are left untouched.
490497
pub fn detect_changes(
491498
conn: &Connection,
492499
all_files: &[String],
493500
root_dir: &str,
494501
incremental: bool,
495502
force_full_rebuild: bool,
503+
scoped_rel_paths: Option<&HashSet<String>>,
496504
) -> ChangeResult {
497505
if !incremental || force_full_rebuild {
498506
return ChangeResult {
@@ -539,7 +547,7 @@ pub fn detect_changes(
539547
}
540548
};
541549

542-
let removed = detect_removed_files(&existing, all_files, root_dir);
550+
let removed = detect_removed_files(&existing, all_files, root_dir, scoped_rel_paths);
543551

544552
// Try Tier 0 (journal) first
545553
if let Some(result) = try_journal_tier(conn, &existing, root_dir, &removed) {
@@ -597,7 +605,7 @@ mod tests {
597605
);
598606

599607
let all_files = vec!["/project/src/a.ts".to_string()];
600-
let removed = detect_removed_files(&existing, &all_files, "/project");
608+
let removed = detect_removed_files(&existing, &all_files, "/project", None);
601609
assert_eq!(removed, vec!["src/b.ts"]);
602610
}
603611
}

crates/codegraph-core/src/import_edges.rs

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -50,7 +50,9 @@ pub struct EdgeRow {
5050
impl ImportEdgeContext {
5151
/// Resolve an import source to a relative path, using batch cache first.
5252
pub fn get_resolved(&self, abs_file: &str, import_source: &str) -> String {
53-
let key = format!("{abs_file}|{import_source}");
53+
// Normalize to forward slashes so cache keys match across platforms (#826).
54+
let normalized = abs_file.replace('\\', "/");
55+
let key = format!("{normalized}|{import_source}");
5456
if let Some(hit) = self.batch_resolved.get(&key) {
5557
return hit.clone();
5658
}

src/domain/graph/builder/pipeline.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -271,7 +271,7 @@ interface NativeOrchestratorResult {
271271
function shouldSkipNativeOrchestrator(ctx: PipelineContext): string | null {
272272
if (process.env.CODEGRAPH_FORCE_JS_PIPELINE === '1') return 'CODEGRAPH_FORCE_JS_PIPELINE=1';
273273
if (ctx.forceFullRebuild) return 'forceFullRebuild';
274-
const orchestratorBuggy = !!ctx.engineVersion && semverCompare(ctx.engineVersion, '3.8.1') <= 0;
274+
const orchestratorBuggy = !!ctx.engineVersion && semverCompare(ctx.engineVersion, '3.9.0') <= 0;
275275
if (orchestratorBuggy) return `buggy addon ${ctx.engineVersion}`;
276276
if (ctx.engineName !== 'native') return `engine=${ctx.engineName}`;
277277
return null;

0 commit comments

Comments
 (0)