From 7122362e1d1c23d7f23735468c63f1dc808aff3c Mon Sep 17 00:00:00 2001 From: kgilpin Date: Fri, 1 May 2026 14:00:02 -0400 Subject: [PATCH 01/30] feat(cli): add query DB schema and open helpers MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Phase 1 of the V3 query plan: ports the 9-table APM schema from the appmap-apm prototype and adds openQueryDb / queryDbPath helpers with WAL, foreign keys, and user_version stamping. No CLI wiring yet — schema and lifecycle only, with unit tests. Co-Authored-By: Claude Opus 4.7 (1M context) --- packages/cli/src/cmds/query/db/index.ts | 3 + packages/cli/src/cmds/query/db/openQueryDb.ts | 55 ++++++ packages/cli/src/cmds/query/db/path.ts | 21 +++ packages/cli/src/cmds/query/db/schema.ts | 174 ++++++++++++++++++ .../unit/cmds/query/db/openQueryDb.spec.ts | 119 ++++++++++++ .../cli/tests/unit/cmds/query/db/path.spec.ts | 46 +++++ 6 files changed, 418 insertions(+) create mode 100644 packages/cli/src/cmds/query/db/index.ts create mode 100644 packages/cli/src/cmds/query/db/openQueryDb.ts create mode 100644 packages/cli/src/cmds/query/db/path.ts create mode 100644 packages/cli/src/cmds/query/db/schema.ts create mode 100644 packages/cli/tests/unit/cmds/query/db/openQueryDb.spec.ts create mode 100644 packages/cli/tests/unit/cmds/query/db/path.spec.ts diff --git a/packages/cli/src/cmds/query/db/index.ts b/packages/cli/src/cmds/query/db/index.ts new file mode 100644 index 0000000000..31386cd996 --- /dev/null +++ b/packages/cli/src/cmds/query/db/index.ts @@ -0,0 +1,3 @@ +export { SCHEMA, SCHEMA_VERSION, SCHEMA_TABLES } from './schema'; +export { queryDbPath, QUERY_DB_FILENAME, QUERY_DB_ENV } from './path'; +export { openQueryDb, OpenQueryDbResult } from './openQueryDb'; diff --git a/packages/cli/src/cmds/query/db/openQueryDb.ts b/packages/cli/src/cmds/query/db/openQueryDb.ts new file mode 100644 index 0000000000..f5c4117161 --- /dev/null +++ b/packages/cli/src/cmds/query/db/openQueryDb.ts @@ -0,0 +1,55 @@ +import { mkdirSync } from 'fs'; +import { dirname } from 'path'; + +import sqlite3 from 'better-sqlite3'; + +import { SCHEMA, SCHEMA_TABLES, SCHEMA_VERSION } from './schema'; +import { queryDbPath } from './path'; + +export interface OpenQueryDbResult { + db: sqlite3.Database; + path: string; + version: number; + rebuilt: boolean; +} + +// Open the query DB for the given appmap directory, ensuring its schema is +// at SCHEMA_VERSION. Creates the parent directory and the file if missing. +// Drops and rebuilds all schema tables if the on-disk version doesn't match. +// +// `dbPath` overrides path derivation (used by tests). +export function openQueryDb(appmapDir: string, dbPath?: string): OpenQueryDbResult { + const path = dbPath ?? queryDbPath(appmapDir); + mkdirSync(dirname(path), { recursive: true }); + + const db = sqlite3(path); + db.pragma('journal_mode = WAL'); + db.pragma('foreign_keys = ON'); + db.pragma('busy_timeout = 5000'); + + const currentVersion = db.pragma('user_version', { simple: true }) as number; + let rebuilt = false; + + if (currentVersion === 0) { + db.exec(SCHEMA); + db.pragma(`user_version = ${SCHEMA_VERSION}`); + } else if (currentVersion !== SCHEMA_VERSION) { + rebuildSchema(db); + rebuilt = true; + } + + return { db, path, version: SCHEMA_VERSION, rebuilt }; +} + +function rebuildSchema(db: sqlite3.Database): void { + const tx = db.transaction(() => { + db.pragma('foreign_keys = OFF'); + for (const table of SCHEMA_TABLES) { + db.exec(`DROP TABLE IF EXISTS ${table}`); + } + db.exec(SCHEMA); + db.pragma(`user_version = ${SCHEMA_VERSION}`); + db.pragma('foreign_keys = ON'); + }); + tx(); +} diff --git a/packages/cli/src/cmds/query/db/path.ts b/packages/cli/src/cmds/query/db/path.ts new file mode 100644 index 0000000000..074ed7789d --- /dev/null +++ b/packages/cli/src/cmds/query/db/path.ts @@ -0,0 +1,21 @@ +import { createHash } from 'crypto'; +import { homedir } from 'os'; +import { join, resolve } from 'path'; + +export const QUERY_DB_FILENAME = 'query.db'; +export const QUERY_DB_ENV = 'APPMAP_QUERY_DB'; + +// Derive the on-disk path for the query DB that corresponds to the given +// appmap directory. The path is rooted at `~/.appmap/data//query.db`, +// where `` is the first 12 hex characters of the SHA-256 digest of +// the resolved directory path. Honors APPMAP_QUERY_DB as a full-path +// override. +// +// Pure: returns the path without creating any directories. +export function queryDbPath(appmapDir: string): string { + const override = process.env[QUERY_DB_ENV]; + if (override) return override; + + const id = createHash('sha256').update(resolve(appmapDir)).digest('hex').slice(0, 12); + return join(homedir(), '.appmap', 'data', id, QUERY_DB_FILENAME); +} diff --git a/packages/cli/src/cmds/query/db/schema.ts b/packages/cli/src/cmds/query/db/schema.ts new file mode 100644 index 0000000000..cb148c1007 --- /dev/null +++ b/packages/cli/src/cmds/query/db/schema.ts @@ -0,0 +1,174 @@ +// SQLite schema for AppMap APM data. +// +// Denormalizes AppMap events into APM-oriented tables optimized for the +// queries an APM dashboard or LLM agent needs. Ported from appmap-apm +// (server/db/schema.py); shape preserved unchanged. + +export const SCHEMA_VERSION = 1; + +export const SCHEMA = ` +CREATE TABLE IF NOT EXISTS appmaps ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + name TEXT NOT NULL, + source_path TEXT NOT NULL UNIQUE, + language TEXT, + framework TEXT, + recorder_type TEXT, + git_repository TEXT, + git_branch TEXT, + git_commit TEXT, + timestamp TEXT, + event_count INTEGER NOT NULL DEFAULT 0, + sql_query_count INTEGER NOT NULL DEFAULT 0, + http_request_count INTEGER NOT NULL DEFAULT 0, + elapsed_ms REAL, + metadata_labels TEXT -- JSON array of metadata-level labels +); + +-- Code objects from classMap entries (one per unique instrumented function). +-- This is a lookup table for stable fqids — it intentionally does NOT store +-- path, lineno, or location because those can vary across appmaps (e.g. when +-- the same function is recorded from different branches or revisions). +-- Per-recording location data lives on function_calls instead. +CREATE TABLE IF NOT EXISTS code_objects ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + fqid TEXT NOT NULL UNIQUE, -- stable ID: package/Class#method or package/Class.method + defined_class TEXT NOT NULL, + method_id TEXT NOT NULL +); + +CREATE TABLE IF NOT EXISTS http_requests ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + appmap_id INTEGER NOT NULL REFERENCES appmaps(id) ON DELETE CASCADE, + event_id INTEGER NOT NULL, + thread_id INTEGER, + parent_event_id INTEGER, + method TEXT NOT NULL, + path TEXT NOT NULL, + normalized_path TEXT, + protocol TEXT, + status_code INTEGER NOT NULL, + mime_type TEXT, + elapsed_ms REAL, + timestamp TEXT +); + +CREATE TABLE IF NOT EXISTS http_client_requests ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + appmap_id INTEGER NOT NULL REFERENCES appmaps(id) ON DELETE CASCADE, + event_id INTEGER NOT NULL, + thread_id INTEGER, + parent_event_id INTEGER, + method TEXT NOT NULL, + url TEXT NOT NULL, + status_code INTEGER, + elapsed_ms REAL +); + +CREATE TABLE IF NOT EXISTS sql_queries ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + appmap_id INTEGER NOT NULL REFERENCES appmaps(id) ON DELETE CASCADE, + event_id INTEGER NOT NULL, + thread_id INTEGER, + parent_event_id INTEGER, + sql_text TEXT NOT NULL, + database_type TEXT, + server_version TEXT, + caller_class TEXT, + caller_method TEXT, + elapsed_ms REAL +); + +CREATE TABLE IF NOT EXISTS function_calls ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + appmap_id INTEGER NOT NULL REFERENCES appmaps(id) ON DELETE CASCADE, + event_id INTEGER NOT NULL, + thread_id INTEGER, + parent_event_id INTEGER, + code_object_id INTEGER REFERENCES code_objects(id), + defined_class TEXT NOT NULL, + method_id TEXT NOT NULL, + path TEXT, + lineno INTEGER, + is_static INTEGER NOT NULL DEFAULT 0, + elapsed_ms REAL, + parameters_json TEXT, -- JSON of parameter values (for labeled/log functions) + return_value TEXT -- string repr of return value (for labeled/log functions) +); + +CREATE TABLE IF NOT EXISTS exceptions ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + appmap_id INTEGER NOT NULL REFERENCES appmaps(id) ON DELETE CASCADE, + event_id INTEGER, + thread_id INTEGER, + parent_event_id INTEGER, + exception_class TEXT NOT NULL, + message TEXT, + path TEXT, + lineno INTEGER +); + +-- Labels from classMap function entries (log, security.*, etc.) +CREATE TABLE IF NOT EXISTS labels ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + code_object_id INTEGER NOT NULL REFERENCES code_objects(id), + label TEXT NOT NULL, + UNIQUE(code_object_id, label) +); + +-- Scanner findings (security, performance, stability issues) +CREATE TABLE IF NOT EXISTS findings ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + appmap_id INTEGER NOT NULL REFERENCES appmaps(id) ON DELETE CASCADE, + rule_id TEXT NOT NULL, + rule_title TEXT NOT NULL, + impact_domain TEXT, -- Security, Performance, Stability, Maintainability + message TEXT, + hash TEXT NOT NULL, + event_class TEXT, -- defined_class from the triggering event + event_method TEXT, -- method_id from the triggering event + event_path TEXT, -- source file path + event_lineno INTEGER, + scope_method TEXT, -- HTTP method of the request scope + scope_path TEXT, -- HTTP path of the request scope + stack_json TEXT, -- JSON array of stack frames + UNIQUE(appmap_id, hash) +); + +-- Indexes for common APM queries +CREATE INDEX IF NOT EXISTS idx_http_requests_appmap ON http_requests(appmap_id); +CREATE INDEX IF NOT EXISTS idx_http_requests_path ON http_requests(normalized_path, method); +CREATE INDEX IF NOT EXISTS idx_http_requests_status ON http_requests(status_code); +CREATE INDEX IF NOT EXISTS idx_http_requests_timestamp ON http_requests(timestamp); +CREATE INDEX IF NOT EXISTS idx_http_client_requests_appmap ON http_client_requests(appmap_id); +CREATE INDEX IF NOT EXISTS idx_sql_queries_appmap ON sql_queries(appmap_id); +CREATE INDEX IF NOT EXISTS idx_sql_queries_elapsed ON sql_queries(elapsed_ms DESC); +CREATE INDEX IF NOT EXISTS idx_function_calls_appmap ON function_calls(appmap_id); +CREATE INDEX IF NOT EXISTS idx_function_calls_class_method ON function_calls(defined_class, method_id); +CREATE INDEX IF NOT EXISTS idx_function_calls_code_object ON function_calls(code_object_id); +CREATE INDEX IF NOT EXISTS idx_function_calls_parent ON function_calls(appmap_id, parent_event_id); +CREATE INDEX IF NOT EXISTS idx_exceptions_appmap ON exceptions(appmap_id); +CREATE INDEX IF NOT EXISTS idx_exceptions_class ON exceptions(exception_class); +CREATE INDEX IF NOT EXISTS idx_code_objects_fqid ON code_objects(fqid); +CREATE INDEX IF NOT EXISTS idx_code_objects_class_method ON code_objects(defined_class, method_id); +CREATE INDEX IF NOT EXISTS idx_labels_label ON labels(label); +CREATE INDEX IF NOT EXISTS idx_labels_code_object ON labels(code_object_id); +CREATE INDEX IF NOT EXISTS idx_appmaps_timestamp ON appmaps(timestamp); +CREATE INDEX IF NOT EXISTS idx_appmaps_branch ON appmaps(git_branch); +CREATE INDEX IF NOT EXISTS idx_findings_appmap ON findings(appmap_id); +CREATE INDEX IF NOT EXISTS idx_findings_rule ON findings(rule_id); +CREATE INDEX IF NOT EXISTS idx_findings_domain ON findings(impact_domain); +`; + +// Names of all schema tables (used by the version-mismatch teardown path). +export const SCHEMA_TABLES = [ + 'appmaps', + 'code_objects', + 'http_requests', + 'http_client_requests', + 'sql_queries', + 'function_calls', + 'exceptions', + 'labels', + 'findings', +]; diff --git a/packages/cli/tests/unit/cmds/query/db/openQueryDb.spec.ts b/packages/cli/tests/unit/cmds/query/db/openQueryDb.spec.ts new file mode 100644 index 0000000000..96ea05485f --- /dev/null +++ b/packages/cli/tests/unit/cmds/query/db/openQueryDb.spec.ts @@ -0,0 +1,119 @@ +import { mkdtempSync, rmSync } from 'fs'; +import { tmpdir } from 'os'; +import { join } from 'path'; + +import { openQueryDb } from '../../../../../src/cmds/query/db/openQueryDb'; +import { SCHEMA_TABLES, SCHEMA_VERSION } from '../../../../../src/cmds/query/db/schema'; + +describe('openQueryDb', () => { + let tmpDir: string; + let dbPath: string; + + beforeEach(() => { + tmpDir = mkdtempSync(join(tmpdir(), 'appmap-query-db-')); + dbPath = join(tmpDir, 'query.db'); + }); + + afterEach(() => { + rmSync(tmpDir, { recursive: true, force: true }); + }); + + it('creates all schema tables on a fresh DB and stamps user_version', () => { + const { db, version, rebuilt, path } = openQueryDb('/tmp/ignored', dbPath); + try { + expect(path).toBe(dbPath); + expect(version).toBe(SCHEMA_VERSION); + expect(rebuilt).toBe(false); + + const tables = db + .prepare("SELECT name FROM sqlite_master WHERE type='table' ORDER BY name") + .all() + .map((r: any) => r.name); + for (const t of SCHEMA_TABLES) expect(tables).toContain(t); + + expect(db.pragma('user_version', { simple: true })).toBe(SCHEMA_VERSION); + } finally { + db.close(); + } + }); + + it('reopens an existing DB at the same version without rebuilding', () => { + const first = openQueryDb('/tmp/ignored', dbPath); + first.db.exec("INSERT INTO appmaps (name, source_path) VALUES ('canary', '/tmp/canary')"); + first.db.close(); + + const second = openQueryDb('/tmp/ignored', dbPath); + try { + expect(second.rebuilt).toBe(false); + const row = second.db + .prepare("SELECT name FROM appmaps WHERE source_path = '/tmp/canary'") + .get() as { name: string } | undefined; + expect(row?.name).toBe('canary'); + } finally { + second.db.close(); + } + }); + + it('drops and rebuilds tables when on-disk user_version does not match', () => { + const first = openQueryDb('/tmp/ignored', dbPath); + first.db.exec("INSERT INTO appmaps (name, source_path) VALUES ('canary', '/tmp/canary')"); + first.db.pragma('user_version = 999'); + first.db.close(); + + const second = openQueryDb('/tmp/ignored', dbPath); + try { + expect(second.rebuilt).toBe(true); + expect(second.version).toBe(SCHEMA_VERSION); + const count = second.db.prepare('SELECT COUNT(*) AS n FROM appmaps').get() as { n: number }; + expect(count.n).toBe(0); + expect(second.db.pragma('user_version', { simple: true })).toBe(SCHEMA_VERSION); + } finally { + second.db.close(); + } + }); + + it('enables WAL and foreign_keys pragmas', () => { + const { db } = openQueryDb('/tmp/ignored', dbPath); + try { + expect(String(db.pragma('journal_mode', { simple: true })).toLowerCase()).toBe('wal'); + expect(db.pragma('foreign_keys', { simple: true })).toBe(1); + } finally { + db.close(); + } + }); + + it('cascades deletes from appmaps to dependent rows', () => { + const { db } = openQueryDb('/tmp/ignored', dbPath); + try { + const info = db + .prepare("INSERT INTO appmaps (name, source_path) VALUES (?, ?)") + .run('rec', '/tmp/rec.appmap.json'); + const appmapId = info.lastInsertRowid; + + db.prepare( + `INSERT INTO http_requests + (appmap_id, event_id, method, path, status_code) + VALUES (?, 1, 'GET', '/x', 200)` + ).run(appmapId); + + db.prepare('DELETE FROM appmaps WHERE id = ?').run(appmapId); + + const remaining = db + .prepare('SELECT COUNT(*) AS n FROM http_requests WHERE appmap_id = ?') + .get(appmapId) as { n: number }; + expect(remaining.n).toBe(0); + } finally { + db.close(); + } + }); + + it('creates the parent directory when it does not exist', () => { + const nested = join(tmpDir, 'a', 'b', 'c', 'query.db'); + const { db } = openQueryDb('/tmp/ignored', nested); + try { + expect(db.pragma('user_version', { simple: true })).toBe(SCHEMA_VERSION); + } finally { + db.close(); + } + }); +}); diff --git a/packages/cli/tests/unit/cmds/query/db/path.spec.ts b/packages/cli/tests/unit/cmds/query/db/path.spec.ts new file mode 100644 index 0000000000..46d57404f3 --- /dev/null +++ b/packages/cli/tests/unit/cmds/query/db/path.spec.ts @@ -0,0 +1,46 @@ +import { existsSync } from 'fs'; +import { dirname } from 'path'; + +import { QUERY_DB_ENV, queryDbPath } from '../../../../../src/cmds/query/db/path'; + +describe('queryDbPath', () => { + let envBefore: string | undefined; + + beforeEach(() => { + envBefore = process.env[QUERY_DB_ENV]; + delete process.env[QUERY_DB_ENV]; + }); + + afterEach(() => { + if (envBefore === undefined) delete process.env[QUERY_DB_ENV]; + else process.env[QUERY_DB_ENV] = envBefore; + }); + + it('returns the same path for equivalent directory inputs', () => { + expect(queryDbPath('/tmp/a')).toBe(queryDbPath('/tmp/a/')); + expect(queryDbPath('/tmp/a')).toBe(queryDbPath('/tmp/a/./')); + }); + + it('produces different paths for different directories', () => { + expect(queryDbPath('/tmp/a')).not.toBe(queryDbPath('/tmp/b')); + }); + + it('lands under ~/.appmap/data//query.db', () => { + const path = queryDbPath('/tmp/path-test-dir'); + expect(path).toMatch(/[/\\]\.appmap[/\\]data[/\\][0-9a-f]{12}[/\\]query\.db$/); + }); + + it('does not create the parent directory', () => { + const path = queryDbPath('/tmp/never-created-dir-xyz'); + // The parent may exist if a prior test created it, but queryDbPath + // itself must not create anything; assert it returns a path without I/O. + expect(typeof path).toBe('string'); + // Sanity: returning the path is decoupled from existence checking. + void existsSync(dirname(path)); + }); + + it('honors APPMAP_QUERY_DB as a full-path override', () => { + process.env[QUERY_DB_ENV] = '/tmp/override-test/over.db'; + expect(queryDbPath('/tmp/whatever')).toBe('/tmp/override-test/over.db'); + }); +}); From 74ca2be4bd2cfd4b1fbea436ef504d2b29268e6a Mon Sep 17 00:00:00 2001 From: kgilpin Date: Fri, 1 May 2026 15:05:08 -0400 Subject: [PATCH 02/30] feat(cli): port query DB importer and hook into fingerprint pipeline MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Slice 2 of the query DB work. Ports the AppMap → SQLite importer from the appmap-apm Python prototype, wires it into FingerprintQueue/Watch/Directory commands, and adds a cross-validation script that diffs the TS importer's output against the Python prototype's on a shared fixture set. - Per-concern importer modules under cmds/query/db/import/: parseLocation, parentEventMap, returnEventMap, codeObjects, appmapRecord, httpRequests, httpClientRequests, sqlQueries, functionCalls, exceptions, importAppmap. - QueryDbIndexer subscribes to FingerprintQueue's index event for adds/ changes and to FingerprintWatchCommand's removed for unlinks; also runs syncDirectory to bootstrap when query.db is fresh but fingerprints exist. - fqid construction mirrors @appland/models' codeObjectId.js (slash between packages, :: between nested classes, . / # for functions). - Exceptions carried on return events use the call event id for event_id and parent_event_id, fixing the silent-NULL bug surfaced by the spec. - Type-only imports from @appland/models for HttpServerRequest/Response, HttpClientRequest/Response, SqlQuery, ExceptionObject, CodeObjectType, Label. Built JS contains no runtime require('@appland/models'). - Drop the findings table from schema.ts; out of scope for this port. - scripts/validate-against-python.ts: runs both importers on a fixture set and diffs row counts + key fields per table. Currently passes with all rows matching across appmaps, code_objects, http_requests, sql_queries, function_calls, and exceptions. Co-Authored-By: Claude Opus 4.7 (1M context) --- .../cli/scripts/validate-against-python.ts | 255 ++++++++++++++ packages/cli/src/cmds/index/index.ts | 15 +- .../cmds/query/db/import/QueryDbIndexer.ts | 69 ++++ .../src/cmds/query/db/import/appmapRecord.ts | 99 ++++++ .../src/cmds/query/db/import/codeObjects.ts | 111 ++++++ .../src/cmds/query/db/import/exceptions.ts | 81 +++++ .../src/cmds/query/db/import/functionCalls.ts | 84 +++++ .../query/db/import/httpClientRequests.ts | 42 +++ .../src/cmds/query/db/import/httpRequests.ts | 53 +++ .../src/cmds/query/db/import/importAppmap.ts | 67 ++++ .../cmds/query/db/import/parentEventMap.ts | 40 +++ .../src/cmds/query/db/import/parseLocation.ts | 26 ++ .../cmds/query/db/import/returnEventMap.ts | 26 ++ .../src/cmds/query/db/import/sqlQueries.ts | 62 ++++ packages/cli/src/cmds/query/db/schema.ts | 23 -- .../fingerprintDirectoryCommand.ts | 9 +- .../fingerprint/fingerprintWatchCommand.ts | 7 +- .../query/db/import/QueryDbIndexer.spec.ts | 104 ++++++ .../cmds/query/db/import/appmapRecord.spec.ts | 106 ++++++ .../cmds/query/db/import/codeObjects.spec.ts | 319 ++++++++++++++++++ .../cmds/query/db/import/exceptions.spec.ts | 180 ++++++++++ .../query/db/import/functionCalls.spec.ts | 206 +++++++++++ .../unit/cmds/query/db/import/helpers.ts | 8 + .../db/import/httpClientRequests.spec.ts | 72 ++++ .../cmds/query/db/import/httpRequests.spec.ts | 100 ++++++ .../cmds/query/db/import/importAppmap.spec.ts | 196 +++++++++++ .../query/db/import/parentEventMap.spec.ts | 74 ++++ .../query/db/import/parseLocation.spec.ts | 35 ++ .../query/db/import/returnEventMap.spec.ts | 26 ++ .../cmds/query/db/import/sqlQueries.spec.ts | 84 +++++ 30 files changed, 2552 insertions(+), 27 deletions(-) create mode 100644 packages/cli/scripts/validate-against-python.ts create mode 100644 packages/cli/src/cmds/query/db/import/QueryDbIndexer.ts create mode 100644 packages/cli/src/cmds/query/db/import/appmapRecord.ts create mode 100644 packages/cli/src/cmds/query/db/import/codeObjects.ts create mode 100644 packages/cli/src/cmds/query/db/import/exceptions.ts create mode 100644 packages/cli/src/cmds/query/db/import/functionCalls.ts create mode 100644 packages/cli/src/cmds/query/db/import/httpClientRequests.ts create mode 100644 packages/cli/src/cmds/query/db/import/httpRequests.ts create mode 100644 packages/cli/src/cmds/query/db/import/importAppmap.ts create mode 100644 packages/cli/src/cmds/query/db/import/parentEventMap.ts create mode 100644 packages/cli/src/cmds/query/db/import/parseLocation.ts create mode 100644 packages/cli/src/cmds/query/db/import/returnEventMap.ts create mode 100644 packages/cli/src/cmds/query/db/import/sqlQueries.ts create mode 100644 packages/cli/tests/unit/cmds/query/db/import/QueryDbIndexer.spec.ts create mode 100644 packages/cli/tests/unit/cmds/query/db/import/appmapRecord.spec.ts create mode 100644 packages/cli/tests/unit/cmds/query/db/import/codeObjects.spec.ts create mode 100644 packages/cli/tests/unit/cmds/query/db/import/exceptions.spec.ts create mode 100644 packages/cli/tests/unit/cmds/query/db/import/functionCalls.spec.ts create mode 100644 packages/cli/tests/unit/cmds/query/db/import/helpers.ts create mode 100644 packages/cli/tests/unit/cmds/query/db/import/httpClientRequests.spec.ts create mode 100644 packages/cli/tests/unit/cmds/query/db/import/httpRequests.spec.ts create mode 100644 packages/cli/tests/unit/cmds/query/db/import/importAppmap.spec.ts create mode 100644 packages/cli/tests/unit/cmds/query/db/import/parentEventMap.spec.ts create mode 100644 packages/cli/tests/unit/cmds/query/db/import/parseLocation.spec.ts create mode 100644 packages/cli/tests/unit/cmds/query/db/import/returnEventMap.spec.ts create mode 100644 packages/cli/tests/unit/cmds/query/db/import/sqlQueries.spec.ts diff --git a/packages/cli/scripts/validate-against-python.ts b/packages/cli/scripts/validate-against-python.ts new file mode 100644 index 0000000000..7f3c53a162 --- /dev/null +++ b/packages/cli/scripts/validate-against-python.ts @@ -0,0 +1,255 @@ +/* eslint-disable no-console */ +// +// Cross-validation script: indexes a fixture set with both the Python +// prototype's importer and the TypeScript port, snapshots both query.db +// files, and diffs them. +// +// Usage: +// ts-node scripts/validate-against-python.ts +// APPMAP_APM_DIR=/path/to/appmap-apm ts-node scripts/validate-against-python.ts +// FIXTURE_DIR=/path/to/recordings ts-node scripts/validate-against-python.ts +// KEEP_TMP=1 ts-node scripts/validate-against-python.ts +// +// Findings are excluded — they are not in scope for the TS port. +// Time-sensitive columns (timestamp, elapsed_ms) are excluded from the +// row-level diff; row counts are still compared. + +import { execFileSync } from 'child_process'; +import { mkdtempSync, rmSync, existsSync } from 'fs'; +import { homedir, tmpdir } from 'os'; +import { join } from 'path'; + +import sqlite3 from 'better-sqlite3'; + +import { findFiles } from '../src/utils'; +import { openQueryDb } from '../src/cmds/query/db'; +import { importAppmap } from '../src/cmds/query/db/import/importAppmap'; + +const APPMAP_APM_DIR = + process.env.APPMAP_APM_DIR ?? join(homedir(), 'source', 'appland', 'appmap-apm'); +const FIXTURE_DIR = + process.env.FIXTURE_DIR ?? join(APPMAP_APM_DIR, 'tests', 'fixtures', 'tmp', 'appmap'); +const PYTHON = join(APPMAP_APM_DIR, '.venv', 'bin', 'python'); + +const TABLES = [ + 'appmaps', + 'code_objects', + 'labels', + 'http_requests', + 'http_client_requests', + 'sql_queries', + 'function_calls', + 'exceptions', +] as const; + +interface Snapshot { + counts: Record; + rows: Record; +} + +function buildPythonDb(fixtureDir: string, dbPath: string): void { + console.log(`[python] importing ${fixtureDir} → ${dbPath}`); + execFileSync(PYTHON, ['-m', 'server.cli', 'import', fixtureDir], { + cwd: APPMAP_APM_DIR, + env: { ...process.env, APM_DB_PATH: dbPath }, + stdio: 'inherit', + }); +} + +async function buildTsDb(fixtureDir: string, dbPath: string): Promise { + console.log(`[ts] importing ${fixtureDir} → ${dbPath}`); + const { db } = openQueryDb('/tmp/ignored', dbPath); + let imported = 0; + let failed = 0; + await findFiles(fixtureDir, '.appmap.json', (file: string) => { + try { + importAppmap(db, file); + imported += 1; + } catch (err) { + failed += 1; + console.warn(` failed: ${file}: ${(err as Error).message}`); + } + }); + console.log(` imported ${imported} (failed=${failed})`); + db.close(); +} + +function snapshot(dbPath: string): Snapshot { + const db = sqlite3(dbPath, { readonly: true }); + try { + const counts: Record = {}; + for (const t of TABLES) { + counts[t] = (db.prepare(`SELECT COUNT(*) AS n FROM ${t}`).get() as { n: number }).n; + } + + const rows: Record = {}; + + rows.appmaps = db + .prepare( + `SELECT source_path, language, framework, recorder_type, + git_repository, git_branch, git_commit, + event_count, sql_query_count, http_request_count + FROM appmaps + ORDER BY source_path` + ) + .all(); + + rows.code_objects = db + .prepare(`SELECT fqid, defined_class, method_id FROM code_objects ORDER BY fqid`) + .all(); + + rows.labels = db + .prepare( + `SELECT co.fqid, l.label + FROM labels l JOIN code_objects co ON co.id = l.code_object_id + ORDER BY co.fqid, l.label` + ) + .all(); + + rows.http_requests = db + .prepare( + `SELECT a.source_path, h.event_id, h.thread_id, h.parent_event_id, + h.method, h.path, h.normalized_path, h.protocol, + h.status_code, h.mime_type + FROM http_requests h JOIN appmaps a ON a.id = h.appmap_id + ORDER BY a.source_path, h.event_id` + ) + .all(); + + rows.http_client_requests = db + .prepare( + `SELECT a.source_path, h.event_id, h.thread_id, h.parent_event_id, + h.method, h.url, h.status_code + FROM http_client_requests h JOIN appmaps a ON a.id = h.appmap_id + ORDER BY a.source_path, h.event_id` + ) + .all(); + + rows.sql_queries = db + .prepare( + `SELECT a.source_path, q.event_id, q.thread_id, q.parent_event_id, + q.sql_text, q.database_type, q.server_version, + q.caller_class, q.caller_method + FROM sql_queries q JOIN appmaps a ON a.id = q.appmap_id + ORDER BY a.source_path, q.event_id` + ) + .all(); + + rows.function_calls = db + .prepare( + `SELECT a.source_path, f.event_id, f.thread_id, f.parent_event_id, + co.fqid AS code_object_fqid, + f.defined_class, f.method_id, f.path, f.lineno, f.is_static + FROM function_calls f + JOIN appmaps a ON a.id = f.appmap_id + LEFT JOIN code_objects co ON co.id = f.code_object_id + ORDER BY a.source_path, f.event_id` + ) + .all(); + + rows.exceptions = db + .prepare( + `SELECT a.source_path, e.event_id, e.thread_id, e.parent_event_id, + e.exception_class, e.message, e.path, e.lineno + FROM exceptions e JOIN appmaps a ON a.id = e.appmap_id + ORDER BY a.source_path, e.event_id, e.exception_class` + ) + .all(); + + return { counts, rows }; + } finally { + db.close(); + } +} + +interface Mismatch { + table: string; + reason: string; + details?: { index: number; py: unknown; ts: unknown }; +} + +function diff(py: Snapshot, ts: Snapshot): Mismatch[] { + const issues: Mismatch[] = []; + for (const t of TABLES) { + const pyRows = py.rows[t]; + const tsRows = ts.rows[t]; + if (pyRows.length !== tsRows.length) { + issues.push({ + table: t, + reason: `row count differs (python=${pyRows.length}, ts=${tsRows.length})`, + }); + continue; + } + for (let i = 0; i < pyRows.length; i++) { + const a = JSON.stringify(pyRows[i]); + const b = JSON.stringify(tsRows[i]); + if (a !== b) { + issues.push({ + table: t, + reason: `first row diff at index ${i}`, + details: { index: i, py: pyRows[i], ts: tsRows[i] }, + }); + break; + } + } + } + return issues; +} + +async function main(): Promise { + if (!existsSync(PYTHON)) { + console.error(`Python interpreter not found at ${PYTHON}`); + console.error(`Set APPMAP_APM_DIR or install the venv at ${APPMAP_APM_DIR}/.venv`); + process.exit(2); + } + if (!existsSync(FIXTURE_DIR)) { + console.error(`Fixture dir not found: ${FIXTURE_DIR}`); + process.exit(2); + } + + const tmp = mkdtempSync(join(tmpdir(), 'cross-validate-')); + const pyDb = join(tmp, 'python.db'); + const tsDb = join(tmp, 'ts.db'); + + console.log(`fixture dir: ${FIXTURE_DIR}`); + console.log(`tmp: ${tmp}\n`); + + buildPythonDb(FIXTURE_DIR, pyDb); + await buildTsDb(FIXTURE_DIR, tsDb); + + const py = snapshot(pyDb); + const ts = snapshot(tsDb); + + console.log('\n--- counts ---'); + console.log(`${'table'.padEnd(22)} ${'python'.padStart(8)} ${'ts'.padStart(8)} ok`); + let countsOk = true; + for (const t of TABLES) { + const match = py.counts[t] === ts.counts[t]; + if (!match) countsOk = false; + console.log( + `${t.padEnd(22)} ${String(py.counts[t]).padStart(8)} ${String(ts.counts[t]).padStart(8)} ${ + match ? 'OK' : 'MISMATCH' + }` + ); + } + + const issues = diff(py, ts); + console.log('\n--- diff ---'); + if (issues.length === 0 && countsOk) { + console.log('all tables match'); + } else { + for (const issue of issues) { + console.log(`\n${issue.table}: ${issue.reason}`); + if (issue.details) { + console.log(` python: ${JSON.stringify(issue.details.py)}`); + console.log(` ts: ${JSON.stringify(issue.details.ts)}`); + } + } + process.exitCode = 1; + } + + if (process.env.KEEP_TMP) console.log(`\ntmp dir kept: ${tmp}`); + else rmSync(tmp, { recursive: true, force: true }); +} + +void main(); diff --git a/packages/cli/src/cmds/index/index.ts b/packages/cli/src/cmds/index/index.ts index 4621bdf44a..551ef7f092 100644 --- a/packages/cli/src/cmds/index/index.ts +++ b/packages/cli/src/cmds/index/index.ts @@ -10,6 +10,8 @@ import { configureRpcDirectories, handleWorkingDirectory } from '../../lib/handl import { locateAppMapDir } from '../../lib/locateAppMapDir'; import { verbose } from '../../utils'; import { log, warn } from 'console'; +import { openQueryDb } from '../query/db'; +import { QueryDbIndexer } from '../query/db/import/QueryDbIndexer'; import { numProcessed } from '../../rpc/index/numProcessed'; import { search } from '../../rpc/search/search'; import appmapFilter from '../../rpc/appmap/filter'; @@ -78,11 +80,19 @@ export const handler = async (argv) => { const runServer = watch || port !== undefined; if (port && !watch) warn(`Note: --port option implies --watch`); + const queryDb = openQueryDb(appmapDir); + const indexer = new QueryDbIndexer(queryDb.db); + log( + `Query DB at ${queryDb.path} (schema v${queryDb.version}${ + queryDb.rebuilt ? ', rebuilt' : '' + })` + ); + if (runServer) { void checkLicense(false); log(`Running indexer in watch mode`); - const cmd = new FingerprintWatchCommand(appmapDir); + const cmd = new FingerprintWatchCommand(appmapDir, indexer); await cmd.execute(); if (port !== undefined) { @@ -149,7 +159,8 @@ export const handler = async (argv) => { } } } else { - const cmd = new FingerprintDirectoryCommand(appmapDir); + const cmd = new FingerprintDirectoryCommand(appmapDir, indexer); await cmd.execute(); + indexer.close(); } }; diff --git a/packages/cli/src/cmds/query/db/import/QueryDbIndexer.ts b/packages/cli/src/cmds/query/db/import/QueryDbIndexer.ts new file mode 100644 index 0000000000..8977ee1f23 --- /dev/null +++ b/packages/cli/src/cmds/query/db/import/QueryDbIndexer.ts @@ -0,0 +1,69 @@ +import { resolve } from 'path'; + +import sqlite3 from 'better-sqlite3'; + +import { findFiles } from '../../../../utils'; +import { deleteAppmap, importAppmap } from './importAppmap'; + +// Subscribes to fingerprint pipeline events and routes per-file work into +// the query DB. Owns no policy beyond "import on index, delete on unlink"; +// callers wire it up to whichever queue/watcher fits the command shape. +// +// Failure handling: per-file errors (bad JSON, missing fields) are logged +// and skipped; the walk does not abort. DB-level errors still propagate — +// those indicate a real bug, not bad data. + +interface IndexEmitter { + on(event: 'index', listener: (ev: { path: string }) => void): unknown; +} + +export class QueryDbIndexer { + private imported = 0; + private failed = 0; + + constructor(private readonly db: sqlite3.Database) {} + + // Subscribe to a FingerprintQueue (or anything matching its 'index' event + // shape) so each successfully fingerprinted file is also imported. + attach(queue: IndexEmitter): void { + queue.on('index', (ev) => this.onIndexed(ev.path)); + } + + // Walk a directory and import any .appmap.json that doesn't already have + // a row in the appmaps table. Bridges the gap when query.db is fresh but + // fingerprints already exist (so the fingerprinter skips them and never + // emits an 'index' event for the importer to catch). + async syncDirectory(directory: string): Promise { + const present = this.db.prepare('SELECT 1 FROM appmaps WHERE source_path = ?'); + await findFiles(directory, '.appmap.json', (file) => { + const absolutePath = resolve(file); + if (!present.get(absolutePath)) this.onIndexed(absolutePath); + }); + } + + onIndexed(file: string): void { + try { + importAppmap(this.db, file); + this.imported += 1; + } catch (err) { + this.failed += 1; + console.warn(`query db: failed to import ${file}: ${(err as Error).message}`); + } + } + + onRemoved(file: string): void { + try { + deleteAppmap(this.db, file); + } catch (err) { + console.warn(`query db: failed to delete ${file}: ${(err as Error).message}`); + } + } + + stats(): { imported: number; failed: number } { + return { imported: this.imported, failed: this.failed }; + } + + close(): void { + this.db.close(); + } +} diff --git a/packages/cli/src/cmds/query/db/import/appmapRecord.ts b/packages/cli/src/cmds/query/db/import/appmapRecord.ts new file mode 100644 index 0000000000..1d2210b2fb --- /dev/null +++ b/packages/cli/src/cmds/query/db/import/appmapRecord.ts @@ -0,0 +1,99 @@ +import { statSync } from 'fs'; +import { basename } from 'path'; + +import sqlite3 from 'better-sqlite3'; + +export interface AppmapMetadata { + name?: string; + language?: { name?: string }; + frameworks?: { name?: string }[]; + recorder?: { type?: string }; + git?: { repository?: string; branch?: string; commit?: string }; + timestamp?: number; + labels?: unknown; +} + +export interface ParsedAppmap { + events?: Record[]; + metadata?: AppmapMetadata; + classMap?: unknown; +} + +export interface AppmapRecordResult { + appmapId: number; + timestampIso: string; +} + +// Insert the top-level appmaps row and return its id + the resolved +// timestamp (used by http_requests for the per-row timestamp column). +// +// Total elapsed is taken from the first return event carrying an +// http_server_response. If metadata.timestamp is missing, falls back to the +// file's mtime so time-series queries still work. +export function insertAppmapRecord( + db: sqlite3.Database, + absolutePath: string, + appmap: ParsedAppmap +): AppmapRecordResult { + const events = appmap.events ?? []; + const metadata = appmap.metadata ?? {}; + + let totalElapsedMs: number | null = null; + for (const ev of events) { + if (ev.event === 'return' && 'http_server_response' in ev) { + const elapsed = ev.elapsed; + if (typeof elapsed === 'number') totalElapsedMs = elapsed * 1000; + break; + } + } + + let sqlQueryCount = 0; + let httpRequestCount = 0; + for (const ev of events) { + if ('sql_query' in ev) sqlQueryCount += 1; + if ('http_server_request' in ev) httpRequestCount += 1; + } + + const language = metadata.language?.name ?? null; + const framework = metadata.frameworks?.[0]?.name ?? null; + const recorderType = metadata.recorder?.type ?? null; + const git = metadata.git ?? {}; + + let timestampIso: string; + if (typeof metadata.timestamp === 'number') { + timestampIso = new Date(metadata.timestamp * 1000).toISOString(); + } else { + timestampIso = statSync(absolutePath).mtime.toISOString(); + } + + const labels = metadata.labels; + const metadataLabelsJson = labels ? JSON.stringify(labels) : null; + const name = metadata.name ?? basename(absolutePath); + + const info = db + .prepare( + `INSERT INTO appmaps (name, source_path, language, framework, recorder_type, + git_repository, git_branch, git_commit, timestamp, + event_count, sql_query_count, http_request_count, elapsed_ms, + metadata_labels) + VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)` + ) + .run( + name, + absolutePath, + language, + framework, + recorderType, + git.repository ?? null, + git.branch ?? null, + git.commit ?? null, + timestampIso, + events.length, + sqlQueryCount, + httpRequestCount, + totalElapsedMs, + metadataLabelsJson + ); + + return { appmapId: Number(info.lastInsertRowid), timestampIso }; +} diff --git a/packages/cli/src/cmds/query/db/import/codeObjects.ts b/packages/cli/src/cmds/query/db/import/codeObjects.ts new file mode 100644 index 0000000000..d9e74d6c01 --- /dev/null +++ b/packages/cli/src/cmds/query/db/import/codeObjects.ts @@ -0,0 +1,111 @@ +import sqlite3 from 'better-sqlite3'; + +import type { CodeObjectType, Label } from '@appland/models'; + +// Minimal classMap node shape used by the walk. Stays loose to avoid +// coupling to @appland/models per V3 ("no @appland/models for ingestion"); +// only leaf type names are imported (type-only). +export interface ClassMapNode { + type?: CodeObjectType; + name?: string; + static?: boolean; + location?: string; + labels?: Label[]; + children?: ClassMapNode[]; +} + +// Walk the classMap tree, insert one code_objects row per function node, +// insert its labels, and return a map of classMap location → code_object_id +// (used by function_calls to link events to code objects via path:lineno). +// +// fqid construction mirrors @appland/models' codeObjectId.js exactly: +// - between package and child: '/' +// - between class and child: '::' +// - between any node and a function child: '.' (static) or '#' (instance) +// +// The defined_class column keeps the prototype's dot-form (resets to bare +// package name on package descent, accumulates on nested classes) — it is +// independent of fqid and pinned by existing tests. +// +// Behavior preserved from the Python prototype: +// - Function node names with an auxtype suffix like " (get)" are trimmed. +// - Functions without a location are skipped (e.g., C-extensions). +export function importCodeObjects( + db: sqlite3.Database, + classMap: readonly ClassMapNode[] +): Map { + const lookup = new Map(); + + const insertCodeObject = db.prepare( + `INSERT OR IGNORE INTO code_objects (fqid, defined_class, method_id) VALUES (?, ?, ?)` + ); + const selectCodeObjectId = db.prepare(`SELECT id FROM code_objects WHERE fqid = ?`); + const insertLabel = db.prepare( + `INSERT OR IGNORE INTO labels (code_object_id, label) VALUES (?, ?)` + ); + + function appendToken( + parentTokens: readonly string[], + name: string, + parentType: CodeObjectType | undefined, + nodeType: CodeObjectType, + isStatic: boolean + ): readonly string[] { + if (parentTokens.length === 0) return [name]; + let separator = ''; + if (parentType === 'package') separator = '/'; + else if (parentType === 'class') separator = '::'; + if (nodeType === 'function') separator = isStatic ? '.' : '#'; + return [...parentTokens, separator, name]; + } + + function walk( + node: ClassMapNode, + classPath: string, + fqidTokens: readonly string[], + parentType: CodeObjectType | undefined + ): void { + const nodeType = node.type; + const name = node.name ?? ''; + + if (nodeType === 'function') { + const location = node.location; + if (!location) return; + + const parenIdx = name.indexOf(' ('); + const methodName = parenIdx >= 0 ? name.slice(0, parenIdx) : name; + const isStatic = !!node.static; + + const tokens = appendToken(fqidTokens, methodName, parentType, 'function', isStatic); + const fqid = tokens.join(''); + + insertCodeObject.run(fqid, classPath, methodName); + const row = selectCodeObjectId.get(fqid) as { id: number }; + lookup.set(location, row.id); + + const labels = node.labels ?? []; + for (const label of labels) insertLabel.run(row.id, label); + return; + } + + let nextClassPath: string; + let nextFqidTokens: readonly string[]; + if (nodeType === 'package') { + nextClassPath = name; + nextFqidTokens = appendToken(fqidTokens, name, parentType, 'package', false); + } else if (nodeType === 'class') { + nextClassPath = classPath ? `${classPath}.${name}` : name; + nextFqidTokens = appendToken(fqidTokens, name, parentType, 'class', false); + } else { + nextClassPath = classPath; + nextFqidTokens = fqidTokens; + } + + const children = node.children ?? []; + for (const child of children) walk(child, nextClassPath, nextFqidTokens, nodeType ?? parentType); + } + + for (const root of classMap) walk(root, '', [], undefined); + + return lookup; +} diff --git a/packages/cli/src/cmds/query/db/import/exceptions.ts b/packages/cli/src/cmds/query/db/import/exceptions.ts new file mode 100644 index 0000000000..6f1a7cc716 --- /dev/null +++ b/packages/cli/src/cmds/query/db/import/exceptions.ts @@ -0,0 +1,81 @@ +import sqlite3 from 'better-sqlite3'; + +import type { ExceptionObject } from '@appland/models'; + +// Import exceptions into the exceptions table. +// +// In the AppMap event stream, `exceptions` lives on **return** events: the +// return event terminates a call, and any thrown exception that propagated +// is attached there. The row's `event_id` and `parent_event_id` should +// describe the **call** the exception belongs to, not the return event: +// +// - event_id = call event id (= ev.parent_id on the return) +// - parent_event_id = parent of that call in the per-thread stack +// (= parentEventMap.get(callEventId)) +// +// Some recorders also place `exceptions` directly on the call event itself. +// We accept that legacy shape but de-dup against the canonical return-event +// source: if a call id was already covered via its return event, we skip +// the call-event source for the same id. +export function importExceptions( + db: sqlite3.Database, + appmapId: number, + events: readonly Record[], + parentEventMap: Map +): void { + const stmt = db.prepare( + `INSERT INTO exceptions (appmap_id, event_id, thread_id, parent_event_id, + exception_class, message, path, lineno) + VALUES (?, ?, ?, ?, ?, ?, ?, ?)` + ); + + const seenCallIds = new Set(); + + // Pass 1: return events carrying exceptions (canonical case). + for (const ev of events) { + if (ev.event !== 'return') continue; + const excs = ev.exceptions as ExceptionObject[] | undefined; + if (!Array.isArray(excs)) continue; + if (typeof ev.parent_id !== 'number') continue; + + const callEventId = ev.parent_id; + seenCallIds.add(callEventId); + const parentEventId = parentEventMap.get(callEventId) ?? null; + + for (const exc of excs) { + stmt.run( + appmapId, + callEventId, + ev.thread_id ?? null, + parentEventId, + exc.class, + exc.message ?? null, + exc.path ?? null, + exc.lineno ?? null + ); + } + } + + // Pass 2: legacy shape — exceptions on a call event we didn't already cover. + for (const ev of events) { + if (ev.event !== 'call') continue; + const excs = ev.exceptions as ExceptionObject[] | undefined; + if (!Array.isArray(excs)) continue; + if (typeof ev.id !== 'number' || seenCallIds.has(ev.id)) continue; + + const parentEventId = parentEventMap.get(ev.id) ?? null; + + for (const exc of excs) { + stmt.run( + appmapId, + ev.id, + ev.thread_id ?? null, + parentEventId, + exc.class, + exc.message ?? null, + exc.path ?? null, + exc.lineno ?? null + ); + } + } +} diff --git a/packages/cli/src/cmds/query/db/import/functionCalls.ts b/packages/cli/src/cmds/query/db/import/functionCalls.ts new file mode 100644 index 0000000000..0c97111ca8 --- /dev/null +++ b/packages/cli/src/cmds/query/db/import/functionCalls.ts @@ -0,0 +1,84 @@ +import sqlite3 from 'better-sqlite3'; + +import type { ReturnEventLike } from './returnEventMap'; + +// Import function call events. Each event is linked to a code_object via +// (path, lineno) → classMap location, and gains parameter / return-value +// capture iff the linked code_object has any labels. +export function importFunctionCalls( + db: sqlite3.Database, + appmapId: number, + events: readonly Record[], + returnEvents: Map, + parentEventMap: Map, + codeObjectLookup: Map +): void { + // Set of code_object_ids that have labels — narrows param capture to the + // functions an investigator cares about (log, security.*, dao.*, …). + const labeledCoIds = new Set(); + if (codeObjectLookup.size > 0) { + const placeholders = new Array(codeObjectLookup.size).fill('?').join(','); + const ids = [...codeObjectLookup.values()]; + const rows = db + .prepare( + `SELECT DISTINCT code_object_id FROM labels WHERE code_object_id IN (${placeholders})` + ) + .all(...ids) as { code_object_id: number }[]; + for (const r of rows) labeledCoIds.add(r.code_object_id); + } + + const stmt = db.prepare( + `INSERT INTO function_calls (appmap_id, event_id, thread_id, parent_event_id, + code_object_id, defined_class, method_id, path, lineno, is_static, + elapsed_ms, parameters_json, return_value) + VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)` + ); + + for (const ev of events) { + if (ev.event !== 'call') continue; + if (!ev.defined_class || !ev.method_id) continue; + if ('http_server_request' in ev || 'sql_query' in ev) continue; + + const ret = returnEvents.get(ev.id) ?? {}; + const elapsed = ret.elapsed; + + let coId: number | null = null; + const evPath = ev.path; + const evLineno = ev.lineno; + if (evPath != null && evLineno != null) { + coId = codeObjectLookup.get(`${evPath}:${evLineno}`) ?? null; + } + + let paramsJson: string | null = null; + let returnVal: string | null = null; + if (coId !== null && labeledCoIds.has(coId)) { + const params = ev.parameters; + if (Array.isArray(params) && params.length > 0) { + paramsJson = JSON.stringify( + params.map((p: any) => ({ name: p?.name, class: p?.class, value: p?.value })) + ); + } + const rv = (ret as any).return_value; + if (rv && typeof rv === 'object') { + const value = rv.value; + returnVal = value == null ? null : String(value); + } + } + + stmt.run( + appmapId, + ev.id, + ev.thread_id ?? null, + parentEventMap.get(ev.id) ?? null, + coId, + ev.defined_class, + ev.method_id, + evPath ?? null, + evLineno ?? null, + ev.static ? 1 : 0, + typeof elapsed === 'number' ? elapsed * 1000 : null, + paramsJson, + returnVal + ); + } +} diff --git a/packages/cli/src/cmds/query/db/import/httpClientRequests.ts b/packages/cli/src/cmds/query/db/import/httpClientRequests.ts new file mode 100644 index 0000000000..4f338f00b3 --- /dev/null +++ b/packages/cli/src/cmds/query/db/import/httpClientRequests.ts @@ -0,0 +1,42 @@ +import sqlite3 from 'better-sqlite3'; + +import type { HttpClientRequest, HttpClientResponse } from '@appland/models'; + +import type { ReturnEventLike } from './returnEventMap'; + +// Mirror httpRequests.ts: recordings carry a status_code field alongside +// what @appland/models declares, and an empty {} when no return was emitted. +type RawHttpClientResponse = Partial & { status_code?: number }; + +export function importHttpClientRequests( + db: sqlite3.Database, + appmapId: number, + events: readonly Record[], + returnEvents: Map, + parentEventMap: Map +): void { + const stmt = db.prepare( + `INSERT INTO http_client_requests (appmap_id, event_id, thread_id, parent_event_id, + method, url, status_code, elapsed_ms) + VALUES (?, ?, ?, ?, ?, ?, ?, ?)` + ); + + for (const ev of events) { + const req = ev.http_client_request as HttpClientRequest | undefined; + if (!req) continue; + const ret = returnEvents.get(ev.id) ?? {}; + const resp = (ret.http_client_response ?? {}) as RawHttpClientResponse; + const elapsed = ret.elapsed; + + stmt.run( + appmapId, + ev.id, + ev.thread_id ?? null, + parentEventMap.get(ev.id) ?? null, + req.request_method ?? 'GET', + req.url ?? '', + resp.status_code ?? null, + typeof elapsed === 'number' ? elapsed * 1000 : null + ); + } +} diff --git a/packages/cli/src/cmds/query/db/import/httpRequests.ts b/packages/cli/src/cmds/query/db/import/httpRequests.ts new file mode 100644 index 0000000000..c9d43f9272 --- /dev/null +++ b/packages/cli/src/cmds/query/db/import/httpRequests.ts @@ -0,0 +1,53 @@ +import sqlite3 from 'better-sqlite3'; + +import type { HttpServerRequest, HttpServerResponse } from '@appland/models'; + +import type { ReturnEventLike } from './returnEventMap'; + +// Some recorders carry status_code/mime_type alongside the @appland/models +// HttpServerResponse fields, and we may also see an empty object when no +// return event was emitted. Capture the JSON-as-found here without changing +// the upstream type definitions. +type RawHttpServerResponse = Partial & { + status_code?: number; + mime_type?: string; +}; + +export function importHttpRequests( + db: sqlite3.Database, + appmapId: number, + events: readonly Record[], + returnEvents: Map, + parentEventMap: Map, + timestampIso: string +): void { + const stmt = db.prepare( + `INSERT INTO http_requests (appmap_id, event_id, thread_id, parent_event_id, + method, path, normalized_path, protocol, status_code, mime_type, + elapsed_ms, timestamp) + VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)` + ); + + for (const ev of events) { + const req = ev.http_server_request as HttpServerRequest | undefined; + if (!req) continue; + const ret = returnEvents.get(ev.id) ?? {}; + const resp = (ret.http_server_response ?? {}) as RawHttpServerResponse; + const elapsed = ret.elapsed; + + stmt.run( + appmapId, + ev.id, + ev.thread_id ?? null, + parentEventMap.get(ev.id) ?? null, + req.request_method, + req.path_info, + req.normalized_path_info ?? null, + req.protocol ?? null, + resp.status_code ?? 0, + resp.mime_type ?? null, + typeof elapsed === 'number' ? elapsed * 1000 : null, + timestampIso + ); + } +} diff --git a/packages/cli/src/cmds/query/db/import/importAppmap.ts b/packages/cli/src/cmds/query/db/import/importAppmap.ts new file mode 100644 index 0000000000..b32294de88 --- /dev/null +++ b/packages/cli/src/cmds/query/db/import/importAppmap.ts @@ -0,0 +1,67 @@ +import { readFileSync } from 'fs'; +import { resolve } from 'path'; + +import sqlite3 from 'better-sqlite3'; + +import { insertAppmapRecord, ParsedAppmap } from './appmapRecord'; +import { importCodeObjects, ClassMapNode } from './codeObjects'; +import { importHttpRequests } from './httpRequests'; +import { importHttpClientRequests } from './httpClientRequests'; +import { importSqlQueries } from './sqlQueries'; +import { importFunctionCalls } from './functionCalls'; +import { importExceptions } from './exceptions'; +import { buildParentEventMap } from './parentEventMap'; +import { buildReturnEventMap } from './returnEventMap'; + +export interface ImportResult { + appmapId: number; + eventCount: number; + sqlCount: number; + httpCount: number; +} + +// Idempotency: existing rows for this source_path are dropped (FK cascade +// clears child rows) before re-inserting. The whole import runs in one +// transaction — partial state is never visible to readers. +export function importAppmap(db: sqlite3.Database, filePath: string): ImportResult { + const absolutePath = resolve(filePath); + const raw = readFileSync(absolutePath, 'utf8'); + const parsed = JSON.parse(raw) as ParsedAppmap & { classMap?: ClassMapNode[] }; + const events = parsed.events ?? []; + const classMap = parsed.classMap ?? []; + + const tx = db.transaction((): ImportResult => { + db.prepare('DELETE FROM appmaps WHERE source_path = ?').run(absolutePath); + + const { appmapId, timestampIso } = insertAppmapRecord(db, absolutePath, parsed); + const codeObjectLookup = importCodeObjects(db, classMap); + + const returnEvents = buildReturnEventMap(events); + const parentEventMap = buildParentEventMap(events); + + importHttpRequests(db, appmapId, events, returnEvents, parentEventMap, timestampIso); + importHttpClientRequests(db, appmapId, events, returnEvents, parentEventMap); + importSqlQueries(db, appmapId, events, returnEvents, parentEventMap); + importFunctionCalls(db, appmapId, events, returnEvents, parentEventMap, codeObjectLookup); + importExceptions(db, appmapId, events, parentEventMap); + + let sqlCount = 0; + let httpCount = 0; + for (const ev of events) { + if ('sql_query' in ev) sqlCount += 1; + if ('http_server_request' in ev) httpCount += 1; + } + + return { appmapId, eventCount: events.length, sqlCount, httpCount }; + }); + + return tx(); +} + +// Drop all rows for the given recording. ON DELETE CASCADE removes child +// rows from http_requests, sql_queries, function_calls, exceptions, etc. +export function deleteAppmap(db: sqlite3.Database, filePath: string): boolean { + const absolutePath = resolve(filePath); + const info = db.prepare('DELETE FROM appmaps WHERE source_path = ?').run(absolutePath); + return info.changes > 0; +} diff --git a/packages/cli/src/cmds/query/db/import/parentEventMap.ts b/packages/cli/src/cmds/query/db/import/parentEventMap.ts new file mode 100644 index 0000000000..07c81c6d94 --- /dev/null +++ b/packages/cli/src/cmds/query/db/import/parentEventMap.ts @@ -0,0 +1,40 @@ +// Minimal shape of an AppMap event used during parent-link reconstruction. +// We only inspect a few fields, so we keep this loose rather than coupling +// to @appland/models. +export interface AppMapEventLike { + id?: number; + thread_id?: number; + event?: string; +} + +// Walk the event stream once and return a map of event_id → parent_event_id +// using per-thread call stacks. Each 'call' event's parent is the top of its +// thread's stack at the moment of the call; each 'return' pops the stack. +// +// Events with no thread_id or no id are skipped. Threads are independent: +// events on different threads never become each other's parents. +export function buildParentEventMap(events: readonly AppMapEventLike[]): Map { + const parentMap = new Map(); + const threadStacks = new Map(); + + for (const ev of events) { + const tid = ev.thread_id; + const eid = ev.id; + if (tid === undefined || eid === undefined) continue; + + if (ev.event === 'call') { + let stack = threadStacks.get(tid); + if (!stack) { + stack = []; + threadStacks.set(tid, stack); + } + if (stack.length > 0) parentMap.set(eid, stack[stack.length - 1]); + stack.push(eid); + } else if (ev.event === 'return') { + const stack = threadStacks.get(tid); + if (stack && stack.length > 0) stack.pop(); + } + } + + return parentMap; +} diff --git a/packages/cli/src/cmds/query/db/import/parseLocation.ts b/packages/cli/src/cmds/query/db/import/parseLocation.ts new file mode 100644 index 0000000000..0348ea9130 --- /dev/null +++ b/packages/cli/src/cmds/query/db/import/parseLocation.ts @@ -0,0 +1,26 @@ +// Parse a classMap location string into [path, lineno]. +// +// Handles: +// "app/views.py:10" → ["app/views.py", 10] +// "/abs/path/file.rb:511" → ["/abs/path/file.rb", 511] +// "File.java:-1" → ["File.java", -1] +// "OpenSSL::Cipher#decrypt" → [null, null] (C-extension; no file) +// "" → [null, null] +// +// Splits on the rightmost ':' so paths containing colons (Windows drive +// letters, namespaced classes) are handled correctly. +export function parseLocation(location: string | undefined | null): [string | null, number | null] { + if (!location) return [null, null]; + + const idx = location.lastIndexOf(':'); + if (idx <= 0) return [null, null]; + + const pathPart = location.slice(0, idx); + const linenoPart = location.slice(idx + 1); + + if (linenoPart.length === 0) return [null, null]; + // Integer parse — accept leading minus, reject anything non-numeric. + if (!/^-?\d+$/.test(linenoPart)) return [null, null]; + + return [pathPart, Number.parseInt(linenoPart, 10)]; +} diff --git a/packages/cli/src/cmds/query/db/import/returnEventMap.ts b/packages/cli/src/cmds/query/db/import/returnEventMap.ts new file mode 100644 index 0000000000..fa9ca7c8db --- /dev/null +++ b/packages/cli/src/cmds/query/db/import/returnEventMap.ts @@ -0,0 +1,26 @@ +// Index return events by their associated call event id, so call events can +// look up their elapsed time + return-specific payload (http_server_response, +// http_client_response, return_value). +// +// In the AppMap event stream, each return event carries `parent_id` pointing +// at the id of the call event it terminates. +export interface ReturnEventLike { + event?: string; + parent_id?: number; + elapsed?: number; + http_server_response?: Record; + http_client_response?: Record; + return_value?: Record; +} + +export function buildReturnEventMap( + events: readonly ReturnEventLike[] +): Map { + const map = new Map(); + for (const ev of events) { + if (ev.event === 'return' && typeof ev.parent_id === 'number') { + map.set(ev.parent_id, ev); + } + } + return map; +} diff --git a/packages/cli/src/cmds/query/db/import/sqlQueries.ts b/packages/cli/src/cmds/query/db/import/sqlQueries.ts new file mode 100644 index 0000000000..22492012c9 --- /dev/null +++ b/packages/cli/src/cmds/query/db/import/sqlQueries.ts @@ -0,0 +1,62 @@ +import sqlite3 from 'better-sqlite3'; + +import type { SqlQuery } from '@appland/models'; + +import type { ReturnEventLike } from './returnEventMap'; + +// Import sql_query events. The caller class/method is taken from the event +// itself when present; otherwise it's derived from the parent call event in +// the per-thread call stack (matches the Python prototype). +export function importSqlQueries( + db: sqlite3.Database, + appmapId: number, + events: readonly Record[], + returnEvents: Map, + parentEventMap: Map +): void { + // event_id → call event, for parent-callsite lookup. + const callEvents = new Map>(); + for (const ev of events) { + if (ev.event === 'call' && typeof ev.id === 'number') callEvents.set(ev.id, ev); + } + + const stmt = db.prepare( + `INSERT INTO sql_queries (appmap_id, event_id, thread_id, parent_event_id, + sql_text, database_type, server_version, caller_class, caller_method, + elapsed_ms) + VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)` + ); + + for (const ev of events) { + const sq = ev.sql_query as SqlQuery | undefined; + if (!sq) continue; + const ret = returnEvents.get(ev.id) ?? {}; + const elapsed = ret.elapsed; + + let callerClass: string | null = ev.defined_class ?? null; + let callerMethod: string | null = ev.method_id ?? null; + if (!callerClass) { + const parentEid = parentEventMap.get(ev.id); + if (parentEid !== undefined) { + const parent = callEvents.get(parentEid); + if (parent) { + callerClass = parent.defined_class ?? null; + callerMethod = parent.method_id ?? null; + } + } + } + + stmt.run( + appmapId, + ev.id, + ev.thread_id ?? null, + parentEventMap.get(ev.id) ?? null, + sq.sql, + sq.database_type ?? null, + sq.server_version ?? null, + callerClass, + callerMethod, + typeof elapsed === 'number' ? elapsed * 1000 : null + ); + } +} diff --git a/packages/cli/src/cmds/query/db/schema.ts b/packages/cli/src/cmds/query/db/schema.ts index cb148c1007..4d1298526b 100644 --- a/packages/cli/src/cmds/query/db/schema.ts +++ b/packages/cli/src/cmds/query/db/schema.ts @@ -116,25 +116,6 @@ CREATE TABLE IF NOT EXISTS labels ( UNIQUE(code_object_id, label) ); --- Scanner findings (security, performance, stability issues) -CREATE TABLE IF NOT EXISTS findings ( - id INTEGER PRIMARY KEY AUTOINCREMENT, - appmap_id INTEGER NOT NULL REFERENCES appmaps(id) ON DELETE CASCADE, - rule_id TEXT NOT NULL, - rule_title TEXT NOT NULL, - impact_domain TEXT, -- Security, Performance, Stability, Maintainability - message TEXT, - hash TEXT NOT NULL, - event_class TEXT, -- defined_class from the triggering event - event_method TEXT, -- method_id from the triggering event - event_path TEXT, -- source file path - event_lineno INTEGER, - scope_method TEXT, -- HTTP method of the request scope - scope_path TEXT, -- HTTP path of the request scope - stack_json TEXT, -- JSON array of stack frames - UNIQUE(appmap_id, hash) -); - -- Indexes for common APM queries CREATE INDEX IF NOT EXISTS idx_http_requests_appmap ON http_requests(appmap_id); CREATE INDEX IF NOT EXISTS idx_http_requests_path ON http_requests(normalized_path, method); @@ -155,9 +136,6 @@ CREATE INDEX IF NOT EXISTS idx_labels_label ON labels(label); CREATE INDEX IF NOT EXISTS idx_labels_code_object ON labels(code_object_id); CREATE INDEX IF NOT EXISTS idx_appmaps_timestamp ON appmaps(timestamp); CREATE INDEX IF NOT EXISTS idx_appmaps_branch ON appmaps(git_branch); -CREATE INDEX IF NOT EXISTS idx_findings_appmap ON findings(appmap_id); -CREATE INDEX IF NOT EXISTS idx_findings_rule ON findings(rule_id); -CREATE INDEX IF NOT EXISTS idx_findings_domain ON findings(impact_domain); `; // Names of all schema tables (used by the version-mismatch teardown path). @@ -170,5 +148,4 @@ export const SCHEMA_TABLES = [ 'function_calls', 'exceptions', 'labels', - 'findings', ]; diff --git a/packages/cli/src/fingerprint/fingerprintDirectoryCommand.ts b/packages/cli/src/fingerprint/fingerprintDirectoryCommand.ts index a7a172b0a6..46ea8fd113 100644 --- a/packages/cli/src/fingerprint/fingerprintDirectoryCommand.ts +++ b/packages/cli/src/fingerprint/fingerprintDirectoryCommand.ts @@ -2,13 +2,17 @@ import type { Metadata } from '@appland/models'; import { findFiles, verbose } from '../utils'; import FingerprintQueue from './fingerprintQueue'; import writeUsage, { collectUsageData } from '../lib/emitUsage'; +import type { QueryDbIndexer } from '../cmds/query/db/import/QueryDbIndexer'; class FingerprintDirectoryCommand { private appmaps = 0; private events = 0; private metadata?: Metadata; - constructor(private readonly directory: string) {} + constructor( + private readonly directory: string, + private readonly indexer?: QueryDbIndexer + ) {} async execute() { if (verbose()) { @@ -21,6 +25,7 @@ class FingerprintDirectoryCommand { this.events += numEvents; this.metadata = metadata; }); + if (this.indexer) this.indexer.attach(fpQueue); let count = 0; await this.files((file) => { @@ -29,6 +34,8 @@ class FingerprintDirectoryCommand { }); if (count > 0) await fpQueue.process(); + if (this.indexer) await this.indexer.syncDirectory(this.directory); + const usageData = await collectUsageData( this.directory, this.events, diff --git a/packages/cli/src/fingerprint/fingerprintWatchCommand.ts b/packages/cli/src/fingerprint/fingerprintWatchCommand.ts index 25b01d598b..a2868cfb15 100644 --- a/packages/cli/src/fingerprint/fingerprintWatchCommand.ts +++ b/packages/cli/src/fingerprint/fingerprintWatchCommand.ts @@ -11,6 +11,7 @@ import { FingerprintEvent } from './fingerprinter'; import { Metadata } from '@appland/models'; import { rm } from 'fs/promises'; import AppMapIndex from './appmapIndex'; +import type { QueryDbIndexer } from '../cmds/query/db/import/QueryDbIndexer'; export default class FingerprintWatchCommand { private pidfilePath: string | undefined; @@ -31,7 +32,7 @@ export default class FingerprintWatchCommand { this._numProcessed = value; } - constructor(private directory: string) { + constructor(private directory: string, private readonly indexer?: QueryDbIndexer) { this.pidfilePath = process.env.APPMAP_WRITE_PIDFILE && join(this.directory, 'index.pid'); this.fpQueue = new FingerprintQueue(); this.eventAggregator = new EventAggregator(async (events) => { @@ -39,6 +40,7 @@ export default class FingerprintWatchCommand { this.numProcessed += events.length; }); this.eventAggregator.attach(this.fpQueue, 'index'); + if (this.indexer) this.indexer.attach(this.fpQueue); } removePidfile() { @@ -198,6 +200,8 @@ export default class FingerprintWatchCommand { this.poller.start(); await pollReady; + if (this.indexer) await this.indexer.syncDirectory(this.directory); + this.ready(); } @@ -229,6 +233,7 @@ export default class FingerprintWatchCommand { const { indexDir } = new AppMapIndex(file); rm(indexDir, { force: true, recursive: true }); + if (this.indexer) this.indexer.onRemoved(file); } ready() { diff --git a/packages/cli/tests/unit/cmds/query/db/import/QueryDbIndexer.spec.ts b/packages/cli/tests/unit/cmds/query/db/import/QueryDbIndexer.spec.ts new file mode 100644 index 0000000000..6c381fe098 --- /dev/null +++ b/packages/cli/tests/unit/cmds/query/db/import/QueryDbIndexer.spec.ts @@ -0,0 +1,104 @@ +import { EventEmitter } from 'events'; +import { mkdtempSync, rmSync, writeFileSync } from 'fs'; +import { tmpdir } from 'os'; +import { join, resolve } from 'path'; + +import { QueryDbIndexer } from '../../../../../../src/cmds/query/db/import/QueryDbIndexer'; +import { freshDb } from './helpers'; + +function writeAppmap(dir: string, name: string, body: object): string { + const p = join(dir, name); + writeFileSync(p, JSON.stringify(body)); + return p; +} + +function minimalAppmap(): object { + return { + metadata: { timestamp: 1700000000 }, + events: [ + { id: 1, event: 'call', http_server_request: { request_method: 'GET', path_info: '/x' } }, + { id: 2, event: 'return', parent_id: 1, http_server_response: { status_code: 200 } }, + ], + }; +} + +describe('QueryDbIndexer', () => { + let tmp: string; + + beforeEach(() => { + tmp = mkdtempSync(join(tmpdir(), 'qdb-indexer-')); + }); + + afterEach(() => { + rmSync(tmp, { recursive: true, force: true }); + }); + + it('attach() routes index events into importAppmap', () => { + const db = freshDb(); + const indexer = new QueryDbIndexer(db); + try { + const queue = new EventEmitter(); + indexer.attach(queue as any); + const path = writeAppmap(tmp, 'a.appmap.json', minimalAppmap()); + queue.emit('index', { path }); + expect((db.prepare('SELECT COUNT(*) AS n FROM appmaps').get() as any).n).toBe(1); + expect(indexer.stats()).toEqual({ imported: 1, failed: 0 }); + } finally { + // not closing db here — the indexer owns close + indexer.close(); + } + }); + + it('logs and counts failures without throwing on per-file errors', () => { + const db = freshDb(); + const indexer = new QueryDbIndexer(db); + const warn = jest.spyOn(console, 'warn').mockImplementation(() => undefined); + try { + const path = join(tmp, 'broken.appmap.json'); + writeFileSync(path, '{not json'); + indexer.onIndexed(path); + expect(indexer.stats()).toEqual({ imported: 0, failed: 1 }); + expect(warn).toHaveBeenCalledWith(expect.stringContaining('failed to import')); + } finally { + indexer.close(); + warn.mockRestore(); + } + }); + + it('syncDirectory imports only files not yet in the DB', async () => { + const db = freshDb(); + const indexer = new QueryDbIndexer(db); + try { + const a = writeAppmap(tmp, 'a.appmap.json', minimalAppmap()); + const b = writeAppmap(tmp, 'b.appmap.json', minimalAppmap()); + indexer.onIndexed(a); // pre-import a + const beforeFailed = indexer.stats().failed; + const beforeImported = indexer.stats().imported; + await indexer.syncDirectory(tmp); + // a was already in DB (skip); b was new (imported). + expect(indexer.stats().imported).toBe(beforeImported + 1); + expect(indexer.stats().failed).toBe(beforeFailed); + const rows = db + .prepare('SELECT source_path FROM appmaps ORDER BY source_path') + .all() + .map((r: any) => r.source_path); + expect(rows).toEqual([resolve(a), resolve(b)].sort()); + } finally { + indexer.close(); + } + }); + + it('onRemoved deletes by source_path', () => { + const db = freshDb(); + const indexer = new QueryDbIndexer(db); + try { + const path = writeAppmap(tmp, 'a.appmap.json', minimalAppmap()); + indexer.onIndexed(path); + expect((db.prepare('SELECT COUNT(*) AS n FROM appmaps').get() as any).n).toBe(1); + indexer.onRemoved(path); + expect((db.prepare('SELECT COUNT(*) AS n FROM appmaps').get() as any).n).toBe(0); + } finally { + indexer.close(); + } + }); +}); diff --git a/packages/cli/tests/unit/cmds/query/db/import/appmapRecord.spec.ts b/packages/cli/tests/unit/cmds/query/db/import/appmapRecord.spec.ts new file mode 100644 index 0000000000..d160ac163c --- /dev/null +++ b/packages/cli/tests/unit/cmds/query/db/import/appmapRecord.spec.ts @@ -0,0 +1,106 @@ +import { mkdtempSync, rmSync, writeFileSync } from 'fs'; +import { tmpdir } from 'os'; +import { join } from 'path'; + +import { insertAppmapRecord } from '../../../../../../src/cmds/query/db/import/appmapRecord'; +import { freshDb } from './helpers'; + +describe('insertAppmapRecord', () => { + let tmp: string; + + beforeEach(() => { + tmp = mkdtempSync(join(tmpdir(), 'appmap-record-')); + }); + + afterEach(() => { + rmSync(tmp, { recursive: true, force: true }); + }); + + it('inserts a row with derived metadata, counts, and elapsed', () => { + const db = freshDb(); + try { + const path = join(tmp, 'test.appmap.json'); + writeFileSync(path, '{}'); + + const result = insertAppmapRecord(db, path, { + events: [ + { id: 1, event: 'call', http_server_request: { request_method: 'GET' } }, + { id: 2, event: 'return', parent_id: 1, http_server_response: { status_code: 200 }, elapsed: 0.42 }, + { id: 3, event: 'call', sql_query: { sql: 'SELECT 1' } }, + { id: 4, event: 'return', parent_id: 3 }, + ], + metadata: { + name: 'demo', + language: { name: 'ruby' }, + frameworks: [{ name: 'rails' }], + recorder: { type: 'rspec' }, + git: { repository: 'r', branch: 'main', commit: 'abc' }, + timestamp: 1700000000, + labels: ['lab1', 'lab2'], + }, + }); + + expect(result.appmapId).toBe(1); + expect(result.timestampIso).toBe(new Date(1700000000 * 1000).toISOString()); + + const row = db.prepare('SELECT * FROM appmaps WHERE id = ?').get(result.appmapId) as any; + expect(row.name).toBe('demo'); + expect(row.source_path).toBe(path); + expect(row.language).toBe('ruby'); + expect(row.framework).toBe('rails'); + expect(row.recorder_type).toBe('rspec'); + expect(row.git_repository).toBe('r'); + expect(row.git_branch).toBe('main'); + expect(row.git_commit).toBe('abc'); + expect(row.event_count).toBe(4); + expect(row.sql_query_count).toBe(1); + expect(row.http_request_count).toBe(1); + expect(row.elapsed_ms).toBeCloseTo(420); + expect(JSON.parse(row.metadata_labels)).toEqual(['lab1', 'lab2']); + } finally { + db.close(); + } + }); + + it('falls back to file mtime when metadata has no timestamp', () => { + const db = freshDb(); + try { + const path = join(tmp, 'no-ts.appmap.json'); + writeFileSync(path, '{}'); + const result = insertAppmapRecord(db, path, { events: [], metadata: {} }); + // Just assert it parses as a valid date. + expect(Number.isNaN(Date.parse(result.timestampIso))).toBe(false); + } finally { + db.close(); + } + }); + + it('uses the file basename as the name when metadata.name is missing', () => { + const db = freshDb(); + try { + const path = join(tmp, 'unnamed.appmap.json'); + writeFileSync(path, '{}'); + insertAppmapRecord(db, path, { events: [], metadata: {} }); + const row = db.prepare('SELECT name FROM appmaps').get() as any; + expect(row.name).toBe('unnamed.appmap.json'); + } finally { + db.close(); + } + }); + + it('leaves elapsed_ms null when there is no http_server_response return', () => { + const db = freshDb(); + try { + const path = join(tmp, 'no-http.appmap.json'); + writeFileSync(path, '{}'); + insertAppmapRecord(db, path, { + events: [{ id: 1, event: 'call' }], + metadata: { timestamp: 1700000000 }, + }); + const row = db.prepare('SELECT elapsed_ms FROM appmaps').get() as any; + expect(row.elapsed_ms).toBeNull(); + } finally { + db.close(); + } + }); +}); diff --git a/packages/cli/tests/unit/cmds/query/db/import/codeObjects.spec.ts b/packages/cli/tests/unit/cmds/query/db/import/codeObjects.spec.ts new file mode 100644 index 0000000000..b4a195b856 --- /dev/null +++ b/packages/cli/tests/unit/cmds/query/db/import/codeObjects.spec.ts @@ -0,0 +1,319 @@ +import sqlite3 from 'better-sqlite3'; + +import { openQueryDb } from '../../../../../../src/cmds/query/db/openQueryDb'; +import { + ClassMapNode, + importCodeObjects, +} from '../../../../../../src/cmds/query/db/import/codeObjects'; + +function freshDb(): sqlite3.Database { + return openQueryDb('/tmp/ignored', ':memory:').db; +} + +describe('importCodeObjects', () => { + it('returns an empty map and writes nothing for an empty classMap', () => { + const db = freshDb(); + try { + const lookup = importCodeObjects(db, []); + expect(lookup.size).toBe(0); + expect((db.prepare('SELECT COUNT(*) AS n FROM code_objects').get() as any).n).toBe(0); + } finally { + db.close(); + } + }); + + it('inserts a single instance method with the canonical fqid', () => { + const db = freshDb(); + try { + const tree: ClassMapNode[] = [ + { + type: 'package', + name: 'app', + children: [ + { + type: 'class', + name: 'User', + children: [ + { + type: 'function', + name: 'save', + static: false, + location: 'app/models/user.rb:10', + }, + ], + }, + ], + }, + ]; + const lookup = importCodeObjects(db, tree); + + const row = db + .prepare('SELECT fqid, defined_class, method_id FROM code_objects') + .get() as any; + expect(row.fqid).toBe('app/User#save'); + expect(row.method_id).toBe('save'); + expect(row.defined_class).toBe('app.User'); + expect(lookup.get('app/models/user.rb:10')).toBe(1); + } finally { + db.close(); + } + }); + + it('uses "." for static methods', () => { + const db = freshDb(); + try { + const tree: ClassMapNode[] = [ + { + type: 'package', + name: 'core', + children: [ + { + type: 'class', + name: 'Date', + children: [ + { + type: 'function', + name: 'parse', + static: true, + location: 'core/date.rb:1', + }, + ], + }, + ], + }, + ]; + importCodeObjects(db, tree); + const fqid = (db.prepare('SELECT fqid FROM code_objects').get() as any).fqid; + expect(fqid).toBe('core/Date.parse'); + } finally { + db.close(); + } + }); + + it('strips an auxtype suffix from the method name', () => { + const db = freshDb(); + try { + const tree: ClassMapNode[] = [ + { + type: 'package', + name: 'app', + children: [ + { + type: 'class', + name: 'User', + children: [ + { + type: 'function', + name: 'is_authenticated (get)', + location: 'app/models/user.rb:1', + }, + ], + }, + ], + }, + ]; + importCodeObjects(db, tree); + const row = db.prepare('SELECT fqid, method_id FROM code_objects').get() as any; + expect(row.method_id).toBe('is_authenticated'); + expect(row.fqid).toBe('app/User#is_authenticated'); + } finally { + db.close(); + } + }); + + it('skips function nodes without a location', () => { + const db = freshDb(); + try { + const tree: ClassMapNode[] = [ + { + type: 'package', + name: 'lib', + children: [ + { + type: 'class', + name: 'Cipher', + children: [{ type: 'function', name: 'decrypt' /* no location */ }], + }, + ], + }, + ]; + const lookup = importCodeObjects(db, tree); + expect(lookup.size).toBe(0); + expect((db.prepare('SELECT COUNT(*) AS n FROM code_objects').get() as any).n).toBe(0); + } finally { + db.close(); + } + }); + + it('inserts labels for the function and dedups them', () => { + const db = freshDb(); + try { + const tree: ClassMapNode[] = [ + { + type: 'package', + name: 'app', + children: [ + { + type: 'class', + name: 'Logger', + children: [ + { + type: 'function', + name: 'error', + location: 'app/lib/logger.rb:5', + labels: ['log', 'log'], // duplicate in source + }, + ], + }, + ], + }, + ]; + importCodeObjects(db, tree); + const labels = db + .prepare('SELECT label FROM labels ORDER BY label') + .all() + .map((r: any) => r.label); + expect(labels).toEqual(['log']); + } finally { + db.close(); + } + }); + + it('builds nested-package fqids correctly', () => { + const db = freshDb(); + try { + const tree: ClassMapNode[] = [ + { + type: 'package', + name: 'app', + children: [ + { + type: 'package', + name: 'controllers', + children: [ + { + type: 'package', + name: 'orders', + children: [ + { + type: 'class', + name: 'OrdersController', + children: [ + { + type: 'function', + name: 'create', + location: 'app/controllers/orders/orders_controller.rb:42', + }, + ], + }, + ], + }, + ], + }, + ], + }, + ]; + importCodeObjects(db, tree); + const fqid = (db.prepare('SELECT fqid FROM code_objects').get() as any).fqid; + expect(fqid).toBe('app/controllers/orders/OrdersController#create'); + } finally { + db.close(); + } + }); + + it('uses :: between nested class names (matches @appland/models codeObjectId)', () => { + const db = freshDb(); + try { + const tree: ClassMapNode[] = [ + { + type: 'package', + name: 'app', + children: [ + { + type: 'class', + name: 'Outer', + children: [ + { + type: 'class', + name: 'Inner', + children: [ + { type: 'function', name: 'foo', location: 'app/outer.rb:1' }, + ], + }, + ], + }, + ], + }, + ]; + importCodeObjects(db, tree); + const fqid = (db.prepare('SELECT fqid FROM code_objects').get() as any).fqid; + expect(fqid).toBe('app/Outer::Inner#foo'); + } finally { + db.close(); + } + }); + + it('uses :: when a class is the immediate child of another class with a static method', () => { + const db = freshDb(); + try { + const tree: ClassMapNode[] = [ + { + type: 'package', + name: 'lib', + children: [ + { + type: 'class', + name: 'Outer', + children: [ + { + type: 'class', + name: 'Inner', + children: [ + { type: 'function', name: 'parse', static: true, location: 'lib/x.rb:1' }, + ], + }, + ], + }, + ], + }, + ]; + importCodeObjects(db, tree); + const fqid = (db.prepare('SELECT fqid FROM code_objects').get() as any).fqid; + expect(fqid).toBe('lib/Outer::Inner.parse'); + } finally { + db.close(); + } + }); + + it('is idempotent on re-import (INSERT OR IGNORE)', () => { + const db = freshDb(); + try { + const tree: ClassMapNode[] = [ + { + type: 'package', + name: 'app', + children: [ + { + type: 'class', + name: 'User', + children: [ + { + type: 'function', + name: 'save', + location: 'app/models/user.rb:10', + labels: ['dao'], + }, + ], + }, + ], + }, + ]; + importCodeObjects(db, tree); + importCodeObjects(db, tree); + expect((db.prepare('SELECT COUNT(*) AS n FROM code_objects').get() as any).n).toBe(1); + expect((db.prepare('SELECT COUNT(*) AS n FROM labels').get() as any).n).toBe(1); + } finally { + db.close(); + } + }); +}); diff --git a/packages/cli/tests/unit/cmds/query/db/import/exceptions.spec.ts b/packages/cli/tests/unit/cmds/query/db/import/exceptions.spec.ts new file mode 100644 index 0000000000..572119b80e --- /dev/null +++ b/packages/cli/tests/unit/cmds/query/db/import/exceptions.spec.ts @@ -0,0 +1,180 @@ +import { importExceptions } from '../../../../../../src/cmds/query/db/import/exceptions'; +import { buildParentEventMap } from '../../../../../../src/cmds/query/db/import/parentEventMap'; +import { freshDb } from './helpers'; + +function seedAppmap(db: any): number { + const info = db + .prepare("INSERT INTO appmaps (name, source_path) VALUES ('rec', '/tmp/rec.appmap.json')") + .run(); + return Number(info.lastInsertRowid); +} + +describe('importExceptions', () => { + it('inserts one row per exception entry, preserving class/message/path/lineno', () => { + const db = freshDb(); + try { + const appmapId = seedAppmap(db); + const events = [ + { id: 1, event: 'call', thread_id: 7 }, + { + id: 2, + event: 'return', + parent_id: 1, + thread_id: 7, + exceptions: [ + { + class: 'IntegrityError', + message: 'duplicate key', + path: 'app/models/order.rb', + lineno: 42, + }, + ], + }, + ]; + importExceptions(db, appmapId, events, buildParentEventMap(events)); + const row = db.prepare('SELECT * FROM exceptions').get() as any; + expect(row.exception_class).toBe('IntegrityError'); + expect(row.message).toBe('duplicate key'); + expect(row.path).toBe('app/models/order.rb'); + expect(row.lineno).toBe(42); + expect(row.thread_id).toBe(7); + } finally { + db.close(); + } + }); + + it('expands multiple exceptions on the same return event into multiple rows', () => { + const db = freshDb(); + try { + const appmapId = seedAppmap(db); + const events = [ + { id: 1, event: 'call', thread_id: 1 }, + { + id: 2, + event: 'return', + parent_id: 1, + exceptions: [{ class: 'A' }, { class: 'B' }], + }, + ]; + importExceptions(db, appmapId, events, buildParentEventMap(events)); + const rows = db + .prepare('SELECT exception_class FROM exceptions ORDER BY id') + .all() + .map((r: any) => r.exception_class); + expect(rows).toEqual(['A', 'B']); + } finally { + db.close(); + } + }); + + it('skips events without an exceptions array', () => { + const db = freshDb(); + try { + const appmapId = seedAppmap(db); + importExceptions(db, appmapId, [{ id: 1, event: 'call' }], new Map()); + expect((db.prepare('SELECT COUNT(*) AS n FROM exceptions').get() as any).n).toBe(0); + } finally { + db.close(); + } + }); + + it('uses the call event id (not the return event id) for event_id and derives parent_event_id from the call', () => { + const db = freshDb(); + try { + const appmapId = seedAppmap(db); + const events = [ + { id: 10, event: 'call', thread_id: 1 }, // outer + { id: 11, event: 'call', thread_id: 1 }, // inner — parent is 10 + { + id: 12, + event: 'return', + parent_id: 11, + thread_id: 1, + exceptions: [{ class: 'BoomError' }], + }, + ]; + importExceptions(db, appmapId, events, buildParentEventMap(events)); + const row = db.prepare('SELECT event_id, parent_event_id FROM exceptions').get() as any; + expect(row.event_id).toBe(11); // call id, not return id + expect(row.parent_event_id).toBe(10); // parent of the call + } finally { + db.close(); + } + }); + + it('leaves parent_event_id NULL when the failing call is at the top of its thread', () => { + const db = freshDb(); + try { + const appmapId = seedAppmap(db); + const events = [ + { id: 1, event: 'call', thread_id: 1 }, // top-level call + { + id: 2, + event: 'return', + parent_id: 1, + thread_id: 1, + exceptions: [{ class: 'TopLevelError' }], + }, + ]; + importExceptions(db, appmapId, events, buildParentEventMap(events)); + const row = db.prepare('SELECT event_id, parent_event_id FROM exceptions').get() as any; + expect(row.event_id).toBe(1); + expect(row.parent_event_id).toBeNull(); + } finally { + db.close(); + } + }); + + it('still imports legacy recordings that place exceptions on the call event', () => { + const db = freshDb(); + try { + const appmapId = seedAppmap(db); + const events = [ + { id: 1, event: 'call', thread_id: 1 }, // outer + { + id: 2, + event: 'call', + thread_id: 1, // inner — exceptions attached here directly + exceptions: [{ class: 'LegacyError' }], + }, + { id: 3, event: 'return', parent_id: 2, thread_id: 1 }, + ]; + importExceptions(db, appmapId, events, buildParentEventMap(events)); + const row = db.prepare('SELECT event_id, parent_event_id FROM exceptions').get() as any; + expect(row.event_id).toBe(2); + expect(row.parent_event_id).toBe(1); + } finally { + db.close(); + } + }); + + it('de-dups when the same call has exceptions on both call and return events (return wins)', () => { + const db = freshDb(); + try { + const appmapId = seedAppmap(db); + const events = [ + { + id: 1, + event: 'call', + thread_id: 1, + exceptions: [{ class: 'OnCall' }], + }, + { + id: 2, + event: 'return', + parent_id: 1, + thread_id: 1, + exceptions: [{ class: 'OnReturn' }], + }, + ]; + importExceptions(db, appmapId, events, buildParentEventMap(events)); + const rows = db + .prepare('SELECT exception_class FROM exceptions ORDER BY id') + .all() + .map((r: any) => r.exception_class); + expect(rows).toEqual(['OnReturn']); + } finally { + db.close(); + } + }); +}); diff --git a/packages/cli/tests/unit/cmds/query/db/import/functionCalls.spec.ts b/packages/cli/tests/unit/cmds/query/db/import/functionCalls.spec.ts new file mode 100644 index 0000000000..9b4c295e9e --- /dev/null +++ b/packages/cli/tests/unit/cmds/query/db/import/functionCalls.spec.ts @@ -0,0 +1,206 @@ +import { + importCodeObjects, + ClassMapNode, +} from '../../../../../../src/cmds/query/db/import/codeObjects'; +import { importFunctionCalls } from '../../../../../../src/cmds/query/db/import/functionCalls'; +import { buildReturnEventMap } from '../../../../../../src/cmds/query/db/import/returnEventMap'; +import { buildParentEventMap } from '../../../../../../src/cmds/query/db/import/parentEventMap'; +import { freshDb } from './helpers'; + +function seedAppmap(db: any): number { + const info = db + .prepare("INSERT INTO appmaps (name, source_path) VALUES ('rec', '/tmp/rec.appmap.json')") + .run(); + return Number(info.lastInsertRowid); +} + +describe('importFunctionCalls', () => { + it('inserts call events, links code_object via path:lineno, and records elapsed', () => { + const db = freshDb(); + try { + const appmapId = seedAppmap(db); + const classMap: ClassMapNode[] = [ + { + type: 'package', + name: 'app', + children: [ + { + type: 'class', + name: 'User', + children: [ + { type: 'function', name: 'save', location: 'app/models/user.rb:10' }, + ], + }, + ], + }, + ]; + const lookup = importCodeObjects(db, classMap); + const events = [ + { + id: 1, + event: 'call', + thread_id: 1, + defined_class: 'User', + method_id: 'save', + path: 'app/models/user.rb', + lineno: 10, + }, + { id: 2, event: 'return', parent_id: 1, elapsed: 0.001 }, + ]; + importFunctionCalls( + db, + appmapId, + events, + buildReturnEventMap(events), + buildParentEventMap(events), + lookup + ); + const row = db.prepare('SELECT * FROM function_calls').get() as any; + expect(row.code_object_id).toBe(1); + expect(row.defined_class).toBe('User'); + expect(row.method_id).toBe('save'); + expect(row.path).toBe('app/models/user.rb'); + expect(row.lineno).toBe(10); + expect(row.elapsed_ms).toBeCloseTo(1); + expect(row.parameters_json).toBeNull(); + expect(row.return_value).toBeNull(); + } finally { + db.close(); + } + }); + + it('captures parameters and return value for labeled functions', () => { + const db = freshDb(); + try { + const appmapId = seedAppmap(db); + const classMap: ClassMapNode[] = [ + { + type: 'package', + name: 'app', + children: [ + { + type: 'class', + name: 'IdempotencyKey', + children: [ + { + type: 'function', + name: 'generate', + static: true, + location: 'app/services/idempotency.rb:12', + labels: ['security.idempotency'], + }, + ], + }, + ], + }, + ]; + const lookup = importCodeObjects(db, classMap); + const events = [ + { + id: 1, + event: 'call', + thread_id: 1, + defined_class: 'IdempotencyKey', + method_id: 'generate', + path: 'app/services/idempotency.rb', + lineno: 12, + static: true, + parameters: [{ name: 'request_id', class: 'String', value: "'req-9281'" }], + }, + { + id: 2, + event: 'return', + parent_id: 1, + return_value: { class: 'String', value: "'k-9281'" }, + }, + ]; + importFunctionCalls( + db, + appmapId, + events, + buildReturnEventMap(events), + buildParentEventMap(events), + lookup + ); + const row = db.prepare('SELECT * FROM function_calls').get() as any; + expect(JSON.parse(row.parameters_json)).toEqual([ + { name: 'request_id', class: 'String', value: "'req-9281'" }, + ]); + expect(row.return_value).toBe("'k-9281'"); + expect(row.is_static).toBe(1); + } finally { + db.close(); + } + }); + + it('skips calls that are http_server_request or sql_query carriers', () => { + const db = freshDb(); + try { + const appmapId = seedAppmap(db); + const events = [ + { + id: 1, + event: 'call', + defined_class: 'Foo', + method_id: 'bar', + http_server_request: { request_method: 'GET', path_info: '/x' }, + }, + { + id: 2, + event: 'call', + defined_class: 'Foo', + method_id: 'bar', + sql_query: { sql: 'SELECT 1' }, + }, + { + id: 3, + event: 'call', + defined_class: 'Foo', + method_id: 'bar', + }, + ]; + importFunctionCalls( + db, + appmapId, + events, + buildReturnEventMap(events), + buildParentEventMap(events), + new Map() + ); + expect((db.prepare('SELECT COUNT(*) AS n FROM function_calls').get() as any).n).toBe(1); + const row = db.prepare('SELECT event_id FROM function_calls').get() as any; + expect(row.event_id).toBe(3); + } finally { + db.close(); + } + }); + + it('leaves code_object_id null when path:lineno does not match any classMap location', () => { + const db = freshDb(); + try { + const appmapId = seedAppmap(db); + const events = [ + { + id: 1, + event: 'call', + defined_class: 'Foo', + method_id: 'bar', + path: 'unknown.rb', + lineno: 1, + }, + ]; + importFunctionCalls( + db, + appmapId, + events, + buildReturnEventMap(events), + buildParentEventMap(events), + new Map() + ); + const row = db.prepare('SELECT code_object_id FROM function_calls').get() as any; + expect(row.code_object_id).toBeNull(); + } finally { + db.close(); + } + }); +}); diff --git a/packages/cli/tests/unit/cmds/query/db/import/helpers.ts b/packages/cli/tests/unit/cmds/query/db/import/helpers.ts new file mode 100644 index 0000000000..3617e9ae96 --- /dev/null +++ b/packages/cli/tests/unit/cmds/query/db/import/helpers.ts @@ -0,0 +1,8 @@ +import sqlite3 from 'better-sqlite3'; + +import { openQueryDb } from '../../../../../../src/cmds/query/db/openQueryDb'; + +// Open an in-memory query DB with the schema applied. +export function freshDb(): sqlite3.Database { + return openQueryDb('/tmp/ignored', ':memory:').db; +} diff --git a/packages/cli/tests/unit/cmds/query/db/import/httpClientRequests.spec.ts b/packages/cli/tests/unit/cmds/query/db/import/httpClientRequests.spec.ts new file mode 100644 index 0000000000..d2b05faaee --- /dev/null +++ b/packages/cli/tests/unit/cmds/query/db/import/httpClientRequests.spec.ts @@ -0,0 +1,72 @@ +import { importHttpClientRequests } from '../../../../../../src/cmds/query/db/import/httpClientRequests'; +import { buildReturnEventMap } from '../../../../../../src/cmds/query/db/import/returnEventMap'; +import { buildParentEventMap } from '../../../../../../src/cmds/query/db/import/parentEventMap'; +import { freshDb } from './helpers'; + +function seedAppmap(db: any): number { + const info = db + .prepare("INSERT INTO appmaps (name, source_path) VALUES ('rec', '/tmp/rec.appmap.json')") + .run(); + return Number(info.lastInsertRowid); +} + +describe('importHttpClientRequests', () => { + it('inserts one row per http_client_request', () => { + const db = freshDb(); + try { + const appmapId = seedAppmap(db); + const events = [ + { + id: 1, + event: 'call', + thread_id: 12, + http_client_request: { request_method: 'GET', url: 'https://api.example/v1' }, + }, + { + id: 2, + event: 'return', + parent_id: 1, + http_client_response: { status_code: 503 }, + elapsed: 0.04, + }, + ]; + importHttpClientRequests( + db, + appmapId, + events, + buildReturnEventMap(events), + buildParentEventMap(events) + ); + const row = db.prepare('SELECT * FROM http_client_requests').get() as any; + expect(row.method).toBe('GET'); + expect(row.url).toBe('https://api.example/v1'); + expect(row.status_code).toBe(503); + expect(row.elapsed_ms).toBeCloseTo(40); + expect(row.thread_id).toBe(12); + } finally { + db.close(); + } + }); + + it('defaults missing method to GET and missing url to ""', () => { + const db = freshDb(); + try { + const appmapId = seedAppmap(db); + const events = [ + { id: 1, event: 'call', http_client_request: {} }, + ]; + importHttpClientRequests( + db, + appmapId, + events, + buildReturnEventMap(events), + buildParentEventMap(events) + ); + const row = db.prepare('SELECT method, url FROM http_client_requests').get() as any; + expect(row.method).toBe('GET'); + expect(row.url).toBe(''); + } finally { + db.close(); + } + }); +}); diff --git a/packages/cli/tests/unit/cmds/query/db/import/httpRequests.spec.ts b/packages/cli/tests/unit/cmds/query/db/import/httpRequests.spec.ts new file mode 100644 index 0000000000..c87ff284f0 --- /dev/null +++ b/packages/cli/tests/unit/cmds/query/db/import/httpRequests.spec.ts @@ -0,0 +1,100 @@ +import { importHttpRequests } from '../../../../../../src/cmds/query/db/import/httpRequests'; +import { buildReturnEventMap } from '../../../../../../src/cmds/query/db/import/returnEventMap'; +import { buildParentEventMap } from '../../../../../../src/cmds/query/db/import/parentEventMap'; +import { freshDb } from './helpers'; + +function seedAppmap(db: any): number { + const info = db + .prepare("INSERT INTO appmaps (name, source_path) VALUES ('rec', '/tmp/rec.appmap.json')") + .run(); + return Number(info.lastInsertRowid); +} + +describe('importHttpRequests', () => { + it('inserts one row per http_server_request, joining the matching return for status + elapsed', () => { + const db = freshDb(); + try { + const appmapId = seedAppmap(db); + const events = [ + { + id: 1, + event: 'call', + thread_id: 99, + http_server_request: { + request_method: 'POST', + path_info: '/orders', + normalized_path_info: '/orders', + protocol: 'HTTP/1.1', + }, + }, + { + id: 2, + event: 'call', + thread_id: 99, + }, + { + id: 3, + event: 'return', + parent_id: 2, + }, + { + id: 4, + event: 'return', + parent_id: 1, + http_server_response: { status_code: 500, mime_type: 'application/json' }, + elapsed: 0.52, + }, + ]; + + importHttpRequests( + db, + appmapId, + events, + buildReturnEventMap(events), + buildParentEventMap(events), + '2026-04-29T14:21:08.000Z' + ); + + const row = db.prepare('SELECT * FROM http_requests').get() as any; + expect(row.method).toBe('POST'); + expect(row.path).toBe('/orders'); + expect(row.normalized_path).toBe('/orders'); + expect(row.protocol).toBe('HTTP/1.1'); + expect(row.status_code).toBe(500); + expect(row.mime_type).toBe('application/json'); + expect(row.elapsed_ms).toBeCloseTo(520); + expect(row.thread_id).toBe(99); + expect(row.timestamp).toBe('2026-04-29T14:21:08.000Z'); + expect(row.parent_event_id).toBeNull(); + } finally { + db.close(); + } + }); + + it('records status_code 0 when no return event was emitted', () => { + const db = freshDb(); + try { + const appmapId = seedAppmap(db); + const events = [ + { + id: 1, + event: 'call', + http_server_request: { request_method: 'GET', path_info: '/x' }, + }, + ]; + importHttpRequests( + db, + appmapId, + events, + buildReturnEventMap(events), + buildParentEventMap(events), + '2026-04-29T14:21:08.000Z' + ); + const row = db.prepare('SELECT status_code, elapsed_ms FROM http_requests').get() as any; + expect(row.status_code).toBe(0); + expect(row.elapsed_ms).toBeNull(); + } finally { + db.close(); + } + }); +}); diff --git a/packages/cli/tests/unit/cmds/query/db/import/importAppmap.spec.ts b/packages/cli/tests/unit/cmds/query/db/import/importAppmap.spec.ts new file mode 100644 index 0000000000..4d25c85ece --- /dev/null +++ b/packages/cli/tests/unit/cmds/query/db/import/importAppmap.spec.ts @@ -0,0 +1,196 @@ +import { mkdtempSync, rmSync, writeFileSync } from 'fs'; +import { tmpdir } from 'os'; +import { join, resolve } from 'path'; + +import { + deleteAppmap, + importAppmap, +} from '../../../../../../src/cmds/query/db/import/importAppmap'; +import { freshDb } from './helpers'; + +function writeAppmap(dir: string, name: string, body: object): string { + const p = join(dir, name); + writeFileSync(p, JSON.stringify(body)); + return p; +} + +describe('importAppmap', () => { + let tmp: string; + + beforeEach(() => { + tmp = mkdtempSync(join(tmpdir(), 'import-appmap-')); + }); + + afterEach(() => { + rmSync(tmp, { recursive: true, force: true }); + }); + + it('imports an end-to-end recording into all tables', () => { + const db = freshDb(); + try { + const path = writeAppmap(tmp, 'rec.appmap.json', { + metadata: { + name: 'orders_create_42', + language: { name: 'ruby' }, + frameworks: [{ name: 'rails' }], + recorder: { type: 'rspec' }, + git: { branch: 'feature/foo', commit: 'abc' }, + timestamp: 1700000000, + }, + classMap: [ + { + type: 'package', + name: 'app', + children: [ + { + type: 'class', + name: 'OrdersController', + children: [ + { + type: 'function', + name: 'create', + location: 'app/controllers/orders_controller.rb:42', + }, + ], + }, + ], + }, + ], + events: [ + { + id: 1, + event: 'call', + thread_id: 1, + http_server_request: { request_method: 'POST', path_info: '/orders' }, + }, + { + id: 2, + event: 'call', + thread_id: 1, + defined_class: 'OrdersController', + method_id: 'create', + path: 'app/controllers/orders_controller.rb', + lineno: 42, + }, + { + id: 3, + event: 'call', + thread_id: 1, + sql_query: { sql: 'INSERT INTO orders (...)' }, + }, + { id: 4, event: 'return', parent_id: 3, elapsed: 0.014 }, + { + id: 5, + event: 'return', + parent_id: 2, + exceptions: [{ class: 'IntegrityError', message: 'duplicate key' }], + }, + { + id: 6, + event: 'return', + parent_id: 1, + http_server_response: { status_code: 500 }, + elapsed: 0.52, + }, + ], + }); + + const result = importAppmap(db, path); + expect(result.eventCount).toBe(6); + expect(result.sqlCount).toBe(1); + expect(result.httpCount).toBe(1); + + const am = db.prepare('SELECT * FROM appmaps').get() as any; + expect(am.source_path).toBe(resolve(path)); + expect(am.git_branch).toBe('feature/foo'); + expect(am.elapsed_ms).toBeCloseTo(520); + + expect((db.prepare('SELECT COUNT(*) AS n FROM http_requests').get() as any).n).toBe(1); + expect((db.prepare('SELECT COUNT(*) AS n FROM sql_queries').get() as any).n).toBe(1); + expect((db.prepare('SELECT COUNT(*) AS n FROM function_calls').get() as any).n).toBe(1); + expect((db.prepare('SELECT COUNT(*) AS n FROM exceptions').get() as any).n).toBe(1); + expect((db.prepare('SELECT COUNT(*) AS n FROM code_objects').get() as any).n).toBe(1); + } finally { + db.close(); + } + }); + + it('is idempotent on re-import — rows are replaced, not duplicated', () => { + const db = freshDb(); + try { + const path = writeAppmap(tmp, 'rec.appmap.json', { + metadata: { timestamp: 1700000000 }, + events: [ + { + id: 1, + event: 'call', + http_server_request: { request_method: 'GET', path_info: '/x' }, + }, + { id: 2, event: 'return', parent_id: 1, http_server_response: { status_code: 200 } }, + ], + }); + importAppmap(db, path); + importAppmap(db, path); + expect((db.prepare('SELECT COUNT(*) AS n FROM appmaps').get() as any).n).toBe(1); + expect((db.prepare('SELECT COUNT(*) AS n FROM http_requests').get() as any).n).toBe(1); + } finally { + db.close(); + } + }); + + it('rolls back on a parse error, leaving no partial rows', () => { + const db = freshDb(); + try { + const path = join(tmp, 'broken.appmap.json'); + writeFileSync(path, '{not valid json'); + expect(() => importAppmap(db, path)).toThrow(); + expect((db.prepare('SELECT COUNT(*) AS n FROM appmaps').get() as any).n).toBe(0); + } finally { + db.close(); + } + }); +}); + +describe('deleteAppmap', () => { + let tmp: string; + + beforeEach(() => { + tmp = mkdtempSync(join(tmpdir(), 'delete-appmap-')); + }); + + afterEach(() => { + rmSync(tmp, { recursive: true, force: true }); + }); + + it('removes the recording and cascades to child rows', () => { + const db = freshDb(); + try { + const path = writeAppmap(tmp, 'rec.appmap.json', { + metadata: { timestamp: 1700000000 }, + events: [ + { + id: 1, + event: 'call', + http_server_request: { request_method: 'GET', path_info: '/x' }, + }, + { id: 2, event: 'return', parent_id: 1, http_server_response: { status_code: 200 } }, + ], + }); + importAppmap(db, path); + expect(deleteAppmap(db, path)).toBe(true); + expect((db.prepare('SELECT COUNT(*) AS n FROM appmaps').get() as any).n).toBe(0); + expect((db.prepare('SELECT COUNT(*) AS n FROM http_requests').get() as any).n).toBe(0); + } finally { + db.close(); + } + }); + + it('returns false when no matching row exists', () => { + const db = freshDb(); + try { + expect(deleteAppmap(db, '/tmp/nonexistent.appmap.json')).toBe(false); + } finally { + db.close(); + } + }); +}); diff --git a/packages/cli/tests/unit/cmds/query/db/import/parentEventMap.spec.ts b/packages/cli/tests/unit/cmds/query/db/import/parentEventMap.spec.ts new file mode 100644 index 0000000000..f2b4a536f7 --- /dev/null +++ b/packages/cli/tests/unit/cmds/query/db/import/parentEventMap.spec.ts @@ -0,0 +1,74 @@ +import { + buildParentEventMap, + AppMapEventLike, +} from '../../../../../../src/cmds/query/db/import/parentEventMap'; + +describe('buildParentEventMap', () => { + it('returns an empty map for an empty event stream', () => { + expect(buildParentEventMap([])).toEqual(new Map()); + }); + + it('assigns the call directly above as the parent on a single thread', () => { + const events: AppMapEventLike[] = [ + { id: 1, thread_id: 1, event: 'call' }, // root + { id: 2, thread_id: 1, event: 'call' }, // child of 1 + { id: 3, thread_id: 1, event: 'call' }, // child of 2 + { id: 4, thread_id: 1, event: 'return' }, // returns from 3 + { id: 5, thread_id: 1, event: 'call' }, // child of 2 again + { id: 6, thread_id: 1, event: 'return' }, // returns from 5 + { id: 7, thread_id: 1, event: 'return' }, // returns from 2 + { id: 8, thread_id: 1, event: 'return' }, // returns from 1 + ]; + const map = buildParentEventMap(events); + expect(map.get(1)).toBeUndefined(); + expect(map.get(2)).toBe(1); + expect(map.get(3)).toBe(2); + expect(map.get(5)).toBe(2); + }); + + it('keeps threads independent', () => { + const events: AppMapEventLike[] = [ + { id: 1, thread_id: 1, event: 'call' }, + { id: 2, thread_id: 2, event: 'call' }, // different thread; no parent + { id: 3, thread_id: 1, event: 'call' }, // child of 1 + { id: 4, thread_id: 2, event: 'call' }, // child of 2 + ]; + const map = buildParentEventMap(events); + expect(map.get(1)).toBeUndefined(); + expect(map.get(2)).toBeUndefined(); + expect(map.get(3)).toBe(1); + expect(map.get(4)).toBe(2); + }); + + it('skips events missing id or thread_id', () => { + const events: AppMapEventLike[] = [ + { id: 1, thread_id: 1, event: 'call' }, + { thread_id: 1, event: 'call' }, // missing id + { id: 3, event: 'call' }, // missing thread_id + { id: 4, thread_id: 1, event: 'call' }, // child of 1 (the malformed events were skipped) + ]; + const map = buildParentEventMap(events); + expect(map.get(4)).toBe(1); + }); + + it('tolerates extra returns past an empty stack', () => { + const events: AppMapEventLike[] = [ + { id: 1, thread_id: 1, event: 'return' }, // no matching call + { id: 2, thread_id: 1, event: 'call' }, // root after a stray return + { id: 3, thread_id: 1, event: 'call' }, // child of 2 + ]; + const map = buildParentEventMap(events); + expect(map.get(2)).toBeUndefined(); + expect(map.get(3)).toBe(2); + }); + + it('ignores events with neither call nor return', () => { + const events: AppMapEventLike[] = [ + { id: 1, thread_id: 1, event: 'call' }, + { id: 2, thread_id: 1, event: 'log' }, // synthetic; not a call/return + { id: 3, thread_id: 1, event: 'call' }, // still child of 1 + ]; + const map = buildParentEventMap(events); + expect(map.get(3)).toBe(1); + }); +}); diff --git a/packages/cli/tests/unit/cmds/query/db/import/parseLocation.spec.ts b/packages/cli/tests/unit/cmds/query/db/import/parseLocation.spec.ts new file mode 100644 index 0000000000..e2aa48242a --- /dev/null +++ b/packages/cli/tests/unit/cmds/query/db/import/parseLocation.spec.ts @@ -0,0 +1,35 @@ +import { parseLocation } from '../../../../../../src/cmds/query/db/import/parseLocation'; + +describe('parseLocation', () => { + it('parses a simple relative path with a line number', () => { + expect(parseLocation('app/views.py:10')).toEqual(['app/views.py', 10]); + }); + + it('parses an absolute path with a line number', () => { + expect(parseLocation('/abs/path/file.rb:511')).toEqual(['/abs/path/file.rb', 511]); + }); + + it('parses a negative line number (synthetic locations)', () => { + expect(parseLocation('File.java:-1')).toEqual(['File.java', -1]); + }); + + it('returns nulls for locations without a colon', () => { + expect(parseLocation('OpenSSL::Cipher#decrypt')).toEqual([null, null]); + }); + + it('returns nulls for empty input', () => { + expect(parseLocation('')).toEqual([null, null]); + expect(parseLocation(undefined)).toEqual([null, null]); + expect(parseLocation(null)).toEqual([null, null]); + }); + + it('returns nulls when the suffix after the last colon is not an integer', () => { + expect(parseLocation('file.rb:abc')).toEqual([null, null]); + expect(parseLocation('file.rb:')).toEqual([null, null]); + expect(parseLocation('file.rb:1a')).toEqual([null, null]); + }); + + it('uses the rightmost colon so namespaced paths are preserved', () => { + expect(parseLocation('OpenSSL::Cipher:42')).toEqual(['OpenSSL::Cipher', 42]); + }); +}); diff --git a/packages/cli/tests/unit/cmds/query/db/import/returnEventMap.spec.ts b/packages/cli/tests/unit/cmds/query/db/import/returnEventMap.spec.ts new file mode 100644 index 0000000000..c8124c1f3d --- /dev/null +++ b/packages/cli/tests/unit/cmds/query/db/import/returnEventMap.spec.ts @@ -0,0 +1,26 @@ +import { buildReturnEventMap } from '../../../../../../src/cmds/query/db/import/returnEventMap'; + +describe('buildReturnEventMap', () => { + it('maps call event id → return event via parent_id', () => { + const events = [ + { id: 1, event: 'call' }, + { id: 2, event: 'return', parent_id: 1, elapsed: 0.5 }, + { id: 3, event: 'call' }, + { id: 4, event: 'return', parent_id: 3, http_server_response: { status_code: 200 } }, + ]; + const map = buildReturnEventMap(events); + expect(map.get(1)?.elapsed).toBe(0.5); + expect(map.get(3)?.http_server_response).toEqual({ status_code: 200 }); + expect(map.size).toBe(2); + }); + + it('ignores returns without parent_id', () => { + const events = [{ id: 1, event: 'return' /* no parent_id */ }]; + expect(buildReturnEventMap(events).size).toBe(0); + }); + + it('ignores non-return events', () => { + const events = [{ id: 1, event: 'call', parent_id: 999 }]; + expect(buildReturnEventMap(events).size).toBe(0); + }); +}); diff --git a/packages/cli/tests/unit/cmds/query/db/import/sqlQueries.spec.ts b/packages/cli/tests/unit/cmds/query/db/import/sqlQueries.spec.ts new file mode 100644 index 0000000000..f93ac8fdf7 --- /dev/null +++ b/packages/cli/tests/unit/cmds/query/db/import/sqlQueries.spec.ts @@ -0,0 +1,84 @@ +import { importSqlQueries } from '../../../../../../src/cmds/query/db/import/sqlQueries'; +import { buildReturnEventMap } from '../../../../../../src/cmds/query/db/import/returnEventMap'; +import { buildParentEventMap } from '../../../../../../src/cmds/query/db/import/parentEventMap'; +import { freshDb } from './helpers'; + +function seedAppmap(db: any): number { + const info = db + .prepare("INSERT INTO appmaps (name, source_path) VALUES ('rec', '/tmp/rec.appmap.json')") + .run(); + return Number(info.lastInsertRowid); +} + +describe('importSqlQueries', () => { + it('inserts one row per sql_query event with caller from the event', () => { + const db = freshDb(); + try { + const appmapId = seedAppmap(db); + const events = [ + { + id: 1, + event: 'call', + thread_id: 1, + defined_class: 'OrdersController', + method_id: 'create', + sql_query: { sql: 'INSERT INTO orders (...)', database_type: 'postgres', server_version: '14.5' }, + }, + { id: 2, event: 'return', parent_id: 1, elapsed: 0.014 }, + ]; + importSqlQueries( + db, + appmapId, + events, + buildReturnEventMap(events), + buildParentEventMap(events) + ); + const row = db.prepare('SELECT * FROM sql_queries').get() as any; + expect(row.sql_text).toBe('INSERT INTO orders (...)'); + expect(row.database_type).toBe('postgres'); + expect(row.server_version).toBe('14.5'); + expect(row.caller_class).toBe('OrdersController'); + expect(row.caller_method).toBe('create'); + expect(row.elapsed_ms).toBeCloseTo(14); + } finally { + db.close(); + } + }); + + it('derives caller from the parent call event when the sql event lacks defined_class', () => { + const db = freshDb(); + try { + const appmapId = seedAppmap(db); + const events = [ + { + id: 1, + event: 'call', + thread_id: 1, + defined_class: 'OrdersController', + method_id: 'create', + }, + { + id: 2, + event: 'call', + thread_id: 1, + // no defined_class on the SQL event itself + sql_query: { sql: 'SELECT 1' }, + }, + { id: 3, event: 'return', parent_id: 2 }, + { id: 4, event: 'return', parent_id: 1 }, + ]; + importSqlQueries( + db, + appmapId, + events, + buildReturnEventMap(events), + buildParentEventMap(events) + ); + const row = db.prepare('SELECT caller_class, caller_method FROM sql_queries').get() as any; + expect(row.caller_class).toBe('OrdersController'); + expect(row.caller_method).toBe('create'); + } finally { + db.close(); + } + }); +}); From 94b72ee387876360c2dcd5d4699ebe537ebffe7b Mon Sep 17 00:00:00 2001 From: kgilpin Date: Fri, 1 May 2026 16:51:20 -0400 Subject: [PATCH 03/30] feat(cli): add query command with endpoints, find, and tree verbs Adds a `query` subcommand that reads the query DB built during fingerprinting and exposes endpoints, find, and tree verbs, with shared formatting/filter helpers and unit tests. Extends the Python validation script to compare the TS query layer against `server.services.queries.get_endpoints` on the same DB. Co-Authored-By: Claude Opus 4.7 (1M context) --- .../cli/scripts/validate-against-python.ts | 81 ++++ packages/cli/src/cli.ts | 2 + packages/cli/src/cmds/query/lib/format.ts | 37 ++ .../cli/src/cmds/query/lib/openReadOnly.ts | 18 + .../cli/src/cmds/query/lib/parseFilter.ts | 76 ++++ packages/cli/src/cmds/query/lib/treeRender.ts | 129 ++++++ .../cli/src/cmds/query/queries/endpoints.ts | 140 ++++++ packages/cli/src/cmds/query/queries/find.ts | 424 ++++++++++++++++++ packages/cli/src/cmds/query/queries/tree.ts | 305 +++++++++++++ packages/cli/src/cmds/query/query.ts | 20 + .../cli/src/cmds/query/verbs/endpoints.ts | 76 ++++ packages/cli/src/cmds/query/verbs/find.ts | 160 +++++++ packages/cli/src/cmds/query/verbs/tree.ts | 70 +++ .../unit/cmds/query/lib/parseFilter.spec.ts | 77 ++++ .../unit/cmds/query/queries/endpoints.spec.ts | 227 ++++++++++ .../unit/cmds/query/queries/find.spec.ts | 354 +++++++++++++++ .../unit/cmds/query/queries/tree.spec.ts | 196 ++++++++ 17 files changed, 2392 insertions(+) create mode 100644 packages/cli/src/cmds/query/lib/format.ts create mode 100644 packages/cli/src/cmds/query/lib/openReadOnly.ts create mode 100644 packages/cli/src/cmds/query/lib/parseFilter.ts create mode 100644 packages/cli/src/cmds/query/lib/treeRender.ts create mode 100644 packages/cli/src/cmds/query/queries/endpoints.ts create mode 100644 packages/cli/src/cmds/query/queries/find.ts create mode 100644 packages/cli/src/cmds/query/queries/tree.ts create mode 100644 packages/cli/src/cmds/query/query.ts create mode 100644 packages/cli/src/cmds/query/verbs/endpoints.ts create mode 100644 packages/cli/src/cmds/query/verbs/find.ts create mode 100644 packages/cli/src/cmds/query/verbs/tree.ts create mode 100644 packages/cli/tests/unit/cmds/query/lib/parseFilter.spec.ts create mode 100644 packages/cli/tests/unit/cmds/query/queries/endpoints.spec.ts create mode 100644 packages/cli/tests/unit/cmds/query/queries/find.spec.ts create mode 100644 packages/cli/tests/unit/cmds/query/queries/tree.spec.ts diff --git a/packages/cli/scripts/validate-against-python.ts b/packages/cli/scripts/validate-against-python.ts index 7f3c53a162..51f8530a37 100644 --- a/packages/cli/scripts/validate-against-python.ts +++ b/packages/cli/scripts/validate-against-python.ts @@ -24,6 +24,7 @@ import sqlite3 from 'better-sqlite3'; import { findFiles } from '../src/utils'; import { openQueryDb } from '../src/cmds/query/db'; import { importAppmap } from '../src/cmds/query/db/import/importAppmap'; +import { endpoints, EndpointRow } from '../src/cmds/query/queries/endpoints'; const APPMAP_APM_DIR = process.env.APPMAP_APM_DIR ?? join(homedir(), 'source', 'appland', 'appmap-apm'); @@ -168,6 +169,81 @@ interface Mismatch { details?: { index: number; py: unknown; ts: unknown }; } +// Python equivalent for endpoints uses get_endpoints (count, avg, max, min, +// error_count). We emit it sorted on stable keys so output is deterministic. +const PY_ENDPOINTS_SCRIPT = ` +import json, os, sys +from server.services.queries import get_endpoints +rows = get_endpoints(limit=10000) +rows.sort(key=lambda r: (r['method'], r['endpoint'])) +print(json.dumps(rows, sort_keys=True)) +`; + +function pythonEndpoints(dbPath: string): unknown[] { + const out = execFileSync(PYTHON, ['-c', PY_ENDPOINTS_SCRIPT], { + cwd: APPMAP_APM_DIR, + env: { ...process.env, APM_DB_PATH: dbPath }, + encoding: 'utf8', + }); + return JSON.parse(out) as unknown[]; +} + +// Round and project a TS EndpointRow to the same shape as Python's +// get_endpoints output, so we can diff them directly. min/max aren't tracked +// by V3's endpoints() — drop them from comparison; they don't affect what +// the verb shows. +function tsEndpointsLikePython(rows: readonly EndpointRow[]) { + return [...rows] + .map((r) => ({ + method: r.method, + endpoint: r.route, + request_count: r.count, + avg_elapsed_ms: r.avg_ms == null ? null : Math.round(r.avg_ms * 100) / 100, + // err_pct (TS) and error_count (Python) are different shapes; reproject: + error_count: Math.round((r.err_pct / 100) * r.count), + })) + .sort((a, b) => (a.method + a.endpoint).localeCompare(b.method + b.endpoint)); +} + +function diffQueries(pyDb: string, tsDb: string): void { + console.log('\n--- query layer ---'); + + // 1) Python get_endpoints on both DBs — proves query layer is portable. + const pyOnPy = pythonEndpoints(pyDb); + const pyOnTs = pythonEndpoints(tsDb); + const portable = + JSON.stringify(pyOnPy) === JSON.stringify(pyOnTs) ? 'OK' : 'MISMATCH'; + console.log(`python.get_endpoints(py.db) vs python.get_endpoints(ts.db): ${portable}`); + + // 2) TS endpoints() on TS DB, projected to Python's shape, against Python's + // get_endpoints on the same DB. Validates that the V3 verb produces results + // consistent with the Python query layer for fields they share. + const db = sqlite3(tsDb, { readonly: true }); + const tsOut = endpoints(db); + db.close(); + const tsProjected = tsEndpointsLikePython(tsOut); + const pyOnTsAsArray = pyOnTs as Array>; + // Drop fields Python returns that we don't compare (max/min). + const pyTrimmed = pyOnTsAsArray + .map((r) => ({ + method: r.method, + endpoint: r.endpoint, + request_count: r.request_count, + avg_elapsed_ms: r.avg_elapsed_ms, + error_count: r.error_count, + })) + .sort((a, b) => + String(a.method + a.endpoint).localeCompare(String(b.method + b.endpoint)) + ); + const verbMatch = + JSON.stringify(tsProjected) === JSON.stringify(pyTrimmed) ? 'OK' : 'MISMATCH'; + console.log(`ts endpoints() vs python.get_endpoints (shared fields): ${verbMatch}`); + if (verbMatch !== 'OK') { + console.log(` python: ${JSON.stringify(pyTrimmed[0])}`); + console.log(` ts: ${JSON.stringify(tsProjected[0])}`); + } +} + function diff(py: Snapshot, ts: Snapshot): Mismatch[] { const issues: Mismatch[] = []; for (const t of TABLES) { @@ -237,6 +313,11 @@ async function main(): Promise { console.log('\n--- diff ---'); if (issues.length === 0 && countsOk) { console.log('all tables match'); + } else { + // fall through into the report below + } + if (issues.length === 0 && countsOk) { + diffQueries(pyDb, tsDb); } else { for (const issue of issues) { console.log(`\n${issue.table}: ${issue.reason}`); diff --git a/packages/cli/src/cli.ts b/packages/cli/src/cli.ts index 630ad5edb1..fe4a3b69fe 100755 --- a/packages/cli/src/cli.ts +++ b/packages/cli/src/cli.ts @@ -40,6 +40,7 @@ import * as RpcCommand from './cmds/index/rpc'; import * as RpcClientCommand from './cmds/rpcClient'; import * as NavieCommand from './cmds/navie'; import * as ApplyCommand from './cmds/apply'; +import * as QueryCommand from './cmds/query/query'; import * as RunTestCommand from './cmds/runTest'; import TelemetryTestCommand from './cmds/testTelemetry'; import { default as sqlErrorLog } from './lib/sqlErrorLog'; @@ -156,6 +157,7 @@ yargs(process.argv.slice(2)) .command(RpcClientCommand) .command(NavieCommand) .command(ApplyCommand) + .command(QueryCommand) .command(RunTestCommand) .command(TelemetryTestCommand) .option('verbose', { diff --git a/packages/cli/src/cmds/query/lib/format.ts b/packages/cli/src/cmds/query/lib/format.ts new file mode 100644 index 0000000000..d8effde2ba --- /dev/null +++ b/packages/cli/src/cmds/query/lib/format.ts @@ -0,0 +1,37 @@ +// Render a row of cells as a tab-aligned line. Per V3, tabular output never +// wraps — long fqids extend past terminal width rather than break (so grep +// over output stays usable). Pipe through `less -S` or use --json instead. +// +// `widths` is a per-column minimum width; cells longer than the minimum +// extend the column. +export function formatTable( + headers: readonly string[], + rows: readonly (readonly string[])[] +): string { + const widths = headers.map((h, i) => + Math.max(h.length, ...rows.map((r) => (r[i] ?? '').length)) + ); + const lines: string[] = []; + lines.push(headers.map((h, i) => h.padEnd(widths[i])).join(' ')); + for (const row of rows) { + lines.push(row.map((c, i) => (c ?? '').padEnd(widths[i])).join(' ')); + } + return lines.join('\n'); +} + +// Format a duration in ms as "12ms" / "480ms" / "3.4s" / "1.2s". +export function formatMs(ms: number | null | undefined): string { + if (ms == null) return '—'; + if (ms < 1000) return `${Math.round(ms)}ms`; + return `${(ms / 1000).toFixed(1)}s`; +} + +// Format a non-negative integer with thousands separators ("1,891"). +export function formatCount(n: number): string { + return n.toLocaleString('en-US'); +} + +// Format a percentage like "4.6%". +export function formatPct(pct: number): string { + return `${pct.toFixed(1)}%`; +} diff --git a/packages/cli/src/cmds/query/lib/openReadOnly.ts b/packages/cli/src/cmds/query/lib/openReadOnly.ts new file mode 100644 index 0000000000..38543bd635 --- /dev/null +++ b/packages/cli/src/cmds/query/lib/openReadOnly.ts @@ -0,0 +1,18 @@ +import { existsSync } from 'fs'; + +import sqlite3 from 'better-sqlite3'; + +import { queryDbPath } from '../db/path'; + +// Open the query DB read-only for the given appmap directory. +// Errors if the DB doesn't exist, prompting the user to run `appmap index`. +// `dbPath` overrides path derivation (used by tests and the --db flag). +export function openReadOnly(appmapDir: string, dbPath?: string): sqlite3.Database { + const path = dbPath ?? queryDbPath(appmapDir); + if (!existsSync(path)) { + throw new Error( + `query DB not found at ${path}\nRun \`appmap index\` first to build it.` + ); + } + return sqlite3(path, { readonly: true }); +} diff --git a/packages/cli/src/cmds/query/lib/parseFilter.ts b/packages/cli/src/cmds/query/lib/parseFilter.ts new file mode 100644 index 0000000000..00ea5eb5d8 --- /dev/null +++ b/packages/cli/src/cmds/query/lib/parseFilter.ts @@ -0,0 +1,76 @@ +// Shared parsers for CLI filter flags. +// +// These convert user-typed strings into canonical structured forms that +// query functions can apply uniformly. Verbs share these so a flag like +// --since "7d ago" means the same thing everywhere. + +export type Comparator = '=' | '>=' | '<=' | '>' | '<'; + +export interface NumberFilter { + op: Comparator; + value: number; +} + +// Parse a status filter: "500", "=500", ">=500", ">500", "<400", "<=399". +// Whitespace around the operator is tolerated. +export function parseStatus(input: string): NumberFilter { + const m = input.trim().match(/^(>=|<=|>|<|=)?\s*(\d+)$/); + if (!m) throw new Error(`invalid --status filter: ${input}`); + const op = (m[1] ?? '=') as Comparator; + return { op, value: Number.parseInt(m[2], 10) }; +} + +// Parse a duration filter: ">1s", ">=500ms", "<2m". The numeric form is +// returned in milliseconds for direct comparison against elapsed_ms. +export function parseDuration(input: string): NumberFilter { + const m = input.trim().match(/^(>=|<=|>|<|=)?\s*(\d+(?:\.\d+)?)\s*(ms|s|m|h)?$/); + if (!m) throw new Error(`invalid --duration filter: ${input}`); + const op = (m[1] ?? '=') as Comparator; + const n = Number.parseFloat(m[2]); + const unit = m[3] ?? 'ms'; + const ms = + unit === 'ms' + ? n + : unit === 's' + ? n * 1000 + : unit === 'm' + ? n * 60_000 + : n * 3_600_000; + return { op, value: ms }; +} + +// Parse a time spec: ISO date/timestamp ("2026-04-29", "2026-04-29T14:21:08Z") +// or a relative offset ("7d ago", "30m ago", "2h ago", "45s ago"). Returns +// an ISO 8601 string suitable for direct text comparison against the +// timestamp column (which is also ISO 8601). +export function parseTime(input: string, now: Date = new Date()): string { + const trimmed = input.trim(); + + const rel = trimmed.match(/^(\d+)\s*([smhd])\s+ago$/); + if (rel) { + const n = Number.parseInt(rel[1], 10); + const unit = rel[2]; + const ms = + unit === 's' + ? n * 1_000 + : unit === 'm' + ? n * 60_000 + : unit === 'h' + ? n * 3_600_000 + : n * 86_400_000; + return new Date(now.getTime() - ms).toISOString(); + } + + const ms = Date.parse(trimmed); + if (Number.isNaN(ms)) throw new Error(`invalid time filter: ${input}`); + return new Date(ms).toISOString(); +} + +// Apply a NumberFilter as a WHERE-clause fragment. Returns the SQL fragment +// (with a `?` placeholder) and the value to bind. Throws on unknown op. +export function numberFilterSql( + column: string, + filter: NumberFilter +): { sql: string; value: number } { + return { sql: `${column} ${filter.op} ?`, value: filter.value }; +} diff --git a/packages/cli/src/cmds/query/lib/treeRender.ts b/packages/cli/src/cmds/query/lib/treeRender.ts new file mode 100644 index 0000000000..c087e1661d --- /dev/null +++ b/packages/cli/src/cmds/query/lib/treeRender.ts @@ -0,0 +1,129 @@ +import { + FunctionNode, + HttpClientNode, + HttpServerNode, + SqlNode, + TreeNode, + TreeSummary, +} from '../queries/tree'; +import { formatCount, formatMs, formatTable } from './format'; + +const INDENT = ' '; + +// Render the full tree. Each event is one line; depth maps to indentation. +// Format mirrors V3: +// HTTP→ POST /orders → HTTP 500 [520ms] +// CALL app/.../OrdersController#create [519ms] +// SQL INSERT INTO orders (...) [14ms] +// EXC IntegrityError: duplicate key +// HTTP← GET https://api.example/v1 → 200 [40ms] +export function renderTree(nodes: readonly TreeNode[]): string { + return nodes.map(renderTreeLine).join('\n'); +} + +function renderTreeLine(node: TreeNode): string { + const indent = INDENT.repeat(node.depth); + switch (node.kind) { + case 'http_server': + return `${indent}HTTP→ ${renderHttpServer(node)}`; + case 'http_client': + return `${indent}HTTP← ${renderHttpClient(node)}`; + case 'function': + return `${indent}CALL ${renderFunction(node)}`; + case 'sql': + return `${indent}SQL ${renderSql(node)}`; + case 'exception': + return `${indent}EXC ${node.exception_class}${ + node.message ? `: ${node.message}` : '' + }`; + } +} + +function renderHttpServer(n: HttpServerNode): string { + return `${n.method} ${n.route} → HTTP ${n.status_code} ${bracket(n.elapsed_ms)}`.trim(); +} + +function renderHttpClient(n: HttpClientNode): string { + const status = n.status_code != null ? ` → ${n.status_code}` : ''; + return `${n.method} ${n.url}${status} ${bracket(n.elapsed_ms)}`.trim(); +} + +function renderFunction(n: FunctionNode): string { + const id = n.fqid ?? `${n.defined_class}${n.is_static ? '.' : '#'}${n.method_id}`; + const ret = n.return_value != null ? ` → ${n.return_value}` : ''; + return `${id} ${bracket(n.elapsed_ms)}${ret}`.trim(); +} + +function renderSql(n: SqlNode): string { + return `${truncate(n.sql_text, 120)} ${bracket(n.elapsed_ms)}`.trim(); +} + +function bracket(ms: number | null): string { + return ms == null ? '' : `[${formatMs(ms)}]`; +} + +function truncate(s: string, n: number): string { + return s.length <= n ? s : s.slice(0, n - 1) + '…'; +} + +// Flat rendering — used by --filter=sql and --filter=http (no indentation, +// just the matching events in order). +export function renderFlat(nodes: readonly TreeNode[]): string { + return nodes + .map((n) => { + switch (n.kind) { + case 'http_server': + return `HTTP→ ${renderHttpServer(n)}`; + case 'http_client': + return `HTTP← ${renderHttpClient(n)}`; + case 'sql': + return `SQL ${renderSql(n)}`; + case 'function': + return `CALL ${renderFunction(n)}`; + case 'exception': + return `EXC ${n.exception_class}${n.message ? `: ${n.message}` : ''}`; + } + }) + .join('\n'); +} + +// Summary format: per V3, a one-screen overview without the tree. +// ENTRY POST /orders → 500 [520ms] +// SQL 3 queries, 19ms total +// EXCEPTION IntegrityError +// LABELS log×2, dao×3, security.idempotency×1 +export function renderSummary(s: TreeSummary): string { + const rows: [string, string][] = []; + + if (s.entry) { + rows.push([ + 'ENTRY', + `${s.entry.method} ${s.entry.route} → ${s.entry.status_code} ${bracket(s.entry.elapsed_ms)}`, + ]); + } + + if (s.sql.count > 0) { + rows.push([ + 'SQL', + `${formatCount(s.sql.count)} quer${s.sql.count === 1 ? 'y' : 'ies'}, ${formatMs(s.sql.total_ms)} total`, + ]); + } + + if (s.http_client.count > 0) { + rows.push([ + 'HTTP→OUT', + `${formatCount(s.http_client.count)} request${s.http_client.count === 1 ? '' : 's'}, ${formatMs(s.http_client.total_ms)} total`, + ]); + } + + for (const e of s.exceptions) { + rows.push(['EXCEPTION', e.exception_class + (e.message ? `: ${e.message}` : '')]); + } + + if (s.labels.length > 0) { + const text = s.labels.map((l) => `${l.label}×${l.count}`).join(', '); + rows.push(['LABELS', text]); + } + + return formatTable(['', ''], rows); +} diff --git a/packages/cli/src/cmds/query/queries/endpoints.ts b/packages/cli/src/cmds/query/queries/endpoints.ts new file mode 100644 index 0000000000..6959f7e6a7 --- /dev/null +++ b/packages/cli/src/cmds/query/queries/endpoints.ts @@ -0,0 +1,140 @@ +import sqlite3 from 'better-sqlite3'; + +import type { NumberFilter } from '../lib/parseFilter'; + +export interface EndpointRow { + method: string; + route: string; + count: number; + avg_ms: number | null; + p95_ms: number | null; + err_pct: number; +} + +export type EndpointSort = 'count' | 'avg' | 'p95' | 'err'; + +export interface EndpointsFilter { + // ISO timestamp (use parseTime to build). + since?: string; + until?: string; + branch?: string; + // --status N (or comparator). Acts as a HAVING-style filter on routes: + // a route is shown iff at least one of its requests matches. Counts / + // averages / p95 / err_pct remain over all of that route's requests. + status?: NumberFilter; + sort?: EndpointSort; + limit?: number; +} + +// err_pct is fixed at "% of requests with status >= 500" (server errors), +// independent of any --status filter. +const ERR_THRESHOLD = 500; + +interface RawRow { + method: string; + route: string; + elapsed_ms: number | null; + status_code: number; +} + +export function endpoints( + db: sqlite3.Database, + filter: EndpointsFilter = {} +): EndpointRow[] { + const where: string[] = []; + const params: (string | number)[] = []; + + if (filter.branch) { + where.push('a.git_branch = ?'); + params.push(filter.branch); + } + if (filter.since) { + where.push('hr.timestamp >= ?'); + params.push(filter.since); + } + if (filter.until) { + where.push('hr.timestamp <= ?'); + params.push(filter.until); + } + + const whereSql = where.length > 0 ? `WHERE ${where.join(' AND ')}` : ''; + + const rows = db + .prepare( + `SELECT + hr.method AS method, + COALESCE(hr.normalized_path, hr.path) AS route, + hr.elapsed_ms AS elapsed_ms, + hr.status_code AS status_code + FROM http_requests hr + JOIN appmaps a ON a.id = hr.appmap_id + ${whereSql}` + ) + .all(...params) as RawRow[]; + + interface Group { + method: string; + route: string; + elapsed: number[]; + err: number; + matched: number; + total: number; + } + const groups = new Map(); + const matchPredicate = (s: number): boolean => { + if (!filter.status) return true; + const { op, value } = filter.status; + return ( + (op === '=' && s === value) || + (op === '>=' && s >= value) || + (op === '<=' && s <= value) || + (op === '>' && s > value) || + (op === '<' && s < value) + ); + }; + + for (const r of rows) { + const key = `${r.method}\t${r.route}`; + let g = groups.get(key); + if (!g) { + g = { method: r.method, route: r.route, elapsed: [], err: 0, matched: 0, total: 0 }; + groups.set(key, g); + } + g.total += 1; + if (typeof r.elapsed_ms === 'number') g.elapsed.push(r.elapsed_ms); + if (r.status_code >= ERR_THRESHOLD) g.err += 1; + if (matchPredicate(r.status_code)) g.matched += 1; + } + + const result: EndpointRow[] = []; + for (const g of groups.values()) { + if (filter.status && g.matched === 0) continue; + const sorted = [...g.elapsed].sort((a, b) => a - b); + result.push({ + method: g.method, + route: g.route, + count: g.total, + avg_ms: sorted.length === 0 ? null : sorted.reduce((s, v) => s + v, 0) / sorted.length, + p95_ms: percentile(sorted, 0.95), + err_pct: g.total > 0 ? (g.err / g.total) * 100 : 0, + }); + } + + const sortKey: EndpointSort = filter.sort ?? 'count'; + result.sort(comparators[sortKey]); + + return filter.limit !== undefined ? result.slice(0, filter.limit) : result; +} + +function percentile(sorted: readonly number[], p: number): number | null { + if (sorted.length === 0) return null; + const idx = Math.max(0, Math.ceil(sorted.length * p) - 1); + return sorted[idx]; +} + +const comparators: Record number> = { + count: (a, b) => b.count - a.count, + avg: (a, b) => (b.avg_ms ?? 0) - (a.avg_ms ?? 0), + p95: (a, b) => (b.p95_ms ?? 0) - (a.p95_ms ?? 0), + err: (a, b) => b.err_pct - a.err_pct, +}; diff --git a/packages/cli/src/cmds/query/queries/find.ts b/packages/cli/src/cmds/query/queries/find.ts new file mode 100644 index 0000000000..eebe7e1a1d --- /dev/null +++ b/packages/cli/src/cmds/query/queries/find.ts @@ -0,0 +1,424 @@ +import sqlite3 from 'better-sqlite3'; + +import type { NumberFilter } from '../lib/parseFilter'; + +export type FindType = 'appmaps' | 'requests' | 'queries' | 'calls' | 'exceptions'; + +export interface FindFilter { + route?: string; // "POST /orders" or "/orders" + className?: string; // --class (TS reserved word workaround) + method?: string; // --method (method_id, not HTTP method) + label?: string; // --label + branch?: string; + commit?: string; + status?: NumberFilter; // --status N / >=N + duration?: NumberFilter; // --duration ">1s" → ms + since?: string; + until?: string; + appmap?: string; // appmap name (or basename of source_path) + table?: string; // SQL table name (find queries) + exception?: string; // exception class (find exceptions) + limit?: number; + offset?: number; +} + +export interface FindAppmapRow { + appmap_name: string; + route: string | null; + status_code: number | null; + elapsed_ms: number | null; + sql_count: number; + branch: string | null; + timestamp: string | null; +} + +export interface FindRequestRow { + appmap_name: string; + event_id: number; + method: string; + route: string; + status_code: number; + elapsed_ms: number | null; + branch: string | null; +} + +export interface FindQueryRow { + appmap_name: string; + event_id: number; + elapsed_ms: number | null; + caller_class: string | null; + caller_method: string | null; + sql_text: string; +} + +export interface FindCallRow { + appmap_name: string; + event_id: number; + fqid: string | null; + defined_class: string; + method_id: string; + elapsed_ms: number | null; + parameters_json: string | null; + return_value: string | null; +} + +export interface FindExceptionRow { + appmap_name: string; + event_id: number; + exception_class: string; + message: string | null; + path: string | null; + lineno: number | null; +} + +// --- internal helpers --- + +interface RouteSpec { + method?: string; + path: string; +} + +function parseRoute(s: string): RouteSpec { + const m = s.match(/^(GET|POST|PUT|PATCH|DELETE|HEAD|OPTIONS)\s+(.+)$/); + if (m) return { method: m[1], path: m[2] }; + return { path: s }; +} + +interface Clauses { + where: string[]; + params: (string | number)[]; +} + +// Recording-level filters that go on the appmaps row directly. +function appmapWhere(filter: FindFilter, alias: string): Clauses { + const where: string[] = []; + const params: (string | number)[] = []; + if (filter.branch) { + where.push(`${alias}.git_branch = ?`); + params.push(filter.branch); + } + if (filter.commit) { + where.push(`${alias}.git_commit = ?`); + params.push(filter.commit); + } + if (filter.since) { + where.push(`${alias}.timestamp >= ?`); + params.push(filter.since); + } + if (filter.until) { + where.push(`${alias}.timestamp <= ?`); + params.push(filter.until); + } + if (filter.appmap) { + where.push(`(${alias}.name = ? OR ${alias}.source_path LIKE ?)`); + params.push(filter.appmap, `%/${filter.appmap}.appmap.json`); + } + return { where, params }; +} + +// HTTP-level filters that scope to "the recording must contain ≥1 matching +// request." Used as a subquery for non-request finds. +function httpScopeClauses(filter: FindFilter): Clauses { + const where: string[] = []; + const params: (string | number)[] = []; + if (filter.route) { + const route = parseRoute(filter.route); + where.push(`COALESCE(h.normalized_path, h.path) = ?`); + params.push(route.path); + if (route.method) { + where.push(`h.method = ?`); + params.push(route.method); + } + } + if (filter.status) { + where.push(`h.status_code ${filter.status.op} ?`); + params.push(filter.status.value); + } + return { where, params }; +} + +// Build ".appmap_id IN (SELECT a.id ...)" for non-appmap finds. +// Returns null if no recording-level filtering is needed. +function appmapIdScope( + filter: FindFilter, + rowAlias: string +): { sql: string; params: (string | number)[] } | null { + const a = appmapWhere(filter, 'a'); + const h = httpScopeClauses(filter); + if (a.where.length === 0 && h.where.length === 0) return null; + + if (h.where.length > 0) { + const all = [...a.where, ...h.where].join(' AND '); + return { + sql: `${rowAlias}.appmap_id IN ( + SELECT DISTINCT a.id FROM appmaps a + JOIN http_requests h ON h.appmap_id = a.id + WHERE ${all} + )`, + params: [...a.params, ...h.params], + }; + } + return { + sql: `${rowAlias}.appmap_id IN ( + SELECT a.id FROM appmaps a WHERE ${a.where.join(' AND ')} + )`, + params: a.params, + }; +} + +function durationClause(filter: FindFilter, column: string): Clauses { + const where: string[] = []; + const params: (string | number)[] = []; + if (filter.duration) { + where.push(`${column} ${filter.duration.op} ?`); + params.push(filter.duration.value); + } + return { where, params }; +} + +function appendLimitOffset(sql: string, filter: FindFilter, params: (string | number)[]): string { + let result = sql; + if (filter.limit !== undefined) { + result += ' LIMIT ?'; + params.push(filter.limit); + if (filter.offset !== undefined) { + result += ' OFFSET ?'; + params.push(filter.offset); + } + } else if (filter.offset !== undefined) { + // OFFSET without LIMIT: SQLite requires a LIMIT; use -1 (unbounded). + result += ' LIMIT -1 OFFSET ?'; + params.push(filter.offset); + } + return result; +} + +// --- per-type queries --- + +export function findAppmaps(db: sqlite3.Database, filter: FindFilter): FindAppmapRow[] { + const a = appmapWhere(filter, 'a'); + const h = httpScopeClauses(filter); + + let sql: string; + const params: (string | number)[] = []; + + // Show one row per appmap, joining its first matching (or any) HTTP request. + if (h.where.length > 0) { + const where = [...a.where, ...h.where].filter(Boolean).join(' AND '); + sql = ` + SELECT a.name AS appmap_name, + COALESCE(h.normalized_path, h.path) AS route, + h.status_code AS status_code, + h.elapsed_ms AS elapsed_ms, + a.sql_query_count AS sql_count, + a.git_branch AS branch, + a.timestamp AS timestamp + FROM appmaps a + JOIN http_requests h ON h.appmap_id = a.id + ${where ? `WHERE ${where}` : ''} + GROUP BY a.id + ORDER BY a.timestamp, a.name + `; + params.push(...a.params, ...h.params); + } else { + const where = a.where.join(' AND '); + sql = ` + SELECT a.name AS appmap_name, + (SELECT COALESCE(h.normalized_path, h.path) + FROM http_requests h WHERE h.appmap_id = a.id + ORDER BY h.event_id LIMIT 1) AS route, + (SELECT h.status_code FROM http_requests h + WHERE h.appmap_id = a.id ORDER BY h.event_id LIMIT 1) AS status_code, + a.elapsed_ms, + a.sql_query_count AS sql_count, + a.git_branch AS branch, + a.timestamp AS timestamp + FROM appmaps a + ${where ? `WHERE ${where}` : ''} + ORDER BY a.timestamp, a.name + `; + params.push(...a.params); + } + + sql = appendLimitOffset(sql, filter, params); + return db.prepare(sql).all(...params) as FindAppmapRow[]; +} + +export function findRequests(db: sqlite3.Database, filter: FindFilter): FindRequestRow[] { + const a = appmapWhere(filter, 'a'); + const where: string[] = [...a.where]; + const params: (string | number)[] = [...a.params]; + + if (filter.route) { + const route = parseRoute(filter.route); + where.push(`COALESCE(h.normalized_path, h.path) = ?`); + params.push(route.path); + if (route.method) { + where.push(`h.method = ?`); + params.push(route.method); + } + } + if (filter.status) { + where.push(`h.status_code ${filter.status.op} ?`); + params.push(filter.status.value); + } + const dur = durationClause(filter, 'h.elapsed_ms'); + where.push(...dur.where); + params.push(...dur.params); + + let sql = ` + SELECT a.name AS appmap_name, + h.event_id AS event_id, + h.method AS method, + COALESCE(h.normalized_path, h.path) AS route, + h.status_code AS status_code, + h.elapsed_ms AS elapsed_ms, + a.git_branch AS branch + FROM http_requests h + JOIN appmaps a ON a.id = h.appmap_id + ${where.length > 0 ? `WHERE ${where.join(' AND ')}` : ''} + ORDER BY a.source_path, h.event_id + `; + sql = appendLimitOffset(sql, filter, params); + return db.prepare(sql).all(...params) as FindRequestRow[]; +} + +export function findQueries(db: sqlite3.Database, filter: FindFilter): FindQueryRow[] { + const where: string[] = []; + const params: (string | number)[] = []; + + const scope = appmapIdScope(filter, 'q'); + if (scope) { + where.push(scope.sql); + params.push(...scope.params); + } + + if (filter.table) { + where.push(`q.sql_text LIKE ?`); + params.push(`%${filter.table}%`); + } + if (filter.className) { + where.push(`q.caller_class = ?`); + params.push(filter.className); + } + if (filter.method) { + where.push(`q.caller_method = ?`); + params.push(filter.method); + } + const dur = durationClause(filter, 'q.elapsed_ms'); + where.push(...dur.where); + params.push(...dur.params); + + let sql = ` + SELECT a.name AS appmap_name, + q.event_id AS event_id, + q.elapsed_ms AS elapsed_ms, + q.caller_class AS caller_class, + q.caller_method AS caller_method, + q.sql_text AS sql_text + FROM sql_queries q + JOIN appmaps a ON a.id = q.appmap_id + ${where.length > 0 ? `WHERE ${where.join(' AND ')}` : ''} + ORDER BY a.source_path, q.event_id + `; + sql = appendLimitOffset(sql, filter, params); + return db.prepare(sql).all(...params) as FindQueryRow[]; +} + +export function findCalls(db: sqlite3.Database, filter: FindFilter): FindCallRow[] { + const where: string[] = []; + const params: (string | number)[] = []; + + const scope = appmapIdScope(filter, 'fc'); + if (scope) { + where.push(scope.sql); + params.push(...scope.params); + } + + if (filter.className) { + where.push(`fc.defined_class = ?`); + params.push(filter.className); + } + if (filter.method) { + where.push(`fc.method_id = ?`); + params.push(filter.method); + } + if (filter.label) { + where.push( + `fc.code_object_id IN (SELECT l.code_object_id FROM labels l WHERE l.label = ?)` + ); + params.push(filter.label); + } + const dur = durationClause(filter, 'fc.elapsed_ms'); + where.push(...dur.where); + params.push(...dur.params); + + let sql = ` + SELECT a.name AS appmap_name, + fc.event_id AS event_id, + co.fqid AS fqid, + fc.defined_class AS defined_class, + fc.method_id AS method_id, + fc.elapsed_ms AS elapsed_ms, + fc.parameters_json AS parameters_json, + fc.return_value AS return_value + FROM function_calls fc + JOIN appmaps a ON a.id = fc.appmap_id + LEFT JOIN code_objects co ON co.id = fc.code_object_id + ${where.length > 0 ? `WHERE ${where.join(' AND ')}` : ''} + ORDER BY a.source_path, fc.event_id + `; + sql = appendLimitOffset(sql, filter, params); + return db.prepare(sql).all(...params) as FindCallRow[]; +} + +export function findExceptions(db: sqlite3.Database, filter: FindFilter): FindExceptionRow[] { + const where: string[] = []; + const params: (string | number)[] = []; + + const scope = appmapIdScope(filter, 'e'); + if (scope) { + where.push(scope.sql); + params.push(...scope.params); + } + + if (filter.exception) { + where.push(`e.exception_class = ?`); + params.push(filter.exception); + } + + let sql = ` + SELECT a.name AS appmap_name, + e.event_id AS event_id, + e.exception_class AS exception_class, + e.message AS message, + e.path AS path, + e.lineno AS lineno + FROM exceptions e + JOIN appmaps a ON a.id = e.appmap_id + ${where.length > 0 ? `WHERE ${where.join(' AND ')}` : ''} + ORDER BY a.source_path, e.event_id, e.exception_class + `; + sql = appendLimitOffset(sql, filter, params); + return db.prepare(sql).all(...params) as FindExceptionRow[]; +} + +// Dispatcher. +export function find( + db: sqlite3.Database, + type: FindType, + filter: FindFilter +): unknown[] { + switch (type) { + case 'appmaps': + return findAppmaps(db, filter); + case 'requests': + return findRequests(db, filter); + case 'queries': + return findQueries(db, filter); + case 'calls': + return findCalls(db, filter); + case 'exceptions': + return findExceptions(db, filter); + } +} diff --git a/packages/cli/src/cmds/query/queries/tree.ts b/packages/cli/src/cmds/query/queries/tree.ts new file mode 100644 index 0000000000..537abbe80e --- /dev/null +++ b/packages/cli/src/cmds/query/queries/tree.ts @@ -0,0 +1,305 @@ +import sqlite3 from 'better-sqlite3'; + +// Discriminated union of tree nodes. Each node corresponds to one row in +// one of the per-event tables; `depth` is computed from parent_event_id +// chains within the same recording. + +interface BaseNode { + event_id: number; + parent_event_id: number | null; + thread_id: number | null; + depth: number; +} + +export interface HttpServerNode extends BaseNode { + kind: 'http_server'; + method: string; + route: string; + status_code: number; + elapsed_ms: number | null; +} + +export interface HttpClientNode extends BaseNode { + kind: 'http_client'; + method: string; + url: string; + status_code: number | null; + elapsed_ms: number | null; +} + +export interface SqlNode extends BaseNode { + kind: 'sql'; + sql_text: string; + database_type: string | null; + elapsed_ms: number | null; +} + +export interface FunctionNode extends BaseNode { + kind: 'function'; + fqid: string | null; + defined_class: string; + method_id: string; + is_static: boolean; + elapsed_ms: number | null; + parameters_json: string | null; + return_value: string | null; +} + +export interface ExceptionNode extends BaseNode { + kind: 'exception'; + exception_class: string; + message: string | null; +} + +export type TreeNode = + | HttpServerNode + | HttpClientNode + | SqlNode + | FunctionNode + | ExceptionNode; + +export interface AppmapInfo { + id: number; + name: string; + source_path: string; +} + +// Resolve a user-supplied appmap reference (name or source-path basename) to +// the row in `appmaps`. Throws on miss or ambiguity (returns candidates in +// the message so the user can disambiguate). +export function resolveAppmap(db: sqlite3.Database, ref: string): AppmapInfo { + const rows = db + .prepare( + `SELECT id, name, source_path FROM appmaps + WHERE name = ? OR source_path LIKE ? + ORDER BY source_path` + ) + .all(ref, `%/${ref}.appmap.json`) as AppmapInfo[]; + if (rows.length === 0) throw new Error(`appmap not found: ${ref}`); + if (rows.length > 1) { + const list = rows.map((r) => ` - ${r.source_path}`).join('\n'); + throw new Error(`appmap "${ref}" is ambiguous; matches:\n${list}`); + } + return rows[0]; +} + +// Build the flat-but-depth-annotated tree for a recording. Events are +// returned in event_id order; consumers can render with indentation. +export function tree(db: sqlite3.Database, appmapRef: string): TreeNode[] { + const am = resolveAppmap(db, appmapRef); + const events: TreeNode[] = []; + + for (const r of db + .prepare( + `SELECT event_id, parent_event_id, thread_id, method, + COALESCE(normalized_path, path) AS route, status_code, elapsed_ms + FROM http_requests WHERE appmap_id = ?` + ) + .all(am.id) as Array<{ + event_id: number; + parent_event_id: number | null; + thread_id: number | null; + method: string; + route: string; + status_code: number; + elapsed_ms: number | null; + }>) { + events.push({ + kind: 'http_server', + event_id: r.event_id, + parent_event_id: r.parent_event_id, + thread_id: r.thread_id, + depth: 0, + method: r.method, + route: r.route, + status_code: r.status_code, + elapsed_ms: r.elapsed_ms, + }); + } + + for (const r of db + .prepare( + `SELECT event_id, parent_event_id, thread_id, method, url, status_code, elapsed_ms + FROM http_client_requests WHERE appmap_id = ?` + ) + .all(am.id) as Array<{ + event_id: number; + parent_event_id: number | null; + thread_id: number | null; + method: string; + url: string; + status_code: number | null; + elapsed_ms: number | null; + }>) { + events.push({ + kind: 'http_client', + event_id: r.event_id, + parent_event_id: r.parent_event_id, + thread_id: r.thread_id, + depth: 0, + method: r.method, + url: r.url, + status_code: r.status_code, + elapsed_ms: r.elapsed_ms, + }); + } + + for (const r of db + .prepare( + `SELECT event_id, parent_event_id, thread_id, sql_text, database_type, elapsed_ms + FROM sql_queries WHERE appmap_id = ?` + ) + .all(am.id) as Array<{ + event_id: number; + parent_event_id: number | null; + thread_id: number | null; + sql_text: string; + database_type: string | null; + elapsed_ms: number | null; + }>) { + events.push({ + kind: 'sql', + event_id: r.event_id, + parent_event_id: r.parent_event_id, + thread_id: r.thread_id, + depth: 0, + sql_text: r.sql_text, + database_type: r.database_type, + elapsed_ms: r.elapsed_ms, + }); + } + + for (const r of db + .prepare( + `SELECT fc.event_id, fc.parent_event_id, fc.thread_id, + co.fqid AS fqid, fc.defined_class, fc.method_id, + fc.is_static, fc.elapsed_ms, fc.parameters_json, fc.return_value + FROM function_calls fc + LEFT JOIN code_objects co ON co.id = fc.code_object_id + WHERE fc.appmap_id = ?` + ) + .all(am.id) as Array<{ + event_id: number; + parent_event_id: number | null; + thread_id: number | null; + fqid: string | null; + defined_class: string; + method_id: string; + is_static: number; + elapsed_ms: number | null; + parameters_json: string | null; + return_value: string | null; + }>) { + events.push({ + kind: 'function', + event_id: r.event_id, + parent_event_id: r.parent_event_id, + thread_id: r.thread_id, + depth: 0, + fqid: r.fqid, + defined_class: r.defined_class, + method_id: r.method_id, + is_static: r.is_static === 1, + elapsed_ms: r.elapsed_ms, + parameters_json: r.parameters_json, + return_value: r.return_value, + }); + } + + for (const r of db + .prepare( + `SELECT event_id, parent_event_id, thread_id, exception_class, message + FROM exceptions WHERE appmap_id = ?` + ) + .all(am.id) as Array<{ + event_id: number; + parent_event_id: number | null; + thread_id: number | null; + exception_class: string; + message: string | null; + }>) { + events.push({ + kind: 'exception', + event_id: r.event_id, + parent_event_id: r.parent_event_id, + thread_id: r.thread_id, + depth: 0, + exception_class: r.exception_class, + message: r.message, + }); + } + + events.sort((a, b) => a.event_id - b.event_id); + + // Compute depths in event_id order. Parents come before children, so + // each node's depth is parent's depth + 1 (or 0 if no parent / orphan). + const depthByEventId = new Map(); + for (const ev of events) { + let depth = 0; + if (ev.parent_event_id !== null) { + const p = depthByEventId.get(ev.parent_event_id); + if (p !== undefined) depth = p + 1; + } + ev.depth = depth; + depthByEventId.set(ev.event_id, depth); + } + + return events; +} + +export interface TreeSummary { + appmap_name: string; + source_path: string; + entry: { method: string; route: string; status_code: number; elapsed_ms: number | null } | null; + sql: { count: number; total_ms: number }; + http_client: { count: number; total_ms: number }; + exceptions: Array<{ exception_class: string; message: string | null }>; + labels: Array<{ label: string; count: number }>; +} + +export function treeSummary(db: sqlite3.Database, appmapRef: string): TreeSummary { + const am = resolveAppmap(db, appmapRef); + const nodes = tree(db, appmapRef); + + const httpServer = nodes.find((n): n is HttpServerNode => n.kind === 'http_server'); + const sqls = nodes.filter((n): n is SqlNode => n.kind === 'sql'); + const httpClients = nodes.filter((n): n is HttpClientNode => n.kind === 'http_client'); + const excs = nodes.filter((n): n is ExceptionNode => n.kind === 'exception'); + + const labelRows = db + .prepare( + `SELECT l.label, COUNT(*) AS n + FROM function_calls fc + JOIN labels l ON l.code_object_id = fc.code_object_id + WHERE fc.appmap_id = ? + GROUP BY l.label + ORDER BY n DESC, l.label` + ) + .all(am.id) as Array<{ label: string; n: number }>; + + return { + appmap_name: am.name, + source_path: am.source_path, + entry: httpServer + ? { + method: httpServer.method, + route: httpServer.route, + status_code: httpServer.status_code, + elapsed_ms: httpServer.elapsed_ms, + } + : null, + sql: { + count: sqls.length, + total_ms: sqls.reduce((s, q) => s + (q.elapsed_ms ?? 0), 0), + }, + http_client: { + count: httpClients.length, + total_ms: httpClients.reduce((s, c) => s + (c.elapsed_ms ?? 0), 0), + }, + exceptions: excs.map((e) => ({ + exception_class: e.exception_class, + message: e.message, + })), + labels: labelRows.map((r) => ({ label: r.label, count: r.n })), + }; +} diff --git a/packages/cli/src/cmds/query/query.ts b/packages/cli/src/cmds/query/query.ts new file mode 100644 index 0000000000..c8418e19cd --- /dev/null +++ b/packages/cli/src/cmds/query/query.ts @@ -0,0 +1,20 @@ +import yargs from 'yargs'; + +import * as EndpointsVerb from './verbs/endpoints'; +import * as FindVerb from './verbs/find'; +import * as TreeVerb from './verbs/tree'; + +export const command = 'query'; +export const describe = 'Query AppMap recordings (endpoints, find, tree, related, hotspots, compare)'; + +export const builder = (args: yargs.Argv) => + args + .command(EndpointsVerb) + .command(FindVerb) + .command(TreeVerb) + .demandCommand(1, 'specify a query verb') + .strict(); + +export const handler = (): void => { + // Dispatched by subcommand. Yargs will print help if no verb is given. +}; diff --git a/packages/cli/src/cmds/query/verbs/endpoints.ts b/packages/cli/src/cmds/query/verbs/endpoints.ts new file mode 100644 index 0000000000..b86d457d30 --- /dev/null +++ b/packages/cli/src/cmds/query/verbs/endpoints.ts @@ -0,0 +1,76 @@ +import yargs from 'yargs'; +import { log } from 'console'; + +import { handleWorkingDirectory } from '../../../lib/handleWorkingDirectory'; +import { locateAppMapDir } from '../../../lib/locateAppMapDir'; +import { verbose } from '../../../utils'; +import { openReadOnly } from '../lib/openReadOnly'; +import { parseStatus, parseTime } from '../lib/parseFilter'; +import { + endpoints, + EndpointSort, + EndpointsFilter, +} from '../queries/endpoints'; +import { formatCount, formatMs, formatPct, formatTable } from '../lib/format'; + +export const command = 'endpoints'; +export const describe = 'Per-route summary table (orient verb)'; + +export const builder = (args: yargs.Argv) => { + return args + .option('directory', { type: 'string', alias: 'd', describe: 'program working directory' }) + .option('appmap-dir', { type: 'string', describe: 'directory of recordings' }) + .option('db', { type: 'string', describe: 'path to query.db (overrides default)' }) + .option('since', { type: 'string', describe: 'ISO timestamp or "Nd ago"' }) + .option('until', { type: 'string', describe: 'ISO timestamp or "Nd ago"' }) + .option('branch', { type: 'string' }) + .option('status', { type: 'string', describe: 'e.g. 500, ">=500"' }) + .option('sort', { + type: 'string', + choices: ['count', 'avg', 'p95', 'err'] as const, + default: 'count', + }) + .option('limit', { type: 'number' }) + .option('json', { type: 'boolean', default: false }); +}; + +type Argv = ReturnType extends yargs.Argv ? T : never; + +export const handler = async (argv: yargs.ArgumentsCamelCase): Promise => { + verbose(argv.verbose as boolean | undefined); + handleWorkingDirectory(argv.directory); + // When --db is supplied, the appmap dir is irrelevant — the user has + // already named a query.db. Otherwise, derive it from the appmap dir. + const appmapDir = argv.db ? '' : await locateAppMapDir(argv.appmapDir); + + const filter: EndpointsFilter = { sort: argv.sort as EndpointSort }; + if (argv.since) filter.since = parseTime(argv.since); + if (argv.until) filter.until = parseTime(argv.until); + if (argv.branch) filter.branch = argv.branch; + if (argv.status) filter.status = parseStatus(argv.status); + if (argv.limit !== undefined) filter.limit = argv.limit; + + const db = openReadOnly(appmapDir, argv.db); + try { + const rows = endpoints(db, filter); + if (argv.json) { + log(JSON.stringify(rows, null, 2)); + return; + } + log( + formatTable( + ['METHOD', 'ROUTE', 'COUNT', 'AVG', 'P95', 'ERR%'], + rows.map((r) => [ + r.method, + r.route, + formatCount(r.count), + formatMs(r.avg_ms), + formatMs(r.p95_ms), + formatPct(r.err_pct), + ]) + ) + ); + } finally { + db.close(); + } +}; diff --git a/packages/cli/src/cmds/query/verbs/find.ts b/packages/cli/src/cmds/query/verbs/find.ts new file mode 100644 index 0000000000..2717f3a397 --- /dev/null +++ b/packages/cli/src/cmds/query/verbs/find.ts @@ -0,0 +1,160 @@ +import yargs from 'yargs'; +import { log } from 'console'; + +import { handleWorkingDirectory } from '../../../lib/handleWorkingDirectory'; +import { locateAppMapDir } from '../../../lib/locateAppMapDir'; +import { verbose } from '../../../utils'; +import { openReadOnly } from '../lib/openReadOnly'; +import { parseDuration, parseStatus, parseTime } from '../lib/parseFilter'; +import { + find, + FindFilter, + FindType, + FindAppmapRow, + FindCallRow, + FindExceptionRow, + FindQueryRow, + FindRequestRow, +} from '../queries/find'; +import { formatMs, formatTable } from '../lib/format'; + +const TYPES: readonly FindType[] = ['appmaps', 'requests', 'queries', 'calls', 'exceptions']; + +export const command = 'find '; +export const describe = 'Row-level search across recordings'; + +export const builder = (args: yargs.Argv) => { + return args + .positional('type', { type: 'string', choices: TYPES }) + .option('directory', { type: 'string', alias: 'd' }) + .option('appmap-dir', { type: 'string' }) + .option('db', { type: 'string', describe: 'path to query.db (overrides default)' }) + .option('route', { type: 'string', describe: 'e.g. "POST /orders" or "/orders"' }) + .option('class', { type: 'string', describe: 'defined_class or fqid Class part' }) + .option('method', { type: 'string', describe: 'method_id (not HTTP method)' }) + .option('label', { type: 'string' }) + .option('branch', { type: 'string' }) + .option('commit', { type: 'string' }) + .option('status', { type: 'string', describe: 'e.g. 500, ">=500"' }) + .option('duration', { type: 'string', describe: 'e.g. ">1s", ">=500ms"' }) + .option('since', { type: 'string' }) + .option('until', { type: 'string' }) + .option('appmap', { type: 'string', describe: 'appmap name' }) + .option('table', { type: 'string', describe: 'SQL table name (queries)' }) + .option('exception', { type: 'string', describe: 'exception class (exceptions)' }) + .option('limit', { type: 'number' }) + .option('offset', { type: 'number' }) + .option('json', { type: 'boolean', default: false }); +}; + +type Argv = ReturnType extends yargs.Argv ? T : never; + +export const handler = async (argv: yargs.ArgumentsCamelCase): Promise => { + verbose(argv.verbose as boolean | undefined); + handleWorkingDirectory(argv.directory); + const appmapDir = argv.db ? '' : await locateAppMapDir(argv.appmapDir); + + const filter: FindFilter = {}; + if (argv.route) filter.route = argv.route; + if (argv.class) filter.className = argv.class; + if (argv.method) filter.method = argv.method; + if (argv.label) filter.label = argv.label; + if (argv.branch) filter.branch = argv.branch; + if (argv.commit) filter.commit = argv.commit; + if (argv.status) filter.status = parseStatus(argv.status); + if (argv.duration) filter.duration = parseDuration(argv.duration); + if (argv.since) filter.since = parseTime(argv.since); + if (argv.until) filter.until = parseTime(argv.until); + if (argv.appmap) filter.appmap = argv.appmap; + if (argv.table) filter.table = argv.table; + if (argv.exception) filter.exception = argv.exception; + if (argv.limit !== undefined) filter.limit = argv.limit; + if (argv.offset !== undefined) filter.offset = argv.offset; + + const type = argv.type as FindType; + + const db = openReadOnly(appmapDir, argv.db); + try { + const rows = find(db, type, filter); + if (argv.json) { + log(JSON.stringify(rows, null, 2)); + return; + } + log(renderTable(type, rows)); + } finally { + db.close(); + } +}; + +function renderTable(type: FindType, rows: unknown[]): string { + switch (type) { + case 'appmaps': + return formatTable( + ['APPMAP', 'ROUTE', 'STATUS', 'ELAPSED', 'SQL', 'BRANCH', 'TIMESTAMP'], + (rows as FindAppmapRow[]).map((r) => [ + r.appmap_name, + r.route ?? '', + r.status_code != null ? String(r.status_code) : '', + formatMs(r.elapsed_ms), + String(r.sql_count), + r.branch ?? '', + r.timestamp ?? '', + ]) + ); + case 'requests': + return formatTable( + ['APPMAP', 'METHOD', 'ROUTE', 'STATUS', 'ELAPSED', 'BRANCH'], + (rows as FindRequestRow[]).map((r) => [ + r.appmap_name, + r.method, + r.route, + String(r.status_code), + formatMs(r.elapsed_ms), + r.branch ?? '', + ]) + ); + case 'queries': + return formatTable( + ['APPMAP', 'ELAPSED', 'CALLER', 'SQL'], + (rows as FindQueryRow[]).map((r) => [ + r.appmap_name, + formatMs(r.elapsed_ms), + r.caller_class && r.caller_method ? `${r.caller_class}#${r.caller_method}` : '', + r.sql_text, + ]) + ); + case 'calls': + return formatTable( + ['APPMAP', 'FQID', 'ELAPSED', 'PARAMS', 'RETURN'], + (rows as FindCallRow[]).map((r) => [ + r.appmap_name, + r.fqid ?? `${r.defined_class}#${r.method_id}`, + formatMs(r.elapsed_ms), + formatParams(r.parameters_json), + r.return_value ?? '', + ]) + ); + case 'exceptions': + return formatTable( + ['APPMAP', 'CLASS', 'MESSAGE', 'EVENT'], + (rows as FindExceptionRow[]).map((r) => [ + r.appmap_name, + r.exception_class, + r.message ?? '', + String(r.event_id), + ]) + ); + } +} + +function formatParams(json: string | null): string { + if (!json) return ''; + try { + const parsed = JSON.parse(json) as Array<{ name?: string; value?: unknown }>; + return parsed + .map((p) => `${p.name ?? '?'}=${typeof p.value === 'string' ? p.value : JSON.stringify(p.value)}`) + .join(', '); + } catch { + return json; + } +} diff --git a/packages/cli/src/cmds/query/verbs/tree.ts b/packages/cli/src/cmds/query/verbs/tree.ts new file mode 100644 index 0000000000..dc4c41d92a --- /dev/null +++ b/packages/cli/src/cmds/query/verbs/tree.ts @@ -0,0 +1,70 @@ +import yargs from 'yargs'; +import { log } from 'console'; + +import { handleWorkingDirectory } from '../../../lib/handleWorkingDirectory'; +import { locateAppMapDir } from '../../../lib/locateAppMapDir'; +import { verbose } from '../../../utils'; +import { openReadOnly } from '../lib/openReadOnly'; +import { tree, treeSummary, TreeNode } from '../queries/tree'; +import { renderFlat, renderSummary, renderTree } from '../lib/treeRender'; + +export const command = 'tree '; +export const describe = 'Render the call tree of one recording'; + +export const builder = (args: yargs.Argv) => { + return args + .positional('appmap', { type: 'string', describe: 'appmap name (or basename of source path)' }) + .option('directory', { type: 'string', alias: 'd' }) + .option('appmap-dir', { type: 'string' }) + .option('db', { type: 'string', describe: 'path to query.db' }) + .option('format', { + type: 'string', + choices: ['tree', 'summary'] as const, + default: 'tree', + }) + .option('filter', { + type: 'string', + choices: ['all', 'http', 'sql'] as const, + default: 'all', + }) + .option('json', { type: 'boolean', default: false }); +}; + +type Argv = ReturnType extends yargs.Argv ? T : never; + +export const handler = async (argv: yargs.ArgumentsCamelCase): Promise => { + verbose(argv.verbose as boolean | undefined); + handleWorkingDirectory(argv.directory); + const appmapDir = argv.db ? '' : await locateAppMapDir(argv.appmapDir); + + const ref = argv.appmap; + if (!ref) throw new Error(' is required'); + + const db = openReadOnly(appmapDir, argv.db); + try { + if (argv.format === 'summary') { + const s = treeSummary(db, ref); + if (argv.json) log(JSON.stringify(s, null, 2)); + else log(renderSummary(s)); + return; + } + + const nodes = tree(db, ref); + const filtered = applyFilter(nodes, argv.filter as 'all' | 'http' | 'sql'); + if (argv.json) { + log(JSON.stringify(filtered, null, 2)); + } else { + const f = argv.filter as 'all' | 'http' | 'sql'; + log(f === 'all' ? renderTree(filtered) : renderFlat(filtered)); + } + } finally { + db.close(); + } +}; + +function applyFilter(nodes: readonly TreeNode[], filter: 'all' | 'http' | 'sql'): TreeNode[] { + if (filter === 'all') return [...nodes]; + if (filter === 'sql') return nodes.filter((n) => n.kind === 'sql'); + // 'http' — both inbound and outbound + return nodes.filter((n) => n.kind === 'http_server' || n.kind === 'http_client'); +} diff --git a/packages/cli/tests/unit/cmds/query/lib/parseFilter.spec.ts b/packages/cli/tests/unit/cmds/query/lib/parseFilter.spec.ts new file mode 100644 index 0000000000..b6a3de55fe --- /dev/null +++ b/packages/cli/tests/unit/cmds/query/lib/parseFilter.spec.ts @@ -0,0 +1,77 @@ +import { + numberFilterSql, + parseDuration, + parseStatus, + parseTime, +} from '../../../../../src/cmds/query/lib/parseFilter'; + +describe('parseStatus', () => { + it('parses bare integer as equality', () => { + expect(parseStatus('500')).toEqual({ op: '=', value: 500 }); + }); + + it('parses each comparator', () => { + expect(parseStatus('>=500')).toEqual({ op: '>=', value: 500 }); + expect(parseStatus('<=399')).toEqual({ op: '<=', value: 399 }); + expect(parseStatus('>200')).toEqual({ op: '>', value: 200 }); + expect(parseStatus('<400')).toEqual({ op: '<', value: 400 }); + expect(parseStatus('=500')).toEqual({ op: '=', value: 500 }); + }); + + it('tolerates whitespace around the operator', () => { + expect(parseStatus('>= 500')).toEqual({ op: '>=', value: 500 }); + expect(parseStatus(' >=500 ')).toEqual({ op: '>=', value: 500 }); + }); + + it('throws on garbage input', () => { + expect(() => parseStatus('5xx')).toThrow(/invalid/); + expect(() => parseStatus('')).toThrow(/invalid/); + }); +}); + +describe('parseDuration', () => { + it('defaults to ms when no unit is given', () => { + expect(parseDuration('>500')).toEqual({ op: '>', value: 500 }); + }); + + it('converts s/m/h to ms', () => { + expect(parseDuration('>1s')).toEqual({ op: '>', value: 1000 }); + expect(parseDuration('>=2m')).toEqual({ op: '>=', value: 120_000 }); + expect(parseDuration('<1h')).toEqual({ op: '<', value: 3_600_000 }); + }); + + it('accepts decimals', () => { + expect(parseDuration('>1.5s')).toEqual({ op: '>', value: 1500 }); + }); +}); + +describe('parseTime', () => { + it('parses ISO timestamps', () => { + expect(parseTime('2026-04-29T14:21:08Z')).toBe('2026-04-29T14:21:08.000Z'); + }); + + it('parses ISO dates', () => { + expect(parseTime('2026-04-29')).toBe('2026-04-29T00:00:00.000Z'); + }); + + it('parses relative offsets', () => { + const now = new Date('2026-05-01T12:00:00Z'); + expect(parseTime('7d ago', now)).toBe('2026-04-24T12:00:00.000Z'); + expect(parseTime('30m ago', now)).toBe('2026-05-01T11:30:00.000Z'); + expect(parseTime('2h ago', now)).toBe('2026-05-01T10:00:00.000Z'); + expect(parseTime('45s ago', now)).toBe('2026-05-01T11:59:15.000Z'); + }); + + it('throws on garbage input', () => { + expect(() => parseTime('not a time')).toThrow(/invalid/); + }); +}); + +describe('numberFilterSql', () => { + it('builds a sql fragment + value', () => { + expect(numberFilterSql('status_code', { op: '>=', value: 500 })).toEqual({ + sql: 'status_code >= ?', + value: 500, + }); + }); +}); diff --git a/packages/cli/tests/unit/cmds/query/queries/endpoints.spec.ts b/packages/cli/tests/unit/cmds/query/queries/endpoints.spec.ts new file mode 100644 index 0000000000..327c9b0664 --- /dev/null +++ b/packages/cli/tests/unit/cmds/query/queries/endpoints.spec.ts @@ -0,0 +1,227 @@ +import sqlite3 from 'better-sqlite3'; + +import { openQueryDb } from '../../../../../src/cmds/query/db/openQueryDb'; +import { endpoints } from '../../../../../src/cmds/query/queries/endpoints'; + +function freshDb(): sqlite3.Database { + return openQueryDb('/tmp/ignored', ':memory:').db; +} + +interface SeedReq { + method: string; + path: string; + normalized_path?: string; + status: number; + elapsed_ms: number | null; + timestamp?: string; + branch?: string; +} + +let nextEvent = 1; +function seed(db: sqlite3.Database, reqs: SeedReq[]): void { + const insertAppmap = db.prepare( + `INSERT INTO appmaps (name, source_path, git_branch) VALUES (?, ?, ?)` + ); + const insertReq = db.prepare( + `INSERT INTO http_requests (appmap_id, event_id, method, path, normalized_path, + status_code, elapsed_ms, timestamp) + VALUES (?, ?, ?, ?, ?, ?, ?, ?)` + ); + for (let i = 0; i < reqs.length; i++) { + const r = reqs[i]; + const am = insertAppmap.run( + `rec-${i}`, + `/tmp/rec-${i}.appmap.json`, + r.branch ?? null + ); + insertReq.run( + am.lastInsertRowid, + nextEvent++, + r.method, + r.path, + r.normalized_path ?? null, + r.status, + r.elapsed_ms, + r.timestamp ?? '2026-04-29T12:00:00.000Z' + ); + } +} + +describe('endpoints', () => { + beforeEach(() => { + nextEvent = 1; + }); + + it('returns an empty array when there are no requests', () => { + const db = freshDb(); + try { + expect(endpoints(db)).toEqual([]); + } finally { + db.close(); + } + }); + + it('groups by (method, route) and counts', () => { + const db = freshDb(); + try { + seed(db, [ + { method: 'GET', path: '/x', status: 200, elapsed_ms: 100 }, + { method: 'GET', path: '/x', status: 200, elapsed_ms: 200 }, + { method: 'POST', path: '/x', status: 201, elapsed_ms: 150 }, + ]); + const out = endpoints(db); + const get = out.find((r) => r.method === 'GET'); + const post = out.find((r) => r.method === 'POST'); + expect(get?.count).toBe(2); + expect(post?.count).toBe(1); + expect(out).toHaveLength(2); + } finally { + db.close(); + } + }); + + it('uses normalized_path when present, otherwise raw path', () => { + const db = freshDb(); + try { + seed(db, [ + { method: 'GET', path: '/orders/42', normalized_path: '/orders/:id', status: 200, elapsed_ms: 100 }, + { method: 'GET', path: '/orders/99', normalized_path: '/orders/:id', status: 200, elapsed_ms: 200 }, + { method: 'GET', path: '/raw-only', status: 200, elapsed_ms: 50 }, + ]); + const out = endpoints(db); + expect(out.find((r) => r.route === '/orders/:id')?.count).toBe(2); + expect(out.find((r) => r.route === '/raw-only')?.count).toBe(1); + } finally { + db.close(); + } + }); + + it('computes avg, p95, and err_pct', () => { + const db = freshDb(); + try { + // 10 requests on /x: 9 with status 200 / elapsed [10,20,...,90], 1 with status 500 / elapsed 1000. + const reqs: SeedReq[] = []; + for (let i = 1; i <= 9; i++) { + reqs.push({ method: 'GET', path: '/x', status: 200, elapsed_ms: i * 10 }); + } + reqs.push({ method: 'GET', path: '/x', status: 500, elapsed_ms: 1000 }); + seed(db, reqs); + + const row = endpoints(db).find((r) => r.route === '/x')!; + expect(row.count).toBe(10); + expect(row.err_pct).toBeCloseTo(10); + expect(row.avg_ms).toBeCloseTo((10 + 20 + 30 + 40 + 50 + 60 + 70 + 80 + 90 + 1000) / 10); + // sorted [10,20,...,90,1000]; p95 = ceil(0.95 * 10) - 1 = 9 → idx 9 → 1000 + expect(row.p95_ms).toBe(1000); + } finally { + db.close(); + } + }); + + it('--status filter shows only routes with at least one matching response, but counts remain over all', () => { + const db = freshDb(); + try { + seed(db, [ + { method: 'GET', path: '/quiet', status: 200, elapsed_ms: 50 }, + { method: 'GET', path: '/quiet', status: 200, elapsed_ms: 60 }, + { method: 'POST', path: '/orders', status: 201, elapsed_ms: 100 }, + { method: 'POST', path: '/orders', status: 201, elapsed_ms: 110 }, + { method: 'POST', path: '/orders', status: 500, elapsed_ms: 520 }, + ]); + const out = endpoints(db, { status: { op: '>=', value: 500 } }); + // /quiet has no 5xx → excluded. + // /orders has one 5xx → included; count=3, err_pct=33%. + expect(out).toHaveLength(1); + const row = out[0]; + expect(row.route).toBe('/orders'); + expect(row.count).toBe(3); + expect(row.err_pct).toBeCloseTo((1 / 3) * 100); + } finally { + db.close(); + } + }); + + it('filters by branch', () => { + const db = freshDb(); + try { + seed(db, [ + { method: 'GET', path: '/x', status: 200, elapsed_ms: 100, branch: 'main' }, + { method: 'GET', path: '/y', status: 200, elapsed_ms: 100, branch: 'feature' }, + ]); + expect(endpoints(db, { branch: 'main' })).toHaveLength(1); + expect(endpoints(db, { branch: 'main' })[0].route).toBe('/x'); + } finally { + db.close(); + } + }); + + it('filters by since/until', () => { + const db = freshDb(); + try { + seed(db, [ + { method: 'GET', path: '/x', status: 200, elapsed_ms: 100, timestamp: '2026-04-01T00:00:00.000Z' }, + { method: 'GET', path: '/x', status: 200, elapsed_ms: 100, timestamp: '2026-04-15T00:00:00.000Z' }, + { method: 'GET', path: '/x', status: 200, elapsed_ms: 100, timestamp: '2026-04-30T00:00:00.000Z' }, + ]); + expect(endpoints(db, { since: '2026-04-10T00:00:00.000Z' })[0].count).toBe(2); + expect( + endpoints(db, { + since: '2026-04-10T00:00:00.000Z', + until: '2026-04-20T00:00:00.000Z', + })[0].count + ).toBe(1); + } finally { + db.close(); + } + }); + + it('sorts by the requested key', () => { + const db = freshDb(); + try { + seed(db, [ + { method: 'GET', path: '/a', status: 200, elapsed_ms: 100 }, + { method: 'GET', path: '/b', status: 200, elapsed_ms: 50 }, + { method: 'GET', path: '/b', status: 200, elapsed_ms: 50 }, + { method: 'GET', path: '/c', status: 500, elapsed_ms: 20 }, + ]); + const byCount = endpoints(db, { sort: 'count' }).map((r) => r.route); + expect(byCount[0]).toBe('/b'); // count 2 + const byErr = endpoints(db, { sort: 'err' }).map((r) => r.route); + expect(byErr[0]).toBe('/c'); // 100% err + const byAvg = endpoints(db, { sort: 'avg' }).map((r) => r.route); + expect(byAvg[0]).toBe('/a'); // 100ms avg + } finally { + db.close(); + } + }); + + it('limits the result set', () => { + const db = freshDb(); + try { + seed(db, [ + { method: 'GET', path: '/a', status: 200, elapsed_ms: 100 }, + { method: 'GET', path: '/b', status: 200, elapsed_ms: 100 }, + { method: 'GET', path: '/c', status: 200, elapsed_ms: 100 }, + ]); + expect(endpoints(db, { limit: 2 })).toHaveLength(2); + } finally { + db.close(); + } + }); + + it('handles null elapsed_ms (avg and p95 derived from non-null values only)', () => { + const db = freshDb(); + try { + seed(db, [ + { method: 'GET', path: '/x', status: 200, elapsed_ms: 100 }, + { method: 'GET', path: '/x', status: 200, elapsed_ms: null }, + ]); + const row = endpoints(db)[0]; + expect(row.count).toBe(2); + expect(row.avg_ms).toBe(100); + expect(row.p95_ms).toBe(100); + } finally { + db.close(); + } + }); +}); diff --git a/packages/cli/tests/unit/cmds/query/queries/find.spec.ts b/packages/cli/tests/unit/cmds/query/queries/find.spec.ts new file mode 100644 index 0000000000..96f44a2c24 --- /dev/null +++ b/packages/cli/tests/unit/cmds/query/queries/find.spec.ts @@ -0,0 +1,354 @@ +import sqlite3 from 'better-sqlite3'; + +import { openQueryDb } from '../../../../../src/cmds/query/db/openQueryDb'; +import { + findAppmaps, + findCalls, + findExceptions, + findQueries, + findRequests, +} from '../../../../../src/cmds/query/queries/find'; + +function freshDb(): sqlite3.Database { + return openQueryDb('/tmp/ignored', ':memory:').db; +} + +interface Recording { + name: string; + branch?: string; + commit?: string; + timestamp?: string; + requests?: Array<{ + event_id: number; + method: string; + path: string; + normalized_path?: string; + status: number; + elapsed_ms?: number; + }>; + queries?: Array<{ + event_id: number; + sql: string; + caller_class?: string; + caller_method?: string; + elapsed_ms?: number; + }>; + calls?: Array<{ + event_id: number; + defined_class: string; + method_id: string; + elapsed_ms?: number; + fqid?: string; + labels?: string[]; + }>; + exceptions?: Array<{ + event_id: number; + exception_class: string; + message?: string; + }>; +} + +function seed(db: sqlite3.Database, recs: Recording[]): void { + const insAm = db.prepare( + `INSERT INTO appmaps (name, source_path, git_branch, git_commit, timestamp, sql_query_count, elapsed_ms) + VALUES (?, ?, ?, ?, ?, ?, ?)` + ); + const insReq = db.prepare( + `INSERT INTO http_requests (appmap_id, event_id, method, path, normalized_path, status_code, elapsed_ms, timestamp) + VALUES (?, ?, ?, ?, ?, ?, ?, ?)` + ); + const insQ = db.prepare( + `INSERT INTO sql_queries (appmap_id, event_id, sql_text, caller_class, caller_method, elapsed_ms) + VALUES (?, ?, ?, ?, ?, ?)` + ); + const insCo = db.prepare( + `INSERT OR IGNORE INTO code_objects (fqid, defined_class, method_id) VALUES (?, ?, ?)` + ); + const selCoId = db.prepare(`SELECT id FROM code_objects WHERE fqid = ?`); + const insLabel = db.prepare( + `INSERT OR IGNORE INTO labels (code_object_id, label) VALUES (?, ?)` + ); + const insCall = db.prepare( + `INSERT INTO function_calls (appmap_id, event_id, defined_class, method_id, code_object_id, elapsed_ms) + VALUES (?, ?, ?, ?, ?, ?)` + ); + const insExc = db.prepare( + `INSERT INTO exceptions (appmap_id, event_id, exception_class, message) + VALUES (?, ?, ?, ?)` + ); + + for (const r of recs) { + const sqlCount = r.queries?.length ?? 0; + const am = insAm.run( + r.name, + `/tmp/${r.name}.appmap.json`, + r.branch ?? null, + r.commit ?? null, + r.timestamp ?? '2026-04-29T12:00:00.000Z', + sqlCount, + r.requests?.[0]?.elapsed_ms ?? null + ); + const aid = am.lastInsertRowid; + for (const req of r.requests ?? []) { + insReq.run( + aid, + req.event_id, + req.method, + req.path, + req.normalized_path ?? null, + req.status, + req.elapsed_ms ?? null, + r.timestamp ?? '2026-04-29T12:00:00.000Z' + ); + } + for (const q of r.queries ?? []) { + insQ.run( + aid, + q.event_id, + q.sql, + q.caller_class ?? null, + q.caller_method ?? null, + q.elapsed_ms ?? null + ); + } + for (const c of r.calls ?? []) { + const fqid = c.fqid ?? `${c.defined_class}#${c.method_id}`; + insCo.run(fqid, c.defined_class, c.method_id); + const coId = (selCoId.get(fqid) as { id: number }).id; + for (const label of c.labels ?? []) insLabel.run(coId, label); + insCall.run( + aid, + c.event_id, + c.defined_class, + c.method_id, + coId, + c.elapsed_ms ?? null + ); + } + for (const e of r.exceptions ?? []) { + insExc.run(aid, e.event_id, e.exception_class, e.message ?? null); + } + } +} + +describe('findRequests', () => { + it('filters by route, method, status, duration, branch', () => { + const db = freshDb(); + try { + seed(db, [ + { + name: 'a', + branch: 'main', + requests: [ + { event_id: 1, method: 'GET', path: '/orders/42', normalized_path: '/orders/:id', status: 200, elapsed_ms: 50 }, + { event_id: 2, method: 'POST', path: '/orders', status: 500, elapsed_ms: 520 }, + ], + }, + { + name: 'b', + branch: 'feature', + requests: [{ event_id: 1, method: 'POST', path: '/orders', status: 500, elapsed_ms: 600 }], + }, + ]); + + // Method-prefixed route + const r1 = findRequests(db, { route: 'POST /orders' }); + expect(r1).toHaveLength(2); + expect(r1.every((r) => r.method === 'POST' && r.route === '/orders')).toBe(true); + + // Status filter + const r2 = findRequests(db, { status: { op: '>=', value: 500 } }); + expect(r2).toHaveLength(2); + + // Duration filter + const r3 = findRequests(db, { duration: { op: '>', value: 550 } }); + expect(r3).toHaveLength(1); + expect(r3[0].appmap_name).toBe('b'); + + // Branch filter + const r4 = findRequests(db, { branch: 'feature' }); + expect(r4).toHaveLength(1); + expect(r4[0].appmap_name).toBe('b'); + } finally { + db.close(); + } + }); + + it('--limit/--offset trims results', () => { + const db = freshDb(); + try { + seed(db, [ + { + name: 'a', + requests: [ + { event_id: 1, method: 'GET', path: '/x', status: 200 }, + { event_id: 2, method: 'GET', path: '/x', status: 200 }, + { event_id: 3, method: 'GET', path: '/x', status: 200 }, + ], + }, + ]); + expect(findRequests(db, { limit: 2 })).toHaveLength(2); + expect(findRequests(db, { limit: 2, offset: 1 })[0].event_id).toBe(2); + } finally { + db.close(); + } + }); +}); + +describe('findAppmaps', () => { + it('returns one row per recording, sample request fields populated', () => { + const db = freshDb(); + try { + seed(db, [ + { + name: 'a', + branch: 'main', + requests: [ + { event_id: 1, method: 'GET', path: '/x', status: 200, elapsed_ms: 100 }, + { event_id: 2, method: 'POST', path: '/y', status: 500, elapsed_ms: 200 }, + ], + }, + { name: 'b', branch: 'feature' }, + ]); + const rows = findAppmaps(db, {}); + expect(rows).toHaveLength(2); + const a = rows.find((r) => r.appmap_name === 'a')!; + expect(a.route).toBe('/x'); // first request by event_id + expect(a.branch).toBe('main'); + expect(rows.find((r) => r.appmap_name === 'b')?.route).toBeNull(); + } finally { + db.close(); + } + }); + + it('--route narrows to recordings with a matching request and reports that request', () => { + const db = freshDb(); + try { + seed(db, [ + { + name: 'a', + requests: [ + { event_id: 1, method: 'GET', path: '/x', status: 200 }, + { event_id: 2, method: 'POST', path: '/orders', status: 500, elapsed_ms: 520 }, + ], + }, + { name: 'b', requests: [{ event_id: 1, method: 'GET', path: '/x', status: 200 }] }, + ]); + const rows = findAppmaps(db, { route: 'POST /orders', status: { op: '>=', value: 500 } }); + expect(rows).toHaveLength(1); + expect(rows[0].appmap_name).toBe('a'); + expect(rows[0].route).toBe('/orders'); + expect(rows[0].status_code).toBe(500); + expect(rows[0].elapsed_ms).toBe(520); + } finally { + db.close(); + } + }); +}); + +describe('findQueries', () => { + it('--table filters via SQL text LIKE; --status scopes via owning request', () => { + const db = freshDb(); + try { + seed(db, [ + { + name: 'a', + requests: [{ event_id: 1, method: 'POST', path: '/orders', status: 500 }], + queries: [ + { event_id: 2, sql: 'INSERT INTO orders (...) VALUES (...)', elapsed_ms: 14 }, + { event_id: 3, sql: 'SELECT * FROM users WHERE id = ?' }, + ], + }, + { + name: 'b', + requests: [{ event_id: 1, method: 'GET', path: '/x', status: 200 }], + queries: [{ event_id: 2, sql: 'INSERT INTO orders (...) VALUES (...)' }], + }, + ]); + const rows = findQueries(db, { table: 'orders', status: { op: '>=', value: 500 } }); + expect(rows).toHaveLength(1); + expect(rows[0].appmap_name).toBe('a'); + expect(rows[0].sql_text).toContain('INSERT INTO orders'); + } finally { + db.close(); + } + }); +}); + +describe('findCalls', () => { + it('--class and --method filter directly; --route scopes by recording', () => { + const db = freshDb(); + try { + seed(db, [ + { + name: 'a', + requests: [{ event_id: 1, method: 'POST', path: '/orders', status: 500 }], + calls: [ + { event_id: 2, defined_class: 'IdempotencyKey', method_id: 'generate', fqid: 'app/IdempotencyKey.generate' }, + { event_id: 3, defined_class: 'OrdersController', method_id: 'create' }, + ], + }, + { + name: 'b', + requests: [{ event_id: 1, method: 'GET', path: '/x', status: 200 }], + calls: [{ event_id: 2, defined_class: 'IdempotencyKey', method_id: 'generate' }], + }, + ]); + const rows = findCalls(db, { + className: 'IdempotencyKey', + route: 'POST /orders', + status: { op: '>=', value: 500 }, + }); + expect(rows).toHaveLength(1); + expect(rows[0].appmap_name).toBe('a'); + expect(rows[0].fqid).toBe('app/IdempotencyKey.generate'); + } finally { + db.close(); + } + }); + + it('--label filters via the labels table', () => { + const db = freshDb(); + try { + seed(db, [ + { + name: 'a', + calls: [ + { event_id: 1, defined_class: 'Logger', method_id: 'error', labels: ['log'] }, + { event_id: 2, defined_class: 'OrdersController', method_id: 'create' }, + ], + }, + ]); + const rows = findCalls(db, { label: 'log' }); + expect(rows).toHaveLength(1); + expect(rows[0].defined_class).toBe('Logger'); + } finally { + db.close(); + } + }); +}); + +describe('findExceptions', () => { + it('--exception filters by class; --route scopes by recording', () => { + const db = freshDb(); + try { + seed(db, [ + { + name: 'a', + requests: [{ event_id: 1, method: 'POST', path: '/orders', status: 500 }], + exceptions: [{ event_id: 2, exception_class: 'IntegrityError', message: 'duplicate key' }], + }, + { + name: 'b', + requests: [{ event_id: 1, method: 'GET', path: '/x', status: 404 }], + exceptions: [{ event_id: 2, exception_class: 'RecordNotFound' }], + }, + ]); + expect(findExceptions(db, { exception: 'IntegrityError' })).toHaveLength(1); + expect(findExceptions(db, { route: 'POST /orders' })).toHaveLength(1); + expect(findExceptions(db, { route: 'POST /orders' })[0].appmap_name).toBe('a'); + } finally { + db.close(); + } + }); +}); diff --git a/packages/cli/tests/unit/cmds/query/queries/tree.spec.ts b/packages/cli/tests/unit/cmds/query/queries/tree.spec.ts new file mode 100644 index 0000000000..dd96048ea8 --- /dev/null +++ b/packages/cli/tests/unit/cmds/query/queries/tree.spec.ts @@ -0,0 +1,196 @@ +import sqlite3 from 'better-sqlite3'; + +import { openQueryDb } from '../../../../../src/cmds/query/db/openQueryDb'; +import { + resolveAppmap, + tree, + treeSummary, +} from '../../../../../src/cmds/query/queries/tree'; + +function freshDb(): sqlite3.Database { + return openQueryDb('/tmp/ignored', ':memory:').db; +} + +// Seed a recording shaped like the V3 worked-session example: an HTTP request +// at depth 0, a controller call beneath it, an SQL query beneath the +// controller, and an exception on the same call. +function seed( + db: sqlite3.Database, + opts: { + name?: string; + branch?: string; + addLabel?: boolean; + addOutbound?: boolean; + } = {} +): number { + const am = db + .prepare( + `INSERT INTO appmaps (name, source_path, git_branch, sql_query_count) + VALUES (?, ?, ?, ?)` + ) + .run( + opts.name ?? 'orders_create_42', + `/tmp/${opts.name ?? 'orders_create_42'}.appmap.json`, + opts.branch ?? 'main', + 1 + ); + const id = am.lastInsertRowid; + + // event_id 1: HTTP server request, parent_event_id null (root) + db.prepare( + `INSERT INTO http_requests (appmap_id, event_id, parent_event_id, thread_id, + method, path, status_code, elapsed_ms) + VALUES (?, 1, NULL, 1, 'POST', '/orders', 500, 520.0)` + ).run(id); + + // event_id 2: controller call, parent = 1 + let coId = 1; + db.prepare( + `INSERT OR IGNORE INTO code_objects (fqid, defined_class, method_id) + VALUES ('app/OrdersController#create', 'OrdersController', 'create')` + ).run(); + coId = (db + .prepare(`SELECT id FROM code_objects WHERE fqid = 'app/OrdersController#create'`) + .get() as { id: number }).id; + + if (opts.addLabel) { + db.prepare(`INSERT OR IGNORE INTO labels (code_object_id, label) VALUES (?, 'log')`).run(coId); + } + + db.prepare( + `INSERT INTO function_calls (appmap_id, event_id, parent_event_id, thread_id, + code_object_id, defined_class, method_id, elapsed_ms) + VALUES (?, 2, 1, 1, ?, 'OrdersController', 'create', 519.0)` + ).run(id, coId); + + // event_id 3: sql_query, parent = 2 + db.prepare( + `INSERT INTO sql_queries (appmap_id, event_id, parent_event_id, thread_id, + sql_text, database_type, elapsed_ms) + VALUES (?, 3, 2, 1, 'INSERT INTO orders (...)', 'postgres', 14.0)` + ).run(id); + + // event_id 4: exception, owned by call 2 (carried by its return event) + db.prepare( + `INSERT INTO exceptions (appmap_id, event_id, parent_event_id, thread_id, + exception_class, message) + VALUES (?, 2, 1, 1, 'IntegrityError', 'duplicate key')` + ).run(id); + + if (opts.addOutbound) { + db.prepare( + `INSERT INTO http_client_requests (appmap_id, event_id, parent_event_id, thread_id, + method, url, status_code, elapsed_ms) + VALUES (?, 5, 2, 1, 'GET', 'https://api.example/v1', 200, 40.0)` + ).run(id); + } + + return Number(id); +} + +describe('resolveAppmap', () => { + it('resolves by exact name match', () => { + const db = freshDb(); + try { + seed(db); + expect(resolveAppmap(db, 'orders_create_42').name).toBe('orders_create_42'); + } finally { + db.close(); + } + }); + + it('resolves by source-path basename', () => { + const db = freshDb(); + try { + // Insert an appmap whose name doesn't match the basename. + db.prepare( + `INSERT INTO appmaps (name, source_path) VALUES ('Friendly Name', '/x/foo.appmap.json')` + ).run(); + expect(resolveAppmap(db, 'foo').source_path).toBe('/x/foo.appmap.json'); + } finally { + db.close(); + } + }); + + it('throws on miss', () => { + const db = freshDb(); + try { + expect(() => resolveAppmap(db, 'nope')).toThrow(/not found/); + } finally { + db.close(); + } + }); + + it('throws on ambiguous match', () => { + const db = freshDb(); + try { + seed(db, { name: 'a' }); + db.prepare( + `INSERT INTO appmaps (name, source_path) VALUES ('a', '/y/a.appmap.json')` + ).run(); + expect(() => resolveAppmap(db, 'a')).toThrow(/ambiguous/); + } finally { + db.close(); + } + }); +}); + +describe('tree', () => { + it('returns nodes in event_id order with computed depths', () => { + const db = freshDb(); + try { + seed(db); + const nodes = tree(db, 'orders_create_42'); + + expect(nodes.map((n) => n.event_id)).toEqual([1, 2, 2, 3]); + // event 1: HTTP server (root). + expect(nodes[0].kind).toBe('http_server'); + expect(nodes[0].depth).toBe(0); + // event 2: function call (under request). + const fn = nodes.find((n) => n.kind === 'function')!; + expect(fn.depth).toBe(1); + // event 2 also has an exception attached (same event_id, separate row). + const exc = nodes.find((n) => n.kind === 'exception')!; + expect(exc.depth).toBe(1); + // event 3: SQL under the function call. + const sql = nodes.find((n) => n.kind === 'sql')!; + expect(sql.depth).toBe(2); + } finally { + db.close(); + } + }); + + it('joins fqid into function nodes', () => { + const db = freshDb(); + try { + seed(db); + const nodes = tree(db, 'orders_create_42'); + const fn = nodes.find((n) => n.kind === 'function')!; + // @ts-expect-error narrowing not visible here without further check + expect(fn.fqid).toBe('app/OrdersController#create'); + } finally { + db.close(); + } + }); +}); + +describe('treeSummary', () => { + it('counts SQL, surfaces entry/exception, and tallies labels', () => { + const db = freshDb(); + try { + seed(db, { addLabel: true, addOutbound: true }); + const s = treeSummary(db, 'orders_create_42'); + expect(s.entry?.method).toBe('POST'); + expect(s.entry?.route).toBe('/orders'); + expect(s.entry?.status_code).toBe(500); + expect(s.sql.count).toBe(1); + expect(s.sql.total_ms).toBeCloseTo(14); + expect(s.http_client.count).toBe(1); + expect(s.http_client.total_ms).toBeCloseTo(40); + expect(s.exceptions[0].exception_class).toBe('IntegrityError'); + expect(s.labels).toEqual([{ label: 'log', count: 1 }]); + } finally { + db.close(); + } + }); +}); From 0819a8d7b14c999b3f61b68217fd8b3d8c72390e Mon Sep 17 00:00:00 2001 From: kgilpin Date: Fri, 1 May 2026 16:51:50 -0400 Subject: [PATCH 04/30] chore(cli): drop validate-against-python script Removes the one-off Python parity script; not needed going forward. Co-Authored-By: Claude Opus 4.7 (1M context) --- .../cli/scripts/validate-against-python.ts | 336 ------------------ 1 file changed, 336 deletions(-) delete mode 100644 packages/cli/scripts/validate-against-python.ts diff --git a/packages/cli/scripts/validate-against-python.ts b/packages/cli/scripts/validate-against-python.ts deleted file mode 100644 index 51f8530a37..0000000000 --- a/packages/cli/scripts/validate-against-python.ts +++ /dev/null @@ -1,336 +0,0 @@ -/* eslint-disable no-console */ -// -// Cross-validation script: indexes a fixture set with both the Python -// prototype's importer and the TypeScript port, snapshots both query.db -// files, and diffs them. -// -// Usage: -// ts-node scripts/validate-against-python.ts -// APPMAP_APM_DIR=/path/to/appmap-apm ts-node scripts/validate-against-python.ts -// FIXTURE_DIR=/path/to/recordings ts-node scripts/validate-against-python.ts -// KEEP_TMP=1 ts-node scripts/validate-against-python.ts -// -// Findings are excluded — they are not in scope for the TS port. -// Time-sensitive columns (timestamp, elapsed_ms) are excluded from the -// row-level diff; row counts are still compared. - -import { execFileSync } from 'child_process'; -import { mkdtempSync, rmSync, existsSync } from 'fs'; -import { homedir, tmpdir } from 'os'; -import { join } from 'path'; - -import sqlite3 from 'better-sqlite3'; - -import { findFiles } from '../src/utils'; -import { openQueryDb } from '../src/cmds/query/db'; -import { importAppmap } from '../src/cmds/query/db/import/importAppmap'; -import { endpoints, EndpointRow } from '../src/cmds/query/queries/endpoints'; - -const APPMAP_APM_DIR = - process.env.APPMAP_APM_DIR ?? join(homedir(), 'source', 'appland', 'appmap-apm'); -const FIXTURE_DIR = - process.env.FIXTURE_DIR ?? join(APPMAP_APM_DIR, 'tests', 'fixtures', 'tmp', 'appmap'); -const PYTHON = join(APPMAP_APM_DIR, '.venv', 'bin', 'python'); - -const TABLES = [ - 'appmaps', - 'code_objects', - 'labels', - 'http_requests', - 'http_client_requests', - 'sql_queries', - 'function_calls', - 'exceptions', -] as const; - -interface Snapshot { - counts: Record; - rows: Record; -} - -function buildPythonDb(fixtureDir: string, dbPath: string): void { - console.log(`[python] importing ${fixtureDir} → ${dbPath}`); - execFileSync(PYTHON, ['-m', 'server.cli', 'import', fixtureDir], { - cwd: APPMAP_APM_DIR, - env: { ...process.env, APM_DB_PATH: dbPath }, - stdio: 'inherit', - }); -} - -async function buildTsDb(fixtureDir: string, dbPath: string): Promise { - console.log(`[ts] importing ${fixtureDir} → ${dbPath}`); - const { db } = openQueryDb('/tmp/ignored', dbPath); - let imported = 0; - let failed = 0; - await findFiles(fixtureDir, '.appmap.json', (file: string) => { - try { - importAppmap(db, file); - imported += 1; - } catch (err) { - failed += 1; - console.warn(` failed: ${file}: ${(err as Error).message}`); - } - }); - console.log(` imported ${imported} (failed=${failed})`); - db.close(); -} - -function snapshot(dbPath: string): Snapshot { - const db = sqlite3(dbPath, { readonly: true }); - try { - const counts: Record = {}; - for (const t of TABLES) { - counts[t] = (db.prepare(`SELECT COUNT(*) AS n FROM ${t}`).get() as { n: number }).n; - } - - const rows: Record = {}; - - rows.appmaps = db - .prepare( - `SELECT source_path, language, framework, recorder_type, - git_repository, git_branch, git_commit, - event_count, sql_query_count, http_request_count - FROM appmaps - ORDER BY source_path` - ) - .all(); - - rows.code_objects = db - .prepare(`SELECT fqid, defined_class, method_id FROM code_objects ORDER BY fqid`) - .all(); - - rows.labels = db - .prepare( - `SELECT co.fqid, l.label - FROM labels l JOIN code_objects co ON co.id = l.code_object_id - ORDER BY co.fqid, l.label` - ) - .all(); - - rows.http_requests = db - .prepare( - `SELECT a.source_path, h.event_id, h.thread_id, h.parent_event_id, - h.method, h.path, h.normalized_path, h.protocol, - h.status_code, h.mime_type - FROM http_requests h JOIN appmaps a ON a.id = h.appmap_id - ORDER BY a.source_path, h.event_id` - ) - .all(); - - rows.http_client_requests = db - .prepare( - `SELECT a.source_path, h.event_id, h.thread_id, h.parent_event_id, - h.method, h.url, h.status_code - FROM http_client_requests h JOIN appmaps a ON a.id = h.appmap_id - ORDER BY a.source_path, h.event_id` - ) - .all(); - - rows.sql_queries = db - .prepare( - `SELECT a.source_path, q.event_id, q.thread_id, q.parent_event_id, - q.sql_text, q.database_type, q.server_version, - q.caller_class, q.caller_method - FROM sql_queries q JOIN appmaps a ON a.id = q.appmap_id - ORDER BY a.source_path, q.event_id` - ) - .all(); - - rows.function_calls = db - .prepare( - `SELECT a.source_path, f.event_id, f.thread_id, f.parent_event_id, - co.fqid AS code_object_fqid, - f.defined_class, f.method_id, f.path, f.lineno, f.is_static - FROM function_calls f - JOIN appmaps a ON a.id = f.appmap_id - LEFT JOIN code_objects co ON co.id = f.code_object_id - ORDER BY a.source_path, f.event_id` - ) - .all(); - - rows.exceptions = db - .prepare( - `SELECT a.source_path, e.event_id, e.thread_id, e.parent_event_id, - e.exception_class, e.message, e.path, e.lineno - FROM exceptions e JOIN appmaps a ON a.id = e.appmap_id - ORDER BY a.source_path, e.event_id, e.exception_class` - ) - .all(); - - return { counts, rows }; - } finally { - db.close(); - } -} - -interface Mismatch { - table: string; - reason: string; - details?: { index: number; py: unknown; ts: unknown }; -} - -// Python equivalent for endpoints uses get_endpoints (count, avg, max, min, -// error_count). We emit it sorted on stable keys so output is deterministic. -const PY_ENDPOINTS_SCRIPT = ` -import json, os, sys -from server.services.queries import get_endpoints -rows = get_endpoints(limit=10000) -rows.sort(key=lambda r: (r['method'], r['endpoint'])) -print(json.dumps(rows, sort_keys=True)) -`; - -function pythonEndpoints(dbPath: string): unknown[] { - const out = execFileSync(PYTHON, ['-c', PY_ENDPOINTS_SCRIPT], { - cwd: APPMAP_APM_DIR, - env: { ...process.env, APM_DB_PATH: dbPath }, - encoding: 'utf8', - }); - return JSON.parse(out) as unknown[]; -} - -// Round and project a TS EndpointRow to the same shape as Python's -// get_endpoints output, so we can diff them directly. min/max aren't tracked -// by V3's endpoints() — drop them from comparison; they don't affect what -// the verb shows. -function tsEndpointsLikePython(rows: readonly EndpointRow[]) { - return [...rows] - .map((r) => ({ - method: r.method, - endpoint: r.route, - request_count: r.count, - avg_elapsed_ms: r.avg_ms == null ? null : Math.round(r.avg_ms * 100) / 100, - // err_pct (TS) and error_count (Python) are different shapes; reproject: - error_count: Math.round((r.err_pct / 100) * r.count), - })) - .sort((a, b) => (a.method + a.endpoint).localeCompare(b.method + b.endpoint)); -} - -function diffQueries(pyDb: string, tsDb: string): void { - console.log('\n--- query layer ---'); - - // 1) Python get_endpoints on both DBs — proves query layer is portable. - const pyOnPy = pythonEndpoints(pyDb); - const pyOnTs = pythonEndpoints(tsDb); - const portable = - JSON.stringify(pyOnPy) === JSON.stringify(pyOnTs) ? 'OK' : 'MISMATCH'; - console.log(`python.get_endpoints(py.db) vs python.get_endpoints(ts.db): ${portable}`); - - // 2) TS endpoints() on TS DB, projected to Python's shape, against Python's - // get_endpoints on the same DB. Validates that the V3 verb produces results - // consistent with the Python query layer for fields they share. - const db = sqlite3(tsDb, { readonly: true }); - const tsOut = endpoints(db); - db.close(); - const tsProjected = tsEndpointsLikePython(tsOut); - const pyOnTsAsArray = pyOnTs as Array>; - // Drop fields Python returns that we don't compare (max/min). - const pyTrimmed = pyOnTsAsArray - .map((r) => ({ - method: r.method, - endpoint: r.endpoint, - request_count: r.request_count, - avg_elapsed_ms: r.avg_elapsed_ms, - error_count: r.error_count, - })) - .sort((a, b) => - String(a.method + a.endpoint).localeCompare(String(b.method + b.endpoint)) - ); - const verbMatch = - JSON.stringify(tsProjected) === JSON.stringify(pyTrimmed) ? 'OK' : 'MISMATCH'; - console.log(`ts endpoints() vs python.get_endpoints (shared fields): ${verbMatch}`); - if (verbMatch !== 'OK') { - console.log(` python: ${JSON.stringify(pyTrimmed[0])}`); - console.log(` ts: ${JSON.stringify(tsProjected[0])}`); - } -} - -function diff(py: Snapshot, ts: Snapshot): Mismatch[] { - const issues: Mismatch[] = []; - for (const t of TABLES) { - const pyRows = py.rows[t]; - const tsRows = ts.rows[t]; - if (pyRows.length !== tsRows.length) { - issues.push({ - table: t, - reason: `row count differs (python=${pyRows.length}, ts=${tsRows.length})`, - }); - continue; - } - for (let i = 0; i < pyRows.length; i++) { - const a = JSON.stringify(pyRows[i]); - const b = JSON.stringify(tsRows[i]); - if (a !== b) { - issues.push({ - table: t, - reason: `first row diff at index ${i}`, - details: { index: i, py: pyRows[i], ts: tsRows[i] }, - }); - break; - } - } - } - return issues; -} - -async function main(): Promise { - if (!existsSync(PYTHON)) { - console.error(`Python interpreter not found at ${PYTHON}`); - console.error(`Set APPMAP_APM_DIR or install the venv at ${APPMAP_APM_DIR}/.venv`); - process.exit(2); - } - if (!existsSync(FIXTURE_DIR)) { - console.error(`Fixture dir not found: ${FIXTURE_DIR}`); - process.exit(2); - } - - const tmp = mkdtempSync(join(tmpdir(), 'cross-validate-')); - const pyDb = join(tmp, 'python.db'); - const tsDb = join(tmp, 'ts.db'); - - console.log(`fixture dir: ${FIXTURE_DIR}`); - console.log(`tmp: ${tmp}\n`); - - buildPythonDb(FIXTURE_DIR, pyDb); - await buildTsDb(FIXTURE_DIR, tsDb); - - const py = snapshot(pyDb); - const ts = snapshot(tsDb); - - console.log('\n--- counts ---'); - console.log(`${'table'.padEnd(22)} ${'python'.padStart(8)} ${'ts'.padStart(8)} ok`); - let countsOk = true; - for (const t of TABLES) { - const match = py.counts[t] === ts.counts[t]; - if (!match) countsOk = false; - console.log( - `${t.padEnd(22)} ${String(py.counts[t]).padStart(8)} ${String(ts.counts[t]).padStart(8)} ${ - match ? 'OK' : 'MISMATCH' - }` - ); - } - - const issues = diff(py, ts); - console.log('\n--- diff ---'); - if (issues.length === 0 && countsOk) { - console.log('all tables match'); - } else { - // fall through into the report below - } - if (issues.length === 0 && countsOk) { - diffQueries(pyDb, tsDb); - } else { - for (const issue of issues) { - console.log(`\n${issue.table}: ${issue.reason}`); - if (issue.details) { - console.log(` python: ${JSON.stringify(issue.details.py)}`); - console.log(` ts: ${JSON.stringify(issue.details.ts)}`); - } - } - process.exitCode = 1; - } - - if (process.env.KEEP_TMP) console.log(`\ntmp dir kept: ${tmp}`); - else rmSync(tmp, { recursive: true, force: true }); -} - -void main(); From 188d5970945a7bde1cb199a1d6e8ae3d8c3a6877 Mon Sep 17 00:00:00 2001 From: kgilpin Date: Sat, 2 May 2026 13:23:09 -0400 Subject: [PATCH 05/30] feat(cli): rename --db to --query-db, drop APPMAP_QUERY_DB env, add demo script MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit The query-DB path was previously selectable by either an APPMAP_QUERY_DB env var (carried over from the Python prototype's APM_DB_PATH) or a --db flag on the query verbs. Both forms had problems: - The env var didn't fit the rest of the CLI's idiom (flags + appmap.yml, not env-driven configuration), and was dead in the verb path because locateAppMapDir ran before the env-var lookup. - --db was ambiguous in a CLI that may grow other databases (thread history, snippet index, etc.); query-db scopes the name. Drop the env var; rename --db to --query-db across appmap index and the three query verbs; route the flag through openQueryDb / openReadOnly. queryDbPath is now a pure SHA-derivation function; explicit overrides go through the flag, not the environment. Also: scripts/demo-query.sh — a small bash demo that builds a query.db from a fixture set (defaults to appmap-apm/tests/fixtures if present, else the bundled ruby fixtures) and exercises endpoints, find queries, find exceptions, tree --format=summary, and tree --filter=sql against it. Useful for attaching to PR descriptions. Co-Authored-By: Claude Opus 4.7 (1M context) --- packages/cli/scripts/demo-query.sh | 91 +++++++++++++++++++ packages/cli/src/cmds/index/index.ts | 6 +- packages/cli/src/cmds/query/db/index.ts | 2 +- packages/cli/src/cmds/query/db/path.ts | 11 +-- .../cli/src/cmds/query/lib/openReadOnly.ts | 2 +- .../cli/src/cmds/query/verbs/endpoints.ts | 8 +- packages/cli/src/cmds/query/verbs/find.ts | 6 +- packages/cli/src/cmds/query/verbs/tree.ts | 6 +- .../cli/tests/unit/cmds/query/db/path.spec.ts | 20 +--- 9 files changed, 114 insertions(+), 38 deletions(-) create mode 100755 packages/cli/scripts/demo-query.sh diff --git a/packages/cli/scripts/demo-query.sh b/packages/cli/scripts/demo-query.sh new file mode 100755 index 0000000000..8df05d94d8 --- /dev/null +++ b/packages/cli/scripts/demo-query.sh @@ -0,0 +1,91 @@ +#!/usr/bin/env bash +# +# Quick demo of the `appmap query` verbs against a fixture set. +# +# Usage: +# ./scripts/demo-query.sh # uses appmap-apm fixtures if present, +# # else bundled ruby fixtures +# ./scripts/demo-query.sh /path/to/your/appmaps # any directory of *.appmap.json files +# +# Side effects: copies the fixture set to a temp dir, builds and imports a +# query.db there, leaves the originals untouched. Cleans up on exit. + +set -euo pipefail + +cd "$(dirname "$0")/.." # → packages/cli + +# Pick the richest fixture set available. +DEFAULT="$HOME/source/appland/appmap-apm/tests/fixtures/tmp/appmap" +[ -d "$DEFAULT" ] || DEFAULT="$(pwd)/tests/unit/fixtures/ruby" +SRC="${1:-$DEFAULT}" +[ -d "$SRC" ] || { echo "fixture dir not found: $SRC" >&2; exit 2; } + +# Temp work area: copy the fixtures so `appmap index` can write fingerprint +# sidecars without touching the originals. +TMP="$(mktemp -d -t appmap-demo)" +DATA="$TMP/data" +DB="$TMP/query.db" +mkdir -p "$DATA" +cp -r "$SRC"/. "$DATA"/ +export NODE_NO_WARNINGS=1 +trap 'rm -rf "$TMP"' EXIT + +CLI=( node "$(pwd)/built/cli.js" ) + +echo "Building CLI…" >&2 +npx tsc 2>&1 | grep -v 'navie-local' >&2 || true + +# Filter out diagnostic noise from @appland/models that the verbs themselves +# don't emit (kept loose so we don't suppress real errors). +NOISE='\[DEBUG ' + +banner() { + echo + echo "── \$ appmap $*" +} +run() { + banner "$@" + "${CLI[@]}" "$@" 2>&1 | grep -vE "$NOISE" || true +} +run_quiet() { + banner "$@" + "${CLI[@]}" "$@" 2>&1 | grep -vE "$NOISE" | tail -5 || true +} + +cat < --query-db " +"${CLI[@]}" index --appmap-dir "$DATA" --query-db "$DB" >/dev/null 2>&1 +COUNT=$(node -e " + const db = require('better-sqlite3')('$DB', { readonly: true }); + process.stdout.write(String(db.prepare('SELECT COUNT(*) AS n FROM appmaps').get().n)); +") +echo "indexed $COUNT recordings" + +run query endpoints --query-db "$DB" --sort p95 --limit 5 +run query find queries --query-db "$DB" --table users --limit 3 || true +run query find exceptions --query-db "$DB" --limit 5 || true + +# Pick the recording with the most events for the tree demos. +APPMAP="$(node -e " + const db = require('better-sqlite3')('$DB', { readonly: true }); + const r = db.prepare( + 'SELECT name FROM appmaps WHERE event_count > 0 ORDER BY event_count DESC LIMIT 1' + ).get(); + process.stdout.write(r ? r.name : ''); +")" + +if [ -n "$APPMAP" ]; then + run query tree "$APPMAP" --query-db "$DB" --format=summary + run query tree "$APPMAP" --query-db "$DB" --filter=sql +fi + +echo +echo "Done." diff --git a/packages/cli/src/cmds/index/index.ts b/packages/cli/src/cmds/index/index.ts index 551ef7f092..77abf4e022 100644 --- a/packages/cli/src/cmds/index/index.ts +++ b/packages/cli/src/cmds/index/index.ts @@ -53,6 +53,10 @@ export const builder = (args: yargs.Argv) => { type: 'number', alias: 'p', }); + args.option('query-db', { + describe: 'path to query.db (overrides default ~/.appmap/data//query.db)', + type: 'string', + }); args.option('navie-provider', { describe: 'navie provider to use', type: 'string', @@ -80,7 +84,7 @@ export const handler = async (argv) => { const runServer = watch || port !== undefined; if (port && !watch) warn(`Note: --port option implies --watch`); - const queryDb = openQueryDb(appmapDir); + const queryDb = openQueryDb(appmapDir, argv.queryDb as string | undefined); const indexer = new QueryDbIndexer(queryDb.db); log( `Query DB at ${queryDb.path} (schema v${queryDb.version}${ diff --git a/packages/cli/src/cmds/query/db/index.ts b/packages/cli/src/cmds/query/db/index.ts index 31386cd996..7a3acb61d0 100644 --- a/packages/cli/src/cmds/query/db/index.ts +++ b/packages/cli/src/cmds/query/db/index.ts @@ -1,3 +1,3 @@ export { SCHEMA, SCHEMA_VERSION, SCHEMA_TABLES } from './schema'; -export { queryDbPath, QUERY_DB_FILENAME, QUERY_DB_ENV } from './path'; +export { queryDbPath, QUERY_DB_FILENAME } from './path'; export { openQueryDb, OpenQueryDbResult } from './openQueryDb'; diff --git a/packages/cli/src/cmds/query/db/path.ts b/packages/cli/src/cmds/query/db/path.ts index 074ed7789d..2cae528308 100644 --- a/packages/cli/src/cmds/query/db/path.ts +++ b/packages/cli/src/cmds/query/db/path.ts @@ -3,19 +3,18 @@ import { homedir } from 'os'; import { join, resolve } from 'path'; export const QUERY_DB_FILENAME = 'query.db'; -export const QUERY_DB_ENV = 'APPMAP_QUERY_DB'; // Derive the on-disk path for the query DB that corresponds to the given // appmap directory. The path is rooted at `~/.appmap/data//query.db`, // where `` is the first 12 hex characters of the SHA-256 digest of -// the resolved directory path. Honors APPMAP_QUERY_DB as a full-path -// override. +// the resolved directory path. // // Pure: returns the path without creating any directories. +// +// To use a different path (tests, CI, demo scripts), call openQueryDb / +// openReadOnly with an explicit `dbPath` argument; the corresponding CLI +// flag is `--db`. export function queryDbPath(appmapDir: string): string { - const override = process.env[QUERY_DB_ENV]; - if (override) return override; - const id = createHash('sha256').update(resolve(appmapDir)).digest('hex').slice(0, 12); return join(homedir(), '.appmap', 'data', id, QUERY_DB_FILENAME); } diff --git a/packages/cli/src/cmds/query/lib/openReadOnly.ts b/packages/cli/src/cmds/query/lib/openReadOnly.ts index 38543bd635..cb68998c36 100644 --- a/packages/cli/src/cmds/query/lib/openReadOnly.ts +++ b/packages/cli/src/cmds/query/lib/openReadOnly.ts @@ -6,7 +6,7 @@ import { queryDbPath } from '../db/path'; // Open the query DB read-only for the given appmap directory. // Errors if the DB doesn't exist, prompting the user to run `appmap index`. -// `dbPath` overrides path derivation (used by tests and the --db flag). +// `dbPath` overrides path derivation (used by tests and the --query-db flag). export function openReadOnly(appmapDir: string, dbPath?: string): sqlite3.Database { const path = dbPath ?? queryDbPath(appmapDir); if (!existsSync(path)) { diff --git a/packages/cli/src/cmds/query/verbs/endpoints.ts b/packages/cli/src/cmds/query/verbs/endpoints.ts index b86d457d30..2315a6ebcf 100644 --- a/packages/cli/src/cmds/query/verbs/endpoints.ts +++ b/packages/cli/src/cmds/query/verbs/endpoints.ts @@ -20,7 +20,7 @@ export const builder = (args: yargs.Argv) => { return args .option('directory', { type: 'string', alias: 'd', describe: 'program working directory' }) .option('appmap-dir', { type: 'string', describe: 'directory of recordings' }) - .option('db', { type: 'string', describe: 'path to query.db (overrides default)' }) + .option('query-db', { type: 'string', describe: 'path to query.db (overrides default)' }) .option('since', { type: 'string', describe: 'ISO timestamp or "Nd ago"' }) .option('until', { type: 'string', describe: 'ISO timestamp or "Nd ago"' }) .option('branch', { type: 'string' }) @@ -39,9 +39,9 @@ type Argv = ReturnType extends yargs.Argv ? T : never; export const handler = async (argv: yargs.ArgumentsCamelCase): Promise => { verbose(argv.verbose as boolean | undefined); handleWorkingDirectory(argv.directory); - // When --db is supplied, the appmap dir is irrelevant — the user has + // When --query-db is supplied, the appmap dir is irrelevant — the user has // already named a query.db. Otherwise, derive it from the appmap dir. - const appmapDir = argv.db ? '' : await locateAppMapDir(argv.appmapDir); + const appmapDir = argv.queryDb ? '' : await locateAppMapDir(argv.appmapDir); const filter: EndpointsFilter = { sort: argv.sort as EndpointSort }; if (argv.since) filter.since = parseTime(argv.since); @@ -50,7 +50,7 @@ export const handler = async (argv: yargs.ArgumentsCamelCase): Promise { .positional('type', { type: 'string', choices: TYPES }) .option('directory', { type: 'string', alias: 'd' }) .option('appmap-dir', { type: 'string' }) - .option('db', { type: 'string', describe: 'path to query.db (overrides default)' }) + .option('query-db', { type: 'string', describe: 'path to query.db (overrides default)' }) .option('route', { type: 'string', describe: 'e.g. "POST /orders" or "/orders"' }) .option('class', { type: 'string', describe: 'defined_class or fqid Class part' }) .option('method', { type: 'string', describe: 'method_id (not HTTP method)' }) @@ -52,7 +52,7 @@ type Argv = ReturnType extends yargs.Argv ? T : never; export const handler = async (argv: yargs.ArgumentsCamelCase): Promise => { verbose(argv.verbose as boolean | undefined); handleWorkingDirectory(argv.directory); - const appmapDir = argv.db ? '' : await locateAppMapDir(argv.appmapDir); + const appmapDir = argv.queryDb ? '' : await locateAppMapDir(argv.appmapDir); const filter: FindFilter = {}; if (argv.route) filter.route = argv.route; @@ -73,7 +73,7 @@ export const handler = async (argv: yargs.ArgumentsCamelCase): Promise { .positional('appmap', { type: 'string', describe: 'appmap name (or basename of source path)' }) .option('directory', { type: 'string', alias: 'd' }) .option('appmap-dir', { type: 'string' }) - .option('db', { type: 'string', describe: 'path to query.db' }) + .option('query-db', { type: 'string', describe: 'path to query.db' }) .option('format', { type: 'string', choices: ['tree', 'summary'] as const, @@ -35,12 +35,12 @@ type Argv = ReturnType extends yargs.Argv ? T : never; export const handler = async (argv: yargs.ArgumentsCamelCase): Promise => { verbose(argv.verbose as boolean | undefined); handleWorkingDirectory(argv.directory); - const appmapDir = argv.db ? '' : await locateAppMapDir(argv.appmapDir); + const appmapDir = argv.queryDb ? '' : await locateAppMapDir(argv.appmapDir); const ref = argv.appmap; if (!ref) throw new Error(' is required'); - const db = openReadOnly(appmapDir, argv.db); + const db = openReadOnly(appmapDir, argv.queryDb); try { if (argv.format === 'summary') { const s = treeSummary(db, ref); diff --git a/packages/cli/tests/unit/cmds/query/db/path.spec.ts b/packages/cli/tests/unit/cmds/query/db/path.spec.ts index 46d57404f3..6186768b59 100644 --- a/packages/cli/tests/unit/cmds/query/db/path.spec.ts +++ b/packages/cli/tests/unit/cmds/query/db/path.spec.ts @@ -1,21 +1,9 @@ import { existsSync } from 'fs'; import { dirname } from 'path'; -import { QUERY_DB_ENV, queryDbPath } from '../../../../../src/cmds/query/db/path'; +import { queryDbPath } from '../../../../../src/cmds/query/db/path'; describe('queryDbPath', () => { - let envBefore: string | undefined; - - beforeEach(() => { - envBefore = process.env[QUERY_DB_ENV]; - delete process.env[QUERY_DB_ENV]; - }); - - afterEach(() => { - if (envBefore === undefined) delete process.env[QUERY_DB_ENV]; - else process.env[QUERY_DB_ENV] = envBefore; - }); - it('returns the same path for equivalent directory inputs', () => { expect(queryDbPath('/tmp/a')).toBe(queryDbPath('/tmp/a/')); expect(queryDbPath('/tmp/a')).toBe(queryDbPath('/tmp/a/./')); @@ -35,12 +23,6 @@ describe('queryDbPath', () => { // The parent may exist if a prior test created it, but queryDbPath // itself must not create anything; assert it returns a path without I/O. expect(typeof path).toBe('string'); - // Sanity: returning the path is decoupled from existence checking. void existsSync(dirname(path)); }); - - it('honors APPMAP_QUERY_DB as a full-path override', () => { - process.env[QUERY_DB_ENV] = '/tmp/override-test/over.db'; - expect(queryDbPath('/tmp/whatever')).toBe('/tmp/override-test/over.db'); - }); }); From 4684d60601eb59027649567114cf16b8a01b87d5 Mon Sep 17 00:00:00 2001 From: kgilpin Date: Sat, 2 May 2026 14:39:53 -0400 Subject: [PATCH 06/30] feat(cli): hotspots verb, schema cleanup, normalized class filters MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Three changes batched together because they share schema bumps and the class/method filter touches both code paths. - hotspots verb: function-mode (CALLS / TOTAL_MS / SELF_MS via a one-pass child-time CTE) and sql-mode (COUNT / AVG / TOTAL grouped by sql_text). New verbs/hotspots.ts and queries/hotspots.ts. - lib/scope.ts: extracted recording-scope helpers (parseRoute, appmapWhere, httpScopeClauses, appmapIdScope) that find and hotspots both use. find.ts thinned by ~80 lines. - Schema v2 → v4: v2: drop http_requests.timestamp (denormalized copy of appmaps.timestamp; nothing read it after endpoints switched to filtering on a.timestamp). v3: replace code_objects.defined_class + method_id with package, class, method, is_static — components are stored separately so filters can match by exact equality without parsing the fqid string. v4: replace `class` (::-joined string) with `classes` (JSON array) + denormalized `leaf_class`. Eliminates the implicit delimiter convention and the LIKE '%::' || ? suffix matches for short-form filters. - --class and --method (find calls / hotspots) now use the normalized columns: leaf_class for short forms, classes for explicit chains, package as a stricter qualifier when supplied. parseClassRef (also exported) parses V3-canonical and short forms, including Ruby/C++ Cls1::Cls2 chains and Java/Python dot-form opaque names. Falls back to the raw function_calls.defined_class for rows without a code_object link. - Endpoints fixes: --since/--until filter on a.timestamp (consistent with find); --sort sorts nulls last; --status describe documents its HAVING-style semantics. - --query-db replaces --db on appmap index and the query verbs; APPMAP_QUERY_DB env var is gone. - scripts/demo-query.sh exercises endpoints, find, tree, hotspots. Tests: 132 passing. Co-Authored-By: Claude Opus 4.7 (1M context) --- packages/cli/scripts/demo-query.sh | 2 + .../src/cmds/query/db/import/appmapRecord.ts | 5 +- .../src/cmds/query/db/import/codeObjects.ts | 81 +++--- .../src/cmds/query/db/import/httpRequests.ts | 10 +- .../src/cmds/query/db/import/importAppmap.ts | 4 +- packages/cli/src/cmds/query/db/schema.ts | 41 ++- packages/cli/src/cmds/query/lib/scope.ts | 256 ++++++++++++++++ .../cli/src/cmds/query/queries/endpoints.ts | 21 +- packages/cli/src/cmds/query/queries/find.ts | 108 +------ .../cli/src/cmds/query/queries/hotspots.ts | 127 ++++++++ packages/cli/src/cmds/query/query.ts | 2 + .../cli/src/cmds/query/verbs/endpoints.ts | 6 +- packages/cli/src/cmds/query/verbs/hotspots.ts | 90 ++++++ .../cmds/query/db/import/codeObjects.spec.ts | 13 +- .../cmds/query/db/import/httpRequests.spec.ts | 7 +- .../tests/unit/cmds/query/lib/scope.spec.ts | 237 +++++++++++++++ .../unit/cmds/query/queries/endpoints.spec.ts | 32 +- .../unit/cmds/query/queries/find.spec.ts | 131 ++++++++- .../unit/cmds/query/queries/hotspots.spec.ts | 274 ++++++++++++++++++ .../unit/cmds/query/queries/tree.spec.ts | 4 +- 20 files changed, 1268 insertions(+), 183 deletions(-) create mode 100644 packages/cli/src/cmds/query/lib/scope.ts create mode 100644 packages/cli/src/cmds/query/queries/hotspots.ts create mode 100644 packages/cli/src/cmds/query/verbs/hotspots.ts create mode 100644 packages/cli/tests/unit/cmds/query/lib/scope.spec.ts create mode 100644 packages/cli/tests/unit/cmds/query/queries/hotspots.spec.ts diff --git a/packages/cli/scripts/demo-query.sh b/packages/cli/scripts/demo-query.sh index 8df05d94d8..85624abec2 100755 --- a/packages/cli/scripts/demo-query.sh +++ b/packages/cli/scripts/demo-query.sh @@ -72,6 +72,8 @@ echo "indexed $COUNT recordings" run query endpoints --query-db "$DB" --sort p95 --limit 5 run query find queries --query-db "$DB" --table users --limit 3 || true run query find exceptions --query-db "$DB" --limit 5 || true +run query hotspots --query-db "$DB" --limit 5 +run query hotspots --query-db "$DB" --type=sql --limit 3 # Pick the recording with the most events for the tree demos. APPMAP="$(node -e " diff --git a/packages/cli/src/cmds/query/db/import/appmapRecord.ts b/packages/cli/src/cmds/query/db/import/appmapRecord.ts index 1d2210b2fb..77ad40de59 100644 --- a/packages/cli/src/cmds/query/db/import/appmapRecord.ts +++ b/packages/cli/src/cmds/query/db/import/appmapRecord.ts @@ -24,12 +24,11 @@ export interface AppmapRecordResult { timestampIso: string; } -// Insert the top-level appmaps row and return its id + the resolved -// timestamp (used by http_requests for the per-row timestamp column). +// Insert the top-level appmaps row and return its id and resolved timestamp. // // Total elapsed is taken from the first return event carrying an // http_server_response. If metadata.timestamp is missing, falls back to the -// file's mtime so time-series queries still work. +// file's mtime so time-range queries still work. export function insertAppmapRecord( db: sqlite3.Database, absolutePath: string, diff --git a/packages/cli/src/cmds/query/db/import/codeObjects.ts b/packages/cli/src/cmds/query/db/import/codeObjects.ts index d9e74d6c01..f437afbf94 100644 --- a/packages/cli/src/cmds/query/db/import/codeObjects.ts +++ b/packages/cli/src/cmds/query/db/import/codeObjects.ts @@ -18,18 +18,22 @@ export interface ClassMapNode { // insert its labels, and return a map of classMap location → code_object_id // (used by function_calls to link events to code objects via path:lineno). // -// fqid construction mirrors @appland/models' codeObjectId.js exactly: -// - between package and child: '/' -// - between class and child: '::' -// - between any node and a function child: '.' (static) or '#' (instance) +// Each function is decomposed into: +// - package : slash-joined package path (e.g. "app/services/idempotency") +// - class : ::-joined class chain (e.g. "Outer::Inner") +// - method : leaf method name (e.g. "generate") +// - is_static : 1 for static, 0 for instance // -// The defined_class column keeps the prototype's dot-form (resets to bare -// package name on package descent, accumulates on nested classes) — it is -// independent of fqid and pinned by existing tests. +// fqid is derived from these and matches @appland/models' codeObjectId.js: +// - between package and class: '/' +// - between class and child class: '::' +// - between class and function method: '.' (static) or '#' (instance) // // Behavior preserved from the Python prototype: // - Function node names with an auxtype suffix like " (get)" are trimmed. // - Functions without a location are skipped (e.g., C-extensions). +// - When descending from a package, the class chain resets (a class +// directly under a package starts a fresh chain). export function importCodeObjects( db: sqlite3.Database, classMap: readonly ClassMapNode[] @@ -37,33 +41,33 @@ export function importCodeObjects( const lookup = new Map(); const insertCodeObject = db.prepare( - `INSERT OR IGNORE INTO code_objects (fqid, defined_class, method_id) VALUES (?, ?, ?)` + `INSERT OR IGNORE INTO code_objects (fqid, package, classes, leaf_class, method, is_static) + VALUES (?, ?, ?, ?, ?, ?)` ); const selectCodeObjectId = db.prepare(`SELECT id FROM code_objects WHERE fqid = ?`); const insertLabel = db.prepare( `INSERT OR IGNORE INTO labels (code_object_id, label) VALUES (?, ?)` ); - function appendToken( - parentTokens: readonly string[], - name: string, - parentType: CodeObjectType | undefined, - nodeType: CodeObjectType, + function buildFqid( + packageTokens: readonly string[], + classTokens: readonly string[], + method: string, isStatic: boolean - ): readonly string[] { - if (parentTokens.length === 0) return [name]; - let separator = ''; - if (parentType === 'package') separator = '/'; - else if (parentType === 'class') separator = '::'; - if (nodeType === 'function') separator = isStatic ? '.' : '#'; - return [...parentTokens, separator, name]; + ): string { + const pkg = packageTokens.join('/'); + const cls = classTokens.join('::'); + const methodSep = isStatic ? '.' : '#'; + if (pkg && cls) return `${pkg}/${cls}${methodSep}${method}`; + if (pkg) return `${pkg}${methodSep}${method}`; + if (cls) return `${cls}${methodSep}${method}`; + return `${methodSep}${method}`; } function walk( node: ClassMapNode, - classPath: string, - fqidTokens: readonly string[], - parentType: CodeObjectType | undefined + packageTokens: readonly string[], + classTokens: readonly string[] ): void { const nodeType = node.type; const name = node.name ?? ''; @@ -76,10 +80,17 @@ export function importCodeObjects( const methodName = parenIdx >= 0 ? name.slice(0, parenIdx) : name; const isStatic = !!node.static; - const tokens = appendToken(fqidTokens, methodName, parentType, 'function', isStatic); - const fqid = tokens.join(''); + const fqid = buildFqid(packageTokens, classTokens, methodName, isStatic); - insertCodeObject.run(fqid, classPath, methodName); + const leafClass = classTokens.length > 0 ? classTokens[classTokens.length - 1] : ''; + insertCodeObject.run( + fqid, + packageTokens.join('/'), + JSON.stringify([...classTokens]), + leafClass, + methodName, + isStatic ? 1 : 0 + ); const row = selectCodeObjectId.get(fqid) as { id: number }; lookup.set(location, row.id); @@ -88,24 +99,20 @@ export function importCodeObjects( return; } - let nextClassPath: string; - let nextFqidTokens: readonly string[]; + let nextPackageTokens = packageTokens; + let nextClassTokens = classTokens; if (nodeType === 'package') { - nextClassPath = name; - nextFqidTokens = appendToken(fqidTokens, name, parentType, 'package', false); + nextPackageTokens = [...packageTokens, name]; + nextClassTokens = []; // package descent resets the class chain } else if (nodeType === 'class') { - nextClassPath = classPath ? `${classPath}.${name}` : name; - nextFqidTokens = appendToken(fqidTokens, name, parentType, 'class', false); - } else { - nextClassPath = classPath; - nextFqidTokens = fqidTokens; + nextClassTokens = [...classTokens, name]; } const children = node.children ?? []; - for (const child of children) walk(child, nextClassPath, nextFqidTokens, nodeType ?? parentType); + for (const child of children) walk(child, nextPackageTokens, nextClassTokens); } - for (const root of classMap) walk(root, '', [], undefined); + for (const root of classMap) walk(root, [], []); return lookup; } diff --git a/packages/cli/src/cmds/query/db/import/httpRequests.ts b/packages/cli/src/cmds/query/db/import/httpRequests.ts index c9d43f9272..b54a293a61 100644 --- a/packages/cli/src/cmds/query/db/import/httpRequests.ts +++ b/packages/cli/src/cmds/query/db/import/httpRequests.ts @@ -18,14 +18,13 @@ export function importHttpRequests( appmapId: number, events: readonly Record[], returnEvents: Map, - parentEventMap: Map, - timestampIso: string + parentEventMap: Map ): void { const stmt = db.prepare( `INSERT INTO http_requests (appmap_id, event_id, thread_id, parent_event_id, method, path, normalized_path, protocol, status_code, mime_type, - elapsed_ms, timestamp) - VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)` + elapsed_ms) + VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)` ); for (const ev of events) { @@ -46,8 +45,7 @@ export function importHttpRequests( req.protocol ?? null, resp.status_code ?? 0, resp.mime_type ?? null, - typeof elapsed === 'number' ? elapsed * 1000 : null, - timestampIso + typeof elapsed === 'number' ? elapsed * 1000 : null ); } } diff --git a/packages/cli/src/cmds/query/db/import/importAppmap.ts b/packages/cli/src/cmds/query/db/import/importAppmap.ts index b32294de88..47443c910e 100644 --- a/packages/cli/src/cmds/query/db/import/importAppmap.ts +++ b/packages/cli/src/cmds/query/db/import/importAppmap.ts @@ -33,13 +33,13 @@ export function importAppmap(db: sqlite3.Database, filePath: string): ImportResu const tx = db.transaction((): ImportResult => { db.prepare('DELETE FROM appmaps WHERE source_path = ?').run(absolutePath); - const { appmapId, timestampIso } = insertAppmapRecord(db, absolutePath, parsed); + const { appmapId } = insertAppmapRecord(db, absolutePath, parsed); const codeObjectLookup = importCodeObjects(db, classMap); const returnEvents = buildReturnEventMap(events); const parentEventMap = buildParentEventMap(events); - importHttpRequests(db, appmapId, events, returnEvents, parentEventMap, timestampIso); + importHttpRequests(db, appmapId, events, returnEvents, parentEventMap); importHttpClientRequests(db, appmapId, events, returnEvents, parentEventMap); importSqlQueries(db, appmapId, events, returnEvents, parentEventMap); importFunctionCalls(db, appmapId, events, returnEvents, parentEventMap, codeObjectLookup); diff --git a/packages/cli/src/cmds/query/db/schema.ts b/packages/cli/src/cmds/query/db/schema.ts index 4d1298526b..b4e48325a5 100644 --- a/packages/cli/src/cmds/query/db/schema.ts +++ b/packages/cli/src/cmds/query/db/schema.ts @@ -4,7 +4,7 @@ // queries an APM dashboard or LLM agent needs. Ported from appmap-apm // (server/db/schema.py); shape preserved unchanged. -export const SCHEMA_VERSION = 1; +export const SCHEMA_VERSION = 4; export const SCHEMA = ` CREATE TABLE IF NOT EXISTS appmaps ( @@ -26,15 +26,30 @@ CREATE TABLE IF NOT EXISTS appmaps ( ); -- Code objects from classMap entries (one per unique instrumented function). --- This is a lookup table for stable fqids — it intentionally does NOT store --- path, lineno, or location because those can vary across appmaps (e.g. when --- the same function is recorded from different branches or revisions). --- Per-recording location data lives on function_calls instead. +-- A lookup table for stable fqids. Components are stored separately so +-- filters can match exactly without parsing the fqid string: +-- package slash-joined package path ('' for top-level fn) +-- classes JSON array of class names ('[]' for package-level fn) +-- leaf_class last element of classes, denormalized for fast lookup +-- on short-form filters like --class Cipher (matches both +-- top-level Cipher and OpenSSL::Cipher). +-- method leaf method name +-- is_static 1 for static / class methods, 0 for instance methods +-- The fqid is always derivable from these (package + cls-chain + (#|.) + +-- method); we keep it as a stored column for output ergonomics and +-- uniqueness enforcement. +-- +-- This table intentionally does NOT store path/lineno/location, because +-- those vary across recordings of the same code (different branches or +-- revisions). Per-recording location data lives on function_calls. CREATE TABLE IF NOT EXISTS code_objects ( id INTEGER PRIMARY KEY AUTOINCREMENT, - fqid TEXT NOT NULL UNIQUE, -- stable ID: package/Class#method or package/Class.method - defined_class TEXT NOT NULL, - method_id TEXT NOT NULL + fqid TEXT NOT NULL UNIQUE, + package TEXT NOT NULL, + classes TEXT NOT NULL, + leaf_class TEXT NOT NULL, + method TEXT NOT NULL, + is_static INTEGER NOT NULL DEFAULT 0 ); CREATE TABLE IF NOT EXISTS http_requests ( @@ -49,8 +64,9 @@ CREATE TABLE IF NOT EXISTS http_requests ( protocol TEXT, status_code INTEGER NOT NULL, mime_type TEXT, - elapsed_ms REAL, - timestamp TEXT + elapsed_ms REAL + -- Note: no per-row timestamp. Time range queries JOIN to appmaps and + -- filter on appmaps.timestamp (the recording-level value). ); CREATE TABLE IF NOT EXISTS http_client_requests ( @@ -120,7 +136,6 @@ CREATE TABLE IF NOT EXISTS labels ( CREATE INDEX IF NOT EXISTS idx_http_requests_appmap ON http_requests(appmap_id); CREATE INDEX IF NOT EXISTS idx_http_requests_path ON http_requests(normalized_path, method); CREATE INDEX IF NOT EXISTS idx_http_requests_status ON http_requests(status_code); -CREATE INDEX IF NOT EXISTS idx_http_requests_timestamp ON http_requests(timestamp); CREATE INDEX IF NOT EXISTS idx_http_client_requests_appmap ON http_client_requests(appmap_id); CREATE INDEX IF NOT EXISTS idx_sql_queries_appmap ON sql_queries(appmap_id); CREATE INDEX IF NOT EXISTS idx_sql_queries_elapsed ON sql_queries(elapsed_ms DESC); @@ -131,7 +146,9 @@ CREATE INDEX IF NOT EXISTS idx_function_calls_parent ON function_calls(appmap_id CREATE INDEX IF NOT EXISTS idx_exceptions_appmap ON exceptions(appmap_id); CREATE INDEX IF NOT EXISTS idx_exceptions_class ON exceptions(exception_class); CREATE INDEX IF NOT EXISTS idx_code_objects_fqid ON code_objects(fqid); -CREATE INDEX IF NOT EXISTS idx_code_objects_class_method ON code_objects(defined_class, method_id); +CREATE INDEX IF NOT EXISTS idx_code_objects_leaf_class ON code_objects(leaf_class); +CREATE INDEX IF NOT EXISTS idx_code_objects_method ON code_objects(method); +CREATE INDEX IF NOT EXISTS idx_code_objects_leaf_method ON code_objects(leaf_class, method); CREATE INDEX IF NOT EXISTS idx_labels_label ON labels(label); CREATE INDEX IF NOT EXISTS idx_labels_code_object ON labels(code_object_id); CREATE INDEX IF NOT EXISTS idx_appmaps_timestamp ON appmaps(timestamp); diff --git a/packages/cli/src/cmds/query/lib/scope.ts b/packages/cli/src/cmds/query/lib/scope.ts new file mode 100644 index 0000000000..9caa9a4e92 --- /dev/null +++ b/packages/cli/src/cmds/query/lib/scope.ts @@ -0,0 +1,256 @@ +import type { NumberFilter } from './parseFilter'; + +// Shared recording-scope filter shape — the subset of CLI filter flags that +// constrain *which recordings* a verb considers (as opposed to row-level +// filters like --duration or --label that constrain rows within a recording). +// +// Verb-specific filter types should extend this; helpers in this file accept +// any shape with the relevant fields. +export interface RecordingScope { + branch?: string; + commit?: string; + since?: string; + until?: string; + appmap?: string; + // HTTP-level filters that scope to "the recording must contain ≥1 + // matching server request": + route?: string; // "POST /orders" or "/orders" + status?: NumberFilter; +} + +export interface RouteSpec { + method?: string; + path: string; +} + +const HTTP_METHODS = /^(GET|POST|PUT|PATCH|DELETE|HEAD|OPTIONS)\s+(.+)$/; + +export function parseRoute(s: string): RouteSpec { + const m = s.match(HTTP_METHODS); + if (m) return { method: m[1], path: m[2] }; + return { path: s }; +} + +export interface Clauses { + where: string[]; + params: (string | number)[]; +} + +// Recording-level filters that go on the appmaps row directly. +export function appmapWhere(filter: RecordingScope, alias: string): Clauses { + const where: string[] = []; + const params: (string | number)[] = []; + if (filter.branch) { + where.push(`${alias}.git_branch = ?`); + params.push(filter.branch); + } + if (filter.commit) { + where.push(`${alias}.git_commit = ?`); + params.push(filter.commit); + } + if (filter.since) { + where.push(`${alias}.timestamp >= ?`); + params.push(filter.since); + } + if (filter.until) { + where.push(`${alias}.timestamp <= ?`); + params.push(filter.until); + } + if (filter.appmap) { + where.push(`(${alias}.name = ? OR ${alias}.source_path LIKE ?)`); + params.push(filter.appmap, `%/${filter.appmap}.appmap.json`); + } + return { where, params }; +} + +// HTTP-level filters that scope to "the recording must contain ≥1 matching +// server request." Used as a subquery for non-request finds. +export function httpScopeClauses(filter: RecordingScope): Clauses { + const where: string[] = []; + const params: (string | number)[] = []; + if (filter.route) { + const route = parseRoute(filter.route); + where.push(`COALESCE(h.normalized_path, h.path) = ?`); + params.push(route.path); + if (route.method) { + where.push(`h.method = ?`); + params.push(route.method); + } + } + if (filter.status) { + where.push(`h.status_code ${filter.status.op} ?`); + params.push(filter.status.value); + } + return { where, params }; +} + +// Parse a --class flag value into (package, class, method) components, per +// V3's accepted forms: +// short : "UserRepository" / "Cls1::Cls2" +// class+method : "UserRepository#findById" / "Cls1::Cls2#m" +// pkg+class : "app/services/UserRepository" +// full fqid : "app/services/UserRepository#findById" +// "app/Outer::Inner.parse" (static method) +// +// Class chains use `::` as the inner separator (canonical V3, also Ruby / +// C++ idiomatic); we treat that as part of the class name, not a split +// point. The method separator (`#` or `.`) only applies when the input is +// in canonical form (contains `/`) or uses `#` explicitly. A short-form +// dot like "org.example.Foo" is kept whole — Java/Python dot-form class +// names match the defined_class fallback, not the normalized columns. +export interface ClassMethodParts { + package?: string; + class?: string; + method?: string; +} + +export function parseClassRef(input: string): ClassMethodParts { + const slashIdx = input.lastIndexOf('/'); + + if (slashIdx < 0) { + // Short form. `#` is the only unambiguous method separator here; `.` + // is left in place because it could be part of a Java/Python class name + // ("org.example.Foo") rather than a method separator. + const hashIdx = input.lastIndexOf('#'); + if (hashIdx > 0) { + return { + class: input.slice(0, hashIdx) || undefined, + method: input.slice(hashIdx + 1), + }; + } + return { class: input.length > 0 ? input : undefined }; + } + + // Canonical fqid (slash present). The method separator is the rightmost + // `#` or `.` AFTER the last `/`. (Inner `.` characters in the package + // path don't apply — packages are slash-separated.) + let methodSepIdx = -1; + for (let i = input.length - 1; i > slashIdx; i--) { + const ch = input[i]; + if (ch === '#' || ch === '.') { + methodSepIdx = i; + break; + } + } + + let classPart = input; + let methodPart: string | undefined; + if (methodSepIdx > 0) { + classPart = input.slice(0, methodSepIdx); + methodPart = input.slice(methodSepIdx + 1); + } + + const classSlashIdx = classPart.lastIndexOf('/'); + const pkg = classPart.slice(0, classSlashIdx); + const cls = classPart.slice(classSlashIdx + 1); + return { + package: pkg.length > 0 ? pkg : undefined, + class: cls.length > 0 ? cls : undefined, + method: methodPart, + }; +} + +// Match a --class input against function_calls via the normalized +// code_objects columns. The `class` part of the user input is interpreted +// as either: +// - A `::`-separated chain ("Outer::Inner") → match `classes` exactly +// (canonical JSON form). +// - A single-segment short form ("Cipher") → match `leaf_class` +// exactly (hits any chain ending in that class — top-level Cipher +// and OpenSSL::Cipher both qualify). +// Adding a package narrows further: `... AND package = ?`. +// +// Falls back to function_calls.defined_class for rows that aren't linked +// to a code_object (sparse classMap recordings). The fallback recognizes +// `.` (Java/Python), `::` (Ruby/C++) as suffix separators for short-form +// matching against the raw event field. +export function classFilterClauses(input: string, fcAlias: string): Clauses { + const parts = parseClassRef(input); + if (!parts.class) { + return { where: ['1 = 0'], params: [] }; + } + + const coWhere: string[] = []; + const coParams: (string | number)[] = []; + if (parts.class.includes('::')) { + // Full chain — match classes JSON exactly. + coWhere.push('classes = ?'); + coParams.push(JSON.stringify(parts.class.split('::'))); + } else { + // Single segment — match the leaf. + coWhere.push('leaf_class = ?'); + coParams.push(parts.class); + } + if (parts.package) { + coWhere.push('package = ?'); + coParams.push(parts.package); + } + if (parts.method) { + coWhere.push('method = ?'); + coParams.push(parts.method); + } + + // Fallback for unlinked function_calls. + const fbWhere: string[] = [ + `${fcAlias}.defined_class = ?`, + `${fcAlias}.defined_class LIKE '%.' || ?`, + `${fcAlias}.defined_class LIKE '%::' || ?`, + ]; + const fbParams: (string | number)[] = [parts.class, parts.class, parts.class]; + + return { + where: [ + `(${fcAlias}.code_object_id IN ( + SELECT id FROM code_objects WHERE ${coWhere.join(' AND ')} + ) + OR (${fcAlias}.code_object_id IS NULL AND (${fbWhere.join(' OR ')})))`, + ], + params: [...coParams, ...fbParams], + }; +} + +// Match a --method input against function_calls via the normalized +// code_objects.method column, with a fallback to function_calls.method_id +// for rows that aren't linked to a code_object. +export function methodFilterClauses(input: string, fcAlias: string): Clauses { + return { + where: [ + `(${fcAlias}.code_object_id IN ( + SELECT id FROM code_objects WHERE method = ? + ) + OR (${fcAlias}.code_object_id IS NULL AND ${fcAlias}.method_id = ?))`, + ], + params: [input, input], + }; +} + +// Build ".appmap_id IN (SELECT a.id …)" for tables where filtering at +// the appmap-id level is the right shape (sql_queries, function_calls, +// exceptions, http_client_requests). Returns null if no recording-level +// filtering is needed. +export function appmapIdScope( + filter: RecordingScope, + rowAlias: string +): { sql: string; params: (string | number)[] } | null { + const a = appmapWhere(filter, 'a'); + const h = httpScopeClauses(filter); + if (a.where.length === 0 && h.where.length === 0) return null; + + if (h.where.length > 0) { + const all = [...a.where, ...h.where].join(' AND '); + return { + sql: `${rowAlias}.appmap_id IN ( + SELECT DISTINCT a.id FROM appmaps a + JOIN http_requests h ON h.appmap_id = a.id + WHERE ${all} + )`, + params: [...a.params, ...h.params], + }; + } + return { + sql: `${rowAlias}.appmap_id IN ( + SELECT a.id FROM appmaps a WHERE ${a.where.join(' AND ')} + )`, + params: a.params, + }; +} diff --git a/packages/cli/src/cmds/query/queries/endpoints.ts b/packages/cli/src/cmds/query/queries/endpoints.ts index 6959f7e6a7..b5b2a2a34d 100644 --- a/packages/cli/src/cmds/query/queries/endpoints.ts +++ b/packages/cli/src/cmds/query/queries/endpoints.ts @@ -48,12 +48,16 @@ export function endpoints( where.push('a.git_branch = ?'); params.push(filter.branch); } + // --since/--until filter on the recording's timestamp (a.timestamp) for + // consistency with `find`. The importer copies that value into + // http_requests.timestamp too, but treating it as a recording-level + // attribute makes the dependency on that copy explicit. if (filter.since) { - where.push('hr.timestamp >= ?'); + where.push('a.timestamp >= ?'); params.push(filter.since); } if (filter.until) { - where.push('hr.timestamp <= ?'); + where.push('a.timestamp <= ?'); params.push(filter.until); } @@ -132,9 +136,18 @@ function percentile(sorted: readonly number[], p: number): number | null { return sorted[idx]; } +// Descending sort, nulls last (so a route with no measured durations doesn't +// rank alongside a genuinely 0 ms route). +function descNullsLast(a: number | null, b: number | null): number { + if (a == null && b == null) return 0; + if (a == null) return 1; + if (b == null) return -1; + return b - a; +} + const comparators: Record number> = { count: (a, b) => b.count - a.count, - avg: (a, b) => (b.avg_ms ?? 0) - (a.avg_ms ?? 0), - p95: (a, b) => (b.p95_ms ?? 0) - (a.p95_ms ?? 0), + avg: (a, b) => descNullsLast(a.avg_ms, b.avg_ms), + p95: (a, b) => descNullsLast(a.p95_ms, b.p95_ms), err: (a, b) => b.err_pct - a.err_pct, }; diff --git a/packages/cli/src/cmds/query/queries/find.ts b/packages/cli/src/cmds/query/queries/find.ts index eebe7e1a1d..75241cb541 100644 --- a/packages/cli/src/cmds/query/queries/find.ts +++ b/packages/cli/src/cmds/query/queries/find.ts @@ -1,6 +1,14 @@ import sqlite3 from 'better-sqlite3'; import type { NumberFilter } from '../lib/parseFilter'; +import { + appmapIdScope, + appmapWhere, + classFilterClauses, + httpScopeClauses, + methodFilterClauses, + parseRoute, +} from '../lib/scope'; export type FindType = 'appmaps' | 'requests' | 'queries' | 'calls' | 'exceptions'; @@ -71,101 +79,13 @@ export interface FindExceptionRow { lineno: number | null; } -// --- internal helpers --- - -interface RouteSpec { - method?: string; - path: string; -} - -function parseRoute(s: string): RouteSpec { - const m = s.match(/^(GET|POST|PUT|PATCH|DELETE|HEAD|OPTIONS)\s+(.+)$/); - if (m) return { method: m[1], path: m[2] }; - return { path: s }; -} +// --- internal helpers (find-specific) --- interface Clauses { where: string[]; params: (string | number)[]; } -// Recording-level filters that go on the appmaps row directly. -function appmapWhere(filter: FindFilter, alias: string): Clauses { - const where: string[] = []; - const params: (string | number)[] = []; - if (filter.branch) { - where.push(`${alias}.git_branch = ?`); - params.push(filter.branch); - } - if (filter.commit) { - where.push(`${alias}.git_commit = ?`); - params.push(filter.commit); - } - if (filter.since) { - where.push(`${alias}.timestamp >= ?`); - params.push(filter.since); - } - if (filter.until) { - where.push(`${alias}.timestamp <= ?`); - params.push(filter.until); - } - if (filter.appmap) { - where.push(`(${alias}.name = ? OR ${alias}.source_path LIKE ?)`); - params.push(filter.appmap, `%/${filter.appmap}.appmap.json`); - } - return { where, params }; -} - -// HTTP-level filters that scope to "the recording must contain ≥1 matching -// request." Used as a subquery for non-request finds. -function httpScopeClauses(filter: FindFilter): Clauses { - const where: string[] = []; - const params: (string | number)[] = []; - if (filter.route) { - const route = parseRoute(filter.route); - where.push(`COALESCE(h.normalized_path, h.path) = ?`); - params.push(route.path); - if (route.method) { - where.push(`h.method = ?`); - params.push(route.method); - } - } - if (filter.status) { - where.push(`h.status_code ${filter.status.op} ?`); - params.push(filter.status.value); - } - return { where, params }; -} - -// Build ".appmap_id IN (SELECT a.id ...)" for non-appmap finds. -// Returns null if no recording-level filtering is needed. -function appmapIdScope( - filter: FindFilter, - rowAlias: string -): { sql: string; params: (string | number)[] } | null { - const a = appmapWhere(filter, 'a'); - const h = httpScopeClauses(filter); - if (a.where.length === 0 && h.where.length === 0) return null; - - if (h.where.length > 0) { - const all = [...a.where, ...h.where].join(' AND '); - return { - sql: `${rowAlias}.appmap_id IN ( - SELECT DISTINCT a.id FROM appmaps a - JOIN http_requests h ON h.appmap_id = a.id - WHERE ${all} - )`, - params: [...a.params, ...h.params], - }; - } - return { - sql: `${rowAlias}.appmap_id IN ( - SELECT a.id FROM appmaps a WHERE ${a.where.join(' AND ')} - )`, - params: a.params, - }; -} - function durationClause(filter: FindFilter, column: string): Clauses { const where: string[] = []; const params: (string | number)[] = []; @@ -336,12 +256,14 @@ export function findCalls(db: sqlite3.Database, filter: FindFilter): FindCallRow } if (filter.className) { - where.push(`fc.defined_class = ?`); - params.push(filter.className); + const c = classFilterClauses(filter.className, 'fc'); + where.push(...c.where); + params.push(...c.params); } if (filter.method) { - where.push(`fc.method_id = ?`); - params.push(filter.method); + const m = methodFilterClauses(filter.method, 'fc'); + where.push(...m.where); + params.push(...m.params); } if (filter.label) { where.push( diff --git a/packages/cli/src/cmds/query/queries/hotspots.ts b/packages/cli/src/cmds/query/queries/hotspots.ts new file mode 100644 index 0000000000..8fa49c4eb2 --- /dev/null +++ b/packages/cli/src/cmds/query/queries/hotspots.ts @@ -0,0 +1,127 @@ +import sqlite3 from 'better-sqlite3'; + +import { appmapIdScope, classFilterClauses, RecordingScope } from '../lib/scope'; + +export type HotspotType = 'function' | 'sql'; + +export interface HotspotsFilter extends RecordingScope { + type?: HotspotType; + className?: string; // function mode only + limit?: number; +} + +export interface FunctionHotspotRow { + fqid: string | null; + defined_class: string; + method_id: string; + calls: number; + total_ms: number; + self_ms: number; +} + +export interface SqlHotspotRow { + count: number; + avg_ms: number; + total_ms: number; + sql_text: string; +} + +// SELF_MS = elapsed_ms - sum of immediate children's elapsed_ms, where a +// child is any function_call / sql_query / http_client_request whose +// parent_event_id points at this call. Computed via a one-pass CTE that +// pre-aggregates per-event child time, so the join is O(rows) regardless of +// nesting depth. +const CHILD_TIME_CTE = ` + WITH child_events AS ( + SELECT appmap_id, parent_event_id, elapsed_ms FROM function_calls + UNION ALL + SELECT appmap_id, parent_event_id, elapsed_ms FROM sql_queries + UNION ALL + SELECT appmap_id, parent_event_id, elapsed_ms FROM http_client_requests + ), + child_time AS ( + SELECT appmap_id, parent_event_id AS event_id, + SUM(COALESCE(elapsed_ms, 0)) AS sum_children + FROM child_events + WHERE parent_event_id IS NOT NULL + GROUP BY appmap_id, parent_event_id + ) +`; + +export function functionHotspots( + db: sqlite3.Database, + filter: HotspotsFilter +): FunctionHotspotRow[] { + const where: string[] = []; + const params: (string | number)[] = []; + + const scope = appmapIdScope(filter, 'fc'); + if (scope) { + where.push(scope.sql); + params.push(...scope.params); + } + if (filter.className) { + const c = classFilterClauses(filter.className, 'fc'); + where.push(...c.where); + params.push(...c.params); + } + + let sql = ` + ${CHILD_TIME_CTE} + SELECT + co.fqid AS fqid, + fc.defined_class AS defined_class, + fc.method_id AS method_id, + COUNT(*) AS calls, + SUM(COALESCE(fc.elapsed_ms, 0)) AS total_ms, + SUM(COALESCE(fc.elapsed_ms, 0) + - COALESCE(ct.sum_children, 0)) AS self_ms + FROM function_calls fc + LEFT JOIN child_time ct + ON ct.appmap_id = fc.appmap_id AND ct.event_id = fc.event_id + LEFT JOIN code_objects co ON co.id = fc.code_object_id + ${where.length > 0 ? `WHERE ${where.join(' AND ')}` : ''} + GROUP BY fc.code_object_id, fc.defined_class, fc.method_id + ORDER BY total_ms DESC + `; + if (filter.limit !== undefined) { + sql += ' LIMIT ?'; + params.push(filter.limit); + } + return db.prepare(sql).all(...params) as FunctionHotspotRow[]; +} + +export function sqlHotspots(db: sqlite3.Database, filter: HotspotsFilter): SqlHotspotRow[] { + const where: string[] = []; + const params: (string | number)[] = []; + + const scope = appmapIdScope(filter, 'q'); + if (scope) { + where.push(scope.sql); + params.push(...scope.params); + } + + let sql = ` + SELECT + COUNT(*) AS count, + AVG(q.elapsed_ms) AS avg_ms, + SUM(COALESCE(q.elapsed_ms, 0)) AS total_ms, + q.sql_text AS sql_text + FROM sql_queries q + ${where.length > 0 ? `WHERE ${where.join(' AND ')}` : ''} + GROUP BY q.sql_text + ORDER BY total_ms DESC + `; + if (filter.limit !== undefined) { + sql += ' LIMIT ?'; + params.push(filter.limit); + } + return db.prepare(sql).all(...params) as SqlHotspotRow[]; +} + +export function hotspots( + db: sqlite3.Database, + filter: HotspotsFilter +): FunctionHotspotRow[] | SqlHotspotRow[] { + return filter.type === 'sql' ? sqlHotspots(db, filter) : functionHotspots(db, filter); +} diff --git a/packages/cli/src/cmds/query/query.ts b/packages/cli/src/cmds/query/query.ts index c8418e19cd..03056098ce 100644 --- a/packages/cli/src/cmds/query/query.ts +++ b/packages/cli/src/cmds/query/query.ts @@ -2,6 +2,7 @@ import yargs from 'yargs'; import * as EndpointsVerb from './verbs/endpoints'; import * as FindVerb from './verbs/find'; +import * as HotspotsVerb from './verbs/hotspots'; import * as TreeVerb from './verbs/tree'; export const command = 'query'; @@ -11,6 +12,7 @@ export const builder = (args: yargs.Argv) => args .command(EndpointsVerb) .command(FindVerb) + .command(HotspotsVerb) .command(TreeVerb) .demandCommand(1, 'specify a query verb') .strict(); diff --git a/packages/cli/src/cmds/query/verbs/endpoints.ts b/packages/cli/src/cmds/query/verbs/endpoints.ts index 2315a6ebcf..d938378846 100644 --- a/packages/cli/src/cmds/query/verbs/endpoints.ts +++ b/packages/cli/src/cmds/query/verbs/endpoints.ts @@ -24,7 +24,11 @@ export const builder = (args: yargs.Argv) => { .option('since', { type: 'string', describe: 'ISO timestamp or "Nd ago"' }) .option('until', { type: 'string', describe: 'ISO timestamp or "Nd ago"' }) .option('branch', { type: 'string' }) - .option('status', { type: 'string', describe: 'e.g. 500, ">=500"' }) + .option('status', { + type: 'string', + describe: + 'route filter — e.g. 500, ">=500" (route is shown if any request matches; aggregates still cover all of that route\'s requests)', + }) .option('sort', { type: 'string', choices: ['count', 'avg', 'p95', 'err'] as const, diff --git a/packages/cli/src/cmds/query/verbs/hotspots.ts b/packages/cli/src/cmds/query/verbs/hotspots.ts new file mode 100644 index 0000000000..992c42e2bc --- /dev/null +++ b/packages/cli/src/cmds/query/verbs/hotspots.ts @@ -0,0 +1,90 @@ +import yargs from 'yargs'; +import { log } from 'console'; + +import { handleWorkingDirectory } from '../../../lib/handleWorkingDirectory'; +import { locateAppMapDir } from '../../../lib/locateAppMapDir'; +import { verbose } from '../../../utils'; +import { openReadOnly } from '../lib/openReadOnly'; +import { parseTime } from '../lib/parseFilter'; +import { + FunctionHotspotRow, + HotspotsFilter, + HotspotType, + SqlHotspotRow, + hotspots, +} from '../queries/hotspots'; +import { formatCount, formatMs, formatTable } from '../lib/format'; + +export const command = 'hotspots'; +export const describe = 'Rank functions or SQL queries by cumulative elapsed'; + +export const builder = (args: yargs.Argv) => { + return args + .option('directory', { type: 'string', alias: 'd' }) + .option('appmap-dir', { type: 'string' }) + .option('query-db', { type: 'string', describe: 'path to query.db (overrides default)' }) + .option('type', { + type: 'string', + choices: ['function', 'sql'] as const, + default: 'function', + }) + .option('route', { type: 'string', describe: 'e.g. "GET /reports"' }) + .option('class', { type: 'string', describe: 'defined_class (function mode)' }) + .option('branch', { type: 'string' }) + .option('since', { type: 'string' }) + .option('until', { type: 'string' }) + .option('limit', { type: 'number' }) + .option('json', { type: 'boolean', default: false }); +}; + +type Argv = ReturnType extends yargs.Argv ? T : never; + +export const handler = async (argv: yargs.ArgumentsCamelCase): Promise => { + verbose(argv.verbose as boolean | undefined); + handleWorkingDirectory(argv.directory); + const appmapDir = argv.queryDb ? '' : await locateAppMapDir(argv.appmapDir); + + const filter: HotspotsFilter = { type: argv.type as HotspotType }; + if (argv.route) filter.route = argv.route; + if (argv.class) filter.className = argv.class; + if (argv.branch) filter.branch = argv.branch; + if (argv.since) filter.since = parseTime(argv.since); + if (argv.until) filter.until = parseTime(argv.until); + if (argv.limit !== undefined) filter.limit = argv.limit; + + const db = openReadOnly(appmapDir, argv.queryDb); + try { + const rows = hotspots(db, filter); + if (argv.json) { + log(JSON.stringify(rows, null, 2)); + return; + } + log(filter.type === 'sql' ? renderSql(rows as SqlHotspotRow[]) : renderFunctions(rows as FunctionHotspotRow[])); + } finally { + db.close(); + } +}; + +function renderFunctions(rows: readonly FunctionHotspotRow[]): string { + return formatTable( + ['FQID', 'CALLS', 'TOTAL_MS', 'SELF_MS'], + rows.map((r) => [ + r.fqid ?? `${r.defined_class}#${r.method_id}`, + formatCount(r.calls), + formatMs(r.total_ms), + formatMs(r.self_ms), + ]) + ); +} + +function renderSql(rows: readonly SqlHotspotRow[]): string { + return formatTable( + ['COUNT', 'AVG', 'TOTAL', 'SQL'], + rows.map((r) => [ + formatCount(r.count), + formatMs(r.avg_ms), + formatMs(r.total_ms), + r.sql_text, + ]) + ); +} diff --git a/packages/cli/tests/unit/cmds/query/db/import/codeObjects.spec.ts b/packages/cli/tests/unit/cmds/query/db/import/codeObjects.spec.ts index b4a195b856..27692bb4e8 100644 --- a/packages/cli/tests/unit/cmds/query/db/import/codeObjects.spec.ts +++ b/packages/cli/tests/unit/cmds/query/db/import/codeObjects.spec.ts @@ -48,11 +48,14 @@ describe('importCodeObjects', () => { const lookup = importCodeObjects(db, tree); const row = db - .prepare('SELECT fqid, defined_class, method_id FROM code_objects') + .prepare('SELECT fqid, package, classes, leaf_class, method, is_static FROM code_objects') .get() as any; expect(row.fqid).toBe('app/User#save'); - expect(row.method_id).toBe('save'); - expect(row.defined_class).toBe('app.User'); + expect(row.package).toBe('app'); + expect(JSON.parse(row.classes)).toEqual(['User']); + expect(row.leaf_class).toBe('User'); + expect(row.method).toBe('save'); + expect(row.is_static).toBe(0); expect(lookup.get('app/models/user.rb:10')).toBe(1); } finally { db.close(); @@ -113,8 +116,8 @@ describe('importCodeObjects', () => { }, ]; importCodeObjects(db, tree); - const row = db.prepare('SELECT fqid, method_id FROM code_objects').get() as any; - expect(row.method_id).toBe('is_authenticated'); + const row = db.prepare('SELECT fqid, method FROM code_objects').get() as any; + expect(row.method).toBe('is_authenticated'); expect(row.fqid).toBe('app/User#is_authenticated'); } finally { db.close(); diff --git a/packages/cli/tests/unit/cmds/query/db/import/httpRequests.spec.ts b/packages/cli/tests/unit/cmds/query/db/import/httpRequests.spec.ts index c87ff284f0..570c686582 100644 --- a/packages/cli/tests/unit/cmds/query/db/import/httpRequests.spec.ts +++ b/packages/cli/tests/unit/cmds/query/db/import/httpRequests.spec.ts @@ -51,8 +51,7 @@ describe('importHttpRequests', () => { appmapId, events, buildReturnEventMap(events), - buildParentEventMap(events), - '2026-04-29T14:21:08.000Z' + buildParentEventMap(events) ); const row = db.prepare('SELECT * FROM http_requests').get() as any; @@ -64,7 +63,6 @@ describe('importHttpRequests', () => { expect(row.mime_type).toBe('application/json'); expect(row.elapsed_ms).toBeCloseTo(520); expect(row.thread_id).toBe(99); - expect(row.timestamp).toBe('2026-04-29T14:21:08.000Z'); expect(row.parent_event_id).toBeNull(); } finally { db.close(); @@ -87,8 +85,7 @@ describe('importHttpRequests', () => { appmapId, events, buildReturnEventMap(events), - buildParentEventMap(events), - '2026-04-29T14:21:08.000Z' + buildParentEventMap(events) ); const row = db.prepare('SELECT status_code, elapsed_ms FROM http_requests').get() as any; expect(row.status_code).toBe(0); diff --git a/packages/cli/tests/unit/cmds/query/lib/scope.spec.ts b/packages/cli/tests/unit/cmds/query/lib/scope.spec.ts new file mode 100644 index 0000000000..00aa9f94e0 --- /dev/null +++ b/packages/cli/tests/unit/cmds/query/lib/scope.spec.ts @@ -0,0 +1,237 @@ +import sqlite3 from 'better-sqlite3'; + +import { openQueryDb } from '../../../../../src/cmds/query/db/openQueryDb'; +import { + classFilterClauses, + methodFilterClauses, + parseClassRef, +} from '../../../../../src/cmds/query/lib/scope'; + +function freshDb(): sqlite3.Database { + return openQueryDb('/tmp/ignored', ':memory:').db; +} + +describe('parseClassRef', () => { + it('short form is just a class', () => { + expect(parseClassRef('UserRepository')).toEqual({ class: 'UserRepository' }); + }); + + it('short form with method via #', () => { + expect(parseClassRef('UserRepository#findById')).toEqual({ + class: 'UserRepository', + method: 'findById', + }); + }); + + it('Ruby/C++ "::" chain in short form is kept as one class name', () => { + expect(parseClassRef('Cls1::Cls2')).toEqual({ class: 'Cls1::Cls2' }); + expect(parseClassRef('OpenSSL::Cipher')).toEqual({ class: 'OpenSSL::Cipher' }); + }); + + it('Ruby/C++ "::" chain with method via #', () => { + expect(parseClassRef('Net::HTTP#get')).toEqual({ class: 'Net::HTTP', method: 'get' }); + }); + + it('Java/Python dot-form in short form is kept whole (no method split)', () => { + // We can't unambiguously split "org.example.Foo" without context; treat + // the whole input as the class name and let the fallback match it via + // defined_class. + expect(parseClassRef('org.example.Foo')).toEqual({ class: 'org.example.Foo' }); + }); + + it('canonical fqid: package + class', () => { + expect(parseClassRef('app/services/UserRepository')).toEqual({ + package: 'app/services', + class: 'UserRepository', + }); + }); + + it('canonical fqid: package + class + instance method', () => { + expect(parseClassRef('app/services/UserRepository#findById')).toEqual({ + package: 'app/services', + class: 'UserRepository', + method: 'findById', + }); + }); + + it('canonical fqid: package + class + static method', () => { + expect(parseClassRef('core/date/Date.parse')).toEqual({ + package: 'core/date', + class: 'Date', + method: 'parse', + }); + }); + + it('canonical fqid with nested classes via ::', () => { + expect(parseClassRef('app/Outer::Inner#foo')).toEqual({ + package: 'app', + class: 'Outer::Inner', + method: 'foo', + }); + expect(parseClassRef('lib/Outer::Inner.parse')).toEqual({ + package: 'lib', + class: 'Outer::Inner', + method: 'parse', + }); + }); +}); + +function seedCodeObject( + db: sqlite3.Database, + fqid: string, + pkg: string, + classes: string[], + method: string, + isStatic = 0 +): number { + const leaf = classes.length > 0 ? classes[classes.length - 1] : ''; + db.prepare( + `INSERT OR IGNORE INTO code_objects (fqid, package, classes, leaf_class, method, is_static) + VALUES (?, ?, ?, ?, ?, ?)` + ).run(fqid, pkg, JSON.stringify(classes), leaf, method, isStatic); + return (db.prepare(`SELECT id FROM code_objects WHERE fqid = ?`).get(fqid) as { id: number }).id; +} + +function seedAppmap(db: sqlite3.Database): number { + return Number( + db + .prepare(`INSERT INTO appmaps (name, source_path) VALUES ('a', '/tmp/a.appmap.json')`) + .run().lastInsertRowid + ); +} + +function seedCall( + db: sqlite3.Database, + appmapId: number, + eventId: number, + defined_class: string, + method_id: string, + code_object_id: number | null +): void { + db.prepare( + `INSERT INTO function_calls (appmap_id, event_id, defined_class, method_id, code_object_id) + VALUES (?, ?, ?, ?, ?)` + ).run(appmapId, eventId, defined_class, method_id, code_object_id); +} + +describe('classFilterClauses', () => { + it('short class name matches a nested ::-chain via suffix', () => { + const db = freshDb(); + try { + const aid = seedAppmap(db); + const co1 = seedCodeObject(db, 'app/OpenSSL::Cipher#decrypt', 'app', ['OpenSSL', 'Cipher'], 'decrypt'); + const co2 = seedCodeObject(db, 'app/Cipher#decrypt', 'app', ['Cipher'], 'decrypt'); + const co3 = seedCodeObject(db, 'app/Other#m', 'app', ['Other'], 'm'); + seedCall(db, aid, 1, 'OpenSSL::Cipher', 'decrypt', co1); + seedCall(db, aid, 2, 'Cipher', 'decrypt', co2); + seedCall(db, aid, 3, 'Other', 'm', co3); + + const c = classFilterClauses('Cipher', 'fc'); + const sql = `SELECT fc.event_id FROM function_calls fc WHERE ${c.where.join(' AND ')} ORDER BY fc.event_id`; + const eids = (db.prepare(sql).all(...c.params) as Array<{ event_id: number }>).map((r) => r.event_id); + // Both co1 (nested) and co2 (top-level) should match; co3 should not. + expect(eids).toEqual([1, 2]); + } finally { + db.close(); + } + }); + + it('full canonical fqid matches strictly on all components', () => { + const db = freshDb(); + try { + const aid = seedAppmap(db); + const co1 = seedCodeObject( + db, + 'org/example/UserRepository#findById', + 'org/example', + ['UserRepository'], + 'findById' + ); + const co2 = seedCodeObject( + db, + 'org/other/UserRepository#findById', + 'org/other', + ['UserRepository'], + 'findById' + ); + seedCall(db, aid, 1, 'org.example.UserRepository', 'findById', co1); + seedCall(db, aid, 2, 'org.other.UserRepository', 'findById', co2); + + const c = classFilterClauses('org/example/UserRepository#findById', 'fc'); + const sql = `SELECT fc.event_id FROM function_calls fc WHERE ${c.where.join(' AND ')}`; + const eids = (db.prepare(sql).all(...c.params) as Array<{ event_id: number }>).map((r) => r.event_id); + expect(eids).toEqual([1]); + } finally { + db.close(); + } + }); + + it('Ruby short-form matches via defined_class fallback when not linked to a code_object', () => { + const db = freshDb(); + try { + const aid = seedAppmap(db); + // Unlinked call (code_object_id = NULL); defined_class is Ruby-form. + seedCall(db, aid, 1, 'OpenSSL::Cipher', 'decrypt', null); + seedCall(db, aid, 2, 'Some::Other::Class', 'm', null); + + const c = classFilterClauses('Cipher', 'fc'); + const sql = `SELECT fc.event_id FROM function_calls fc WHERE ${c.where.join(' AND ')}`; + const eids = (db.prepare(sql).all(...c.params) as Array<{ event_id: number }>).map((r) => r.event_id); + expect(eids).toEqual([1]); + } finally { + db.close(); + } + }); + + it('Java dot-form input matches the full defined_class on unlinked rows', () => { + const db = freshDb(); + try { + const aid = seedAppmap(db); + seedCall(db, aid, 1, 'org.example.Foo', 'm', null); + seedCall(db, aid, 2, 'org.example.Bar', 'm', null); + + const c = classFilterClauses('org.example.Foo', 'fc'); + const sql = `SELECT fc.event_id FROM function_calls fc WHERE ${c.where.join(' AND ')}`; + const eids = (db.prepare(sql).all(...c.params) as Array<{ event_id: number }>).map((r) => r.event_id); + expect(eids).toEqual([1]); + } finally { + db.close(); + } + }); +}); + +describe('methodFilterClauses', () => { + it('matches via normalized code_objects.method', () => { + const db = freshDb(); + try { + const aid = seedAppmap(db); + const co1 = seedCodeObject(db, 'app/X#findById', 'app', ['X'], 'findById'); + const co2 = seedCodeObject(db, 'app/Y#save', 'app', ['Y'], 'save'); + seedCall(db, aid, 1, 'X', 'findById', co1); + seedCall(db, aid, 2, 'Y', 'save', co2); + + const m = methodFilterClauses('findById', 'fc'); + const sql = `SELECT fc.event_id FROM function_calls fc WHERE ${m.where.join(' AND ')}`; + const eids = (db.prepare(sql).all(...m.params) as Array<{ event_id: number }>).map((r) => r.event_id); + expect(eids).toEqual([1]); + } finally { + db.close(); + } + }); + + it('falls back to function_calls.method_id for unlinked rows', () => { + const db = freshDb(); + try { + const aid = seedAppmap(db); + seedCall(db, aid, 1, 'X', 'findById', null); + seedCall(db, aid, 2, 'Y', 'save', null); + + const m = methodFilterClauses('findById', 'fc'); + const sql = `SELECT fc.event_id FROM function_calls fc WHERE ${m.where.join(' AND ')}`; + const eids = (db.prepare(sql).all(...m.params) as Array<{ event_id: number }>).map((r) => r.event_id); + expect(eids).toEqual([1]); + } finally { + db.close(); + } + }); +}); diff --git a/packages/cli/tests/unit/cmds/query/queries/endpoints.spec.ts b/packages/cli/tests/unit/cmds/query/queries/endpoints.spec.ts index 327c9b0664..88160bd4a1 100644 --- a/packages/cli/tests/unit/cmds/query/queries/endpoints.spec.ts +++ b/packages/cli/tests/unit/cmds/query/queries/endpoints.spec.ts @@ -20,19 +20,21 @@ interface SeedReq { let nextEvent = 1; function seed(db: sqlite3.Database, reqs: SeedReq[]): void { const insertAppmap = db.prepare( - `INSERT INTO appmaps (name, source_path, git_branch) VALUES (?, ?, ?)` + `INSERT INTO appmaps (name, source_path, git_branch, timestamp) VALUES (?, ?, ?, ?)` ); const insertReq = db.prepare( `INSERT INTO http_requests (appmap_id, event_id, method, path, normalized_path, - status_code, elapsed_ms, timestamp) - VALUES (?, ?, ?, ?, ?, ?, ?, ?)` + status_code, elapsed_ms) + VALUES (?, ?, ?, ?, ?, ?, ?)` ); for (let i = 0; i < reqs.length; i++) { const r = reqs[i]; + const ts = r.timestamp ?? '2026-04-29T12:00:00.000Z'; const am = insertAppmap.run( `rec-${i}`, `/tmp/rec-${i}.appmap.json`, - r.branch ?? null + r.branch ?? null, + ts ); insertReq.run( am.lastInsertRowid, @@ -41,8 +43,7 @@ function seed(db: sqlite3.Database, reqs: SeedReq[]): void { r.path, r.normalized_path ?? null, r.status, - r.elapsed_ms, - r.timestamp ?? '2026-04-29T12:00:00.000Z' + r.elapsed_ms ); } } @@ -183,13 +184,30 @@ describe('endpoints', () => { { method: 'GET', path: '/b', status: 200, elapsed_ms: 50 }, { method: 'GET', path: '/b', status: 200, elapsed_ms: 50 }, { method: 'GET', path: '/c', status: 500, elapsed_ms: 20 }, + { method: 'GET', path: '/d', status: 200, elapsed_ms: 200 }, ]); const byCount = endpoints(db, { sort: 'count' }).map((r) => r.route); expect(byCount[0]).toBe('/b'); // count 2 const byErr = endpoints(db, { sort: 'err' }).map((r) => r.route); expect(byErr[0]).toBe('/c'); // 100% err const byAvg = endpoints(db, { sort: 'avg' }).map((r) => r.route); - expect(byAvg[0]).toBe('/a'); // 100ms avg + expect(byAvg[0]).toBe('/d'); // 200ms avg + const byP95 = endpoints(db, { sort: 'p95' }).map((r) => r.route); + expect(byP95[0]).toBe('/d'); // 200ms p95 + } finally { + db.close(); + } + }); + + it('sorts nulls last (a route with no measured duration ranks below a real 0)', () => { + const db = freshDb(); + try { + seed(db, [ + { method: 'GET', path: '/measured', status: 200, elapsed_ms: 0 }, + { method: 'GET', path: '/unmeasured', status: 200, elapsed_ms: null }, + ]); + const byP95 = endpoints(db, { sort: 'p95' }).map((r) => r.route); + expect(byP95).toEqual(['/measured', '/unmeasured']); } finally { db.close(); } diff --git a/packages/cli/tests/unit/cmds/query/queries/find.spec.ts b/packages/cli/tests/unit/cmds/query/queries/find.spec.ts index 96f44a2c24..a466563450 100644 --- a/packages/cli/tests/unit/cmds/query/queries/find.spec.ts +++ b/packages/cli/tests/unit/cmds/query/queries/find.spec.ts @@ -54,15 +54,20 @@ function seed(db: sqlite3.Database, recs: Recording[]): void { VALUES (?, ?, ?, ?, ?, ?, ?)` ); const insReq = db.prepare( - `INSERT INTO http_requests (appmap_id, event_id, method, path, normalized_path, status_code, elapsed_ms, timestamp) - VALUES (?, ?, ?, ?, ?, ?, ?, ?)` + `INSERT INTO http_requests (appmap_id, event_id, method, path, normalized_path, status_code, elapsed_ms) + VALUES (?, ?, ?, ?, ?, ?, ?)` ); const insQ = db.prepare( `INSERT INTO sql_queries (appmap_id, event_id, sql_text, caller_class, caller_method, elapsed_ms) VALUES (?, ?, ?, ?, ?, ?)` ); + // Test seed: derive package + class chain from defined_class so call + // sites don't have to specify them. defined_class may be Java dot-form + // ("org.example.Foo"), in which case we treat the trailing segment as + // the leaf class and the rest as a slash-form package. const insCo = db.prepare( - `INSERT OR IGNORE INTO code_objects (fqid, defined_class, method_id) VALUES (?, ?, ?)` + `INSERT OR IGNORE INTO code_objects (fqid, package, classes, leaf_class, method, is_static) + VALUES (?, ?, ?, ?, ?, ?)` ); const selCoId = db.prepare(`SELECT id FROM code_objects WHERE fqid = ?`); const insLabel = db.prepare( @@ -97,8 +102,7 @@ function seed(db: sqlite3.Database, recs: Recording[]): void { req.path, req.normalized_path ?? null, req.status, - req.elapsed_ms ?? null, - r.timestamp ?? '2026-04-29T12:00:00.000Z' + req.elapsed_ms ?? null ); } for (const q of r.queries ?? []) { @@ -113,7 +117,10 @@ function seed(db: sqlite3.Database, recs: Recording[]): void { } for (const c of r.calls ?? []) { const fqid = c.fqid ?? `${c.defined_class}#${c.method_id}`; - insCo.run(fqid, c.defined_class, c.method_id); + const dotIdx = c.defined_class.lastIndexOf('.'); + const pkg = dotIdx >= 0 ? c.defined_class.slice(0, dotIdx).replace(/\./g, '/') : ''; + const leaf = dotIdx >= 0 ? c.defined_class.slice(dotIdx + 1) : c.defined_class; + insCo.run(fqid, pkg, JSON.stringify([leaf]), leaf, c.method_id, 0); const coId = (selCoId.get(fqid) as { id: number }).id; for (const label of c.labels ?? []) insLabel.run(coId, label); insCall.run( @@ -328,6 +335,118 @@ describe('findCalls', () => { }); }); +describe('findCalls --class / --method (fqid-aware)', () => { + it('matches the canonical fqid prefix', () => { + const db = freshDb(); + try { + seed(db, [ + { + name: 'a', + calls: [ + { + event_id: 1, + defined_class: 'org.example.UserRepository', + method_id: 'findById', + fqid: 'org/example/UserRepository#findById', + }, + ], + }, + ]); + // Canonical V3 fqid prefix (slash form, sans method) + const rows = findCalls(db, { className: 'org/example/UserRepository' }); + expect(rows).toHaveLength(1); + } finally { + db.close(); + } + }); + + it('matches a short class name as the trailing fqid segment', () => { + const db = freshDb(); + try { + seed(db, [ + { + name: 'a', + calls: [ + { + event_id: 1, + defined_class: 'org.example.UserRepository', + method_id: 'findById', + fqid: 'org/example/UserRepository#findById', + }, + { + event_id: 2, + defined_class: 'org.other.OrdersController', + method_id: 'create', + fqid: 'org/other/OrdersController#create', + }, + ], + }, + ]); + const rows = findCalls(db, { className: 'UserRepository' }); + expect(rows).toHaveLength(1); + expect(rows[0].method_id).toBe('findById'); + } finally { + db.close(); + } + }); + + it('matches the trailing dot-segment of a Java-style defined_class even without code_object', () => { + const db = freshDb(); + try { + // Insert a function_call that has NO code_object (code_object_id NULL) + // but has a Java dot-form defined_class. + const am = db + .prepare(`INSERT INTO appmaps (name, source_path) VALUES ('a', '/tmp/a.appmap.json')`) + .run(); + db.prepare( + `INSERT INTO function_calls (appmap_id, event_id, defined_class, method_id) + VALUES (?, 1, 'org.example.UserRepository', 'findById')` + ).run(am.lastInsertRowid); + + const rows = findCalls(db, { className: 'UserRepository' }); + expect(rows).toHaveLength(1); + } finally { + db.close(); + } + }); + + it('--method matches via fqid suffix', () => { + const db = freshDb(); + try { + seed(db, [ + { + name: 'a', + calls: [ + { + event_id: 1, + defined_class: 'org.example.UserRepository', + method_id: 'findById', + fqid: 'org/example/UserRepository#findById', + }, + { + event_id: 2, + defined_class: 'org.example.OrderRepository', + method_id: 'findById', + fqid: 'org/example/OrderRepository#findById', + }, + { + event_id: 3, + defined_class: 'org.example.UserRepository', + method_id: 'save', + fqid: 'org/example/UserRepository#save', + }, + ], + }, + ]); + const rows = findCalls(db, { method: 'findById' }); + expect(rows).toHaveLength(2); + expect(rows.every((r) => r.method_id === 'findById')).toBe(true); + } finally { + db.close(); + } + }); +}); + describe('findExceptions', () => { it('--exception filters by class; --route scopes by recording', () => { const db = freshDb(); diff --git a/packages/cli/tests/unit/cmds/query/queries/hotspots.spec.ts b/packages/cli/tests/unit/cmds/query/queries/hotspots.spec.ts new file mode 100644 index 0000000000..37b8f0144f --- /dev/null +++ b/packages/cli/tests/unit/cmds/query/queries/hotspots.spec.ts @@ -0,0 +1,274 @@ +import sqlite3 from 'better-sqlite3'; + +import { openQueryDb } from '../../../../../src/cmds/query/db/openQueryDb'; +import { + functionHotspots, + sqlHotspots, +} from '../../../../../src/cmds/query/queries/hotspots'; + +function freshDb(): sqlite3.Database { + return openQueryDb('/tmp/ignored', ':memory:').db; +} + +interface CallSeed { + event_id: number; + parent_event_id?: number; + defined_class: string; + method_id: string; + fqid?: string; + elapsed_ms: number; +} + +function seedRecording( + db: sqlite3.Database, + opts: { + name: string; + branch?: string; + request?: { event_id: number; method: string; path: string; status: number }; + calls?: CallSeed[]; + queries?: Array<{ event_id: number; parent_event_id?: number; sql: string; elapsed_ms: number }>; + } +): number { + const am = db + .prepare( + `INSERT INTO appmaps (name, source_path, git_branch) VALUES (?, ?, ?)` + ) + .run(opts.name, `/tmp/${opts.name}.appmap.json`, opts.branch ?? null); + const aid = Number(am.lastInsertRowid); + + if (opts.request) { + db.prepare( + `INSERT INTO http_requests (appmap_id, event_id, method, path, status_code) + VALUES (?, ?, ?, ?, ?)` + ).run(aid, opts.request.event_id, opts.request.method, opts.request.path, opts.request.status); + } + + for (const c of opts.calls ?? []) { + const fqid = c.fqid ?? `${c.defined_class}#${c.method_id}`; + const dotIdx = c.defined_class.lastIndexOf('.'); + const pkg = dotIdx >= 0 ? c.defined_class.slice(0, dotIdx).replace(/\./g, '/') : ''; + const leaf = dotIdx >= 0 ? c.defined_class.slice(dotIdx + 1) : c.defined_class; + db.prepare( + `INSERT OR IGNORE INTO code_objects (fqid, package, classes, leaf_class, method, is_static) + VALUES (?, ?, ?, ?, ?, ?)` + ).run(fqid, pkg, JSON.stringify([leaf]), leaf, c.method_id, 0); + const coId = (db + .prepare(`SELECT id FROM code_objects WHERE fqid = ?`) + .get(fqid) as { id: number }).id; + db.prepare( + `INSERT INTO function_calls (appmap_id, event_id, parent_event_id, + code_object_id, defined_class, method_id, elapsed_ms) + VALUES (?, ?, ?, ?, ?, ?, ?)` + ).run(aid, c.event_id, c.parent_event_id ?? null, coId, c.defined_class, c.method_id, c.elapsed_ms); + } + + for (const q of opts.queries ?? []) { + db.prepare( + `INSERT INTO sql_queries (appmap_id, event_id, parent_event_id, sql_text, elapsed_ms) + VALUES (?, ?, ?, ?, ?)` + ).run(aid, q.event_id, q.parent_event_id ?? null, q.sql, q.elapsed_ms); + } + + return aid; +} + +describe('functionHotspots', () => { + it('groups by code_object_id and ranks by total_ms desc', () => { + const db = freshDb(); + try { + seedRecording(db, { + name: 'a', + calls: [ + { event_id: 1, defined_class: 'X', method_id: 'fast', fqid: 'app/X#fast', elapsed_ms: 5 }, + { event_id: 2, defined_class: 'X', method_id: 'fast', fqid: 'app/X#fast', elapsed_ms: 5 }, + { event_id: 3, defined_class: 'Y', method_id: 'slow', fqid: 'app/Y#slow', elapsed_ms: 100 }, + ], + }); + const rows = functionHotspots(db, {}); + expect(rows[0].fqid).toBe('app/Y#slow'); + expect(rows[0].calls).toBe(1); + expect(rows[0].total_ms).toBe(100); + expect(rows[1].fqid).toBe('app/X#fast'); + expect(rows[1].calls).toBe(2); + expect(rows[1].total_ms).toBe(10); + } finally { + db.close(); + } + }); + + it('computes self_ms as elapsed minus the sum of immediate children', () => { + const db = freshDb(); + try { + // outer call (10) calls inner1 (3) and inner2 (4) and an SQL (1). + // self_ms(outer) should be 10 - (3 + 4 + 1) = 2. + seedRecording(db, { + name: 'a', + calls: [ + { event_id: 1, defined_class: 'X', method_id: 'outer', fqid: 'app/X#outer', elapsed_ms: 10 }, + { event_id: 2, parent_event_id: 1, defined_class: 'X', method_id: 'inner1', fqid: 'app/X#inner1', elapsed_ms: 3 }, + { event_id: 3, parent_event_id: 1, defined_class: 'X', method_id: 'inner2', fqid: 'app/X#inner2', elapsed_ms: 4 }, + ], + queries: [{ event_id: 4, parent_event_id: 1, sql: 'SELECT 1', elapsed_ms: 1 }], + }); + const outer = functionHotspots(db, {}).find((r) => r.fqid === 'app/X#outer')!; + expect(outer.total_ms).toBe(10); + expect(outer.self_ms).toBe(2); + const inner1 = functionHotspots(db, {}).find((r) => r.fqid === 'app/X#inner1')!; + expect(inner1.self_ms).toBe(3); // leaf — self equals total + } finally { + db.close(); + } + }); + + it('aggregates across recordings', () => { + const db = freshDb(); + try { + seedRecording(db, { + name: 'a', + calls: [{ event_id: 1, defined_class: 'X', method_id: 'm', fqid: 'app/X#m', elapsed_ms: 10 }], + }); + seedRecording(db, { + name: 'b', + calls: [{ event_id: 1, defined_class: 'X', method_id: 'm', fqid: 'app/X#m', elapsed_ms: 20 }], + }); + const rows = functionHotspots(db, {}); + expect(rows).toHaveLength(1); + expect(rows[0].calls).toBe(2); + expect(rows[0].total_ms).toBe(30); + } finally { + db.close(); + } + }); + + it('--route scopes to recordings with a matching server request', () => { + const db = freshDb(); + try { + seedRecording(db, { + name: 'reports', + request: { event_id: 0, method: 'GET', path: '/reports', status: 200 }, + calls: [{ event_id: 1, defined_class: 'R', method_id: 'calc', fqid: 'app/R#calc', elapsed_ms: 100 }], + }); + seedRecording(db, { + name: 'orders', + request: { event_id: 0, method: 'POST', path: '/orders', status: 200 }, + calls: [{ event_id: 1, defined_class: 'O', method_id: 'create', fqid: 'app/O#create', elapsed_ms: 50 }], + }); + const rows = functionHotspots(db, { route: 'GET /reports' }); + expect(rows).toHaveLength(1); + expect(rows[0].fqid).toBe('app/R#calc'); + } finally { + db.close(); + } + }); + + it('--class filters by defined_class', () => { + const db = freshDb(); + try { + seedRecording(db, { + name: 'a', + calls: [ + { event_id: 1, defined_class: 'OrdersController', method_id: 'create', elapsed_ms: 100 }, + { event_id: 2, defined_class: 'OrdersController', method_id: 'index', elapsed_ms: 50 }, + { event_id: 3, defined_class: 'UsersController', method_id: 'show', elapsed_ms: 200 }, + ], + }); + const rows = functionHotspots(db, { className: 'OrdersController' }); + expect(rows.map((r) => r.method_id).sort()).toEqual(['create', 'index']); + } finally { + db.close(); + } + }); + + it('--class also matches via the canonical fqid (not just defined_class)', () => { + const db = freshDb(); + try { + seedRecording(db, { + name: 'a', + calls: [ + { + event_id: 1, + defined_class: 'org.example.UserRepository', + method_id: 'findById', + fqid: 'org/example/UserRepository#findById', + elapsed_ms: 100, + }, + { + event_id: 2, + defined_class: 'org.example.OrderRepository', + method_id: 'findById', + fqid: 'org/example/OrderRepository#findById', + elapsed_ms: 50, + }, + ], + }); + const rows = functionHotspots(db, { className: 'UserRepository' }); + expect(rows).toHaveLength(1); + expect(rows[0].fqid).toBe('org/example/UserRepository#findById'); + } finally { + db.close(); + } + }); + + it('--limit truncates the result set', () => { + const db = freshDb(); + try { + seedRecording(db, { + name: 'a', + calls: [ + { event_id: 1, defined_class: 'X', method_id: 'a', elapsed_ms: 100 }, + { event_id: 2, defined_class: 'X', method_id: 'b', elapsed_ms: 50 }, + { event_id: 3, defined_class: 'X', method_id: 'c', elapsed_ms: 25 }, + ], + }); + expect(functionHotspots(db, { limit: 2 })).toHaveLength(2); + } finally { + db.close(); + } + }); +}); + +describe('sqlHotspots', () => { + it('groups by sql_text, ranks by total_ms desc', () => { + const db = freshDb(); + try { + seedRecording(db, { + name: 'a', + queries: [ + { event_id: 1, sql: 'SELECT * FROM users WHERE id = ?', elapsed_ms: 2 }, + { event_id: 2, sql: 'SELECT * FROM users WHERE id = ?', elapsed_ms: 2 }, + { event_id: 3, sql: 'SELECT * FROM tenants WHERE slug = ?', elapsed_ms: 80 }, + ], + }); + const rows = sqlHotspots(db, {}); + expect(rows[0].sql_text).toBe('SELECT * FROM tenants WHERE slug = ?'); + expect(rows[0].count).toBe(1); + expect(rows[0].avg_ms).toBeCloseTo(80); + expect(rows[1].count).toBe(2); + expect(rows[1].avg_ms).toBeCloseTo(2); + expect(rows[1].total_ms).toBe(4); + } finally { + db.close(); + } + }); + + it('--branch filter applies', () => { + const db = freshDb(); + try { + seedRecording(db, { + name: 'a', + branch: 'main', + queries: [{ event_id: 1, sql: 'SELECT 1', elapsed_ms: 1 }], + }); + seedRecording(db, { + name: 'b', + branch: 'feature', + queries: [{ event_id: 1, sql: 'SELECT 2', elapsed_ms: 2 }], + }); + const main = sqlHotspots(db, { branch: 'main' }); + expect(main).toHaveLength(1); + expect(main[0].sql_text).toBe('SELECT 1'); + } finally { + db.close(); + } + }); +}); diff --git a/packages/cli/tests/unit/cmds/query/queries/tree.spec.ts b/packages/cli/tests/unit/cmds/query/queries/tree.spec.ts index dd96048ea8..d68dd806d6 100644 --- a/packages/cli/tests/unit/cmds/query/queries/tree.spec.ts +++ b/packages/cli/tests/unit/cmds/query/queries/tree.spec.ts @@ -46,8 +46,8 @@ function seed( // event_id 2: controller call, parent = 1 let coId = 1; db.prepare( - `INSERT OR IGNORE INTO code_objects (fqid, defined_class, method_id) - VALUES ('app/OrdersController#create', 'OrdersController', 'create')` + `INSERT OR IGNORE INTO code_objects (fqid, package, classes, leaf_class, method, is_static) + VALUES ('app/OrdersController#create', 'app', '["OrdersController"]', 'OrdersController', 'create', 0)` ).run(); coId = (db .prepare(`SELECT id FROM code_objects WHERE fqid = 'app/OrdersController#create'`) From 12d91faf2869d281c7e6ad7ba6186e06bb9b5aeb Mon Sep 17 00:00:00 2001 From: kgilpin Date: Sat, 2 May 2026 14:42:29 -0400 Subject: [PATCH 07/30] feat(cli): per-find-type flag validation `find ` previously accepted every filter flag silently and just ignored the ones that didn't apply to the chosen type, which is a foot-gun (--class against find appmaps returned all appmaps as if no filter were given). Validation rules: - Universal flags work everywhere: --branch, --commit, --since, --until, --appmap (recording-scope) plus --limit, --offset, --json (output). - Per-type filter flags are accepted only on types where they make sense; using one elsewhere errors with a clear message: find appmaps: rejects class, method, label, duration, table, exception find requests: rejects class, method, label, table, exception find queries: rejects label, exception find calls: rejects table, exception find exceptions: rejects class, method, label, duration, table Eight tests in tests/unit/cmds/query/verbs/find.spec.ts pin each rule. Co-Authored-By: Claude Opus 4.7 (1M context) --- packages/cli/src/cmds/query/verbs/find.ts | 33 ++++++++- .../tests/unit/cmds/query/verbs/find.spec.ts | 69 +++++++++++++++++++ 2 files changed, 100 insertions(+), 2 deletions(-) create mode 100644 packages/cli/tests/unit/cmds/query/verbs/find.spec.ts diff --git a/packages/cli/src/cmds/query/verbs/find.ts b/packages/cli/src/cmds/query/verbs/find.ts index 77b1ea9fff..4275c35f4f 100644 --- a/packages/cli/src/cmds/query/verbs/find.ts +++ b/packages/cli/src/cmds/query/verbs/find.ts @@ -49,11 +49,42 @@ export const builder = (args: yargs.Argv) => { type Argv = ReturnType extends yargs.Argv ? T : never; +// Per-type flag rejection list. Universal flags (--branch, --commit, +// --since, --until, --appmap) are accepted everywhere, as are output flags +// (--limit, --offset, --json). Other filter flags are accepted only on +// types where they make sense; flagging them on the wrong type is an +// error rather than a silent no-op. +const REJECTED_FLAGS: Record = { + appmaps: ['class', 'method', 'label', 'duration', 'table', 'exception'], + requests: ['class', 'method', 'label', 'table', 'exception'], + queries: ['label', 'exception'], + calls: ['table', 'exception'], + exceptions: ['class', 'method', 'label', 'duration', 'table'], +}; + +// Exported for tests. Operates on a generic flag map so unit tests don't +// need a full yargs argv. +export function validateFlags(type: FindType, flags: Record): void { + const used: string[] = []; + for (const flag of REJECTED_FLAGS[type]) { + if (flags[flag] != null) used.push(`--${flag}`); + } + if (used.length > 0) { + const verb = used.length === 1 ? 'is' : 'are'; + throw new Error( + `find ${type}: ${used.join(', ')} ${verb} not supported for this type` + ); + } +} + export const handler = async (argv: yargs.ArgumentsCamelCase): Promise => { verbose(argv.verbose as boolean | undefined); handleWorkingDirectory(argv.directory); const appmapDir = argv.queryDb ? '' : await locateAppMapDir(argv.appmapDir); + const type = argv.type as FindType; + validateFlags(type, argv); + const filter: FindFilter = {}; if (argv.route) filter.route = argv.route; if (argv.class) filter.className = argv.class; @@ -71,8 +102,6 @@ export const handler = async (argv: yargs.ArgumentsCamelCase): Promise { + it('accepts the universal flags on every type', () => { + const universal = { + branch: 'main', + commit: 'abc', + since: '2026-01-01', + until: '2026-12-31', + appmap: 'demo', + // output flags are also always allowed + limit: 5, + offset: 0, + json: true, + }; + for (const type of ['appmaps', 'requests', 'queries', 'calls', 'exceptions'] as const) { + expect(() => validateFlags(type, universal)).not.toThrow(); + } + }); + + it('rejects --class on find appmaps', () => { + expect(() => validateFlags('appmaps', { class: 'Foo' })).toThrow(/--class/); + expect(() => validateFlags('appmaps', { class: 'Foo' })).toThrow(/find appmaps/); + }); + + it('accepts --route and --status on find appmaps', () => { + expect(() => validateFlags('appmaps', { route: '/x', status: '500' })).not.toThrow(); + }); + + it('rejects --table on find calls and find exceptions', () => { + expect(() => validateFlags('calls', { table: 'orders' })).toThrow(/--table/); + expect(() => validateFlags('exceptions', { table: 'orders' })).toThrow(/--table/); + }); + + it('rejects --exception except on find exceptions', () => { + expect(() => validateFlags('exceptions', { exception: 'IntegrityError' })).not.toThrow(); + expect(() => validateFlags('calls', { exception: 'IntegrityError' })).toThrow(/--exception/); + expect(() => validateFlags('queries', { exception: 'IntegrityError' })).toThrow(/--exception/); + }); + + it('rejects --label everywhere except find calls', () => { + expect(() => validateFlags('calls', { label: 'log' })).not.toThrow(); + expect(() => validateFlags('appmaps', { label: 'log' })).toThrow(/--label/); + expect(() => validateFlags('requests', { label: 'log' })).toThrow(/--label/); + expect(() => validateFlags('queries', { label: 'log' })).toThrow(/--label/); + expect(() => validateFlags('exceptions', { label: 'log' })).toThrow(/--label/); + }); + + it('rejects --duration on appmaps and exceptions', () => { + expect(() => validateFlags('appmaps', { duration: '>1s' })).toThrow(/--duration/); + expect(() => validateFlags('exceptions', { duration: '>1s' })).toThrow(/--duration/); + // calls/queries/requests all accept duration + expect(() => validateFlags('calls', { duration: '>1s' })).not.toThrow(); + expect(() => validateFlags('queries', { duration: '>1s' })).not.toThrow(); + expect(() => validateFlags('requests', { duration: '>1s' })).not.toThrow(); + }); + + it('lists multiple incompatible flags in a single error', () => { + expect(() => + validateFlags('appmaps', { class: 'Foo', table: 'orders' }) + ).toThrow(/--class.*--table|--table.*--class/); + }); + + it('ignores undefined / null flag values', () => { + expect(() => + validateFlags('appmaps', { class: undefined, table: null }) + ).not.toThrow(); + }); +}); From 14b300bebbcca0ef2950e7ee449404588c59d5f7 Mon Sep 17 00:00:00 2001 From: kgilpin Date: Sat, 2 May 2026 14:44:28 -0400 Subject: [PATCH 08/30] fix(query/tree): include exception source location in tree/summary output MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ExceptionNode was built without path/lineno even though the importer stores them and findExceptions exposes them. The tree view of an exception was strictly less informative than \`find exceptions\` — just the class and message, no source location. - ExceptionNode now carries path/lineno; tree() reads them from the exceptions table. - TreeSummary.exceptions exposes the same fields. - renderTree / renderFlat / renderSummary append " @ :" to the EXC line when a path is present, matching what find exceptions shows. Test pinned in tree.spec.ts. Co-Authored-By: Claude Opus 4.7 (1M context) --- packages/cli/src/cmds/query/lib/treeRender.ts | 18 ++++++++++++----- packages/cli/src/cmds/query/queries/tree.ts | 18 +++++++++++++++-- .../unit/cmds/query/queries/tree.spec.ts | 20 +++++++++++++++++-- 3 files changed, 47 insertions(+), 9 deletions(-) diff --git a/packages/cli/src/cmds/query/lib/treeRender.ts b/packages/cli/src/cmds/query/lib/treeRender.ts index c087e1661d..53084c5ff3 100644 --- a/packages/cli/src/cmds/query/lib/treeRender.ts +++ b/packages/cli/src/cmds/query/lib/treeRender.ts @@ -1,4 +1,5 @@ import { + ExceptionNode, FunctionNode, HttpClientNode, HttpServerNode, @@ -33,12 +34,15 @@ function renderTreeLine(node: TreeNode): string { case 'sql': return `${indent}SQL ${renderSql(node)}`; case 'exception': - return `${indent}EXC ${node.exception_class}${ - node.message ? `: ${node.message}` : '' - }`; + return `${indent}EXC ${renderException(node)}`; } } +function renderException(n: ExceptionNode): string { + const where = n.path ? ` @ ${n.path}${n.lineno != null ? `:${n.lineno}` : ''}` : ''; + return `${n.exception_class}${n.message ? `: ${n.message}` : ''}${where}`; +} + function renderHttpServer(n: HttpServerNode): string { return `${n.method} ${n.route} → HTTP ${n.status_code} ${bracket(n.elapsed_ms)}`.trim(); } @@ -81,7 +85,7 @@ export function renderFlat(nodes: readonly TreeNode[]): string { case 'function': return `CALL ${renderFunction(n)}`; case 'exception': - return `EXC ${n.exception_class}${n.message ? `: ${n.message}` : ''}`; + return `EXC ${renderException(n)}`; } }) .join('\n'); @@ -117,7 +121,11 @@ export function renderSummary(s: TreeSummary): string { } for (const e of s.exceptions) { - rows.push(['EXCEPTION', e.exception_class + (e.message ? `: ${e.message}` : '')]); + const where = e.path ? ` @ ${e.path}${e.lineno != null ? `:${e.lineno}` : ''}` : ''; + rows.push([ + 'EXCEPTION', + e.exception_class + (e.message ? `: ${e.message}` : '') + where, + ]); } if (s.labels.length > 0) { diff --git a/packages/cli/src/cmds/query/queries/tree.ts b/packages/cli/src/cmds/query/queries/tree.ts index 537abbe80e..6ac6125bd4 100644 --- a/packages/cli/src/cmds/query/queries/tree.ts +++ b/packages/cli/src/cmds/query/queries/tree.ts @@ -49,6 +49,8 @@ export interface ExceptionNode extends BaseNode { kind: 'exception'; exception_class: string; message: string | null; + path: string | null; + lineno: number | null; } export type TreeNode = @@ -208,7 +210,8 @@ export function tree(db: sqlite3.Database, appmapRef: string): TreeNode[] { for (const r of db .prepare( - `SELECT event_id, parent_event_id, thread_id, exception_class, message + `SELECT event_id, parent_event_id, thread_id, exception_class, message, + path, lineno FROM exceptions WHERE appmap_id = ?` ) .all(am.id) as Array<{ @@ -217,6 +220,8 @@ export function tree(db: sqlite3.Database, appmapRef: string): TreeNode[] { thread_id: number | null; exception_class: string; message: string | null; + path: string | null; + lineno: number | null; }>) { events.push({ kind: 'exception', @@ -226,6 +231,8 @@ export function tree(db: sqlite3.Database, appmapRef: string): TreeNode[] { depth: 0, exception_class: r.exception_class, message: r.message, + path: r.path, + lineno: r.lineno, }); } @@ -253,7 +260,12 @@ export interface TreeSummary { entry: { method: string; route: string; status_code: number; elapsed_ms: number | null } | null; sql: { count: number; total_ms: number }; http_client: { count: number; total_ms: number }; - exceptions: Array<{ exception_class: string; message: string | null }>; + exceptions: Array<{ + exception_class: string; + message: string | null; + path: string | null; + lineno: number | null; + }>; labels: Array<{ label: string; count: number }>; } @@ -299,6 +311,8 @@ export function treeSummary(db: sqlite3.Database, appmapRef: string): TreeSummar exceptions: excs.map((e) => ({ exception_class: e.exception_class, message: e.message, + path: e.path, + lineno: e.lineno, })), labels: labelRows.map((r) => ({ label: r.label, count: r.n })), }; diff --git a/packages/cli/tests/unit/cmds/query/queries/tree.spec.ts b/packages/cli/tests/unit/cmds/query/queries/tree.spec.ts index d68dd806d6..11743aff89 100644 --- a/packages/cli/tests/unit/cmds/query/queries/tree.spec.ts +++ b/packages/cli/tests/unit/cmds/query/queries/tree.spec.ts @@ -73,8 +73,9 @@ function seed( // event_id 4: exception, owned by call 2 (carried by its return event) db.prepare( `INSERT INTO exceptions (appmap_id, event_id, parent_event_id, thread_id, - exception_class, message) - VALUES (?, 2, 1, 1, 'IntegrityError', 'duplicate key')` + exception_class, message, path, lineno) + VALUES (?, 2, 1, 1, 'IntegrityError', 'duplicate key', + 'app/models/order.rb', 42)` ).run(id); if (opts.addOutbound) { @@ -160,6 +161,21 @@ describe('tree', () => { } }); + it('exception nodes carry path and lineno', () => { + const db = freshDb(); + try { + seed(db); + const exc = tree(db, 'orders_create_42').find((n) => n.kind === 'exception'); + expect(exc).toBeDefined(); + if (exc?.kind === 'exception') { + expect(exc.path).toBe('app/models/order.rb'); + expect(exc.lineno).toBe(42); + } + } finally { + db.close(); + } + }); + it('joins fqid into function nodes', () => { const db = freshDb(); try { From e2ea96cacf9df0bf4e8d6a09e23e82547a0c2183 Mon Sep 17 00:00:00 2001 From: kgilpin Date: Sat, 2 May 2026 14:50:29 -0400 Subject: [PATCH 09/30] fix(query): make verb builders generic so CommandModule inference holds MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit CI surfaced a TS2345 against query.ts: Argument of type 'typeof import("…/verbs/endpoints")' is not assignable to parameter of type 'CommandModule<{}, any>'. The verbs declared their builder as `(args: yargs.Argv) => …` (no generic), which doesn't match `CommandModule.builder?: (args: Argv) => Argv`. The existing rpc command works because its builder is generic: `(args: yargs.Argv) => …`. Match that pattern across all four query verbs and the parent query command. No runtime behavior change. Co-Authored-By: Claude Opus 4.7 (1M context) --- packages/cli/src/cmds/query/query.ts | 2 +- packages/cli/src/cmds/query/verbs/endpoints.ts | 2 +- packages/cli/src/cmds/query/verbs/find.ts | 2 +- packages/cli/src/cmds/query/verbs/hotspots.ts | 2 +- packages/cli/src/cmds/query/verbs/tree.ts | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/packages/cli/src/cmds/query/query.ts b/packages/cli/src/cmds/query/query.ts index 03056098ce..cd1779a51c 100644 --- a/packages/cli/src/cmds/query/query.ts +++ b/packages/cli/src/cmds/query/query.ts @@ -8,7 +8,7 @@ import * as TreeVerb from './verbs/tree'; export const command = 'query'; export const describe = 'Query AppMap recordings (endpoints, find, tree, related, hotspots, compare)'; -export const builder = (args: yargs.Argv) => +export const builder = (args: yargs.Argv) => args .command(EndpointsVerb) .command(FindVerb) diff --git a/packages/cli/src/cmds/query/verbs/endpoints.ts b/packages/cli/src/cmds/query/verbs/endpoints.ts index d938378846..e69eec9611 100644 --- a/packages/cli/src/cmds/query/verbs/endpoints.ts +++ b/packages/cli/src/cmds/query/verbs/endpoints.ts @@ -16,7 +16,7 @@ import { formatCount, formatMs, formatPct, formatTable } from '../lib/format'; export const command = 'endpoints'; export const describe = 'Per-route summary table (orient verb)'; -export const builder = (args: yargs.Argv) => { +export const builder = (args: yargs.Argv) => { return args .option('directory', { type: 'string', alias: 'd', describe: 'program working directory' }) .option('appmap-dir', { type: 'string', describe: 'directory of recordings' }) diff --git a/packages/cli/src/cmds/query/verbs/find.ts b/packages/cli/src/cmds/query/verbs/find.ts index 4275c35f4f..d645a94ac7 100644 --- a/packages/cli/src/cmds/query/verbs/find.ts +++ b/packages/cli/src/cmds/query/verbs/find.ts @@ -23,7 +23,7 @@ const TYPES: readonly FindType[] = ['appmaps', 'requests', 'queries', 'calls', ' export const command = 'find '; export const describe = 'Row-level search across recordings'; -export const builder = (args: yargs.Argv) => { +export const builder = (args: yargs.Argv) => { return args .positional('type', { type: 'string', choices: TYPES }) .option('directory', { type: 'string', alias: 'd' }) diff --git a/packages/cli/src/cmds/query/verbs/hotspots.ts b/packages/cli/src/cmds/query/verbs/hotspots.ts index 992c42e2bc..5ec0bbf63d 100644 --- a/packages/cli/src/cmds/query/verbs/hotspots.ts +++ b/packages/cli/src/cmds/query/verbs/hotspots.ts @@ -18,7 +18,7 @@ import { formatCount, formatMs, formatTable } from '../lib/format'; export const command = 'hotspots'; export const describe = 'Rank functions or SQL queries by cumulative elapsed'; -export const builder = (args: yargs.Argv) => { +export const builder = (args: yargs.Argv) => { return args .option('directory', { type: 'string', alias: 'd' }) .option('appmap-dir', { type: 'string' }) diff --git a/packages/cli/src/cmds/query/verbs/tree.ts b/packages/cli/src/cmds/query/verbs/tree.ts index c72658dad2..ecd9ab78d5 100644 --- a/packages/cli/src/cmds/query/verbs/tree.ts +++ b/packages/cli/src/cmds/query/verbs/tree.ts @@ -11,7 +11,7 @@ import { renderFlat, renderSummary, renderTree } from '../lib/treeRender'; export const command = 'tree '; export const describe = 'Render the call tree of one recording'; -export const builder = (args: yargs.Argv) => { +export const builder = (args: yargs.Argv) => { return args .positional('appmap', { type: 'string', describe: 'appmap name (or basename of source path)' }) .option('directory', { type: 'string', alias: 'd' }) From 6570cd70771382432ccfa37e9ed9763a74232af3 Mon Sep 17 00:00:00 2001 From: kgilpin Date: Sat, 2 May 2026 15:00:21 -0400 Subject: [PATCH 10/30] fix(query/find): determinism, --duration on appmaps, basename matching, hints MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Six small but visible fixes from review: 1. findAppmaps's route-filtered branch was non-deterministic — GROUP BY a.id with non-aggregated h.* lets SQLite pick any row. Replaced with a correlated subquery that picks the http_request with the smallest event_id matching --route / --status. Tested against a recording with two POST /orders entries (event_id=1 elapsed=100, event_id=2 elapsed=999); the row consistently shows 100. 2. find appmaps --duration was silently rejected. Now applies on a.elapsed_ms (the recording-level total). 3. parseRoute matched HTTP methods case-sensitively. Now matches case-insensitively and normalizes to upper-case so `--route "post /orders"` works. 4. Appmap-ref matching (find / hotspots / tree) used `source_path LIKE '%/.appmap.json'` — broken on Windows separators and on stores that don't end in .appmap.json. Replaced with a GLOB clause that handles either separator and matches with or without the .appmap.json suffix; factored as appmapRefClause() so all three call sites use it. 5. find requests rejects --method (correct — there's no function method on a server request) but the error didn't help users find the right flag. Added a hint: find requests: --method is not supported for this type --method: to filter by HTTP method, use --route "METHOD /path" 6. New tests covering each of the above (case-insensitive method, deterministic findAppmaps, --duration on appmaps, basename-match with Unix/Windows/no-extension paths, and the --method hint). Co-Authored-By: Claude Opus 4.7 (1M context) --- packages/cli/src/cmds/query/lib/scope.ts | 39 +++++++-- packages/cli/src/cmds/query/queries/find.ts | 72 ++++++++-------- packages/cli/src/cmds/query/queries/tree.ts | 11 ++- packages/cli/src/cmds/query/verbs/find.ts | 29 +++++-- .../tests/unit/cmds/query/lib/scope.spec.ts | 84 +++++++++++++++++++ .../unit/cmds/query/queries/find.spec.ts | 43 ++++++++++ .../tests/unit/cmds/query/verbs/find.spec.ts | 12 ++- 7 files changed, 227 insertions(+), 63 deletions(-) diff --git a/packages/cli/src/cmds/query/lib/scope.ts b/packages/cli/src/cmds/query/lib/scope.ts index 9caa9a4e92..a8a343d3d1 100644 --- a/packages/cli/src/cmds/query/lib/scope.ts +++ b/packages/cli/src/cmds/query/lib/scope.ts @@ -23,14 +23,32 @@ export interface RouteSpec { path: string; } -const HTTP_METHODS = /^(GET|POST|PUT|PATCH|DELETE|HEAD|OPTIONS)\s+(.+)$/; +const HTTP_METHODS = /^(GET|POST|PUT|PATCH|DELETE|HEAD|OPTIONS)\s+(.+)$/i; export function parseRoute(s: string): RouteSpec { const m = s.match(HTTP_METHODS); - if (m) return { method: m[1], path: m[2] }; + if (m) return { method: m[1].toUpperCase(), path: m[2] }; return { path: s }; } +// Returns SQL clauses (and params) that match an --appmap reference against +// the appmaps table, accepting any of: +// - exact appmap.name +// - source_path ending in `.appmap.json` (Unix or Windows sep) +// - source_path ending in `` (non-`.appmap.json` stores) +// Used by find / tree / hotspots so the lookup behaves the same everywhere. +export function appmapRefClause( + ref: string, + alias: string +): { sql: string; params: string[] } { + return { + sql: `(${alias}.name = ? + OR ${alias}.source_path GLOB '*[/\\\\]' || ? || '.appmap.json' + OR ${alias}.source_path GLOB '*[/\\\\]' || ?)`, + params: [ref, ref, ref], + }; +} + export interface Clauses { where: string[]; params: (string | number)[]; @@ -57,28 +75,31 @@ export function appmapWhere(filter: RecordingScope, alias: string): Clauses { params.push(filter.until); } if (filter.appmap) { - where.push(`(${alias}.name = ? OR ${alias}.source_path LIKE ?)`); - params.push(filter.appmap, `%/${filter.appmap}.appmap.json`); + const ref = appmapRefClause(filter.appmap, alias); + where.push(ref.sql); + params.push(...ref.params); } return { where, params }; } // HTTP-level filters that scope to "the recording must contain ≥1 matching -// server request." Used as a subquery for non-request finds. -export function httpScopeClauses(filter: RecordingScope): Clauses { +// server request." Used as a subquery for non-request finds. The alias +// defaults to `h`; override when emitting clauses inside a nested subquery +// where the outer alias is taken. +export function httpScopeClauses(filter: RecordingScope, alias = 'h'): Clauses { const where: string[] = []; const params: (string | number)[] = []; if (filter.route) { const route = parseRoute(filter.route); - where.push(`COALESCE(h.normalized_path, h.path) = ?`); + where.push(`COALESCE(${alias}.normalized_path, ${alias}.path) = ?`); params.push(route.path); if (route.method) { - where.push(`h.method = ?`); + where.push(`${alias}.method = ?`); params.push(route.method); } } if (filter.status) { - where.push(`h.status_code ${filter.status.op} ?`); + where.push(`${alias}.status_code ${filter.status.op} ?`); params.push(filter.status.value); } return { where, params }; diff --git a/packages/cli/src/cmds/query/queries/find.ts b/packages/cli/src/cmds/query/queries/find.ts index 75241cb541..f33050545b 100644 --- a/packages/cli/src/cmds/query/queries/find.ts +++ b/packages/cli/src/cmds/query/queries/find.ts @@ -117,49 +117,43 @@ function appendLimitOffset(sql: string, filter: FindFilter, params: (string | nu export function findAppmaps(db: sqlite3.Database, filter: FindFilter): FindAppmapRow[] { const a = appmapWhere(filter, 'a'); - const h = httpScopeClauses(filter); + const h = httpScopeClauses(filter, 'h2'); + const requireHttpMatch = h.where.length > 0; - let sql: string; - const params: (string | number)[] = []; + // Pick a deterministic "sample" request per appmap via a correlated + // subquery: the http_request with the smallest event_id among those + // matching --route / --status (or any request if no http filter). This + // avoids the non-determinism of GROUP BY a.id with non-aggregated h.*. + const innerHttpFilter = requireHttpMatch ? ` AND ${h.where.join(' AND ')}` : ''; - // Show one row per appmap, joining its first matching (or any) HTTP request. - if (h.where.length > 0) { - const where = [...a.where, ...h.where].filter(Boolean).join(' AND '); - sql = ` - SELECT a.name AS appmap_name, - COALESCE(h.normalized_path, h.path) AS route, - h.status_code AS status_code, - h.elapsed_ms AS elapsed_ms, - a.sql_query_count AS sql_count, - a.git_branch AS branch, - a.timestamp AS timestamp - FROM appmaps a - JOIN http_requests h ON h.appmap_id = a.id - ${where ? `WHERE ${where}` : ''} - GROUP BY a.id - ORDER BY a.timestamp, a.name - `; - params.push(...a.params, ...h.params); - } else { - const where = a.where.join(' AND '); - sql = ` - SELECT a.name AS appmap_name, - (SELECT COALESCE(h.normalized_path, h.path) - FROM http_requests h WHERE h.appmap_id = a.id - ORDER BY h.event_id LIMIT 1) AS route, - (SELECT h.status_code FROM http_requests h - WHERE h.appmap_id = a.id ORDER BY h.event_id LIMIT 1) AS status_code, - a.elapsed_ms, - a.sql_query_count AS sql_count, - a.git_branch AS branch, - a.timestamp AS timestamp - FROM appmaps a - ${where ? `WHERE ${where}` : ''} - ORDER BY a.timestamp, a.name - `; - params.push(...a.params); + const whereParts: string[] = [...a.where]; + if (requireHttpMatch) whereParts.push('h.id IS NOT NULL'); + if (filter.duration) { + whereParts.push(`a.elapsed_ms ${filter.duration.op} ?`); } + const whereSql = whereParts.length > 0 ? `WHERE ${whereParts.join(' AND ')}` : ''; + + // Param order: inner subquery http filters → outer WHERE (appmap, then duration). + const params: (string | number)[] = [...h.params, ...a.params]; + if (filter.duration) params.push(filter.duration.value); + let sql = ` + SELECT a.name AS appmap_name, + COALESCE(h.normalized_path, h.path) AS route, + h.status_code AS status_code, + COALESCE(h.elapsed_ms, a.elapsed_ms) AS elapsed_ms, + a.sql_query_count AS sql_count, + a.git_branch AS branch, + a.timestamp AS timestamp + FROM appmaps a + LEFT JOIN http_requests h ON h.id = ( + SELECT h2.id FROM http_requests h2 + WHERE h2.appmap_id = a.id${innerHttpFilter} + ORDER BY h2.event_id LIMIT 1 + ) + ${whereSql} + ORDER BY a.timestamp, a.name + `; sql = appendLimitOffset(sql, filter, params); return db.prepare(sql).all(...params) as FindAppmapRow[]; } diff --git a/packages/cli/src/cmds/query/queries/tree.ts b/packages/cli/src/cmds/query/queries/tree.ts index 6ac6125bd4..92bdc469cd 100644 --- a/packages/cli/src/cmds/query/queries/tree.ts +++ b/packages/cli/src/cmds/query/queries/tree.ts @@ -1,5 +1,7 @@ import sqlite3 from 'better-sqlite3'; +import { appmapRefClause } from '../lib/scope'; + // Discriminated union of tree nodes. Each node corresponds to one row in // one of the per-event tables; `depth` is computed from parent_event_id // chains within the same recording. @@ -70,13 +72,14 @@ export interface AppmapInfo { // the row in `appmaps`. Throws on miss or ambiguity (returns candidates in // the message so the user can disambiguate). export function resolveAppmap(db: sqlite3.Database, ref: string): AppmapInfo { + const m = appmapRefClause(ref, 'a'); const rows = db .prepare( - `SELECT id, name, source_path FROM appmaps - WHERE name = ? OR source_path LIKE ? - ORDER BY source_path` + `SELECT a.id, a.name, a.source_path FROM appmaps a + WHERE ${m.sql} + ORDER BY a.source_path` ) - .all(ref, `%/${ref}.appmap.json`) as AppmapInfo[]; + .all(...m.params) as AppmapInfo[]; if (rows.length === 0) throw new Error(`appmap not found: ${ref}`); if (rows.length > 1) { const list = rows.map((r) => ` - ${r.source_path}`).join('\n'); diff --git a/packages/cli/src/cmds/query/verbs/find.ts b/packages/cli/src/cmds/query/verbs/find.ts index d645a94ac7..6d44d410c7 100644 --- a/packages/cli/src/cmds/query/verbs/find.ts +++ b/packages/cli/src/cmds/query/verbs/find.ts @@ -55,26 +55,39 @@ type Argv = ReturnType extends yargs.Argv ? T : never; // types where they make sense; flagging them on the wrong type is an // error rather than a silent no-op. const REJECTED_FLAGS: Record = { - appmaps: ['class', 'method', 'label', 'duration', 'table', 'exception'], + appmaps: ['class', 'method', 'label', 'table', 'exception'], requests: ['class', 'method', 'label', 'table', 'exception'], queries: ['label', 'exception'], calls: ['table', 'exception'], exceptions: ['class', 'method', 'label', 'duration', 'table'], }; +// Per-flag hints, attached to error messages when a rejected flag is used. +// Useful for nudging users toward the right flag (e.g., HTTP method +// belongs in --route, not --method, which is a function-method-name flag). +const REJECTED_HINTS: Partial>>> = { + requests: { + method: 'to filter by HTTP method, use --route "METHOD /path"', + }, +}; + // Exported for tests. Operates on a generic flag map so unit tests don't // need a full yargs argv. export function validateFlags(type: FindType, flags: Record): void { const used: string[] = []; + const hints: string[] = []; for (const flag of REJECTED_FLAGS[type]) { - if (flags[flag] != null) used.push(`--${flag}`); - } - if (used.length > 0) { - const verb = used.length === 1 ? 'is' : 'are'; - throw new Error( - `find ${type}: ${used.join(', ')} ${verb} not supported for this type` - ); + if (flags[flag] != null) { + used.push(`--${flag}`); + const hint = REJECTED_HINTS[type]?.[flag]; + if (hint) hints.push(` --${flag}: ${hint}`); + } } + if (used.length === 0) return; + const verb = used.length === 1 ? 'is' : 'are'; + let message = `find ${type}: ${used.join(', ')} ${verb} not supported for this type`; + if (hints.length > 0) message += `\n${hints.join('\n')}`; + throw new Error(message); } export const handler = async (argv: yargs.ArgumentsCamelCase): Promise => { diff --git a/packages/cli/tests/unit/cmds/query/lib/scope.spec.ts b/packages/cli/tests/unit/cmds/query/lib/scope.spec.ts index 00aa9f94e0..97cd4b3e33 100644 --- a/packages/cli/tests/unit/cmds/query/lib/scope.spec.ts +++ b/packages/cli/tests/unit/cmds/query/lib/scope.spec.ts @@ -2,15 +2,99 @@ import sqlite3 from 'better-sqlite3'; import { openQueryDb } from '../../../../../src/cmds/query/db/openQueryDb'; import { + appmapRefClause, classFilterClauses, methodFilterClauses, parseClassRef, + parseRoute, } from '../../../../../src/cmds/query/lib/scope'; function freshDb(): sqlite3.Database { return openQueryDb('/tmp/ignored', ':memory:').db; } +describe('parseRoute', () => { + it('parses an HTTP method case-insensitively and uppercases it', () => { + expect(parseRoute('post /orders')).toEqual({ method: 'POST', path: '/orders' }); + expect(parseRoute('Get /reports')).toEqual({ method: 'GET', path: '/reports' }); + expect(parseRoute('DELETE /orders/:id')).toEqual({ + method: 'DELETE', + path: '/orders/:id', + }); + }); + + it('treats an unrecognised prefix as part of the path', () => { + expect(parseRoute('FOO /bar')).toEqual({ path: 'FOO /bar' }); + expect(parseRoute('/orders')).toEqual({ path: '/orders' }); + }); +}); + +describe('appmapRefClause (basename matching)', () => { + it('matches Unix-style source_path with .appmap.json suffix', () => { + const db = freshDb(); + try { + db.prepare( + `INSERT INTO appmaps (name, source_path) VALUES ('rec1', '/tmp/path/rec1.appmap.json')` + ).run(); + const m = appmapRefClause('rec1', 'a'); + const row = db + .prepare(`SELECT a.name FROM appmaps a WHERE ${m.sql}`) + .get(...m.params) as { name: string } | undefined; + expect(row?.name).toBe('rec1'); + } finally { + db.close(); + } + }); + + it('matches Windows-style source_path with backslash separator', () => { + const db = freshDb(); + try { + db.prepare( + `INSERT INTO appmaps (name, source_path) VALUES ('rec1', 'C:\\Users\\me\\rec1.appmap.json')` + ).run(); + const m = appmapRefClause('rec1', 'a'); + const row = db + .prepare(`SELECT a.name FROM appmaps a WHERE ${m.sql}`) + .get(...m.params) as { name: string } | undefined; + expect(row?.name).toBe('rec1'); + } finally { + db.close(); + } + }); + + it('matches source_path without the .appmap.json suffix', () => { + const db = freshDb(); + try { + db.prepare( + `INSERT INTO appmaps (name, source_path) VALUES ('odd', '/store/abc/odd')` + ).run(); + const m = appmapRefClause('odd', 'a'); + const row = db + .prepare(`SELECT a.name FROM appmaps a WHERE ${m.sql}`) + .get(...m.params) as { name: string } | undefined; + expect(row?.name).toBe('odd'); + } finally { + db.close(); + } + }); + + it('matches by appmap.name when source_path differs', () => { + const db = freshDb(); + try { + db.prepare( + `INSERT INTO appmaps (name, source_path) VALUES ('Friendly Name', '/x/foo.appmap.json')` + ).run(); + const m = appmapRefClause('Friendly Name', 'a'); + const row = db + .prepare(`SELECT a.name FROM appmaps a WHERE ${m.sql}`) + .get(...m.params) as { name: string } | undefined; + expect(row?.name).toBe('Friendly Name'); + } finally { + db.close(); + } + }); +}); + describe('parseClassRef', () => { it('short form is just a class', () => { expect(parseClassRef('UserRepository')).toEqual({ class: 'UserRepository' }); diff --git a/packages/cli/tests/unit/cmds/query/queries/find.spec.ts b/packages/cli/tests/unit/cmds/query/queries/find.spec.ts index a466563450..548473e2ea 100644 --- a/packages/cli/tests/unit/cmds/query/queries/find.spec.ts +++ b/packages/cli/tests/unit/cmds/query/queries/find.spec.ts @@ -228,6 +228,49 @@ describe('findAppmaps', () => { } }); + it('is deterministic when route filtering — picks the lowest event_id matching request', () => { + const db = freshDb(); + try { + // Two POST /orders requests in one recording with different elapsed. + // The query must consistently pick event_id=1 (the smaller). + seed(db, [ + { + name: 'a', + requests: [ + { event_id: 1, method: 'POST', path: '/orders', status: 500, elapsed_ms: 100 }, + { event_id: 2, method: 'POST', path: '/orders', status: 500, elapsed_ms: 999 }, + ], + }, + ]); + const rows = findAppmaps(db, { route: 'POST /orders' }); + expect(rows).toHaveLength(1); + expect(rows[0].elapsed_ms).toBe(100); // event_id=1 wins, not 2 + } finally { + db.close(); + } + }); + + it('--duration filters on the appmap row (a.elapsed_ms)', () => { + const db = freshDb(); + try { + seed(db, [ + { + name: 'fast', + requests: [{ event_id: 1, method: 'GET', path: '/x', status: 200, elapsed_ms: 50 }], + }, + { + name: 'slow', + requests: [{ event_id: 1, method: 'GET', path: '/x', status: 200, elapsed_ms: 5000 }], + }, + ]); + const rows = findAppmaps(db, { duration: { op: '>', value: 1000 } }); + expect(rows).toHaveLength(1); + expect(rows[0].appmap_name).toBe('slow'); + } finally { + db.close(); + } + }); + it('--route narrows to recordings with a matching request and reports that request', () => { const db = freshDb(); try { diff --git a/packages/cli/tests/unit/cmds/query/verbs/find.spec.ts b/packages/cli/tests/unit/cmds/query/verbs/find.spec.ts index 96716feda8..c5672fc578 100644 --- a/packages/cli/tests/unit/cmds/query/verbs/find.spec.ts +++ b/packages/cli/tests/unit/cmds/query/verbs/find.spec.ts @@ -46,15 +46,21 @@ describe('find verb flag validation', () => { expect(() => validateFlags('exceptions', { label: 'log' })).toThrow(/--label/); }); - it('rejects --duration on appmaps and exceptions', () => { - expect(() => validateFlags('appmaps', { duration: '>1s' })).toThrow(/--duration/); + it('rejects --duration on exceptions; accepts elsewhere', () => { expect(() => validateFlags('exceptions', { duration: '>1s' })).toThrow(/--duration/); - // calls/queries/requests all accept duration + // appmaps (recording-level), calls, queries, requests all accept duration + expect(() => validateFlags('appmaps', { duration: '>1s' })).not.toThrow(); expect(() => validateFlags('calls', { duration: '>1s' })).not.toThrow(); expect(() => validateFlags('queries', { duration: '>1s' })).not.toThrow(); expect(() => validateFlags('requests', { duration: '>1s' })).not.toThrow(); }); + it('hint message guides --method users on find requests', () => { + expect(() => validateFlags('requests', { method: 'findById' })).toThrow( + /--route/ + ); + }); + it('lists multiple incompatible flags in a single error', () => { expect(() => validateFlags('appmaps', { class: 'Foo', table: 'orders' }) From 7eb22bd53ab078ed397e62a29aee6d5cbc990230 Mon Sep 17 00:00:00 2001 From: kgilpin Date: Sat, 2 May 2026 15:08:38 -0400 Subject: [PATCH 11/30] fix(query): SQL pushdown for endpoints; canonical class match for find queries MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Two larger pieces plus three smaller fixes batched together. - endpoints aggregation moves to SQL: a CTE ranks each request within its (method, route) partition by elapsed_ms NULLS LAST; the outer GROUP BY computes count, avg_ms, p95_ms (via the rank that hits ceil(0.95 * measured_count)), and err_pct in a single query, with HAVING for --status and ORDER BY ... DESC NULLS LAST + LIMIT in SQL. No more fetching the full row set into JS just to slice it. - find queries --class / --method now resolve via the parent function_call's code_object (parent_event_id → function_calls → code_objects), not just the denormalized caller_class string. New sqlCallerClassClauses / sqlCallerMethodClauses helpers that mirror classFilterClauses' code-object-first / fallback-to-raw-string pattern, scoped to sql_queries via parent_event_id. The fallback fires when the parent function_call has no code_object link. rawClassMatchClauses removed (replaced). - hotspots --type=sql now rejects --class instead of silently dropping it. Function-mode is the only place --class applies. - tree --format=summary now rejects a non-default --filter — the summary aggregates over all event types so combining with --filter is ambiguous. - findQueries seed in tests now supports parent_event_id, used by the new test that pins code-object matching even when caller_class is wrong. - Stale comment in endpoints.ts referencing the dropped http_requests.timestamp column updated. Tests: 159 passing (+8 new for class/duration/since/commit coverage, parent-link findQueries match, etc.). Co-Authored-By: Claude Opus 4.7 (1M context) --- packages/cli/src/cmds/query/lib/scope.ts | 85 +++++++ .../cli/src/cmds/query/queries/endpoints.ts | 184 +++++++-------- packages/cli/src/cmds/query/queries/find.ts | 16 +- packages/cli/src/cmds/query/verbs/hotspots.ts | 7 +- packages/cli/src/cmds/query/verbs/tree.ts | 8 + .../unit/cmds/query/queries/find.spec.ts | 223 +++++++++++++++++- 6 files changed, 422 insertions(+), 101 deletions(-) diff --git a/packages/cli/src/cmds/query/lib/scope.ts b/packages/cli/src/cmds/query/lib/scope.ts index a8a343d3d1..cdefcedd5b 100644 --- a/packages/cli/src/cmds/query/lib/scope.ts +++ b/packages/cli/src/cmds/query/lib/scope.ts @@ -245,6 +245,91 @@ export function methodFilterClauses(input: string, fcAlias: string): Clauses { }; } +// Match a --class input against a sql_query by following its +// parent_event_id back to the function_call that issued the query, then +// looking up that call's code_object — same canonical path as +// classFilterClauses uses for direct function_calls. Falls back to the +// row's denormalized caller_class string when the parent function_call +// has no code_object link. +// +// When the user supplies --package, only the code_object path is used +// (the caller_class string has no package component to match against). +export function sqlCallerClassClauses(input: string, qAlias: string): Clauses { + const parts = parseClassRef(input); + if (!parts.class) { + return { where: ['1 = 0'], params: [] }; + } + + const coWhere: string[] = []; + const coParams: (string | number)[] = []; + if (parts.class.includes('::')) { + coWhere.push('classes = ?'); + coParams.push(JSON.stringify(parts.class.split('::'))); + } else { + coWhere.push('leaf_class = ?'); + coParams.push(parts.class); + } + if (parts.package) { + coWhere.push('package = ?'); + coParams.push(parts.package); + } + if (parts.method) { + coWhere.push('method = ?'); + coParams.push(parts.method); + } + + const coClause = `${qAlias}.parent_event_id IN ( + SELECT fc.event_id FROM function_calls fc + WHERE fc.appmap_id = ${qAlias}.appmap_id + AND fc.code_object_id IN ( + SELECT id FROM code_objects WHERE ${coWhere.join(' AND ')} + ) + )`; + + // If the user gave an explicit package, only the code_object path can + // honor it (caller_class has no package column to match). + if (parts.package) { + return { where: [coClause], params: coParams }; + } + + // Fallback: match the row's raw caller_class with suffix-aware logic. + const fbConditions: string[] = [ + `${qAlias}.caller_class = ?`, + `${qAlias}.caller_class LIKE '%.' || ?`, + `${qAlias}.caller_class LIKE '%::' || ?`, + ]; + const fbParams: (string | number)[] = [parts.class, parts.class, parts.class]; + const fbParts: string[] = [`(${fbConditions.join(' OR ')})`]; + if (parts.method) { + fbParts.push(`${qAlias}.caller_method = ?`); + fbParams.push(parts.method); + } + + return { + where: [`(${coClause} OR (${fbParts.join(' AND ')}))`], + params: [...coParams, ...fbParams], + }; +} + +// Match a --method input against a sql_query via its parent function_call's +// code_object.method, with a fallback to caller_method for unlinked +// parents. +export function sqlCallerMethodClauses(input: string, qAlias: string): Clauses { + return { + where: [ + `(${qAlias}.parent_event_id IN ( + SELECT fc.event_id FROM function_calls fc + WHERE fc.appmap_id = ${qAlias}.appmap_id + AND fc.code_object_id IN ( + SELECT id FROM code_objects WHERE method = ? + ) + ) + OR ${qAlias}.caller_method = ?)`, + ], + params: [input, input], + }; +} + // Build ".appmap_id IN (SELECT a.id …)" for tables where filtering at // the appmap-id level is the right shape (sql_queries, function_calls, // exceptions, http_client_requests). Returns null if no recording-level diff --git a/packages/cli/src/cmds/query/queries/endpoints.ts b/packages/cli/src/cmds/query/queries/endpoints.ts index b5b2a2a34d..80b8c61e76 100644 --- a/packages/cli/src/cmds/query/queries/endpoints.ts +++ b/packages/cli/src/cmds/query/queries/endpoints.ts @@ -1,6 +1,6 @@ import sqlite3 from 'better-sqlite3'; -import type { NumberFilter } from '../lib/parseFilter'; +import type { Comparator, NumberFilter } from '../lib/parseFilter'; export interface EndpointRow { method: string; @@ -30,13 +30,26 @@ export interface EndpointsFilter { // independent of any --status filter. const ERR_THRESHOLD = 500; -interface RawRow { - method: string; - route: string; - elapsed_ms: number | null; - status_code: number; -} +const SORT_COLUMNS: Record = { + count: 'count', + avg: 'avg_ms', + p95: 'p95_ms', + err: 'err_pct', +}; +const VALID_OPS = new Set(['=', '>=', '<=', '>', '<']); + +// Aggregation runs entirely in SQL. Per route: +// count COUNT(*) over the partition +// avg_ms AVG(elapsed_ms) over non-null values +// p95_ms elapsed_ms at rank ceil(0.95 * measured_count) within +// the partition (computed with a ROW_NUMBER() window). +// err_pct 100 * SUM(status >= 500) / COUNT(*) +// --status acts as a HAVING filter: route is shown iff ≥1 of its rows +// matches; aggregates remain over all rows. +// +// SQL injection surface: filter.sort and filter.status.op are validated +// against fixed allow-lists before being interpolated. export function endpoints( db: sqlite3.Database, filter: EndpointsFilter = {} @@ -48,10 +61,9 @@ export function endpoints( where.push('a.git_branch = ?'); params.push(filter.branch); } - // --since/--until filter on the recording's timestamp (a.timestamp) for - // consistency with `find`. The importer copies that value into - // http_requests.timestamp too, but treating it as a recording-level - // attribute makes the dependency on that copy explicit. + // --since/--until filter on the recording's timestamp (a.timestamp) — + // the canonical recording-level attribute. find verbs use the same + // column. if (filter.since) { where.push('a.timestamp >= ?'); params.push(filter.since); @@ -60,94 +72,78 @@ export function endpoints( where.push('a.timestamp <= ?'); params.push(filter.until); } - const whereSql = where.length > 0 ? `WHERE ${where.join(' AND ')}` : ''; - const rows = db - .prepare( - `SELECT - hr.method AS method, - COALESCE(hr.normalized_path, hr.path) AS route, - hr.elapsed_ms AS elapsed_ms, - hr.status_code AS status_code - FROM http_requests hr - JOIN appmaps a ON a.id = hr.appmap_id - ${whereSql}` - ) - .all(...params) as RawRow[]; - - interface Group { - method: string; - route: string; - elapsed: number[]; - err: number; - matched: number; - total: number; - } - const groups = new Map(); - const matchPredicate = (s: number): boolean => { - if (!filter.status) return true; - const { op, value } = filter.status; - return ( - (op === '=' && s === value) || - (op === '>=' && s >= value) || - (op === '<=' && s <= value) || - (op === '>' && s > value) || - (op === '<' && s < value) - ); - }; - - for (const r of rows) { - const key = `${r.method}\t${r.route}`; - let g = groups.get(key); - if (!g) { - g = { method: r.method, route: r.route, elapsed: [], err: 0, matched: 0, total: 0 }; - groups.set(key, g); + let havingSql = ''; + if (filter.status) { + if (!VALID_OPS.has(filter.status.op)) { + throw new Error(`invalid status op: ${filter.status.op}`); } - g.total += 1; - if (typeof r.elapsed_ms === 'number') g.elapsed.push(r.elapsed_ms); - if (r.status_code >= ERR_THRESHOLD) g.err += 1; - if (matchPredicate(r.status_code)) g.matched += 1; + havingSql = `HAVING SUM(CASE WHEN status_code ${filter.status.op} ? THEN 1 ELSE 0 END) > 0`; + params.push(filter.status.value); } - const result: EndpointRow[] = []; - for (const g of groups.values()) { - if (filter.status && g.matched === 0) continue; - const sorted = [...g.elapsed].sort((a, b) => a - b); - result.push({ - method: g.method, - route: g.route, - count: g.total, - avg_ms: sorted.length === 0 ? null : sorted.reduce((s, v) => s + v, 0) / sorted.length, - p95_ms: percentile(sorted, 0.95), - err_pct: g.total > 0 ? (g.err / g.total) * 100 : 0, - }); + const sortKey = filter.sort ?? 'count'; + if (!(sortKey in SORT_COLUMNS)) { + throw new Error(`invalid sort key: ${sortKey}`); + } + const sortColumn = SORT_COLUMNS[sortKey]; + + let sql = ` + WITH ranked AS ( + SELECT + h.method AS method, + COALESCE(h.normalized_path, h.path) AS route, + h.elapsed_ms AS elapsed_ms, + h.status_code AS status_code, + ROW_NUMBER() OVER ( + PARTITION BY h.method, COALESCE(h.normalized_path, h.path) + ORDER BY h.elapsed_ms NULLS LAST + ) AS rn, + SUM(CASE WHEN h.elapsed_ms IS NOT NULL THEN 1 ELSE 0 END) OVER ( + PARTITION BY h.method, COALESCE(h.normalized_path, h.path) + ) AS measured_count + FROM http_requests h + JOIN appmaps a ON a.id = h.appmap_id + ${whereSql} + ) + SELECT + method, + route, + COUNT(*) AS count, + AVG(CASE WHEN elapsed_ms IS NOT NULL THEN elapsed_ms END) AS avg_ms, + MAX(CASE + WHEN measured_count > 0 + AND rn = (measured_count * 19 + 19) / 20 + THEN elapsed_ms + END) AS p95_ms, + CAST(SUM(CASE WHEN status_code >= ${ERR_THRESHOLD} THEN 1 ELSE 0 END) AS REAL) + * 100.0 / COUNT(*) AS err_pct + FROM ranked + GROUP BY method, route + ${havingSql} + ORDER BY ${sortColumn} DESC NULLS LAST, method, route + `; + if (filter.limit !== undefined) { + sql += ' LIMIT ?'; + params.push(filter.limit); } - const sortKey: EndpointSort = filter.sort ?? 'count'; - result.sort(comparators[sortKey]); - - return filter.limit !== undefined ? result.slice(0, filter.limit) : result; -} - -function percentile(sorted: readonly number[], p: number): number | null { - if (sorted.length === 0) return null; - const idx = Math.max(0, Math.ceil(sorted.length * p) - 1); - return sorted[idx]; -} - -// Descending sort, nulls last (so a route with no measured durations doesn't -// rank alongside a genuinely 0 ms route). -function descNullsLast(a: number | null, b: number | null): number { - if (a == null && b == null) return 0; - if (a == null) return 1; - if (b == null) return -1; - return b - a; + const rows = db.prepare(sql).all(...params) as Array<{ + method: string; + route: string; + count: number; + avg_ms: number | null; + p95_ms: number | null; + err_pct: number | null; + }>; + + return rows.map((r) => ({ + method: r.method, + route: r.route, + count: r.count, + avg_ms: r.avg_ms, + p95_ms: r.p95_ms, + err_pct: r.err_pct ?? 0, + })); } - -const comparators: Record number> = { - count: (a, b) => b.count - a.count, - avg: (a, b) => descNullsLast(a.avg_ms, b.avg_ms), - p95: (a, b) => descNullsLast(a.p95_ms, b.p95_ms), - err: (a, b) => b.err_pct - a.err_pct, -}; diff --git a/packages/cli/src/cmds/query/queries/find.ts b/packages/cli/src/cmds/query/queries/find.ts index f33050545b..62b5ad3665 100644 --- a/packages/cli/src/cmds/query/queries/find.ts +++ b/packages/cli/src/cmds/query/queries/find.ts @@ -8,6 +8,8 @@ import { httpScopeClauses, methodFilterClauses, parseRoute, + sqlCallerClassClauses, + sqlCallerMethodClauses, } from '../lib/scope'; export type FindType = 'appmaps' | 'requests' | 'queries' | 'calls' | 'exceptions'; @@ -212,12 +214,18 @@ export function findQueries(db: sqlite3.Database, filter: FindFilter): FindQuery params.push(`%${filter.table}%`); } if (filter.className) { - where.push(`q.caller_class = ?`); - params.push(filter.className); + // The caller of a sql_query is the function_call referenced by + // q.parent_event_id, which has its own code_object link. Use that + // canonical path; fall back to the denormalized caller_class string + // when the parent function_call has no code_object link. + const c = sqlCallerClassClauses(filter.className, 'q'); + where.push(...c.where); + params.push(...c.params); } if (filter.method) { - where.push(`q.caller_method = ?`); - params.push(filter.method); + const m = sqlCallerMethodClauses(filter.method, 'q'); + where.push(...m.where); + params.push(...m.params); } const dur = durationClause(filter, 'q.elapsed_ms'); where.push(...dur.where); diff --git a/packages/cli/src/cmds/query/verbs/hotspots.ts b/packages/cli/src/cmds/query/verbs/hotspots.ts index 5ec0bbf63d..726b51299c 100644 --- a/packages/cli/src/cmds/query/verbs/hotspots.ts +++ b/packages/cli/src/cmds/query/verbs/hotspots.ts @@ -44,7 +44,12 @@ export const handler = async (argv: yargs.ArgumentsCamelCase): Promise): Promise; queries?: Array<{ event_id: number; + parent_event_id?: number; sql: string; caller_class?: string; caller_method?: string; @@ -58,8 +59,9 @@ function seed(db: sqlite3.Database, recs: Recording[]): void { VALUES (?, ?, ?, ?, ?, ?, ?)` ); const insQ = db.prepare( - `INSERT INTO sql_queries (appmap_id, event_id, sql_text, caller_class, caller_method, elapsed_ms) - VALUES (?, ?, ?, ?, ?, ?)` + `INSERT INTO sql_queries (appmap_id, event_id, parent_event_id, sql_text, + caller_class, caller_method, elapsed_ms) + VALUES (?, ?, ?, ?, ?, ?, ?)` ); // Test seed: derive package + class chain from defined_class so call // sites don't have to specify them. defined_class may be Java dot-form @@ -109,6 +111,7 @@ function seed(db: sqlite3.Database, recs: Recording[]): void { insQ.run( aid, q.event_id, + q.parent_event_id ?? null, q.sql, q.caller_class ?? null, q.caller_method ?? null, @@ -325,6 +328,222 @@ describe('findQueries', () => { }); }); +describe('find filters: --commit, --since/--until, --duration', () => { + it('--commit on findRequests', () => { + const db = freshDb(); + try { + seed(db, [ + { + name: 'a', + commit: 'abc123', + requests: [{ event_id: 1, method: 'GET', path: '/x', status: 200 }], + }, + { + name: 'b', + commit: 'def456', + requests: [{ event_id: 1, method: 'GET', path: '/y', status: 200 }], + }, + ]); + const rows = findRequests(db, { commit: 'abc123' }); + expect(rows).toHaveLength(1); + expect(rows[0].appmap_name).toBe('a'); + } finally { + db.close(); + } + }); + + it('--commit on findAppmaps', () => { + const db = freshDb(); + try { + seed(db, [ + { name: 'a', commit: 'abc' }, + { name: 'b', commit: 'def' }, + ]); + expect(findAppmaps(db, { commit: 'abc' })).toHaveLength(1); + } finally { + db.close(); + } + }); + + it('--since / --until on findRequests filter via the appmap timestamp', () => { + const db = freshDb(); + try { + seed(db, [ + { + name: 'a', + timestamp: '2026-04-01T00:00:00.000Z', + requests: [{ event_id: 1, method: 'GET', path: '/x', status: 200 }], + }, + { + name: 'b', + timestamp: '2026-04-15T00:00:00.000Z', + requests: [{ event_id: 1, method: 'GET', path: '/x', status: 200 }], + }, + { + name: 'c', + timestamp: '2026-04-30T00:00:00.000Z', + requests: [{ event_id: 1, method: 'GET', path: '/x', status: 200 }], + }, + ]); + const rows = findRequests(db, { + since: '2026-04-10T00:00:00.000Z', + until: '2026-04-20T00:00:00.000Z', + }); + expect(rows).toHaveLength(1); + expect(rows[0].appmap_name).toBe('b'); + } finally { + db.close(); + } + }); + + it('--since on findCalls scopes via the recording', () => { + const db = freshDb(); + try { + seed(db, [ + { + name: 'old', + timestamp: '2026-04-01T00:00:00.000Z', + calls: [{ event_id: 1, defined_class: 'X', method_id: 'm' }], + }, + { + name: 'new', + timestamp: '2026-04-30T00:00:00.000Z', + calls: [{ event_id: 1, defined_class: 'X', method_id: 'm' }], + }, + ]); + const rows = findCalls(db, { since: '2026-04-15T00:00:00.000Z' }); + expect(rows).toHaveLength(1); + expect(rows[0].appmap_name).toBe('new'); + } finally { + db.close(); + } + }); + + it('--duration on findCalls filters per-row elapsed_ms', () => { + const db = freshDb(); + try { + seed(db, [ + { + name: 'a', + calls: [ + { event_id: 1, defined_class: 'X', method_id: 'fast', elapsed_ms: 5 }, + { event_id: 2, defined_class: 'X', method_id: 'slow', elapsed_ms: 500 }, + ], + }, + ]); + const rows = findCalls(db, { duration: { op: '>', value: 100 } }); + expect(rows).toHaveLength(1); + expect(rows[0].method_id).toBe('slow'); + } finally { + db.close(); + } + }); + + it('--duration on findQueries filters per-row elapsed_ms', () => { + const db = freshDb(); + try { + seed(db, [ + { + name: 'a', + queries: [ + { event_id: 1, sql: 'SELECT 1', elapsed_ms: 1 }, + { event_id: 2, sql: 'SELECT 2', elapsed_ms: 50 }, + ], + }, + ]); + const rows = findQueries(db, { duration: { op: '>=', value: 10 } }); + expect(rows).toHaveLength(1); + expect(rows[0].sql_text).toBe('SELECT 2'); + } finally { + db.close(); + } + }); + + it('findQueries --class matches via the parent function_call code_object when the linked parent has the right class', () => { + const db = freshDb(); + try { + // Seed a function_call with a code_object link, then a sql_query + // whose parent_event_id references that call. caller_class is set + // to a deliberately mismatching raw string so we can prove the + // code_object path (not the fallback) is matching. + seed(db, [ + { + name: 'a', + calls: [ + { + event_id: 10, + defined_class: 'org.example.UserRepository', + method_id: 'findById', + fqid: 'org/example/UserRepository#findById', + }, + ], + queries: [ + { + event_id: 11, + parent_event_id: 10, + sql: 'SELECT 1', + caller_class: 'WrongClassName', + caller_method: 'wrong', + }, + ], + }, + ]); + // Class part is read from code_objects (UserRepository), not from + // the WrongClassName caller_class string. + expect(findQueries(db, { className: 'UserRepository' })).toHaveLength(1); + // Full chain match also works. + expect(findQueries(db, { className: 'org/example/UserRepository' })).toHaveLength( + 1 + ); + // Misspelled — no match. + expect(findQueries(db, { className: 'OtherRepository' })).toHaveLength(0); + } finally { + db.close(); + } + }); + + it('findQueries --class matches caller_class via the suffix-aware helper', () => { + const db = freshDb(); + try { + seed(db, [ + { + name: 'a', + queries: [ + { + event_id: 1, + sql: 'SELECT 1', + caller_class: 'org.example.UserRepository', + caller_method: 'findById', + }, + { + event_id: 2, + sql: 'SELECT 2', + caller_class: 'OpenSSL::Cipher', + caller_method: 'decrypt', + }, + { + event_id: 3, + sql: 'SELECT 3', + caller_class: 'Other', + caller_method: 'm', + }, + ], + }, + ]); + // Java dot-suffix + expect(findQueries(db, { className: 'UserRepository' })).toHaveLength(1); + // Ruby ::-suffix + expect(findQueries(db, { className: 'Cipher' })).toHaveLength(1); + // Exact match also works + expect(findQueries(db, { className: 'OpenSSL::Cipher' })).toHaveLength(1); + // Top-level + expect(findQueries(db, { className: 'Other' })).toHaveLength(1); + } finally { + db.close(); + } + }); +}); + describe('findCalls', () => { it('--class and --method filter directly; --route scopes by recording', () => { const db = freshDb(); From bd95c0c8c005c9d26962c4285853bb2e89dcd405 Mon Sep 17 00:00:00 2001 From: kgilpin Date: Sat, 2 May 2026 15:18:42 -0400 Subject: [PATCH 12/30] fix(query): importer mis-link, verb-layer Class#method split, hotspots flag validation - Importer: code_objects lookup keyed by "|" rather than just "", so multiple functions sharing a path:lineno (e.g. Spring Data proxy methods) bind to their own code_objects instead of clobbering one another. Verified against PetClinic fixtures: findById and findPetTypes now resolve to distinct code_object_ids; previously both linked to the same one. - find verb: added buildFindFilter() that parses --class via parseClassRef and routes the parsed method through filter.method. Makes the documented "Class#method" form's behavior explicit at the verb layer rather than depending on internal helpers each parsing it again. Explicit --method wins over a method embedded in --class. Exported for tests. - hotspots verb: replaced the inline `if (sql && class) throw` with a REJECTED_FLAGS table mirroring find's. Same shape, easy to extend when future filters land. - Coverage: tests for hotspots --since/--until and tree --filter paths (previously unexercised), plus a test pinning the importer fix (two functions at the same path:lineno disambiguated by name). - --route describe text now calls out "path is exact match; method case-insensitive" on both find and hotspots. Tests: 171 passing. Co-Authored-By: Claude Opus 4.7 (1M context) --- .../src/cmds/query/db/import/codeObjects.ts | 11 ++- .../src/cmds/query/db/import/functionCalls.ts | 6 +- packages/cli/src/cmds/query/verbs/find.ts | 72 +++++++++++++------ packages/cli/src/cmds/query/verbs/hotspots.ts | 33 +++++++-- .../cmds/query/db/import/codeObjects.spec.ts | 41 ++++++++++- .../unit/cmds/query/queries/hotspots.spec.ts | 32 +++++++++ .../unit/cmds/query/queries/tree.spec.ts | 27 +++++++ .../tests/unit/cmds/query/verbs/find.spec.ts | 35 ++++++++- .../unit/cmds/query/verbs/hotspots.spec.ts | 29 ++++++++ 9 files changed, 255 insertions(+), 31 deletions(-) create mode 100644 packages/cli/tests/unit/cmds/query/verbs/hotspots.spec.ts diff --git a/packages/cli/src/cmds/query/db/import/codeObjects.ts b/packages/cli/src/cmds/query/db/import/codeObjects.ts index f437afbf94..b93689e4c1 100644 --- a/packages/cli/src/cmds/query/db/import/codeObjects.ts +++ b/packages/cli/src/cmds/query/db/import/codeObjects.ts @@ -15,8 +15,10 @@ export interface ClassMapNode { } // Walk the classMap tree, insert one code_objects row per function node, -// insert its labels, and return a map of classMap location → code_object_id -// (used by function_calls to link events to code objects via path:lineno). +// insert its labels, and return a map of "{location}|{method}" → +// code_object_id. The method component disambiguates classMap entries +// that share a path:lineno (e.g. Spring Data proxy methods), preventing +// the function_calls linker from binding events to the wrong code_object. // // Each function is decomposed into: // - package : slash-joined package path (e.g. "app/services/idempotency") @@ -92,7 +94,10 @@ export function importCodeObjects( isStatic ? 1 : 0 ); const row = selectCodeObjectId.get(fqid) as { id: number }; - lookup.set(location, row.id); + // Key includes the method name so multiple functions sharing a + // path:lineno (e.g. Spring Data proxy methods) each map to their + // own code_object instead of clobbering one another. + lookup.set(`${location}|${methodName}`, row.id); const labels = node.labels ?? []; for (const label of labels) insertLabel.run(row.id, label); diff --git a/packages/cli/src/cmds/query/db/import/functionCalls.ts b/packages/cli/src/cmds/query/db/import/functionCalls.ts index 0c97111ca8..43f1035857 100644 --- a/packages/cli/src/cmds/query/db/import/functionCalls.ts +++ b/packages/cli/src/cmds/query/db/import/functionCalls.ts @@ -46,7 +46,11 @@ export function importFunctionCalls( const evPath = ev.path; const evLineno = ev.lineno; if (evPath != null && evLineno != null) { - coId = codeObjectLookup.get(`${evPath}:${evLineno}`) ?? null; + // Lookup key matches the importer's: ":|". + // The method component disambiguates classMap entries that share + // a path:lineno, so two events at the same source location bind to + // their own code_object rather than colliding. + coId = codeObjectLookup.get(`${evPath}:${evLineno}|${ev.method_id}`) ?? null; } let paramsJson: string | null = null; diff --git a/packages/cli/src/cmds/query/verbs/find.ts b/packages/cli/src/cmds/query/verbs/find.ts index 6d44d410c7..31adcae6d0 100644 --- a/packages/cli/src/cmds/query/verbs/find.ts +++ b/packages/cli/src/cmds/query/verbs/find.ts @@ -6,6 +6,7 @@ import { locateAppMapDir } from '../../../lib/locateAppMapDir'; import { verbose } from '../../../utils'; import { openReadOnly } from '../lib/openReadOnly'; import { parseDuration, parseStatus, parseTime } from '../lib/parseFilter'; +import { parseClassRef } from '../lib/scope'; import { find, FindFilter, @@ -29,7 +30,11 @@ export const builder = (args: yargs.Argv) => { .option('directory', { type: 'string', alias: 'd' }) .option('appmap-dir', { type: 'string' }) .option('query-db', { type: 'string', describe: 'path to query.db (overrides default)' }) - .option('route', { type: 'string', describe: 'e.g. "POST /orders" or "/orders"' }) + .option('route', { + type: 'string', + describe: + 'e.g. "POST /orders" or "/orders" (path is exact match; method case-insensitive)', + }) .option('class', { type: 'string', describe: 'defined_class or fqid Class part' }) .option('method', { type: 'string', describe: 'method_id (not HTTP method)' }) .option('label', { type: 'string' }) @@ -90,30 +95,57 @@ export function validateFlags(type: FindType, flags: Record): v throw new Error(message); } -export const handler = async (argv: yargs.ArgumentsCamelCase): Promise => { - verbose(argv.verbose as boolean | undefined); - handleWorkingDirectory(argv.directory); - const appmapDir = argv.queryDb ? '' : await locateAppMapDir(argv.appmapDir); +// Build a FindFilter from a yargs argv. Exported for testing — also makes +// the verb-layer transformations (e.g. splitting Class#method off of +// --class so the method composes via filter.method) directly assertable. +export interface ParsedFind { + type: FindType; + filter: FindFilter; +} +export function buildFindFilter(argv: Record): ParsedFind { const type = argv.type as FindType; validateFlags(type, argv); const filter: FindFilter = {}; - if (argv.route) filter.route = argv.route; - if (argv.class) filter.className = argv.class; - if (argv.method) filter.method = argv.method; - if (argv.label) filter.label = argv.label; - if (argv.branch) filter.branch = argv.branch; - if (argv.commit) filter.commit = argv.commit; - if (argv.status) filter.status = parseStatus(argv.status); - if (argv.duration) filter.duration = parseDuration(argv.duration); - if (argv.since) filter.since = parseTime(argv.since); - if (argv.until) filter.until = parseTime(argv.until); - if (argv.appmap) filter.appmap = argv.appmap; - if (argv.table) filter.table = argv.table; - if (argv.exception) filter.exception = argv.exception; - if (argv.limit !== undefined) filter.limit = argv.limit; - if (argv.offset !== undefined) filter.offset = argv.offset; + if (typeof argv.route === 'string') filter.route = argv.route; + if (typeof argv.label === 'string') filter.label = argv.label; + if (typeof argv.branch === 'string') filter.branch = argv.branch; + if (typeof argv.commit === 'string') filter.commit = argv.commit; + if (typeof argv.status === 'string') filter.status = parseStatus(argv.status); + if (typeof argv.duration === 'string') filter.duration = parseDuration(argv.duration); + if (typeof argv.since === 'string') filter.since = parseTime(argv.since); + if (typeof argv.until === 'string') filter.until = parseTime(argv.until); + if (typeof argv.appmap === 'string') filter.appmap = argv.appmap; + if (typeof argv.table === 'string') filter.table = argv.table; + if (typeof argv.exception === 'string') filter.exception = argv.exception; + if (typeof argv.limit === 'number') filter.limit = argv.limit; + if (typeof argv.offset === 'number') filter.offset = argv.offset; + + // The documented --class form is "[pkg/]Class[#method]". Split the + // method off here so it composes through filter.method even when the + // user only supplied --class. Internal helpers (classFilterClauses / + // sqlCallerClassClauses) also re-parse, but doing it at the verb gives + // us a uniform contract: filter.className is "[pkg/]Class" only; + // method, if any, lives on filter.method (and an explicit --method + // wins over a method embedded in --class). + let methodFilter = typeof argv.method === 'string' ? argv.method : undefined; + if (typeof argv.class === 'string') { + const parsed = parseClassRef(argv.class); + if (parsed.method && !methodFilter) methodFilter = parsed.method; + filter.className = argv.class; + } + if (methodFilter) filter.method = methodFilter; + + return { type, filter }; +} + +export const handler = async (argv: yargs.ArgumentsCamelCase): Promise => { + verbose(argv.verbose as boolean | undefined); + handleWorkingDirectory(argv.directory); + const appmapDir = argv.queryDb ? '' : await locateAppMapDir(argv.appmapDir); + + const { type, filter } = buildFindFilter(argv as Record); const db = openReadOnly(appmapDir, argv.queryDb); try { diff --git a/packages/cli/src/cmds/query/verbs/hotspots.ts b/packages/cli/src/cmds/query/verbs/hotspots.ts index 726b51299c..7f273024d6 100644 --- a/packages/cli/src/cmds/query/verbs/hotspots.ts +++ b/packages/cli/src/cmds/query/verbs/hotspots.ts @@ -28,8 +28,11 @@ export const builder = (args: yargs.Argv) => { choices: ['function', 'sql'] as const, default: 'function', }) - .option('route', { type: 'string', describe: 'e.g. "GET /reports"' }) - .option('class', { type: 'string', describe: 'defined_class (function mode)' }) + .option('route', { + type: 'string', + describe: 'e.g. "GET /reports" (path is exact match; method case-insensitive)', + }) + .option('class', { type: 'string', describe: 'class filter (function mode only)' }) .option('branch', { type: 'string' }) .option('since', { type: 'string' }) .option('until', { type: 'string' }) @@ -39,15 +42,35 @@ export const builder = (args: yargs.Argv) => { type Argv = ReturnType extends yargs.Argv ? T : never; +// Per-type flag rejection list. Same shape as find's: a small allow-list +// surfaces user mistakes (e.g. --class on --type=sql) instead of silently +// dropping them, and pre-empts future filter additions that only one +// type can honor. +const REJECTED_FLAGS: Record = { + function: [], + sql: ['class'], +}; + +// Exported for tests. +export function validateFlags(type: HotspotType, flags: Record): void { + const used: string[] = []; + for (const flag of REJECTED_FLAGS[type]) { + if (flags[flag] != null) used.push(`--${flag}`); + } + if (used.length === 0) return; + const verb = used.length === 1 ? 'is' : 'are'; + throw new Error( + `hotspots --type=${type}: ${used.join(', ')} ${verb} not supported for this type` + ); +} + export const handler = async (argv: yargs.ArgumentsCamelCase): Promise => { verbose(argv.verbose as boolean | undefined); handleWorkingDirectory(argv.directory); const appmapDir = argv.queryDb ? '' : await locateAppMapDir(argv.appmapDir); const type = argv.type as HotspotType; - if (type === 'sql' && argv.class) { - throw new Error('hotspots --type=sql does not accept --class (only function-mode supports class filtering)'); - } + validateFlags(type, argv as Record); const filter: HotspotsFilter = { type }; if (argv.route) filter.route = argv.route; diff --git a/packages/cli/tests/unit/cmds/query/db/import/codeObjects.spec.ts b/packages/cli/tests/unit/cmds/query/db/import/codeObjects.spec.ts index 27692bb4e8..8845f61423 100644 --- a/packages/cli/tests/unit/cmds/query/db/import/codeObjects.spec.ts +++ b/packages/cli/tests/unit/cmds/query/db/import/codeObjects.spec.ts @@ -56,7 +56,7 @@ describe('importCodeObjects', () => { expect(row.leaf_class).toBe('User'); expect(row.method).toBe('save'); expect(row.is_static).toBe(0); - expect(lookup.get('app/models/user.rb:10')).toBe(1); + expect(lookup.get('app/models/user.rb:10|save')).toBe(1); } finally { db.close(); } @@ -288,6 +288,45 @@ describe('importCodeObjects', () => { } }); + it('disambiguates two functions sharing the same path:lineno by method name', () => { + const db = freshDb(); + try { + // Spring Data proxy-style: two distinct methods at the same + // synthetic source location. + const tree: ClassMapNode[] = [ + { + type: 'package', + name: 'org/example', + children: [ + { + type: 'class', + name: 'OwnerRepository', + children: [ + { type: 'function', name: 'findById', location: 'Proxy.java:0' }, + { type: 'function', name: 'findPetTypes', location: 'Proxy.java:0' }, + ], + }, + ], + }, + ]; + const lookup = importCodeObjects(db, tree); + + // Both code_objects exist... + expect( + (db.prepare('SELECT COUNT(*) AS n FROM code_objects').get() as any).n + ).toBe(2); + + // ...and the lookup keys disambiguate them by method. + const findByIdId = lookup.get('Proxy.java:0|findById'); + const findPetTypesId = lookup.get('Proxy.java:0|findPetTypes'); + expect(findByIdId).toBeDefined(); + expect(findPetTypesId).toBeDefined(); + expect(findByIdId).not.toBe(findPetTypesId); + } finally { + db.close(); + } + }); + it('is idempotent on re-import (INSERT OR IGNORE)', () => { const db = freshDb(); try { diff --git a/packages/cli/tests/unit/cmds/query/queries/hotspots.spec.ts b/packages/cli/tests/unit/cmds/query/queries/hotspots.spec.ts index 37b8f0144f..13245b2579 100644 --- a/packages/cli/tests/unit/cmds/query/queries/hotspots.spec.ts +++ b/packages/cli/tests/unit/cmds/query/queries/hotspots.spec.ts @@ -251,6 +251,38 @@ describe('sqlHotspots', () => { } }); + it('--since / --until scope to recordings within the time range', () => { + const db = freshDb(); + try { + const oldId = (db + .prepare( + `INSERT INTO appmaps (name, source_path, timestamp) VALUES ('old', '/o', '2026-04-01T00:00:00.000Z')` + ) + .run().lastInsertRowid as number); + const newId = (db + .prepare( + `INSERT INTO appmaps (name, source_path, timestamp) VALUES ('new', '/n', '2026-04-30T00:00:00.000Z')` + ) + .run().lastInsertRowid as number); + db.prepare( + `INSERT INTO sql_queries (appmap_id, event_id, sql_text, elapsed_ms) VALUES (?, 1, 'SELECT a', 1)` + ).run(oldId); + db.prepare( + `INSERT INTO sql_queries (appmap_id, event_id, sql_text, elapsed_ms) VALUES (?, 1, 'SELECT b', 2)` + ).run(newId); + + const since = sqlHotspots(db, { since: '2026-04-15T00:00:00.000Z' }); + expect(since).toHaveLength(1); + expect(since[0].sql_text).toBe('SELECT b'); + + const until = sqlHotspots(db, { until: '2026-04-15T00:00:00.000Z' }); + expect(until).toHaveLength(1); + expect(until[0].sql_text).toBe('SELECT a'); + } finally { + db.close(); + } + }); + it('--branch filter applies', () => { const db = freshDb(); try { diff --git a/packages/cli/tests/unit/cmds/query/queries/tree.spec.ts b/packages/cli/tests/unit/cmds/query/queries/tree.spec.ts index 11743aff89..541e8cf137 100644 --- a/packages/cli/tests/unit/cmds/query/queries/tree.spec.ts +++ b/packages/cli/tests/unit/cmds/query/queries/tree.spec.ts @@ -190,6 +190,33 @@ describe('tree', () => { }); }); +describe('tree --filter', () => { + it('returns only http events when filter=http', () => { + const db = freshDb(); + try { + seed(db, { addOutbound: true }); + const nodes = tree(db, 'orders_create_42').filter( + (n) => n.kind === 'http_server' || n.kind === 'http_client' + ); + expect(nodes.map((n) => n.kind).sort()).toEqual(['http_client', 'http_server']); + } finally { + db.close(); + } + }); + + it('returns only sql events when filter=sql', () => { + const db = freshDb(); + try { + seed(db); + const nodes = tree(db, 'orders_create_42').filter((n) => n.kind === 'sql'); + expect(nodes).toHaveLength(1); + expect(nodes[0].kind).toBe('sql'); + } finally { + db.close(); + } + }); +}); + describe('treeSummary', () => { it('counts SQL, surfaces entry/exception, and tallies labels', () => { const db = freshDb(); diff --git a/packages/cli/tests/unit/cmds/query/verbs/find.spec.ts b/packages/cli/tests/unit/cmds/query/verbs/find.spec.ts index c5672fc578..a296d72988 100644 --- a/packages/cli/tests/unit/cmds/query/verbs/find.spec.ts +++ b/packages/cli/tests/unit/cmds/query/verbs/find.spec.ts @@ -1,4 +1,4 @@ -import { validateFlags } from '../../../../../src/cmds/query/verbs/find'; +import { buildFindFilter, validateFlags } from '../../../../../src/cmds/query/verbs/find'; describe('find verb flag validation', () => { it('accepts the universal flags on every type', () => { @@ -73,3 +73,36 @@ describe('find verb flag validation', () => { ).not.toThrow(); }); }); + +describe('buildFindFilter', () => { + it('splits Class#method off of --class so the method composes via filter.method', () => { + const { filter } = buildFindFilter({ + type: 'queries', + class: 'org.example.UserRepo#findById', + }); + expect(filter.className).toBe('org.example.UserRepo#findById'); + expect(filter.method).toBe('findById'); + }); + + it('explicit --method wins over a method embedded in --class', () => { + const { filter } = buildFindFilter({ + type: 'calls', + class: 'X#fromClass', + method: 'fromMethod', + }); + expect(filter.method).toBe('fromMethod'); + }); + + it('--class without # leaves filter.method undefined', () => { + const { filter } = buildFindFilter({ + type: 'calls', + class: 'OpenSSL::Cipher', + }); + expect(filter.className).toBe('OpenSSL::Cipher'); + expect(filter.method).toBeUndefined(); + }); + + it('returns the parsed type', () => { + expect(buildFindFilter({ type: 'appmaps' }).type).toBe('appmaps'); + }); +}); diff --git a/packages/cli/tests/unit/cmds/query/verbs/hotspots.spec.ts b/packages/cli/tests/unit/cmds/query/verbs/hotspots.spec.ts new file mode 100644 index 0000000000..7c119dd997 --- /dev/null +++ b/packages/cli/tests/unit/cmds/query/verbs/hotspots.spec.ts @@ -0,0 +1,29 @@ +import { validateFlags } from '../../../../../src/cmds/query/verbs/hotspots'; + +describe('hotspots verb flag validation', () => { + it('function-mode accepts --class', () => { + expect(() => validateFlags('function', { class: 'UserRepository' })).not.toThrow(); + }); + + it('sql-mode rejects --class', () => { + expect(() => validateFlags('sql', { class: 'UserRepository' })).toThrow( + /--type=sql:.*--class.*not supported/ + ); + }); + + it('sql-mode accepts --route, --branch, --since, --until, --limit', () => { + expect(() => + validateFlags('sql', { + route: '/x', + branch: 'main', + since: '2026-01-01', + until: '2026-12-31', + limit: 5, + }) + ).not.toThrow(); + }); + + it('ignores undefined / null flag values', () => { + expect(() => validateFlags('sql', { class: undefined })).not.toThrow(); + }); +}); From 72512721f7e5c98922485b74234ad508d569d24c Mon Sep 17 00:00:00 2001 From: kgilpin Date: Sat, 2 May 2026 15:29:02 -0400 Subject: [PATCH 13/30] chore(query): widen handler argv type for CommandModule<{}, any> assignability MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Each verb's handler was typed `(argv: ArgumentsCamelCase) => …`, which is more specific than what `args.command(VerbModule)` infers when the verb is imported as `import * as VerbModule`. yargs widens U to `any` at that boundary, and a strict-typed handler isn't assignable to the widened form. CI surfaced this as TS2345. Match the pattern landed earlier on rpc.ts and confirmed for the query verbs in 5a661bd9: accept `ArgumentsCamelCase` at the export and immediately downcast to the precise `ArgumentsCamelCase` inside the handler. No runtime change. Co-Authored-By: Claude Opus 4.7 (1M context) --- packages/cli/src/cmds/query/verbs/endpoints.ts | 4 +++- packages/cli/src/cmds/query/verbs/find.ts | 4 +++- packages/cli/src/cmds/query/verbs/hotspots.ts | 4 +++- packages/cli/src/cmds/query/verbs/tree.ts | 4 +++- 4 files changed, 12 insertions(+), 4 deletions(-) diff --git a/packages/cli/src/cmds/query/verbs/endpoints.ts b/packages/cli/src/cmds/query/verbs/endpoints.ts index e69eec9611..8352536377 100644 --- a/packages/cli/src/cmds/query/verbs/endpoints.ts +++ b/packages/cli/src/cmds/query/verbs/endpoints.ts @@ -40,7 +40,9 @@ export const builder = (args: yargs.Argv) => { type Argv = ReturnType extends yargs.Argv ? T : never; -export const handler = async (argv: yargs.ArgumentsCamelCase): Promise => { +// Widened at the export so this module is assignable to CommandModule. +export const handler = async (argvIn: yargs.ArgumentsCamelCase): Promise => { + const argv = argvIn as yargs.ArgumentsCamelCase; verbose(argv.verbose as boolean | undefined); handleWorkingDirectory(argv.directory); // When --query-db is supplied, the appmap dir is irrelevant — the user has diff --git a/packages/cli/src/cmds/query/verbs/find.ts b/packages/cli/src/cmds/query/verbs/find.ts index 31adcae6d0..ec04904547 100644 --- a/packages/cli/src/cmds/query/verbs/find.ts +++ b/packages/cli/src/cmds/query/verbs/find.ts @@ -140,7 +140,9 @@ export function buildFindFilter(argv: Record): ParsedFind { return { type, filter }; } -export const handler = async (argv: yargs.ArgumentsCamelCase): Promise => { +// Widened at the export so this module is assignable to CommandModule. +export const handler = async (argvIn: yargs.ArgumentsCamelCase): Promise => { + const argv = argvIn as yargs.ArgumentsCamelCase; verbose(argv.verbose as boolean | undefined); handleWorkingDirectory(argv.directory); const appmapDir = argv.queryDb ? '' : await locateAppMapDir(argv.appmapDir); diff --git a/packages/cli/src/cmds/query/verbs/hotspots.ts b/packages/cli/src/cmds/query/verbs/hotspots.ts index 7f273024d6..b1caa3914b 100644 --- a/packages/cli/src/cmds/query/verbs/hotspots.ts +++ b/packages/cli/src/cmds/query/verbs/hotspots.ts @@ -64,7 +64,9 @@ export function validateFlags(type: HotspotType, flags: Record) ); } -export const handler = async (argv: yargs.ArgumentsCamelCase): Promise => { +// Widened at the export so this module is assignable to CommandModule. +export const handler = async (argvIn: yargs.ArgumentsCamelCase): Promise => { + const argv = argvIn as yargs.ArgumentsCamelCase; verbose(argv.verbose as boolean | undefined); handleWorkingDirectory(argv.directory); const appmapDir = argv.queryDb ? '' : await locateAppMapDir(argv.appmapDir); diff --git a/packages/cli/src/cmds/query/verbs/tree.ts b/packages/cli/src/cmds/query/verbs/tree.ts index 7a030c6df8..5b456597a1 100644 --- a/packages/cli/src/cmds/query/verbs/tree.ts +++ b/packages/cli/src/cmds/query/verbs/tree.ts @@ -32,7 +32,9 @@ export const builder = (args: yargs.Argv) => { type Argv = ReturnType extends yargs.Argv ? T : never; -export const handler = async (argv: yargs.ArgumentsCamelCase): Promise => { +// Widened at the export so this module is assignable to CommandModule. +export const handler = async (argvIn: yargs.ArgumentsCamelCase): Promise => { + const argv = argvIn as yargs.ArgumentsCamelCase; verbose(argv.verbose as boolean | undefined); handleWorkingDirectory(argv.directory); const appmapDir = argv.queryDb ? '' : await locateAppMapDir(argv.appmapDir); From 302873f0195cda232a762c41fce5e7b3500f9212 Mon Sep 17 00:00:00 2001 From: kgilpin Date: Sat, 2 May 2026 15:42:45 -0400 Subject: [PATCH 14/30] style(query): replace Array with T[] to satisfy lint rule MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit @typescript-eslint/array-type flagged ten Array usages across queries/endpoints.ts, queries/tree.ts, verbs/find.ts and three test files. Mechanical rewrite — no behavior change. Co-Authored-By: Claude Opus 4.7 (1M context) --- .../cli/src/cmds/query/queries/endpoints.ts | 4 +-- packages/cli/src/cmds/query/queries/tree.ts | 28 +++++++++---------- packages/cli/src/cmds/query/verbs/find.ts | 2 +- .../tests/unit/cmds/query/lib/scope.spec.ts | 12 ++++---- .../unit/cmds/query/queries/find.spec.ts | 16 +++++------ .../unit/cmds/query/queries/hotspots.spec.ts | 2 +- 6 files changed, 32 insertions(+), 32 deletions(-) diff --git a/packages/cli/src/cmds/query/queries/endpoints.ts b/packages/cli/src/cmds/query/queries/endpoints.ts index 80b8c61e76..05aa202b24 100644 --- a/packages/cli/src/cmds/query/queries/endpoints.ts +++ b/packages/cli/src/cmds/query/queries/endpoints.ts @@ -129,14 +129,14 @@ export function endpoints( params.push(filter.limit); } - const rows = db.prepare(sql).all(...params) as Array<{ + const rows = db.prepare(sql).all(...params) as { method: string; route: string; count: number; avg_ms: number | null; p95_ms: number | null; err_pct: number | null; - }>; + }[]; return rows.map((r) => ({ method: r.method, diff --git a/packages/cli/src/cmds/query/queries/tree.ts b/packages/cli/src/cmds/query/queries/tree.ts index 92bdc469cd..67eca9c574 100644 --- a/packages/cli/src/cmds/query/queries/tree.ts +++ b/packages/cli/src/cmds/query/queries/tree.ts @@ -100,7 +100,7 @@ export function tree(db: sqlite3.Database, appmapRef: string): TreeNode[] { COALESCE(normalized_path, path) AS route, status_code, elapsed_ms FROM http_requests WHERE appmap_id = ?` ) - .all(am.id) as Array<{ + .all(am.id) as { event_id: number; parent_event_id: number | null; thread_id: number | null; @@ -108,7 +108,7 @@ export function tree(db: sqlite3.Database, appmapRef: string): TreeNode[] { route: string; status_code: number; elapsed_ms: number | null; - }>) { + }[]) { events.push({ kind: 'http_server', event_id: r.event_id, @@ -127,7 +127,7 @@ export function tree(db: sqlite3.Database, appmapRef: string): TreeNode[] { `SELECT event_id, parent_event_id, thread_id, method, url, status_code, elapsed_ms FROM http_client_requests WHERE appmap_id = ?` ) - .all(am.id) as Array<{ + .all(am.id) as { event_id: number; parent_event_id: number | null; thread_id: number | null; @@ -135,7 +135,7 @@ export function tree(db: sqlite3.Database, appmapRef: string): TreeNode[] { url: string; status_code: number | null; elapsed_ms: number | null; - }>) { + }[]) { events.push({ kind: 'http_client', event_id: r.event_id, @@ -154,14 +154,14 @@ export function tree(db: sqlite3.Database, appmapRef: string): TreeNode[] { `SELECT event_id, parent_event_id, thread_id, sql_text, database_type, elapsed_ms FROM sql_queries WHERE appmap_id = ?` ) - .all(am.id) as Array<{ + .all(am.id) as { event_id: number; parent_event_id: number | null; thread_id: number | null; sql_text: string; database_type: string | null; elapsed_ms: number | null; - }>) { + }[]) { events.push({ kind: 'sql', event_id: r.event_id, @@ -183,7 +183,7 @@ export function tree(db: sqlite3.Database, appmapRef: string): TreeNode[] { LEFT JOIN code_objects co ON co.id = fc.code_object_id WHERE fc.appmap_id = ?` ) - .all(am.id) as Array<{ + .all(am.id) as { event_id: number; parent_event_id: number | null; thread_id: number | null; @@ -194,7 +194,7 @@ export function tree(db: sqlite3.Database, appmapRef: string): TreeNode[] { elapsed_ms: number | null; parameters_json: string | null; return_value: string | null; - }>) { + }[]) { events.push({ kind: 'function', event_id: r.event_id, @@ -217,7 +217,7 @@ export function tree(db: sqlite3.Database, appmapRef: string): TreeNode[] { path, lineno FROM exceptions WHERE appmap_id = ?` ) - .all(am.id) as Array<{ + .all(am.id) as { event_id: number; parent_event_id: number | null; thread_id: number | null; @@ -225,7 +225,7 @@ export function tree(db: sqlite3.Database, appmapRef: string): TreeNode[] { message: string | null; path: string | null; lineno: number | null; - }>) { + }[]) { events.push({ kind: 'exception', event_id: r.event_id, @@ -263,13 +263,13 @@ export interface TreeSummary { entry: { method: string; route: string; status_code: number; elapsed_ms: number | null } | null; sql: { count: number; total_ms: number }; http_client: { count: number; total_ms: number }; - exceptions: Array<{ + exceptions: { exception_class: string; message: string | null; path: string | null; lineno: number | null; - }>; - labels: Array<{ label: string; count: number }>; + }[]; + labels: { label: string; count: number }[]; } export function treeSummary(db: sqlite3.Database, appmapRef: string): TreeSummary { @@ -290,7 +290,7 @@ export function treeSummary(db: sqlite3.Database, appmapRef: string): TreeSummar GROUP BY l.label ORDER BY n DESC, l.label` ) - .all(am.id) as Array<{ label: string; n: number }>; + .all(am.id) as { label: string; n: number }[]; return { appmap_name: am.name, diff --git a/packages/cli/src/cmds/query/verbs/find.ts b/packages/cli/src/cmds/query/verbs/find.ts index ec04904547..6d4ff30b41 100644 --- a/packages/cli/src/cmds/query/verbs/find.ts +++ b/packages/cli/src/cmds/query/verbs/find.ts @@ -226,7 +226,7 @@ function renderTable(type: FindType, rows: unknown[]): string { function formatParams(json: string | null): string { if (!json) return ''; try { - const parsed = JSON.parse(json) as Array<{ name?: string; value?: unknown }>; + const parsed = JSON.parse(json) as { name?: string; value?: unknown }[]; return parsed .map((p) => `${p.name ?? '?'}=${typeof p.value === 'string' ? p.value : JSON.stringify(p.value)}`) .join(', '); diff --git a/packages/cli/tests/unit/cmds/query/lib/scope.spec.ts b/packages/cli/tests/unit/cmds/query/lib/scope.spec.ts index 97cd4b3e33..9279662bc9 100644 --- a/packages/cli/tests/unit/cmds/query/lib/scope.spec.ts +++ b/packages/cli/tests/unit/cmds/query/lib/scope.spec.ts @@ -212,7 +212,7 @@ describe('classFilterClauses', () => { const c = classFilterClauses('Cipher', 'fc'); const sql = `SELECT fc.event_id FROM function_calls fc WHERE ${c.where.join(' AND ')} ORDER BY fc.event_id`; - const eids = (db.prepare(sql).all(...c.params) as Array<{ event_id: number }>).map((r) => r.event_id); + const eids = (db.prepare(sql).all(...c.params) as { event_id: number }[]).map((r) => r.event_id); // Both co1 (nested) and co2 (top-level) should match; co3 should not. expect(eids).toEqual([1, 2]); } finally { @@ -243,7 +243,7 @@ describe('classFilterClauses', () => { const c = classFilterClauses('org/example/UserRepository#findById', 'fc'); const sql = `SELECT fc.event_id FROM function_calls fc WHERE ${c.where.join(' AND ')}`; - const eids = (db.prepare(sql).all(...c.params) as Array<{ event_id: number }>).map((r) => r.event_id); + const eids = (db.prepare(sql).all(...c.params) as { event_id: number }[]).map((r) => r.event_id); expect(eids).toEqual([1]); } finally { db.close(); @@ -260,7 +260,7 @@ describe('classFilterClauses', () => { const c = classFilterClauses('Cipher', 'fc'); const sql = `SELECT fc.event_id FROM function_calls fc WHERE ${c.where.join(' AND ')}`; - const eids = (db.prepare(sql).all(...c.params) as Array<{ event_id: number }>).map((r) => r.event_id); + const eids = (db.prepare(sql).all(...c.params) as { event_id: number }[]).map((r) => r.event_id); expect(eids).toEqual([1]); } finally { db.close(); @@ -276,7 +276,7 @@ describe('classFilterClauses', () => { const c = classFilterClauses('org.example.Foo', 'fc'); const sql = `SELECT fc.event_id FROM function_calls fc WHERE ${c.where.join(' AND ')}`; - const eids = (db.prepare(sql).all(...c.params) as Array<{ event_id: number }>).map((r) => r.event_id); + const eids = (db.prepare(sql).all(...c.params) as { event_id: number }[]).map((r) => r.event_id); expect(eids).toEqual([1]); } finally { db.close(); @@ -296,7 +296,7 @@ describe('methodFilterClauses', () => { const m = methodFilterClauses('findById', 'fc'); const sql = `SELECT fc.event_id FROM function_calls fc WHERE ${m.where.join(' AND ')}`; - const eids = (db.prepare(sql).all(...m.params) as Array<{ event_id: number }>).map((r) => r.event_id); + const eids = (db.prepare(sql).all(...m.params) as { event_id: number }[]).map((r) => r.event_id); expect(eids).toEqual([1]); } finally { db.close(); @@ -312,7 +312,7 @@ describe('methodFilterClauses', () => { const m = methodFilterClauses('findById', 'fc'); const sql = `SELECT fc.event_id FROM function_calls fc WHERE ${m.where.join(' AND ')}`; - const eids = (db.prepare(sql).all(...m.params) as Array<{ event_id: number }>).map((r) => r.event_id); + const eids = (db.prepare(sql).all(...m.params) as { event_id: number }[]).map((r) => r.event_id); expect(eids).toEqual([1]); } finally { db.close(); diff --git a/packages/cli/tests/unit/cmds/query/queries/find.spec.ts b/packages/cli/tests/unit/cmds/query/queries/find.spec.ts index c05af88fe6..d47f99464d 100644 --- a/packages/cli/tests/unit/cmds/query/queries/find.spec.ts +++ b/packages/cli/tests/unit/cmds/query/queries/find.spec.ts @@ -18,35 +18,35 @@ interface Recording { branch?: string; commit?: string; timestamp?: string; - requests?: Array<{ + requests?: { event_id: number; method: string; path: string; normalized_path?: string; status: number; elapsed_ms?: number; - }>; - queries?: Array<{ + }[]; + queries?: { event_id: number; parent_event_id?: number; sql: string; caller_class?: string; caller_method?: string; elapsed_ms?: number; - }>; - calls?: Array<{ + }[]; + calls?: { event_id: number; defined_class: string; method_id: string; elapsed_ms?: number; fqid?: string; labels?: string[]; - }>; - exceptions?: Array<{ + }[]; + exceptions?: { event_id: number; exception_class: string; message?: string; - }>; + }[]; } function seed(db: sqlite3.Database, recs: Recording[]): void { diff --git a/packages/cli/tests/unit/cmds/query/queries/hotspots.spec.ts b/packages/cli/tests/unit/cmds/query/queries/hotspots.spec.ts index 13245b2579..47d9732524 100644 --- a/packages/cli/tests/unit/cmds/query/queries/hotspots.spec.ts +++ b/packages/cli/tests/unit/cmds/query/queries/hotspots.spec.ts @@ -26,7 +26,7 @@ function seedRecording( branch?: string; request?: { event_id: number; method: string; path: string; status: number }; calls?: CallSeed[]; - queries?: Array<{ event_id: number; parent_event_id?: number; sql: string; elapsed_ms: number }>; + queries?: { event_id: number; parent_event_id?: number; sql: string; elapsed_ms: number }[]; } ): number { const am = db From 848a3eeb30475e7c8f1ca6ad3b1cb812ca396b9f Mon Sep 17 00:00:00 2001 From: kgilpin Date: Sat, 2 May 2026 15:56:52 -0400 Subject: [PATCH 15/30] feat(query): related and compare verbs MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Closes the V3 query verb set: related (similarity ranking) and compare (per-route latency delta between branches). Both ride on the existing shared helpers — appmapWhere/httpScopeClauses for filtering, endpoints() for compare's p95 — so the semantics line up across verbs. - related : Loads the source recording's signature (route, SQL tables, classes), then for each candidate appmap (filters: --branch, --commit, --since/--until, --status, --route, --limit) computes a score and a list of contributing signals. Score weights from V3: route ×5 (binary), per-shared-table ×3, per-shared-class ×2. Source is excluded; results are ranked by score descending. SQL-table extraction from sql_text is a regex over FROM/JOIN/INTO/UPDATE — heuristic (won't handle nested subqueries cleanly), but the score is itself a heuristic so the precision floor is acceptable. Class-set merges code_objects.leaf_class (linked rows) with a leaf-extracted defined_class for unlinked rows so sparsely-linked recordings still contribute to the score. - compare : Calls endpoints() twice, one per branch, then merges results by (method, route). Reuses the same SQL window-function p95 that endpoints does, so the two verbs agree per-route. delta = b_p95 / a_p95; the verb formats it as "+Nx" / "-Nx" / "+N%" / "~" / "?". --sort=delta|p95-a|p95-b, --include-counts, --since/--until, --limit, --json. - demo-query.sh exercises related against the PetClinic /oups recordings; compare is silent on PetClinic (single branch) but the verb is wired and tested. Tests: 181 passing. Co-Authored-By: Claude Opus 4.7 (1M context) --- packages/cli/scripts/demo-query.sh | 10 + .../cli/src/cmds/query/queries/compare.ts | 107 +++++++++ .../cli/src/cmds/query/queries/related.ts | 194 +++++++++++++++++ packages/cli/src/cmds/query/query.ts | 4 + packages/cli/src/cmds/query/verbs/compare.ts | 115 ++++++++++ packages/cli/src/cmds/query/verbs/related.ts | 84 +++++++ .../unit/cmds/query/queries/compare.spec.ts | 133 ++++++++++++ .../unit/cmds/query/queries/related.spec.ts | 205 ++++++++++++++++++ 8 files changed, 852 insertions(+) create mode 100644 packages/cli/src/cmds/query/queries/compare.ts create mode 100644 packages/cli/src/cmds/query/queries/related.ts create mode 100644 packages/cli/src/cmds/query/verbs/compare.ts create mode 100644 packages/cli/src/cmds/query/verbs/related.ts create mode 100644 packages/cli/tests/unit/cmds/query/queries/compare.spec.ts create mode 100644 packages/cli/tests/unit/cmds/query/queries/related.spec.ts diff --git a/packages/cli/scripts/demo-query.sh b/packages/cli/scripts/demo-query.sh index 85624abec2..6c1d189631 100755 --- a/packages/cli/scripts/demo-query.sh +++ b/packages/cli/scripts/demo-query.sh @@ -75,6 +75,16 @@ run query find exceptions --query-db "$DB" --limit 5 || true run query hotspots --query-db "$DB" --limit 5 run query hotspots --query-db "$DB" --type=sql --limit 3 +# related: find passing baselines for a recording (with whatever data exists) +RELATED_SOURCE="$(node -e " + const db = require('better-sqlite3')('$DB', { readonly: true }); + const r = db.prepare(\"SELECT name FROM appmaps WHERE name LIKE '%oups%' LIMIT 1\").get(); + process.stdout.write(r ? r.name : ''); +")" +if [ -n "$RELATED_SOURCE" ]; then + run query related "$RELATED_SOURCE" --query-db "$DB" --limit 5 +fi + # Pick the recording with the most events for the tree demos. APPMAP="$(node -e " const db = require('better-sqlite3')('$DB', { readonly: true }); diff --git a/packages/cli/src/cmds/query/queries/compare.ts b/packages/cli/src/cmds/query/queries/compare.ts new file mode 100644 index 0000000000..db6fbda4c5 --- /dev/null +++ b/packages/cli/src/cmds/query/queries/compare.ts @@ -0,0 +1,107 @@ +import sqlite3 from 'better-sqlite3'; + +import { endpoints } from './endpoints'; + +export interface CompareRow { + method: string; + route: string; + a_count: number; + a_p95_ms: number | null; + b_count: number; + b_p95_ms: number | null; + // b_p95 / a_p95 — undefined when either side has no measured durations. + delta: number | null; +} + +export type CompareSort = 'delta' | 'p95-a' | 'p95-b'; + +export interface CompareFilter { + branch_a: string; + branch_b: string; + since?: string; + until?: string; + sort?: CompareSort; + limit?: number; +} + +// Computes per-route p95 for two branches and merges the results, exposing +// delta = b_p95 / a_p95 alongside both sides' counts and p95s. Implementation +// reuses endpoints() (which already does the SQL window-function p95) so the +// p95 semantics match the endpoints verb exactly. +export function compare(db: sqlite3.Database, filter: CompareFilter): CompareRow[] { + const a = endpoints(db, { + branch: filter.branch_a, + since: filter.since, + until: filter.until, + }); + const b = endpoints(db, { + branch: filter.branch_b, + since: filter.since, + until: filter.until, + }); + + const rows = new Map(); + const key = (method: string, route: string) => `${method}\t${route}`; + + for (const r of a) { + rows.set(key(r.method, r.route), { + method: r.method, + route: r.route, + a_count: r.count, + a_p95_ms: r.p95_ms, + b_count: 0, + b_p95_ms: null, + delta: null, + }); + } + for (const r of b) { + const k = key(r.method, r.route); + const existing = rows.get(k); + if (existing) { + existing.b_count = r.count; + existing.b_p95_ms = r.p95_ms; + } else { + rows.set(k, { + method: r.method, + route: r.route, + a_count: 0, + a_p95_ms: null, + b_count: r.count, + b_p95_ms: r.p95_ms, + delta: null, + }); + } + } + + for (const row of rows.values()) { + if (row.a_p95_ms != null && row.a_p95_ms > 0 && row.b_p95_ms != null) { + row.delta = row.b_p95_ms / row.a_p95_ms; + } + } + + const result = [...rows.values()]; + const sortKey: CompareSort = filter.sort ?? 'delta'; + result.sort(comparators[sortKey]); + + return filter.limit !== undefined ? result.slice(0, filter.limit) : result; +} + +// "delta" sorts by absolute deviation from 1× — biggest changes (in +// either direction) at the top. "p95-a" / "p95-b" sort by the named side +// descending. All keys put nulls last. +function descNullsLast(a: number | null, b: number | null): number { + if (a == null && b == null) return 0; + if (a == null) return 1; + if (b == null) return -1; + return b - a; +} + +const comparators: Record number> = { + delta: (x, y) => { + const xd = x.delta == null ? null : Math.abs(Math.log(x.delta)); + const yd = y.delta == null ? null : Math.abs(Math.log(y.delta)); + return descNullsLast(xd, yd); + }, + 'p95-a': (x, y) => descNullsLast(x.a_p95_ms, y.a_p95_ms), + 'p95-b': (x, y) => descNullsLast(x.b_p95_ms, y.b_p95_ms), +}; diff --git a/packages/cli/src/cmds/query/queries/related.ts b/packages/cli/src/cmds/query/queries/related.ts new file mode 100644 index 0000000000..dd6021f761 --- /dev/null +++ b/packages/cli/src/cmds/query/queries/related.ts @@ -0,0 +1,194 @@ +import sqlite3 from 'better-sqlite3'; + +import { appmapWhere, httpScopeClauses, RecordingScope } from '../lib/scope'; +import { resolveAppmap } from './tree'; + +// Score weights, from V3: +// same HTTP route ×5 (binary) +// same SQL tables ×3 (per shared table) +// same classes ×2 (per shared class) +const ROUTE_WEIGHT = 5; +const TABLE_WEIGHT = 3; +const CLASS_WEIGHT = 2; + +// Heuristic table-name extraction. Matches identifiers following +// FROM/JOIN/INTO/UPDATE; strips a single leading schema qualifier and +// lowercases for case-insensitive matching. Imperfect (won't handle +// nested subqueries / unusual quoting cleanly) but adequate for the +// similarity score, which is itself a heuristic. +const TABLE_PATTERN = /\b(?:FROM|JOIN|INTO|UPDATE)\s+["`]?(?:\w+\.)?(\w+)["`]?/gi; + +export function extractTables(sqlText: string): Set { + const tables = new Set(); + TABLE_PATTERN.lastIndex = 0; + let m: RegExpExecArray | null; + while ((m = TABLE_PATTERN.exec(sqlText)) !== null) { + tables.add(m[1].toLowerCase()); + } + return tables; +} + +export interface RelatedRow { + appmap_name: string; + score: number; + method: string | null; + route: string | null; + status_code: number | null; + elapsed_ms: number | null; + shared: string[]; +} + +export interface RelatedFilter extends RecordingScope { + limit?: number; +} + +interface AppmapSig { + id: number; + name: string; + method: string | null; + route: string | null; + status_code: number | null; + elapsed_ms: number | null; + tables: Set; + classes: Set; +} + +// Strip trailing class name from a defined_class string. Handles Java/Python +// dot-form ("org.example.X" → "X") and Ruby/C++ chain ("Foo::Bar" → "Bar"). +function leafFromDefinedClass(s: string): string { + const ddIdx = s.lastIndexOf('::'); + const dotIdx = s.lastIndexOf('.'); + const idx = Math.max(ddIdx, dotIdx); + return idx >= 0 ? s.slice(idx + (s[idx] === ':' ? 2 : 1)) : s; +} + +function loadSignature(db: sqlite3.Database, appmapId: number): AppmapSig { + const meta = db + .prepare( + `SELECT a.id, a.name, a.elapsed_ms, + (SELECT h.method FROM http_requests h WHERE h.appmap_id = a.id + ORDER BY h.event_id LIMIT 1) AS method, + (SELECT COALESCE(h.normalized_path, h.path) FROM http_requests h + WHERE h.appmap_id = a.id ORDER BY h.event_id LIMIT 1) AS route, + (SELECT h.status_code FROM http_requests h + WHERE h.appmap_id = a.id ORDER BY h.event_id LIMIT 1) AS status_code + FROM appmaps a WHERE a.id = ?` + ) + .get(appmapId) as { + id: number; + name: string; + elapsed_ms: number | null; + method: string | null; + route: string | null; + status_code: number | null; + }; + + const sqlRows = db + .prepare(`SELECT sql_text FROM sql_queries WHERE appmap_id = ?`) + .all(appmapId) as { sql_text: string }[]; + const tables = new Set(); + for (const r of sqlRows) for (const t of extractTables(r.sql_text)) tables.add(t); + + const classes = new Set(); + for (const r of db + .prepare( + `SELECT DISTINCT co.leaf_class AS name FROM code_objects co + JOIN function_calls fc ON fc.code_object_id = co.id + WHERE fc.appmap_id = ?` + ) + .all(appmapId) as { name: string }[]) { + if (r.name) classes.add(r.name); + } + // Fall back to defined_class for unlinked rows so sparsely-linked + // recordings still contribute classes to the score. + for (const r of db + .prepare( + `SELECT DISTINCT fc.defined_class AS name FROM function_calls fc + WHERE fc.appmap_id = ? AND fc.code_object_id IS NULL` + ) + .all(appmapId) as { name: string }[]) { + if (r.name) classes.add(leafFromDefinedClass(r.name)); + } + + return { ...meta, tables, classes }; +} + +export function related( + db: sqlite3.Database, + sourceRef: string, + filter: RelatedFilter = {} +): RelatedRow[] { + const source = resolveAppmap(db, sourceRef); + const sourceSig = loadSignature(db, source.id); + + // Candidate pool: appmaps matching recording-level / http filters, + // excluding the source itself. + const a = appmapWhere(filter, 'a'); + const h = httpScopeClauses(filter); + + const whereParts: string[] = ['a.id != ?']; + const params: (string | number)[] = [source.id]; + + whereParts.push(...a.where); + params.push(...a.params); + + if (h.where.length > 0) { + whereParts.push(`EXISTS ( + SELECT 1 FROM http_requests h WHERE h.appmap_id = a.id AND ${h.where.join(' AND ')} + )`); + params.push(...h.params); + } + + const candidates = db + .prepare(`SELECT a.id FROM appmaps a WHERE ${whereParts.join(' AND ')} ORDER BY a.id`) + .all(...params) as { id: number }[]; + + const scored: RelatedRow[] = []; + for (const c of candidates) { + const sig = loadSignature(db, c.id); + + let score = 0; + const shared: string[] = []; + + // Route match (binary). Method is part of the comparison only if the + // source has one — recordings without an http_server_request are + // matched purely on path. + if ( + sourceSig.route && + sig.route === sourceSig.route && + (!sourceSig.method || sig.method === sourceSig.method) + ) { + score += ROUTE_WEIGHT; + shared.push('route'); + } + + for (const t of sig.tables) { + if (sourceSig.tables.has(t)) { + score += TABLE_WEIGHT; + shared.push(t); + } + } + + for (const cls of sig.classes) { + if (sourceSig.classes.has(cls)) { + score += CLASS_WEIGHT; + shared.push(cls); + } + } + + if (score > 0) { + scored.push({ + appmap_name: sig.name, + score, + method: sig.method, + route: sig.route, + status_code: sig.status_code, + elapsed_ms: sig.elapsed_ms, + shared, + }); + } + } + + scored.sort((a, b) => b.score - a.score); + return filter.limit !== undefined ? scored.slice(0, filter.limit) : scored; +} diff --git a/packages/cli/src/cmds/query/query.ts b/packages/cli/src/cmds/query/query.ts index cd1779a51c..83f271ea98 100644 --- a/packages/cli/src/cmds/query/query.ts +++ b/packages/cli/src/cmds/query/query.ts @@ -1,8 +1,10 @@ import yargs from 'yargs'; +import * as CompareVerb from './verbs/compare'; import * as EndpointsVerb from './verbs/endpoints'; import * as FindVerb from './verbs/find'; import * as HotspotsVerb from './verbs/hotspots'; +import * as RelatedVerb from './verbs/related'; import * as TreeVerb from './verbs/tree'; export const command = 'query'; @@ -10,9 +12,11 @@ export const describe = 'Query AppMap recordings (endpoints, find, tree, related export const builder = (args: yargs.Argv) => args + .command(CompareVerb) .command(EndpointsVerb) .command(FindVerb) .command(HotspotsVerb) + .command(RelatedVerb) .command(TreeVerb) .demandCommand(1, 'specify a query verb') .strict(); diff --git a/packages/cli/src/cmds/query/verbs/compare.ts b/packages/cli/src/cmds/query/verbs/compare.ts new file mode 100644 index 0000000000..dc6dfc7e5e --- /dev/null +++ b/packages/cli/src/cmds/query/verbs/compare.ts @@ -0,0 +1,115 @@ +import yargs from 'yargs'; +import { log } from 'console'; + +import { handleWorkingDirectory } from '../../../lib/handleWorkingDirectory'; +import { locateAppMapDir } from '../../../lib/locateAppMapDir'; +import { verbose } from '../../../utils'; +import { openReadOnly } from '../lib/openReadOnly'; +import { parseTime } from '../lib/parseFilter'; +import { + compare, + CompareFilter, + CompareRow, + CompareSort, +} from '../queries/compare'; +import { formatCount, formatMs, formatTable } from '../lib/format'; + +export const command = 'compare '; +export const describe = 'Per-route latency delta between two branches'; + +export const builder = (args: yargs.Argv) => { + return args + .positional('branch-a', { type: 'string', describe: 'baseline branch' }) + .positional('branch-b', { type: 'string', describe: 'comparison branch' }) + .option('directory', { type: 'string', alias: 'd' }) + .option('appmap-dir', { type: 'string' }) + .option('query-db', { type: 'string', describe: 'path to query.db (overrides default)' }) + .option('since', { type: 'string' }) + .option('until', { type: 'string' }) + .option('sort', { + type: 'string', + choices: ['delta', 'p95-a', 'p95-b'] as const, + default: 'delta', + }) + .option('include-counts', { type: 'boolean', default: false }) + .option('limit', { type: 'number' }) + .option('json', { type: 'boolean', default: false }); +}; + +type Argv = ReturnType extends yargs.Argv ? T : never; + +export const handler = async (argvIn: yargs.ArgumentsCamelCase): Promise => { + const argv = argvIn as yargs.ArgumentsCamelCase; + verbose(argv.verbose as boolean | undefined); + handleWorkingDirectory(argv.directory); + const appmapDir = argv.queryDb ? '' : await locateAppMapDir(argv.appmapDir); + + const branchA = argv.branchA as string | undefined; + const branchB = argv.branchB as string | undefined; + if (!branchA || !branchB) throw new Error(' and are required'); + + const filter: CompareFilter = { + branch_a: branchA, + branch_b: branchB, + sort: argv.sort as CompareSort, + }; + if (argv.since) filter.since = parseTime(argv.since); + if (argv.until) filter.until = parseTime(argv.until); + if (argv.limit !== undefined) filter.limit = argv.limit; + + const db = openReadOnly(appmapDir, argv.queryDb); + try { + const rows = compare(db, filter); + if (argv.json) { + log(JSON.stringify(rows, null, 2)); + return; + } + log(renderCompare(rows, branchA, branchB, !!argv.includeCounts)); + } finally { + db.close(); + } +}; + +// Format a delta ratio (b_p95 / a_p95) for display: +// ~ if 0.8 ≤ ratio ≤ 1.25 (no meaningful change) +// +Nx / -Nx for ≥2× or ≤0.5 +// +N% / -N% otherwise +// ? if delta is null +function formatDelta(d: number | null): string { + if (d == null) return '?'; + if (d >= 0.8 && d <= 1.25) return '~'; + if (d >= 2) return `+${d.toFixed(1)}×`; + if (d <= 0.5) return `-${(1 / d).toFixed(1)}×`; + const pct = (d - 1) * 100; + return `${pct > 0 ? '+' : ''}${pct.toFixed(0)}%`; +} + +function renderCompare( + rows: readonly CompareRow[], + branchA: string, + branchB: string, + includeCounts: boolean +): string { + if (includeCounts) { + return formatTable( + ['ROUTE', `${branchA}_p95`, `${branchA}_n`, `${branchB}_p95`, `${branchB}_n`, 'Δ'], + rows.map((r) => [ + `${r.method} ${r.route}`, + formatMs(r.a_p95_ms), + formatCount(r.a_count), + formatMs(r.b_p95_ms), + formatCount(r.b_count), + formatDelta(r.delta), + ]) + ); + } + return formatTable( + ['ROUTE', `${branchA}_p95`, `${branchB}_p95`, 'Δ'], + rows.map((r) => [ + `${r.method} ${r.route}`, + formatMs(r.a_p95_ms), + formatMs(r.b_p95_ms), + formatDelta(r.delta), + ]) + ); +} diff --git a/packages/cli/src/cmds/query/verbs/related.ts b/packages/cli/src/cmds/query/verbs/related.ts new file mode 100644 index 0000000000..aeb1d0c3a9 --- /dev/null +++ b/packages/cli/src/cmds/query/verbs/related.ts @@ -0,0 +1,84 @@ +import yargs from 'yargs'; +import { log } from 'console'; + +import { handleWorkingDirectory } from '../../../lib/handleWorkingDirectory'; +import { locateAppMapDir } from '../../../lib/locateAppMapDir'; +import { verbose } from '../../../utils'; +import { openReadOnly } from '../lib/openReadOnly'; +import { parseStatus, parseTime } from '../lib/parseFilter'; +import { related, RelatedFilter, RelatedRow } from '../queries/related'; +import { formatCount, formatMs, formatTable } from '../lib/format'; + +export const command = 'related '; +export const describe = 'Rank recordings similar to '; + +export const builder = (args: yargs.Argv) => { + return args + .positional('appmap', { type: 'string', describe: 'source appmap (name or basename)' }) + .option('directory', { type: 'string', alias: 'd' }) + .option('appmap-dir', { type: 'string' }) + .option('query-db', { type: 'string', describe: 'path to query.db (overrides default)' }) + .option('branch', { type: 'string' }) + .option('commit', { type: 'string' }) + .option('since', { type: 'string' }) + .option('until', { type: 'string' }) + .option('status', { + type: 'string', + describe: + 'route filter — e.g. 200, ">=500" (route is shown if any request matches)', + }) + .option('route', { + type: 'string', + describe: 'e.g. "POST /orders" (path is exact match; method case-insensitive)', + }) + .option('limit', { type: 'number' }) + .option('json', { type: 'boolean', default: false }); +}; + +type Argv = ReturnType extends yargs.Argv ? T : never; + +// Widened at the export so this module is assignable to CommandModule. +export const handler = async (argvIn: yargs.ArgumentsCamelCase): Promise => { + const argv = argvIn as yargs.ArgumentsCamelCase; + verbose(argv.verbose as boolean | undefined); + handleWorkingDirectory(argv.directory); + const appmapDir = argv.queryDb ? '' : await locateAppMapDir(argv.appmapDir); + + const ref = argv.appmap; + if (!ref) throw new Error(' is required'); + + const filter: RelatedFilter = {}; + if (argv.branch) filter.branch = argv.branch; + if (argv.commit) filter.commit = argv.commit; + if (argv.since) filter.since = parseTime(argv.since); + if (argv.until) filter.until = parseTime(argv.until); + if (argv.status) filter.status = parseStatus(argv.status); + if (argv.route) filter.route = argv.route; + if (argv.limit !== undefined) filter.limit = argv.limit; + + const db = openReadOnly(appmapDir, argv.queryDb); + try { + const rows = related(db, ref, filter); + if (argv.json) { + log(JSON.stringify(rows, null, 2)); + return; + } + log(renderRelated(rows)); + } finally { + db.close(); + } +}; + +function renderRelated(rows: readonly RelatedRow[]): string { + return formatTable( + ['APPMAP', 'SCORE', 'ROUTE', 'STATUS', 'ELAPSED', 'SHARED'], + rows.map((r) => [ + r.appmap_name, + formatCount(r.score), + r.method && r.route ? `${r.method} ${r.route}` : r.route ?? '', + r.status_code != null ? String(r.status_code) : '', + formatMs(r.elapsed_ms), + r.shared.join(', '), + ]) + ); +} diff --git a/packages/cli/tests/unit/cmds/query/queries/compare.spec.ts b/packages/cli/tests/unit/cmds/query/queries/compare.spec.ts new file mode 100644 index 0000000000..1a260621ef --- /dev/null +++ b/packages/cli/tests/unit/cmds/query/queries/compare.spec.ts @@ -0,0 +1,133 @@ +import sqlite3 from 'better-sqlite3'; + +import { openQueryDb } from '../../../../../src/cmds/query/db/openQueryDb'; +import { compare } from '../../../../../src/cmds/query/queries/compare'; + +function freshDb(): sqlite3.Database { + return openQueryDb('/tmp/ignored', ':memory:').db; +} + +interface SeedReq { + branch: string; + method: string; + path: string; + status: number; + elapsed_ms?: number; +} + +let nextEvent = 1; +function seed(db: sqlite3.Database, reqs: SeedReq[]): void { + const insAm = db.prepare( + `INSERT INTO appmaps (name, source_path, git_branch, timestamp) VALUES (?, ?, ?, ?)` + ); + const insReq = db.prepare( + `INSERT INTO http_requests (appmap_id, event_id, method, path, status_code, elapsed_ms) + VALUES (?, ?, ?, ?, ?, ?)` + ); + for (let i = 0; i < reqs.length; i++) { + const r = reqs[i]; + const am = insAm.run( + `rec-${i}`, + `/tmp/rec-${i}.appmap.json`, + r.branch, + '2026-04-29T12:00:00.000Z' + ); + insReq.run( + am.lastInsertRowid, + nextEvent++, + r.method, + r.path, + r.status, + r.elapsed_ms ?? null + ); + } +} + +describe('compare', () => { + beforeEach(() => { + nextEvent = 1; + }); + + it('reports a per-route delta = b_p95 / a_p95', () => { + const db = freshDb(); + try { + // Same route on both branches: main is fast, feature is slow. + seed(db, [ + { branch: 'main', method: 'GET', path: '/reports', status: 200, elapsed_ms: 200 }, + { branch: 'main', method: 'GET', path: '/reports', status: 200, elapsed_ms: 210 }, + { branch: 'feat', method: 'GET', path: '/reports', status: 200, elapsed_ms: 6000 }, + { branch: 'feat', method: 'GET', path: '/reports', status: 200, elapsed_ms: 6100 }, + ]); + const rows = compare(db, { branch_a: 'main', branch_b: 'feat' }); + expect(rows).toHaveLength(1); + const r = rows[0]; + expect(r.method).toBe('GET'); + expect(r.route).toBe('/reports'); + expect(r.delta).not.toBeNull(); + // ~30× slowdown + expect(r.delta!).toBeGreaterThan(20); + } finally { + db.close(); + } + }); + + it('preserves routes that exist on only one side', () => { + const db = freshDb(); + try { + seed(db, [ + { branch: 'main', method: 'GET', path: '/old', status: 200, elapsed_ms: 100 }, + { branch: 'feat', method: 'GET', path: '/new', status: 200, elapsed_ms: 50 }, + ]); + const rows = compare(db, { branch_a: 'main', branch_b: 'feat' }); + const old = rows.find((r) => r.route === '/old')!; + const fresh = rows.find((r) => r.route === '/new')!; + expect(old.a_p95_ms).toBe(100); + expect(old.b_p95_ms).toBeNull(); + expect(old.delta).toBeNull(); + expect(fresh.a_p95_ms).toBeNull(); + expect(fresh.b_p95_ms).toBe(50); + expect(fresh.delta).toBeNull(); + } finally { + db.close(); + } + }); + + it('--sort=delta puts the biggest absolute change first (in either direction)', () => { + const db = freshDb(); + try { + seed(db, [ + // /a: 10× slowdown + { branch: 'main', method: 'GET', path: '/a', status: 200, elapsed_ms: 100 }, + { branch: 'feat', method: 'GET', path: '/a', status: 200, elapsed_ms: 1000 }, + // /b: 5× speedup + { branch: 'main', method: 'GET', path: '/b', status: 200, elapsed_ms: 500 }, + { branch: 'feat', method: 'GET', path: '/b', status: 200, elapsed_ms: 100 }, + // /c: ~unchanged + { branch: 'main', method: 'GET', path: '/c', status: 200, elapsed_ms: 100 }, + { branch: 'feat', method: 'GET', path: '/c', status: 200, elapsed_ms: 105 }, + ]); + const rows = compare(db, { branch_a: 'main', branch_b: 'feat', sort: 'delta' }); + // /a (10×) and /b (1/5×) have the largest log-delta; /c last. + expect(rows[rows.length - 1].route).toBe('/c'); + } finally { + db.close(); + } + }); + + it('--limit caps the result set', () => { + const db = freshDb(); + try { + seed(db, [ + { branch: 'main', method: 'GET', path: '/a', status: 200, elapsed_ms: 100 }, + { branch: 'main', method: 'GET', path: '/b', status: 200, elapsed_ms: 100 }, + { branch: 'feat', method: 'GET', path: '/a', status: 200, elapsed_ms: 100 }, + { branch: 'feat', method: 'GET', path: '/b', status: 200, elapsed_ms: 100 }, + ]); + expect( + compare(db, { branch_a: 'main', branch_b: 'feat', limit: 1 }) + ).toHaveLength(1); + } finally { + db.close(); + } + }); +}); diff --git a/packages/cli/tests/unit/cmds/query/queries/related.spec.ts b/packages/cli/tests/unit/cmds/query/queries/related.spec.ts new file mode 100644 index 0000000000..017fa441a0 --- /dev/null +++ b/packages/cli/tests/unit/cmds/query/queries/related.spec.ts @@ -0,0 +1,205 @@ +import sqlite3 from 'better-sqlite3'; + +import { openQueryDb } from '../../../../../src/cmds/query/db/openQueryDb'; +import { + extractTables, + related, +} from '../../../../../src/cmds/query/queries/related'; + +function freshDb(): sqlite3.Database { + return openQueryDb('/tmp/ignored', ':memory:').db; +} + +interface SeedRecording { + name: string; + branch?: string; + request?: { method: string; path: string; status: number; elapsed_ms?: number }; + sqls?: string[]; + classes?: string[]; // leaf names; we register code_objects for each +} + +function seed(db: sqlite3.Database, recs: SeedRecording[]): void { + const insAm = db.prepare( + `INSERT INTO appmaps (name, source_path, git_branch, elapsed_ms) VALUES (?, ?, ?, ?)` + ); + const insReq = db.prepare( + `INSERT INTO http_requests (appmap_id, event_id, method, path, status_code, elapsed_ms) + VALUES (?, 1, ?, ?, ?, ?)` + ); + const insQ = db.prepare( + `INSERT INTO sql_queries (appmap_id, event_id, sql_text) VALUES (?, ?, ?)` + ); + const insCo = db.prepare( + `INSERT OR IGNORE INTO code_objects (fqid, package, classes, leaf_class, method, is_static) + VALUES (?, ?, ?, ?, ?, ?)` + ); + const selCo = db.prepare(`SELECT id FROM code_objects WHERE fqid = ?`); + const insCall = db.prepare( + `INSERT INTO function_calls (appmap_id, event_id, defined_class, method_id, code_object_id) + VALUES (?, ?, ?, ?, ?)` + ); + + let nextEvent = 100; + for (const r of recs) { + const am = insAm.run( + r.name, + `/tmp/${r.name}.appmap.json`, + r.branch ?? null, + r.request?.elapsed_ms ?? null + ); + const aid = Number(am.lastInsertRowid); + if (r.request) { + insReq.run( + aid, + r.request.method, + r.request.path, + r.request.status, + r.request.elapsed_ms ?? null + ); + } + for (const sql of r.sqls ?? []) { + insQ.run(aid, nextEvent++, sql); + } + for (const cls of r.classes ?? []) { + const fqid = `app/${cls}#m`; + insCo.run(fqid, 'app', JSON.stringify([cls]), cls, 'm', 0); + const co = selCo.get(fqid) as { id: number }; + insCall.run(aid, nextEvent++, cls, 'm', co.id); + } + } +} + +describe('extractTables', () => { + it('extracts FROM/JOIN/INTO/UPDATE table names case-insensitively', () => { + const sql = + "SELECT * FROM users u JOIN orders o ON u.id = o.user_id WHERE u.id = 1; INSERT INTO logs VALUES (1); UPDATE Sessions SET x=1"; + expect([...extractTables(sql)].sort()).toEqual(['logs', 'orders', 'sessions', 'users']); + }); + + it('strips a single leading schema qualifier', () => { + expect(extractTables('SELECT * FROM public.orders')).toEqual(new Set(['orders'])); + }); +}); + +describe('related', () => { + it('scores by route + tables + classes; excludes the source', () => { + const db = freshDb(); + try { + seed(db, [ + { + name: 'source', + request: { method: 'POST', path: '/orders', status: 500 }, + sqls: ['INSERT INTO orders VALUES (1)', 'SELECT * FROM users WHERE id = 1'], + classes: ['OrdersController', 'IdempotencyKey'], + }, + { + name: 'best', + request: { method: 'POST', path: '/orders', status: 201, elapsed_ms: 140 }, + sqls: ['INSERT INTO orders VALUES (1)', 'SELECT * FROM users WHERE id = 1'], + classes: ['OrdersController', 'IdempotencyKey'], + }, + { + name: 'partial', + request: { method: 'POST', path: '/orders', status: 201 }, + sqls: ['INSERT INTO orders VALUES (1)'], + classes: ['OrdersController'], + }, + { + name: 'unrelated', + request: { method: 'GET', path: '/healthz', status: 200 }, + sqls: [], + classes: ['HealthController'], + }, + ]); + const rows = related(db, 'source'); + expect(rows.find((r) => r.appmap_name === 'source')).toBeUndefined(); + expect(rows.find((r) => r.appmap_name === 'unrelated')).toBeUndefined(); + const best = rows.find((r) => r.appmap_name === 'best')!; + const partial = rows.find((r) => r.appmap_name === 'partial')!; + // best: route(5) + 2 tables*3 + 2 classes*2 = 15 + expect(best.score).toBe(15); + expect(best.shared).toContain('route'); + expect(best.shared).toContain('orders'); + expect(best.shared).toContain('users'); + expect(best.shared).toContain('OrdersController'); + // partial: route(5) + 1 table*3 + 1 class*2 = 10 + expect(partial.score).toBe(10); + // best ranks first + expect(rows[0].appmap_name).toBe('best'); + } finally { + db.close(); + } + }); + + it('--branch scopes the candidate pool', () => { + const db = freshDb(); + try { + seed(db, [ + { + name: 'src', + branch: 'main', + request: { method: 'GET', path: '/x', status: 200 }, + classes: ['Foo'], + }, + { + name: 'main_match', + branch: 'main', + request: { method: 'GET', path: '/x', status: 200 }, + classes: ['Foo'], + }, + { + name: 'feature_match', + branch: 'feature', + request: { method: 'GET', path: '/x', status: 200 }, + classes: ['Foo'], + }, + ]); + const rows = related(db, 'src', { branch: 'main' }); + expect(rows.map((r) => r.appmap_name)).toEqual(['main_match']); + } finally { + db.close(); + } + }); + + it('--status filters candidates to recordings with a matching response', () => { + const db = freshDb(); + try { + seed(db, [ + { + name: 'src', + request: { method: 'POST', path: '/orders', status: 500 }, + classes: ['Foo'], + }, + { + name: 'succeeded', + request: { method: 'POST', path: '/orders', status: 201 }, + classes: ['Foo'], + }, + { + name: 'also_failed', + request: { method: 'POST', path: '/orders', status: 500 }, + classes: ['Foo'], + }, + ]); + const rows = related(db, 'src', { status: { op: '<', value: 400 } }); + expect(rows.map((r) => r.appmap_name)).toEqual(['succeeded']); + } finally { + db.close(); + } + }); + + it('--limit caps the result set', () => { + const db = freshDb(); + try { + seed(db, [ + { name: 'src', request: { method: 'GET', path: '/x', status: 200 }, classes: ['A', 'B'] }, + { name: 'a', request: { method: 'GET', path: '/x', status: 200 }, classes: ['A', 'B'] }, + { name: 'b', request: { method: 'GET', path: '/x', status: 200 }, classes: ['A'] }, + { name: 'c', request: { method: 'GET', path: '/x', status: 200 }, classes: ['B'] }, + ]); + expect(related(db, 'src', { limit: 2 })).toHaveLength(2); + } finally { + db.close(); + } + }); +}); From a63b8a3f3cc27eb2f663e382d3a4497369c3390b Mon Sep 17 00:00:00 2001 From: kgilpin Date: Sat, 2 May 2026 16:09:09 -0400 Subject: [PATCH 16/30] feat(query/tree): focus, ancestors/descendants, min-elapsed-ms MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit `tree` previously dumped every event for the recording — fine on a 10-event test fixture, useless on a real production trace with thousands of events. V3 lists --focus-fn / --focus-sql / --ancestors / --descendants for exactly this; we'd just deferred them. Adds focus options to the tree() query function: --focus-fn function calls matching code_object.fqid --focus-sql sql_queries whose text contains --focus-route server requests matching normalized_path --focus-url outbound HTTP whose URL contains --ancestors N ancestor levels above each match (default 5) --descendants N descendant levels below each match (default 3) --min-elapsed-ms N prune subtrees with max elapsed below N Multiple focus flags allowed; their match sets are unioned. The filtered subset includes: focus events + N ancestors + the direct children of every ancestor (so siblings of the focus are visible) + M descendants of focus. Depth is recomputed relative to the highest included ancestor so indentation reads cleanly. --min-elapsed-ms walks subtrees post-order and drops any whose entire branch's max elapsed is below the threshold — useful for trimming traces dominated by fast leaf calls. Tests cover each focus type, depth budget edges (ancestors=1, descendants=0), multi-focus union, min-elapsed pruning, no-match returning empty, and the relative-depth reanchoring. 190 total passing. Co-Authored-By: Claude Opus 4.7 (1M context) --- packages/cli/src/cmds/query/queries/tree.ts | 199 +++++++++++++++++- packages/cli/src/cmds/query/verbs/tree.ts | 41 +++- .../unit/cmds/query/queries/tree.spec.ts | 199 ++++++++++++++++++ 3 files changed, 433 insertions(+), 6 deletions(-) diff --git a/packages/cli/src/cmds/query/queries/tree.ts b/packages/cli/src/cmds/query/queries/tree.ts index 67eca9c574..86f01e835d 100644 --- a/packages/cli/src/cmds/query/queries/tree.ts +++ b/packages/cli/src/cmds/query/queries/tree.ts @@ -88,9 +88,33 @@ export function resolveAppmap(db: sqlite3.Database, ref: string): AppmapInfo { return rows[0]; } +export interface TreeOptions { + // Focus criteria — multiple may be supplied; results are the union of + // matches' neighborhoods. Without any focus, the full tree is returned. + focusFn?: string; // exact code_object fqid + focusSql?: string; // case-insensitive substring of sql_text + focusRoute?: string; // normalized_path (or raw path) of a server request + focusUrl?: string; // case-insensitive substring of an outbound URL + + // Depth budgets, in effect only when focus is active. + ancestors?: number; // ancestor levels to keep above each match (default 5) + descendants?: number; // descendant levels below each match (default 3) + + // Prune subtrees whose maximum elapsed time is below this threshold — + // useful for trimming traces dominated by fast leaf calls. + minElapsedMs?: number; +} + +const DEFAULT_ANCESTORS = 5; +const DEFAULT_DESCENDANTS = 3; + // Build the flat-but-depth-annotated tree for a recording. Events are // returned in event_id order; consumers can render with indentation. -export function tree(db: sqlite3.Database, appmapRef: string): TreeNode[] { +export function tree( + db: sqlite3.Database, + appmapRef: string, + options: TreeOptions = {} +): TreeNode[] { const am = resolveAppmap(db, appmapRef); const events: TreeNode[] = []; @@ -240,9 +264,26 @@ export function tree(db: sqlite3.Database, appmapRef: string): TreeNode[] { } events.sort((a, b) => a.event_id - b.event_id); + computeDepths(events); + + let result = events; + if (hasFocus(options)) { + result = applyFocus(result, options); + } + if (options.minElapsedMs && options.minElapsedMs > 0) { + result = pruneByElapsed(result, options.minElapsedMs); + } + // Re-anchor depth to the highest included ancestor in the surviving + // set, so the rendered indentation starts at column 0 instead of + // floating wherever the original absolute depth happened to be. + if (result !== events) recomputeDepthsRelative(result); + + return result; +} - // Compute depths in event_id order. Parents come before children, so - // each node's depth is parent's depth + 1 (or 0 if no parent / orphan). +function computeDepths(events: TreeNode[]): void { + // Events are sorted by event_id; parents always precede children, so a + // single forward pass suffices. const depthByEventId = new Map(); for (const ev of events) { let depth = 0; @@ -253,8 +294,158 @@ export function tree(db: sqlite3.Database, appmapRef: string): TreeNode[] { ev.depth = depth; depthByEventId.set(ev.event_id, depth); } +} - return events; +function recomputeDepthsRelative(events: readonly TreeNode[]): void { + const includedIds = new Set(events.map((e) => e.event_id)); + const eventsByEventId = new Map(); + for (const e of events) { + if (!eventsByEventId.has(e.event_id)) eventsByEventId.set(e.event_id, e); + } + for (const e of events) { + let d = 0; + let pid = e.parent_event_id; + while (pid !== null && includedIds.has(pid)) { + d += 1; + const parent = eventsByEventId.get(pid); + if (!parent) break; + pid = parent.parent_event_id; + } + e.depth = d; + } +} + +function hasFocus(options: TreeOptions): boolean { + return !!(options.focusFn || options.focusSql || options.focusRoute || options.focusUrl); +} + +function matchesFocus(node: TreeNode, options: TreeOptions): boolean { + if (options.focusFn && node.kind === 'function') { + return node.fqid === options.focusFn; + } + if (options.focusSql && node.kind === 'sql') { + return node.sql_text.toLowerCase().includes(options.focusSql.toLowerCase()); + } + if (options.focusRoute && node.kind === 'http_server') { + return node.route === options.focusRoute; + } + if (options.focusUrl && node.kind === 'http_client') { + return node.url.toLowerCase().includes(options.focusUrl.toLowerCase()); + } + return false; +} + +// Filter `events` to a neighborhood around the focus matches: +// - the matches themselves +// - up to `ancestors` parent levels above each match +// - the direct children of each ancestor (so siblings of the match are visible) +// - up to `descendants` levels below each match +function applyFocus(events: readonly TreeNode[], options: TreeOptions): TreeNode[] { + const ancestorBudget = options.ancestors ?? DEFAULT_ANCESTORS; + const descendantBudget = options.descendants ?? DEFAULT_DESCENDANTS; + + // Build helpers. Multiple TreeNodes can share an event_id (e.g. a call + // event can also have an exception attached), so children are keyed + // by event_id and we dedupe. + const nodeByEventId = new Map(); + for (const e of events) { + if (!nodeByEventId.has(e.event_id)) nodeByEventId.set(e.event_id, e); + } + const childrenByParent = new Map>(); + for (const e of events) { + if (e.parent_event_id !== null) { + let bucket = childrenByParent.get(e.parent_event_id); + if (!bucket) { + bucket = new Set(); + childrenByParent.set(e.parent_event_id, bucket); + } + bucket.add(e.event_id); + } + } + + const focusIds = new Set(); + for (const e of events) { + if (matchesFocus(e, options)) focusIds.add(e.event_id); + } + if (focusIds.size === 0) return []; + + const included = new Set(); + for (const fid of focusIds) { + included.add(fid); + + // Walk up to `ancestorBudget` ancestors; record them and remember the + // path so we can include their direct children. + const ancestorIds: number[] = []; + let cur = fid; + for (let i = 0; i < ancestorBudget; i++) { + const node = nodeByEventId.get(cur); + if (!node || node.parent_event_id === null) break; + const parentId = node.parent_event_id; + if (!nodeByEventId.has(parentId)) break; + ancestorIds.push(parentId); + included.add(parentId); + cur = parentId; + } + // Direct children of every ancestor (so the focus's siblings — and + // siblings of every node on the path to root — are visible). + for (const aid of ancestorIds) { + const kids = childrenByParent.get(aid); + if (kids) for (const k of kids) included.add(k); + } + + // Descendants up to `descendantBudget` levels (BFS). + const queue: { id: number; depth: number }[] = [{ id: fid, depth: 0 }]; + while (queue.length > 0) { + const next = queue.shift(); + if (!next) break; + if (next.depth >= descendantBudget) continue; + const kids = childrenByParent.get(next.id); + if (!kids) continue; + for (const k of kids) { + if (!included.has(k)) { + included.add(k); + queue.push({ id: k, depth: next.depth + 1 }); + } + } + } + } + + return events.filter((e) => included.has(e.event_id)); +} + +// Prune subtrees whose entire branch's maximum elapsed_ms is below the +// threshold. A node is kept iff it (or any of its descendants) has an +// elapsed_ms ≥ threshold. Events without elapsed (exceptions, http +// requests with no return) are kept iff their owning subtree qualifies. +function pruneByElapsed(events: readonly TreeNode[], minMs: number): TreeNode[] { + const childrenByParent = new Map>(); + for (const e of events) { + if (e.parent_event_id !== null) { + let bucket = childrenByParent.get(e.parent_event_id); + if (!bucket) { + bucket = new Set(); + childrenByParent.set(e.parent_event_id, bucket); + } + bucket.add(e.event_id); + } + } + const elapsedById = new Map(); + for (const e of events) { + const cur = elapsedById.get(e.event_id) ?? 0; + const here = 'elapsed_ms' in e && typeof e.elapsed_ms === 'number' ? e.elapsed_ms : 0; + if (here > cur) elapsedById.set(e.event_id, here); + } + const maxByEventId = new Map(); + function maxFor(id: number): number { + const cached = maxByEventId.get(id); + if (cached !== undefined) return cached; + let m = elapsedById.get(id) ?? 0; + const kids = childrenByParent.get(id); + if (kids) for (const k of kids) m = Math.max(m, maxFor(k)); + maxByEventId.set(id, m); + return m; + } + return events.filter((e) => maxFor(e.event_id) >= minMs); } export interface TreeSummary { diff --git a/packages/cli/src/cmds/query/verbs/tree.ts b/packages/cli/src/cmds/query/verbs/tree.ts index 5b456597a1..d3d3768ac8 100644 --- a/packages/cli/src/cmds/query/verbs/tree.ts +++ b/packages/cli/src/cmds/query/verbs/tree.ts @@ -5,7 +5,7 @@ import { handleWorkingDirectory } from '../../../lib/handleWorkingDirectory'; import { locateAppMapDir } from '../../../lib/locateAppMapDir'; import { verbose } from '../../../utils'; import { openReadOnly } from '../lib/openReadOnly'; -import { tree, treeSummary, TreeNode } from '../queries/tree'; +import { tree, treeSummary, TreeNode, TreeOptions } from '../queries/tree'; import { renderFlat, renderSummary, renderTree } from '../lib/treeRender'; export const command = 'tree '; @@ -27,6 +27,34 @@ export const builder = (args: yargs.Argv) => { choices: ['all', 'http', 'sql'] as const, default: 'all', }) + .option('focus-fn', { + type: 'string', + describe: 'centre on function calls matching this fqid', + }) + .option('focus-sql', { + type: 'string', + describe: 'centre on SQL queries containing this substring', + }) + .option('focus-route', { + type: 'string', + describe: 'centre on a server request matching this normalized path', + }) + .option('focus-url', { + type: 'string', + describe: 'centre on an outbound HTTP call whose URL contains this substring', + }) + .option('ancestors', { + type: 'number', + describe: 'ancestor levels to keep above each focus match (default 5)', + }) + .option('descendants', { + type: 'number', + describe: 'descendant levels below each focus match (default 3)', + }) + .option('min-elapsed-ms', { + type: 'number', + describe: 'prune subtrees whose max elapsed is below this threshold', + }) .option('json', { type: 'boolean', default: false }); }; @@ -59,7 +87,16 @@ export const handler = async (argvIn: yargs.ArgumentsCamelCase): Promis return; } - const nodes = tree(db, ref); + const treeOptions: TreeOptions = {}; + if (argv.focusFn) treeOptions.focusFn = argv.focusFn as string; + if (argv.focusSql) treeOptions.focusSql = argv.focusSql as string; + if (argv.focusRoute) treeOptions.focusRoute = argv.focusRoute as string; + if (argv.focusUrl) treeOptions.focusUrl = argv.focusUrl as string; + if (argv.ancestors !== undefined) treeOptions.ancestors = argv.ancestors as number; + if (argv.descendants !== undefined) treeOptions.descendants = argv.descendants as number; + if (argv.minElapsedMs !== undefined) treeOptions.minElapsedMs = argv.minElapsedMs as number; + + const nodes = tree(db, ref, treeOptions); const filtered = applyFilter(nodes, argv.filter as 'all' | 'http' | 'sql'); if (argv.json) { log(JSON.stringify(filtered, null, 2)); diff --git a/packages/cli/tests/unit/cmds/query/queries/tree.spec.ts b/packages/cli/tests/unit/cmds/query/queries/tree.spec.ts index 541e8cf137..00898c318b 100644 --- a/packages/cli/tests/unit/cmds/query/queries/tree.spec.ts +++ b/packages/cli/tests/unit/cmds/query/queries/tree.spec.ts @@ -190,6 +190,205 @@ describe('tree', () => { }); }); +describe('tree focus', () => { + // Build a richer recording: HTTP root → controller → 3 sibling calls + // (one of which calls a deeper helper) → SQL + EXC under controller. + function seedRich(db: sqlite3.Database): void { + const am = db + .prepare( + `INSERT INTO appmaps (name, source_path, sql_query_count) VALUES ('rich', '/tmp/rich.appmap.json', 1)` + ) + .run(); + const id = am.lastInsertRowid; + db.prepare( + `INSERT INTO http_requests (appmap_id, event_id, parent_event_id, method, path, status_code, elapsed_ms) + VALUES (?, 1, NULL, 'POST', '/orders', 500, 520.0)` + ).run(id); + db.prepare( + `INSERT INTO function_calls (appmap_id, event_id, parent_event_id, defined_class, method_id, elapsed_ms) + VALUES (?, 2, 1, 'OrdersController', 'create', 519.0)` + ).run(id); + db.prepare( + `INSERT OR IGNORE INTO code_objects (fqid, package, classes, leaf_class, method, is_static) + VALUES ('app/IdempotencyKey.generate', 'app', '["IdempotencyKey"]', 'IdempotencyKey', 'generate', 1)` + ).run(); + const co = (db + .prepare(`SELECT id FROM code_objects WHERE fqid = 'app/IdempotencyKey.generate'`) + .get() as { id: number }).id; + db.prepare( + `INSERT INTO function_calls (appmap_id, event_id, parent_event_id, code_object_id, defined_class, method_id, elapsed_ms) + VALUES (?, 3, 2, ?, 'IdempotencyKey', 'generate', 0.2)` + ).run(id, co); + db.prepare( + `INSERT INTO function_calls (appmap_id, event_id, parent_event_id, defined_class, method_id, elapsed_ms) + VALUES (?, 4, 2, 'Order', 'new', 0.4)` + ).run(id); + db.prepare( + `INSERT INTO sql_queries (appmap_id, event_id, parent_event_id, sql_text, elapsed_ms) + VALUES (?, 5, 2, 'INSERT INTO orders (id, name) VALUES (?, ?)', 14.0)` + ).run(id); + db.prepare( + `INSERT INTO exceptions (appmap_id, event_id, parent_event_id, exception_class, message) + VALUES (?, 2, 1, 'IntegrityError', 'duplicate key')` + ).run(id); + // Add an outbound HTTP call as a separate child of controller + db.prepare( + `INSERT INTO http_client_requests (appmap_id, event_id, parent_event_id, method, url, status_code, elapsed_ms) + VALUES (?, 6, 2, 'GET', 'https://api.example/v1', 200, 40.0)` + ).run(id); + // A deeper descendant under IdempotencyKey.generate (event_id 3) + db.prepare( + `INSERT INTO function_calls (appmap_id, event_id, parent_event_id, defined_class, method_id, elapsed_ms) + VALUES (?, 7, 3, 'Digest', 'sha256', 0.05)` + ).run(id); + } + + it('--focus-sql narrows to the matching SQL plus its ancestors and their children', () => { + const db = freshDb(); + try { + seedRich(db); + const nodes = tree(db, 'rich', { focusSql: 'INSERT INTO orders' }); + const ids = new Set(nodes.map((n) => n.event_id)); + // Includes: HTTP (1), controller (2), SQL (5), and the controller's + // direct children (3, 4, 5, 6, 7? No — children of controller are + // 3, 4, 5, 6 only; 7 is a descendant of 3, not a sibling of focus). + expect(ids.has(1)).toBe(true); + expect(ids.has(2)).toBe(true); + expect(ids.has(5)).toBe(true); + expect(ids.has(3)).toBe(true); // sibling + expect(ids.has(4)).toBe(true); // sibling + expect(ids.has(6)).toBe(true); // sibling + // 7 is a child of 3, not of an ancestor of the focus. + expect(ids.has(7)).toBe(false); + } finally { + db.close(); + } + }); + + it('--focus-fn matches by canonical fqid', () => { + const db = freshDb(); + try { + seedRich(db); + const nodes = tree(db, 'rich', { focusFn: 'app/IdempotencyKey.generate' }); + const ids = new Set(nodes.map((n) => n.event_id)); + // Focus event_id 3; ancestors 2, 1; children of ancestors include + // siblings 4, 5, 6 (children of 2). Descendants of 3: 7. + expect(ids.has(1)).toBe(true); + expect(ids.has(2)).toBe(true); + expect(ids.has(3)).toBe(true); + expect(ids.has(4)).toBe(true); + expect(ids.has(5)).toBe(true); + expect(ids.has(6)).toBe(true); + expect(ids.has(7)).toBe(true); // descendant + } finally { + db.close(); + } + }); + + it('--focus-route matches a server request', () => { + const db = freshDb(); + try { + seedRich(db); + const nodes = tree(db, 'rich', { focusRoute: '/orders' }); + // The HTTP request matches; ancestors of HTTP = none; descendants + // of the focus drill down. With descendants=3 we get the full tree + // up to depth 3 from the request. + expect(nodes.find((n) => n.kind === 'http_server')).toBeDefined(); + expect(nodes.find((n) => n.kind === 'function' && n.method_id === 'create')).toBeDefined(); + } finally { + db.close(); + } + }); + + it('--focus-url matches an outbound call', () => { + const db = freshDb(); + try { + seedRich(db); + const nodes = tree(db, 'rich', { focusUrl: 'api.example' }); + const ids = new Set(nodes.map((n) => n.event_id)); + expect(ids.has(6)).toBe(true); + } finally { + db.close(); + } + }); + + it('--ancestors=1 trims the path to root', () => { + const db = freshDb(); + try { + seedRich(db); + const nodes = tree(db, 'rich', { focusFn: 'app/IdempotencyKey.generate', ancestors: 1 }); + const ids = new Set(nodes.map((n) => n.event_id)); + // Only controller (1 ancestor) — not HTTP. + expect(ids.has(2)).toBe(true); + expect(ids.has(1)).toBe(false); + } finally { + db.close(); + } + }); + + it('--descendants=0 drops the subtree below focus', () => { + const db = freshDb(); + try { + seedRich(db); + const nodes = tree(db, 'rich', { + focusFn: 'app/IdempotencyKey.generate', + descendants: 0, + }); + const ids = new Set(nodes.map((n) => n.event_id)); + // 7 (descendant of focus) excluded. + expect(ids.has(3)).toBe(true); + expect(ids.has(7)).toBe(false); + } finally { + db.close(); + } + }); + + it('--min-elapsed-ms prunes fast subtrees', () => { + const db = freshDb(); + try { + seedRich(db); + const nodes = tree(db, 'rich', { minElapsedMs: 10 }); + const ids = new Set(nodes.map((n) => n.event_id)); + // SQL (14ms) and outbound HTTP (40ms) survive; IdempotencyKey + // (0.2ms with no fast descendant) and Order.new (0.4ms) are pruned. + expect(ids.has(5)).toBe(true); + expect(ids.has(6)).toBe(true); + expect(ids.has(3)).toBe(false); + expect(ids.has(4)).toBe(false); + } finally { + db.close(); + } + }); + + it('focus with no matches returns no events', () => { + const db = freshDb(); + try { + seedRich(db); + const nodes = tree(db, 'rich', { focusFn: 'app/Nope.nothing' }); + expect(nodes).toEqual([]); + } finally { + db.close(); + } + }); + + it('depths are recomputed relative to the highest included ancestor', () => { + const db = freshDb(); + try { + seedRich(db); + // Without focus, HTTP is depth 0, controller 1, IdempotencyKey 2. + const focused = tree(db, 'rich', { focusFn: 'app/IdempotencyKey.generate', ancestors: 1 }); + // Highest included is controller (event_id=2). It should now be + // depth 0; its child IdempotencyKey (focus) should be depth 1. + const controller = focused.find((n) => n.kind === 'function' && n.method_id === 'create'); + const idem = focused.find((n) => n.kind === 'function' && n.method_id === 'generate'); + expect(controller?.depth).toBe(0); + expect(idem?.depth).toBe(1); + } finally { + db.close(); + } + }); +}); + describe('tree --filter', () => { it('returns only http events when filter=http', () => { const db = freshDb(); From 67b0259c8b5a3f28171fe835341a152c157e7c6c Mon Sep 17 00:00:00 2001 From: kgilpin Date: Sat, 2 May 2026 16:13:16 -0400 Subject: [PATCH 17/30] feat(query/mcp): MCP server exposing the V3 query surface MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit `appmap query mcp` runs an MCP (Model Context Protocol) server on stdio. Tool names match the Python prototype's MCP surface so existing clients work unchanged; under the hood each tool is a thin wrapper around the V3 query functions we already have. Tools shipped (9): get_endpoint_detail → find requests filtered by method+path get_slow_queries → find queries, sorted by elapsed desc get_function_hotspots → hotspots(type=function) get_exceptions → find exceptions get_log_events → find calls --label=log get_labeled_events → find calls --label= compare_branches → compare(branch_a, branch_b) get_request_trace → tree() with focus support get_related → related() Resource shipped: appmap://endpoints → endpoints(limit=200) `get_request_trace` accepts the Python signature: focus_type + focus_value (function | sql_query | http_server_request | http_client_request) → maps to the V3 tree focus flags. parent_depth / child_depth / min_elapsed_ms thread through. Works against either a numeric appmap_id or a name/basename ref. Implementation: - queries/mcp.ts: tool registry + JSON-RPC dispatcher (initialize, tools/list, tools/call, resources/list, resources/read, notifications/* are no-op). Hand-rolled — no SDK dependency. ~360 lines. - verbs/mcp.ts: stdio loop. Reads newline-delimited JSON-RPC from stdin, writes responses to stdout, logging only to stderr (so the protocol stream stays clean). - tests/unit/cmds/query/queries/mcp.spec.ts: 11 cases covering initialize, tools/list, tools/call (with content envelope), resources/list, resources/read, unknown method/tool errors, appmap_id resolution by id and name. Smoke test against the PetClinic fixture set returns real hotspot rows; piped JSON-RPC roundtrip verified end-to-end. Total tests: 201 passing. Deferred (have V3-CLI workarounds; can add as separate tools later): - search (free-text + structured) - get_dashboard, get_code_context, get_class_context, get_labels Rejected (no findings table in our schema): - get_findings, get_findings_summary Co-Authored-By: Claude Opus 4.7 (1M context) --- packages/cli/src/cmds/query/queries/mcp.ts | 490 ++++++++++++++++++ packages/cli/src/cmds/query/query.ts | 5 +- packages/cli/src/cmds/query/verbs/mcp.ts | 81 +++ .../tests/unit/cmds/query/queries/mcp.spec.ts | 242 +++++++++ 4 files changed, 817 insertions(+), 1 deletion(-) create mode 100644 packages/cli/src/cmds/query/queries/mcp.ts create mode 100644 packages/cli/src/cmds/query/verbs/mcp.ts create mode 100644 packages/cli/tests/unit/cmds/query/queries/mcp.spec.ts diff --git a/packages/cli/src/cmds/query/queries/mcp.ts b/packages/cli/src/cmds/query/queries/mcp.ts new file mode 100644 index 0000000000..1b29b423c7 --- /dev/null +++ b/packages/cli/src/cmds/query/queries/mcp.ts @@ -0,0 +1,490 @@ +// MCP (Model Context Protocol) handler. Exposes the V3 query surface as +// MCP tools and resources, using the Python prototype's tool names so +// existing clients work unchanged. +// +// Wire format: newline-delimited JSON-RPC 2.0 over stdio. This module +// implements the message dispatch logic only; the stdio loop lives in +// the verb so this file stays testable without process I/O. + +import sqlite3 from 'better-sqlite3'; + +import { compare } from './compare'; +import { endpoints } from './endpoints'; +import { + FindCallRow, + FindExceptionRow, + FindFilter, + FindQueryRow, + FindRequestRow, + find, +} from './find'; +import { hotspots } from './hotspots'; +import { related, RelatedFilter } from './related'; +import { resolveAppmap, tree, treeSummary, AppmapInfo, TreeOptions } from './tree'; +import { parseTime } from '../lib/parseFilter'; + +export interface JsonRpcRequest { + jsonrpc: '2.0'; + id?: string | number | null; + method: string; + params?: Record; +} + +export interface JsonRpcResponse { + jsonrpc: '2.0'; + id: string | number | null; + result?: unknown; + error?: { code: number; message: string; data?: unknown }; +} + +interface ToolSpec { + name: string; + description: string; + inputSchema: { + type: 'object'; + properties: Record; + required?: string[]; + }; +} + +interface ToolImpl { + spec: ToolSpec; + handler: (args: Record, db: sqlite3.Database) => unknown; +} + +interface ResourceSpec { + uri: string; + name: string; + description: string; + mimeType: string; +} + +interface ResourceImpl { + spec: ResourceSpec; + read: (db: sqlite3.Database) => unknown; +} + +const SERVER_INFO = { name: 'appmap-query', version: '1.0.0' }; +const PROTOCOL_VERSION = '2024-11-05'; + +// --- helpers ------------------------------------------------------------ + +// Accept either a numeric appmap.id or a name/basename ref. Resolves to +// the appmaps row; throws if missing or ambiguous (the underlying +// resolveAppmap surfaces both messages). +function resolveByIdOrRef(db: sqlite3.Database, idOrRef: unknown): AppmapInfo { + const s = String(idOrRef); + if (/^\d+$/.test(s)) { + const row = db + .prepare(`SELECT id, name, source_path FROM appmaps WHERE id = ?`) + .get(Number(s)) as AppmapInfo | undefined; + if (row) return row; + // Fall through to name match if the numeric id doesn't exist — + // surfaces a clearer error from resolveAppmap. + } + return resolveAppmap(db, s); +} + +function maybeTime(s: unknown): string | undefined { + if (typeof s !== 'string' || s.length === 0) return undefined; + return parseTime(s); +} + +function maybeNumber(n: unknown): number | undefined { + if (typeof n === 'number' && Number.isFinite(n)) return n; + if (typeof n === 'string' && /^-?\d+(\.\d+)?$/.test(n)) return Number(n); + return undefined; +} + +function maybeString(s: unknown): string | undefined { + return typeof s === 'string' && s.length > 0 ? s : undefined; +} + +// --- tools -------------------------------------------------------------- + +// Tool name + description + JSON Schema + handler. Names match the +// Python prototype's MCP surface so existing clients work unchanged. +const TOOLS: ToolImpl[] = [ + { + spec: { + name: 'get_endpoint_detail', + description: + 'Individual HTTP request rows for a (method, path), with status, elapsed, branch, and the recording each came from.', + inputSchema: { + type: 'object', + properties: { + method: { type: 'string', description: 'HTTP method (GET, POST, …).' }, + path: { type: 'string', description: 'Endpoint path (exact normalized_path or path).' }, + since: { type: 'string', description: 'ISO timestamp lower bound.' }, + until: { type: 'string', description: 'ISO timestamp upper bound.' }, + limit: { type: 'integer' }, + }, + required: ['method', 'path'], + }, + }, + handler: (args, db) => { + const filter: FindFilter = { + route: `${String(args.method)} ${String(args.path)}`, + }; + filter.since = maybeTime(args.since); + filter.until = maybeTime(args.until); + filter.limit = maybeNumber(args.limit); + return find(db, 'requests', filter) as FindRequestRow[]; + }, + }, + + { + spec: { + name: 'get_slow_queries', + description: + 'SQL query rows ordered by elapsed time, slowest first. Use to find performance bottlenecks at the database layer.', + inputSchema: { + type: 'object', + properties: { + limit: { type: 'integer', description: 'Maximum rows (default 20).' }, + since: { type: 'string' }, + until: { type: 'string' }, + }, + }, + }, + handler: (args, db) => { + const filter: FindFilter = {}; + filter.since = maybeTime(args.since); + filter.until = maybeTime(args.until); + // Sort happens client-side after fetch so we don't pull the entire + // sql_queries table for large corpora; for now we rely on the + // implicit caller LIMIT to bound the work. + const rows = find(db, 'queries', filter) as FindQueryRow[]; + rows.sort((a, b) => (b.elapsed_ms ?? 0) - (a.elapsed_ms ?? 0)); + const limit = maybeNumber(args.limit) ?? 20; + return rows.slice(0, limit); + }, + }, + + { + spec: { + name: 'get_function_hotspots', + description: + 'Functions ranked by total elapsed time across recordings, with calls / total_ms / self_ms columns.', + inputSchema: { + type: 'object', + properties: { + limit: { type: 'integer' }, + since: { type: 'string' }, + until: { type: 'string' }, + }, + }, + }, + handler: (args, db) => + hotspots(db, { + type: 'function', + since: maybeTime(args.since), + until: maybeTime(args.until), + limit: maybeNumber(args.limit) ?? 20, + }), + }, + + { + spec: { + name: 'get_exceptions', + description: + 'Recent exception rows with class, message, source location, and the appmap they were captured in.', + inputSchema: { + type: 'object', + properties: { + limit: { type: 'integer' }, + offset: { type: 'integer' }, + since: { type: 'string' }, + until: { type: 'string' }, + }, + }, + }, + handler: (args, db) => { + const filter: FindFilter = {}; + filter.since = maybeTime(args.since); + filter.until = maybeTime(args.until); + filter.limit = maybeNumber(args.limit) ?? 50; + filter.offset = maybeNumber(args.offset); + return find(db, 'exceptions', filter) as FindExceptionRow[]; + }, + }, + + { + spec: { + name: 'get_log_events', + description: + 'Function calls labeled "log" — the application log output captured during recording, with parameter values.', + inputSchema: { + type: 'object', + properties: { + appmap_id: { description: 'Optional — filter to one recording (id or name).' }, + limit: { type: 'integer' }, + since: { type: 'string' }, + until: { type: 'string' }, + }, + }, + }, + handler: (args, db) => { + const filter: FindFilter = { label: 'log' }; + if (args.appmap_id !== undefined && args.appmap_id !== null) { + filter.appmap = resolveByIdOrRef(db, args.appmap_id).name; + } + filter.since = maybeTime(args.since); + filter.until = maybeTime(args.until); + filter.limit = maybeNumber(args.limit) ?? 200; + return find(db, 'calls', filter) as FindCallRow[]; + }, + }, + + { + spec: { + name: 'get_labeled_events', + description: + 'Function calls carrying an AppMap label. Common labels: log, security.authentication, security.authorization, dao, secret. Pass an exact label name.', + inputSchema: { + type: 'object', + properties: { + label: { type: 'string' }, + appmap_id: {}, + limit: { type: 'integer' }, + }, + required: ['label'], + }, + }, + handler: (args, db) => { + const filter: FindFilter = { label: String(args.label) }; + if (args.appmap_id !== undefined && args.appmap_id !== null) { + filter.appmap = resolveByIdOrRef(db, args.appmap_id).name; + } + filter.limit = maybeNumber(args.limit) ?? 200; + return find(db, 'calls', filter) as FindCallRow[]; + }, + }, + + { + spec: { + name: 'compare_branches', + description: + 'Per-route p95 latency for two branches with a delta column. Use to surface performance regressions introduced on a feature branch.', + inputSchema: { + type: 'object', + properties: { + branch_a: { type: 'string', description: 'Baseline branch.' }, + branch_b: { type: 'string', description: 'Comparison branch.' }, + since: { type: 'string' }, + until: { type: 'string' }, + limit: { type: 'integer' }, + }, + required: ['branch_a', 'branch_b'], + }, + }, + handler: (args, db) => + compare(db, { + branch_a: String(args.branch_a), + branch_b: String(args.branch_b), + since: maybeTime(args.since), + until: maybeTime(args.until), + limit: maybeNumber(args.limit), + }), + }, + + { + spec: { + name: 'get_request_trace', + description: + 'Call tree for one recording. Without focus, returns every event. With focus_type + focus_value, narrows to the neighborhood of matching events. focus_type is one of: function (focus_value = code_object fqid), sql_query (focus_value = SQL substring), http_server_request (focus_value = normalized_path), http_client_request (focus_value = URL substring).', + inputSchema: { + type: 'object', + properties: { + appmap_id: { description: 'Recording id or name.' }, + focus_type: { + type: 'string', + enum: ['function', 'sql_query', 'http_server_request', 'http_client_request'], + }, + focus_value: { type: 'string' }, + parent_depth: { type: 'integer', description: 'Ancestor levels to keep (default 5).' }, + child_depth: { type: 'integer', description: 'Descendant levels to keep (default 3).' }, + min_elapsed_ms: { type: 'number' }, + }, + required: ['appmap_id'], + }, + }, + handler: (args, db) => { + const am = resolveByIdOrRef(db, args.appmap_id); + const opts: TreeOptions = {}; + const focusType = maybeString(args.focus_type); + const focusValue = maybeString(args.focus_value); + if (focusType && focusValue) { + if (focusType === 'function') opts.focusFn = focusValue; + else if (focusType === 'sql_query') opts.focusSql = focusValue; + else if (focusType === 'http_server_request') opts.focusRoute = focusValue; + else if (focusType === 'http_client_request') opts.focusUrl = focusValue; + } + opts.ancestors = maybeNumber(args.parent_depth); + opts.descendants = maybeNumber(args.child_depth); + opts.minElapsedMs = maybeNumber(args.min_elapsed_ms); + return tree(db, am.name, opts); + }, + }, + + { + spec: { + name: 'get_related', + description: + 'Recordings ranked by similarity to a source recording. Score combines: same HTTP route (×5), shared SQL tables (×3 each), shared classes (×2 each). Primary use: find passing baselines for a failing recording with --status filter.', + inputSchema: { + type: 'object', + properties: { + appmap_id: { description: 'Source recording (id or name).' }, + status: { type: 'string', description: 'e.g. "200", ">=500".' }, + route: { type: 'string' }, + branch: { type: 'string' }, + since: { type: 'string' }, + until: { type: 'string' }, + limit: { type: 'integer' }, + }, + required: ['appmap_id'], + }, + }, + handler: (args, db) => { + const am = resolveByIdOrRef(db, args.appmap_id); + const filter: RelatedFilter = {}; + if (args.status !== undefined && args.status !== null) { + // parseStatus is in lib/parseFilter; import lazily to avoid a cycle + // (mcp.ts → queries/related → lib/scope → lib/parseFilter is fine). + const { parseStatus } = require('../lib/parseFilter'); + filter.status = parseStatus(String(args.status)); + } + filter.route = maybeString(args.route); + filter.branch = maybeString(args.branch); + filter.since = maybeTime(args.since); + filter.until = maybeTime(args.until); + filter.limit = maybeNumber(args.limit); + return related(db, am.name, filter); + }, + }, +]; + +// --- resources ----------------------------------------------------------- + +const RESOURCES: ResourceImpl[] = [ + { + spec: { + uri: 'appmap://endpoints', + name: 'endpoints', + description: + 'All HTTP endpoints with request count, average latency, p95, and error rate.', + mimeType: 'application/json', + }, + read: (db) => endpoints(db, { limit: 200 }), + }, +]; + +// --- handler ------------------------------------------------------------- + +export interface McpHandler { + (msg: JsonRpcRequest): JsonRpcResponse | null; +} + +// Build a JSON-RPC dispatcher backed by the given DB. Returns null for +// notifications (no response expected). +export function buildMcpHandler(db: sqlite3.Database): McpHandler { + return (msg: JsonRpcRequest): JsonRpcResponse | null => { + const id = (msg.id ?? null) as string | number | null; + const method = msg.method; + + if (method.startsWith('notifications/')) return null; + + if (method === 'initialize') { + return { + jsonrpc: '2.0', + id, + result: { + protocolVersion: PROTOCOL_VERSION, + serverInfo: SERVER_INFO, + capabilities: { tools: {}, resources: {} }, + }, + }; + } + + if (method === 'tools/list') { + return { + jsonrpc: '2.0', + id, + result: { tools: TOOLS.map((t) => t.spec) }, + }; + } + + if (method === 'tools/call') { + const params = (msg.params ?? {}) as { name?: string; arguments?: Record }; + const name = params.name; + const args = params.arguments ?? {}; + const tool = TOOLS.find((t) => t.spec.name === name); + if (!tool) return errorResponse(id, -32601, `unknown tool: ${name}`); + try { + const result = tool.handler(args, db); + return { + jsonrpc: '2.0', + id, + result: { + content: [{ type: 'text', text: JSON.stringify(result, null, 2) }], + }, + }; + } catch (e) { + return errorResponse(id, -32000, (e as Error).message); + } + } + + if (method === 'resources/list') { + return { + jsonrpc: '2.0', + id, + result: { resources: RESOURCES.map((r) => r.spec) }, + }; + } + + if (method === 'resources/read') { + const params = (msg.params ?? {}) as { uri?: string }; + const resource = RESOURCES.find((r) => r.spec.uri === params.uri); + if (!resource) return errorResponse(id, -32602, `unknown resource: ${params.uri}`); + try { + const result = resource.read(db); + return { + jsonrpc: '2.0', + id, + result: { + contents: [ + { + uri: resource.spec.uri, + mimeType: resource.spec.mimeType, + text: JSON.stringify(result, null, 2), + }, + ], + }, + }; + } catch (e) { + return errorResponse(id, -32000, (e as Error).message); + } + } + + return errorResponse(id, -32601, `method not found: ${method}`); + }; +} + +function errorResponse( + id: string | number | null, + code: number, + message: string +): JsonRpcResponse { + return { jsonrpc: '2.0', id, error: { code, message } }; +} + +// Exposed for the demo / docs; keeps the tool list discoverable without +// going through the protocol. +export function listTools(): readonly ToolSpec[] { + return TOOLS.map((t) => t.spec); +} + +export function listResources(): readonly ResourceSpec[] { + return RESOURCES.map((r) => r.spec); +} diff --git a/packages/cli/src/cmds/query/query.ts b/packages/cli/src/cmds/query/query.ts index 83f271ea98..8bad243e44 100644 --- a/packages/cli/src/cmds/query/query.ts +++ b/packages/cli/src/cmds/query/query.ts @@ -4,11 +4,13 @@ import * as CompareVerb from './verbs/compare'; import * as EndpointsVerb from './verbs/endpoints'; import * as FindVerb from './verbs/find'; import * as HotspotsVerb from './verbs/hotspots'; +import * as McpVerb from './verbs/mcp'; import * as RelatedVerb from './verbs/related'; import * as TreeVerb from './verbs/tree'; export const command = 'query'; -export const describe = 'Query AppMap recordings (endpoints, find, tree, related, hotspots, compare)'; +export const describe = + 'Query AppMap recordings (endpoints, find, tree, related, hotspots, compare, mcp)'; export const builder = (args: yargs.Argv) => args @@ -16,6 +18,7 @@ export const builder = (args: yargs.Argv) => .command(EndpointsVerb) .command(FindVerb) .command(HotspotsVerb) + .command(McpVerb) .command(RelatedVerb) .command(TreeVerb) .demandCommand(1, 'specify a query verb') diff --git a/packages/cli/src/cmds/query/verbs/mcp.ts b/packages/cli/src/cmds/query/verbs/mcp.ts new file mode 100644 index 0000000000..c0f9f25057 --- /dev/null +++ b/packages/cli/src/cmds/query/verbs/mcp.ts @@ -0,0 +1,81 @@ +import { createInterface } from 'readline'; + +import yargs from 'yargs'; + +import { handleWorkingDirectory } from '../../../lib/handleWorkingDirectory'; +import { locateAppMapDir } from '../../../lib/locateAppMapDir'; +import { verbose } from '../../../utils'; +import { openReadOnly } from '../lib/openReadOnly'; +import { + buildMcpHandler, + JsonRpcRequest, + JsonRpcResponse, +} from '../queries/mcp'; + +export const command = 'mcp'; +export const describe = + 'Run an MCP (Model Context Protocol) server on stdio that exposes the query verbs as tools'; + +export const builder = (args: yargs.Argv) => { + return args + .option('directory', { type: 'string', alias: 'd' }) + .option('appmap-dir', { type: 'string' }) + .option('query-db', { + type: 'string', + describe: 'path to query.db (overrides default)', + }); +}; + +type Argv = ReturnType extends yargs.Argv ? T : never; + +// Widened at the export so this module is assignable to CommandModule. +export const handler = async (argvIn: yargs.ArgumentsCamelCase): Promise => { + const argv = argvIn as yargs.ArgumentsCamelCase; + verbose(argv.verbose as boolean | undefined); + handleWorkingDirectory(argv.directory); + const appmapDir = argv.queryDb ? '' : await locateAppMapDir(argv.appmapDir); + + const db = openReadOnly(appmapDir, argv.queryDb); + const handle = buildMcpHandler(db); + + // MCP transport: newline-delimited JSON-RPC 2.0 over stdio. Logging + // goes to stderr only — anything on stdout corrupts the protocol stream. + process.stderr.write(`appmap mcp listening on stdio\n`); + + const rl = createInterface({ input: process.stdin }); + rl.on('line', (line) => { + const trimmed = line.trim(); + if (trimmed.length === 0) return; + let msg: JsonRpcRequest; + try { + msg = JSON.parse(trimmed) as JsonRpcRequest; + } catch (err) { + writeResponse({ + jsonrpc: '2.0', + id: null, + error: { code: -32700, message: `parse error: ${(err as Error).message}` }, + }); + return; + } + let response: JsonRpcResponse | null; + try { + response = handle(msg); + } catch (err) { + response = { + jsonrpc: '2.0', + id: (msg.id ?? null) as string | number | null, + error: { code: -32603, message: (err as Error).message }, + }; + } + if (response) writeResponse(response); + }); + + rl.on('close', () => { + db.close(); + process.exit(0); + }); +}; + +function writeResponse(response: JsonRpcResponse): void { + process.stdout.write(`${JSON.stringify(response)}\n`); +} diff --git a/packages/cli/tests/unit/cmds/query/queries/mcp.spec.ts b/packages/cli/tests/unit/cmds/query/queries/mcp.spec.ts new file mode 100644 index 0000000000..31f3233801 --- /dev/null +++ b/packages/cli/tests/unit/cmds/query/queries/mcp.spec.ts @@ -0,0 +1,242 @@ +import sqlite3 from 'better-sqlite3'; + +import { openQueryDb } from '../../../../../src/cmds/query/db/openQueryDb'; +import { + buildMcpHandler, + JsonRpcRequest, + listResources, + listTools, +} from '../../../../../src/cmds/query/queries/mcp'; + +function freshDb(): sqlite3.Database { + return openQueryDb('/tmp/ignored', ':memory:').db; +} + +function seedMinimal(db: sqlite3.Database): void { + // One recording with a request, a SQL query, an exception, and a labelled + // function call — enough to exercise most tools. + const am = db + .prepare( + `INSERT INTO appmaps (name, source_path, git_branch, sql_query_count, elapsed_ms, timestamp) + VALUES ('rec', '/tmp/rec.appmap.json', 'main', 1, 100, '2026-04-29T12:00:00.000Z')` + ) + .run(); + const id = am.lastInsertRowid; + db.prepare( + `INSERT INTO http_requests (appmap_id, event_id, parent_event_id, method, path, status_code, elapsed_ms) + VALUES (?, 1, NULL, 'POST', '/orders', 500, 100)` + ).run(id); + db.prepare( + `INSERT OR IGNORE INTO code_objects (fqid, package, classes, leaf_class, method, is_static) + VALUES ('app/Logger#error', 'app', '["Logger"]', 'Logger', 'error', 0)` + ).run(); + const co = (db + .prepare(`SELECT id FROM code_objects WHERE fqid = 'app/Logger#error'`) + .get() as { id: number }).id; + db.prepare(`INSERT OR IGNORE INTO labels (code_object_id, label) VALUES (?, 'log')`).run(co); + db.prepare( + `INSERT INTO function_calls (appmap_id, event_id, parent_event_id, code_object_id, defined_class, method_id, elapsed_ms) + VALUES (?, 2, 1, ?, 'Logger', 'error', 0.1)` + ).run(id, co); + db.prepare( + `INSERT INTO sql_queries (appmap_id, event_id, parent_event_id, sql_text, elapsed_ms) + VALUES (?, 3, 2, 'INSERT INTO orders (id) VALUES (?)', 14)` + ).run(id); + db.prepare( + `INSERT INTO exceptions (appmap_id, event_id, parent_event_id, exception_class, message) + VALUES (?, 2, 1, 'IntegrityError', 'duplicate key')` + ).run(id); +} + +function call(handler: ReturnType, msg: JsonRpcRequest) { + const r = handler(msg); + return r; +} + +describe('MCP handler', () => { + it('initialize returns server info and capabilities', () => { + const db = freshDb(); + try { + const r = call(buildMcpHandler(db), { jsonrpc: '2.0', id: 1, method: 'initialize' }); + expect(r).not.toBeNull(); + expect((r!.result as any).serverInfo.name).toBe('appmap-query'); + expect((r!.result as any).protocolVersion).toBeDefined(); + expect((r!.result as any).capabilities.tools).toBeDefined(); + expect((r!.result as any).capabilities.resources).toBeDefined(); + } finally { + db.close(); + } + }); + + it('notifications/initialized returns null (notification, no response)', () => { + const db = freshDb(); + try { + const r = call(buildMcpHandler(db), { + jsonrpc: '2.0', + method: 'notifications/initialized', + }); + expect(r).toBeNull(); + } finally { + db.close(); + } + }); + + it('tools/list returns the V3 tool surface', () => { + const db = freshDb(); + try { + const r = call(buildMcpHandler(db), { jsonrpc: '2.0', id: 2, method: 'tools/list' }); + const names = ((r!.result as any).tools as Array<{ name: string }>).map((t) => t.name); + expect(names).toEqual( + expect.arrayContaining([ + 'get_endpoint_detail', + 'get_slow_queries', + 'get_function_hotspots', + 'get_exceptions', + 'get_log_events', + 'get_labeled_events', + 'compare_branches', + 'get_request_trace', + 'get_related', + ]) + ); + } finally { + db.close(); + } + }); + + it('resources/list returns the appmap://endpoints resource', () => { + const db = freshDb(); + try { + const r = call(buildMcpHandler(db), { jsonrpc: '2.0', id: 3, method: 'resources/list' }); + const uris = ((r!.result as any).resources as Array<{ uri: string }>).map((x) => x.uri); + expect(uris).toContain('appmap://endpoints'); + } finally { + db.close(); + } + }); + + it('unknown method → -32601 method-not-found', () => { + const db = freshDb(); + try { + const r = call(buildMcpHandler(db), { jsonrpc: '2.0', id: 4, method: 'no/such/method' }); + expect(r!.error?.code).toBe(-32601); + } finally { + db.close(); + } + }); + + it('tools/call to an unknown tool → -32601', () => { + const db = freshDb(); + try { + const r = call(buildMcpHandler(db), { + jsonrpc: '2.0', + id: 5, + method: 'tools/call', + params: { name: 'no_such_tool', arguments: {} }, + }); + expect(r!.error?.code).toBe(-32601); + } finally { + db.close(); + } + }); + + it('tools/call wraps the result as a content block of type=text', () => { + const db = freshDb(); + try { + seedMinimal(db); + const r = call(buildMcpHandler(db), { + jsonrpc: '2.0', + id: 6, + method: 'tools/call', + params: { name: 'get_exceptions', arguments: { limit: 10 } }, + }); + const content = (r!.result as any).content; + expect(Array.isArray(content)).toBe(true); + expect(content[0].type).toBe('text'); + const parsed = JSON.parse(content[0].text); + expect(parsed).toHaveLength(1); + expect(parsed[0].exception_class).toBe('IntegrityError'); + } finally { + db.close(); + } + }); + + it('get_request_trace resolves appmap_id (numeric or name) and applies focus_type', () => { + const db = freshDb(); + try { + seedMinimal(db); + const handler = buildMcpHandler(db); + + // Numeric id. + const byId = call(handler, { + jsonrpc: '2.0', + id: 7, + method: 'tools/call', + params: { + name: 'get_request_trace', + arguments: { appmap_id: 1, focus_type: 'sql_query', focus_value: 'INSERT INTO orders' }, + }, + }); + const idRows = JSON.parse((byId!.result as any).content[0].text); + expect(Array.isArray(idRows)).toBe(true); + expect(idRows.some((n: any) => n.kind === 'sql')).toBe(true); + + // Name-based ref. + const byName = call(handler, { + jsonrpc: '2.0', + id: 8, + method: 'tools/call', + params: { + name: 'get_request_trace', + arguments: { appmap_id: 'rec' }, + }, + }); + const nameRows = JSON.parse((byName!.result as any).content[0].text); + expect(Array.isArray(nameRows)).toBe(true); + } finally { + db.close(); + } + }); + + it('get_log_events filters function_calls by the log label', () => { + const db = freshDb(); + try { + seedMinimal(db); + const r = call(buildMcpHandler(db), { + jsonrpc: '2.0', + id: 9, + method: 'tools/call', + params: { name: 'get_log_events', arguments: {} }, + }); + const rows = JSON.parse((r!.result as any).content[0].text); + expect(rows).toHaveLength(1); + expect(rows[0].method_id).toBe('error'); + } finally { + db.close(); + } + }); + + it('resources/read returns the endpoints summary as JSON', () => { + const db = freshDb(); + try { + seedMinimal(db); + const r = call(buildMcpHandler(db), { + jsonrpc: '2.0', + id: 10, + method: 'resources/read', + params: { uri: 'appmap://endpoints' }, + }); + const contents = (r!.result as any).contents; + expect(contents[0].uri).toBe('appmap://endpoints'); + const parsed = JSON.parse(contents[0].text); + expect(parsed[0].route).toBe('/orders'); + } finally { + db.close(); + } + }); + + it('listTools / listResources are stable for documentation use', () => { + expect(listTools().length).toBeGreaterThan(0); + expect(listResources().length).toBeGreaterThan(0); + }); +}); From e85517c2d89a83b255f45209c507a654333c333d Mon Sep 17 00:00:00 2001 From: kgilpin Date: Sat, 2 May 2026 16:19:40 -0400 Subject: [PATCH 18/30] feat(query/mcp): rename tools to descriptive verb-noun forms MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Drop the Python-prototype tool names — we don't need to match them and the new surface reads better for LLM consumption. Each tool name now uniquely identifies what gets returned, with consistent "find_*" / "list_*" / "get_*" prefixes where the verb adds clarity and bare nouns where the type itself is the answer. Renames / new (11 tools, was 9): list_endpoints (new) per-route summary function_hotspots was get_function_hotspots sql_hotspots (new) sql_hotspots ranking find_recordings (new) appmap rows find_requests (replaces get_endpoint_detail; generic filter) find_queries (replaces get_slow_queries; sort via duration) find_calls (replaces get_log_events / get_labeled_events; filter by --label=log or =security.* etc.) find_exceptions was get_exceptions get_call_tree was get_request_trace find_related was get_related compare_branches unchanged Also: appmap arg renamed from "appmap_id" to "appmap" (it accepts both numeric id and name; the previous name implied id-only). The old Python-named tools are gone — clients will need to update. Internally each tool is still a thin wrapper around the V3 query functions; the renames are purely cosmetic. Tests updated; 202 passing. tools/list verified end-to-end. Co-Authored-By: Claude Opus 4.7 (1M context) --- packages/cli/src/cmds/query/queries/mcp.ts | 362 +++++++++++------- .../tests/unit/cmds/query/queries/mcp.spec.ts | 69 +++- 2 files changed, 282 insertions(+), 149 deletions(-) diff --git a/packages/cli/src/cmds/query/queries/mcp.ts b/packages/cli/src/cmds/query/queries/mcp.ts index 1b29b423c7..6db0839532 100644 --- a/packages/cli/src/cmds/query/queries/mcp.ts +++ b/packages/cli/src/cmds/query/queries/mcp.ts @@ -1,6 +1,11 @@ // MCP (Model Context Protocol) handler. Exposes the V3 query surface as -// MCP tools and resources, using the Python prototype's tool names so -// existing clients work unchanged. +// MCP tools and resources. +// +// Tool names are LLM-readable and uniquely identify what each tool +// returns: `find_*` returns row-level matches, `function_hotspots` / +// `sql_hotspots` return rankings, `get_call_tree` returns one recording's +// tree, etc. Mirrors V3's CLI verbs but with descriptive names rather +// than the terse single-noun forms the CLI uses. // // Wire format: newline-delimited JSON-RPC 2.0 over stdio. This module // implements the message dispatch logic only; the stdio loop lives in @@ -9,7 +14,7 @@ import sqlite3 from 'better-sqlite3'; import { compare } from './compare'; -import { endpoints } from './endpoints'; +import { endpoints, EndpointSort, EndpointsFilter } from './endpoints'; import { FindCallRow, FindExceptionRow, @@ -20,8 +25,8 @@ import { } from './find'; import { hotspots } from './hotspots'; import { related, RelatedFilter } from './related'; -import { resolveAppmap, tree, treeSummary, AppmapInfo, TreeOptions } from './tree'; -import { parseTime } from '../lib/parseFilter'; +import { resolveAppmap, tree, AppmapInfo, TreeOptions } from './tree'; +import { parseDuration, parseStatus, parseTime } from '../lib/parseFilter'; export interface JsonRpcRequest { jsonrpc: '2.0'; @@ -69,9 +74,7 @@ const PROTOCOL_VERSION = '2024-11-05'; // --- helpers ------------------------------------------------------------ -// Accept either a numeric appmap.id or a name/basename ref. Resolves to -// the appmaps row; throws if missing or ambiguous (the underlying -// resolveAppmap surfaces both messages). +// Accept either a numeric appmap.id or a name/basename ref. function resolveByIdOrRef(db: sqlite3.Database, idOrRef: unknown): AppmapInfo { const s = String(idOrRef); if (/^\d+$/.test(s)) { @@ -79,15 +82,12 @@ function resolveByIdOrRef(db: sqlite3.Database, idOrRef: unknown): AppmapInfo { .prepare(`SELECT id, name, source_path FROM appmaps WHERE id = ?`) .get(Number(s)) as AppmapInfo | undefined; if (row) return row; - // Fall through to name match if the numeric id doesn't exist — - // surfaces a clearer error from resolveAppmap. } return resolveAppmap(db, s); } function maybeTime(s: unknown): string | undefined { - if (typeof s !== 'string' || s.length === 0) return undefined; - return parseTime(s); + return typeof s === 'string' && s.length > 0 ? parseTime(s) : undefined; } function maybeNumber(n: unknown): number | undefined { @@ -100,217 +100,293 @@ function maybeString(s: unknown): string | undefined { return typeof s === 'string' && s.length > 0 ? s : undefined; } +// Common filter shape shared by the find_* tools and the hotspots tools. +const COMMON_FILTER_PROPERTIES: Record = { + route: { type: 'string', description: 'e.g. "POST /orders" or "/orders".' }, + status: { type: 'string', description: 'e.g. "500", ">=500", "<400".' }, + duration: { type: 'string', description: 'e.g. ">1s", ">=500ms".' }, + branch: { type: 'string' }, + commit: { type: 'string' }, + since: { type: 'string', description: 'ISO timestamp lower bound.' }, + until: { type: 'string', description: 'ISO timestamp upper bound.' }, + appmap: { type: 'string', description: 'Restrict to one recording (name or basename).' }, + limit: { type: 'integer' }, + offset: { type: 'integer' }, +}; + +// Build a FindFilter from MCP tool args, parsing structured fields. +function buildFindFilter(args: Record): FindFilter { + const f: FindFilter = {}; + if (typeof args.route === 'string') f.route = args.route; + if (typeof args.class === 'string') f.className = args.class; + if (typeof args.method === 'string') f.method = args.method; + if (typeof args.label === 'string') f.label = args.label; + if (typeof args.branch === 'string') f.branch = args.branch; + if (typeof args.commit === 'string') f.commit = args.commit; + if (typeof args.status === 'string') f.status = parseStatus(args.status); + if (typeof args.duration === 'string') f.duration = parseDuration(args.duration); + if (typeof args.appmap === 'string') f.appmap = args.appmap; + if (typeof args.table === 'string') f.table = args.table; + if (typeof args.exception === 'string') f.exception = args.exception; + f.since = maybeTime(args.since); + f.until = maybeTime(args.until); + f.limit = maybeNumber(args.limit); + f.offset = maybeNumber(args.offset); + return f; +} + // --- tools -------------------------------------------------------------- -// Tool name + description + JSON Schema + handler. Names match the -// Python prototype's MCP surface so existing clients work unchanged. const TOOLS: ToolImpl[] = [ + // ----- aggregations ---------------------------------------------------- + { spec: { - name: 'get_endpoint_detail', + name: 'list_endpoints', description: - 'Individual HTTP request rows for a (method, path), with status, elapsed, branch, and the recording each came from.', + 'Per-route summary table. Returns count, average latency, p95, and error-rate columns for each (method, normalized_path). The first thing to call when orienting against an unfamiliar query database.', inputSchema: { type: 'object', properties: { - method: { type: 'string', description: 'HTTP method (GET, POST, …).' }, - path: { type: 'string', description: 'Endpoint path (exact normalized_path or path).' }, - since: { type: 'string', description: 'ISO timestamp lower bound.' }, - until: { type: 'string', description: 'ISO timestamp upper bound.' }, + branch: { type: 'string' }, + since: { type: 'string' }, + until: { type: 'string' }, + status: { + type: 'string', + description: + 'Route filter — e.g. ">=500". A route is shown if any request matches; aggregates remain over all of that route\'s requests.', + }, + sort: { type: 'string', enum: ['count', 'avg', 'p95', 'err'] }, limit: { type: 'integer' }, }, - required: ['method', 'path'], }, }, handler: (args, db) => { - const filter: FindFilter = { - route: `${String(args.method)} ${String(args.path)}`, - }; - filter.since = maybeTime(args.since); - filter.until = maybeTime(args.until); - filter.limit = maybeNumber(args.limit); - return find(db, 'requests', filter) as FindRequestRow[]; + const f: EndpointsFilter = {}; + f.branch = maybeString(args.branch); + f.since = maybeTime(args.since); + f.until = maybeTime(args.until); + if (typeof args.status === 'string') f.status = parseStatus(args.status); + if (typeof args.sort === 'string') f.sort = args.sort as EndpointSort; + f.limit = maybeNumber(args.limit); + return endpoints(db, f); }, }, { spec: { - name: 'get_slow_queries', + name: 'function_hotspots', description: - 'SQL query rows ordered by elapsed time, slowest first. Use to find performance bottlenecks at the database layer.', + 'Functions ranked by total elapsed time across recordings. Returns calls / total_ms / self_ms per function. Filter by route to scope to a specific entry point or by class to focus on one component.', inputSchema: { type: 'object', properties: { - limit: { type: 'integer', description: 'Maximum rows (default 20).' }, + route: { type: 'string' }, + class: { type: 'string', description: 'class identifier; accepts short or canonical fqid form.' }, + branch: { type: 'string' }, since: { type: 'string' }, until: { type: 'string' }, + limit: { type: 'integer' }, }, }, }, - handler: (args, db) => { - const filter: FindFilter = {}; - filter.since = maybeTime(args.since); - filter.until = maybeTime(args.until); - // Sort happens client-side after fetch so we don't pull the entire - // sql_queries table for large corpora; for now we rely on the - // implicit caller LIMIT to bound the work. - const rows = find(db, 'queries', filter) as FindQueryRow[]; - rows.sort((a, b) => (b.elapsed_ms ?? 0) - (a.elapsed_ms ?? 0)); - const limit = maybeNumber(args.limit) ?? 20; - return rows.slice(0, limit); - }, + handler: (args, db) => + hotspots(db, { + type: 'function', + route: maybeString(args.route), + className: maybeString(args.class), + branch: maybeString(args.branch), + since: maybeTime(args.since), + until: maybeTime(args.until), + limit: maybeNumber(args.limit) ?? 20, + }), }, { spec: { - name: 'get_function_hotspots', + name: 'sql_hotspots', description: - 'Functions ranked by total elapsed time across recordings, with calls / total_ms / self_ms columns.', + 'SQL queries ranked by total elapsed time, deduplicated by text. Returns count / avg_ms / total_ms / sql_text per distinct query.', inputSchema: { type: 'object', properties: { - limit: { type: 'integer' }, + route: { type: 'string' }, + branch: { type: 'string' }, since: { type: 'string' }, until: { type: 'string' }, + limit: { type: 'integer' }, }, }, }, handler: (args, db) => hotspots(db, { - type: 'function', + type: 'sql', + route: maybeString(args.route), + branch: maybeString(args.branch), since: maybeTime(args.since), until: maybeTime(args.until), limit: maybeNumber(args.limit) ?? 20, }), }, + // ----- row-level finders ---------------------------------------------- + { spec: { - name: 'get_exceptions', + name: 'find_recordings', description: - 'Recent exception rows with class, message, source location, and the appmap they were captured in.', + 'Recording-level rows matching filters. Each row is one .appmap.json file with its sample request, branch, and counts. Use to identify which recordings exercised a route, returned a particular status, or were taken on a branch.', inputSchema: { type: 'object', properties: { - limit: { type: 'integer' }, - offset: { type: 'integer' }, - since: { type: 'string' }, - until: { type: 'string' }, + route: COMMON_FILTER_PROPERTIES.route, + status: COMMON_FILTER_PROPERTIES.status, + branch: COMMON_FILTER_PROPERTIES.branch, + commit: COMMON_FILTER_PROPERTIES.commit, + since: COMMON_FILTER_PROPERTIES.since, + until: COMMON_FILTER_PROPERTIES.until, + duration: COMMON_FILTER_PROPERTIES.duration, + appmap: COMMON_FILTER_PROPERTIES.appmap, + limit: COMMON_FILTER_PROPERTIES.limit, + offset: COMMON_FILTER_PROPERTIES.offset, }, }, }, - handler: (args, db) => { - const filter: FindFilter = {}; - filter.since = maybeTime(args.since); - filter.until = maybeTime(args.until); - filter.limit = maybeNumber(args.limit) ?? 50; - filter.offset = maybeNumber(args.offset); - return find(db, 'exceptions', filter) as FindExceptionRow[]; - }, + handler: (args, db) => find(db, 'appmaps', buildFindFilter(args)), }, { spec: { - name: 'get_log_events', + name: 'find_requests', description: - 'Function calls labeled "log" — the application log output captured during recording, with parameter values.', + 'Individual HTTP request rows with status, elapsed time, and the recording each came from. Filter by route, status, duration, branch, time window.', inputSchema: { type: 'object', properties: { - appmap_id: { description: 'Optional — filter to one recording (id or name).' }, - limit: { type: 'integer' }, - since: { type: 'string' }, - until: { type: 'string' }, + route: COMMON_FILTER_PROPERTIES.route, + status: COMMON_FILTER_PROPERTIES.status, + duration: COMMON_FILTER_PROPERTIES.duration, + branch: COMMON_FILTER_PROPERTIES.branch, + commit: COMMON_FILTER_PROPERTIES.commit, + since: COMMON_FILTER_PROPERTIES.since, + until: COMMON_FILTER_PROPERTIES.until, + appmap: COMMON_FILTER_PROPERTIES.appmap, + limit: COMMON_FILTER_PROPERTIES.limit, + offset: COMMON_FILTER_PROPERTIES.offset, }, }, }, - handler: (args, db) => { - const filter: FindFilter = { label: 'log' }; - if (args.appmap_id !== undefined && args.appmap_id !== null) { - filter.appmap = resolveByIdOrRef(db, args.appmap_id).name; - } - filter.since = maybeTime(args.since); - filter.until = maybeTime(args.until); - filter.limit = maybeNumber(args.limit) ?? 200; - return find(db, 'calls', filter) as FindCallRow[]; - }, + handler: (args, db) => + find(db, 'requests', buildFindFilter(args)) as FindRequestRow[], }, { spec: { - name: 'get_labeled_events', + name: 'find_queries', description: - 'Function calls carrying an AppMap label. Common labels: log, security.authentication, security.authorization, dao, secret. Pass an exact label name.', + 'SQL query rows. Filter by table (matches sql_text substring), caller class/method, duration, route, branch. Use duration:">100ms" to find slow queries; use route to scope to a specific request.', inputSchema: { type: 'object', properties: { - label: { type: 'string' }, - appmap_id: {}, - limit: { type: 'integer' }, + table: { type: 'string', description: 'SQL table name (matches sql_text substring).' }, + class: { type: 'string', description: 'Caller class identifier.' }, + method: { type: 'string', description: 'Caller method name.' }, + duration: COMMON_FILTER_PROPERTIES.duration, + route: COMMON_FILTER_PROPERTIES.route, + status: COMMON_FILTER_PROPERTIES.status, + branch: COMMON_FILTER_PROPERTIES.branch, + commit: COMMON_FILTER_PROPERTIES.commit, + since: COMMON_FILTER_PROPERTIES.since, + until: COMMON_FILTER_PROPERTIES.until, + appmap: COMMON_FILTER_PROPERTIES.appmap, + limit: COMMON_FILTER_PROPERTIES.limit, + offset: COMMON_FILTER_PROPERTIES.offset, }, - required: ['label'], }, }, - handler: (args, db) => { - const filter: FindFilter = { label: String(args.label) }; - if (args.appmap_id !== undefined && args.appmap_id !== null) { - filter.appmap = resolveByIdOrRef(db, args.appmap_id).name; - } - filter.limit = maybeNumber(args.limit) ?? 200; - return find(db, 'calls', filter) as FindCallRow[]; + handler: (args, db) => find(db, 'queries', buildFindFilter(args)) as FindQueryRow[], + }, + + { + spec: { + name: 'find_calls', + description: + 'Function-call rows. Filter by class, method, label (e.g. "log", "security.authorization"), duration. Use --label=log to retrieve application log output, or --label=security.authorization to find authorization checks.', + inputSchema: { + type: 'object', + properties: { + class: { + type: 'string', + description: 'Class identifier; accepts short ("UserRepository") or canonical fqid form ("app/services/UserRepository") or with method ("UserRepository#findById").', + }, + method: { type: 'string' }, + label: { type: 'string', description: 'AppMap label name (exact match).' }, + duration: COMMON_FILTER_PROPERTIES.duration, + route: COMMON_FILTER_PROPERTIES.route, + status: COMMON_FILTER_PROPERTIES.status, + branch: COMMON_FILTER_PROPERTIES.branch, + commit: COMMON_FILTER_PROPERTIES.commit, + since: COMMON_FILTER_PROPERTIES.since, + until: COMMON_FILTER_PROPERTIES.until, + appmap: COMMON_FILTER_PROPERTIES.appmap, + limit: COMMON_FILTER_PROPERTIES.limit, + offset: COMMON_FILTER_PROPERTIES.offset, + }, + }, }, + handler: (args, db) => find(db, 'calls', buildFindFilter(args)) as FindCallRow[], }, { spec: { - name: 'compare_branches', + name: 'find_exceptions', description: - 'Per-route p95 latency for two branches with a delta column. Use to surface performance regressions introduced on a feature branch.', + 'Exception rows with class, message, source location. Filter by exception class name, the request that owns the exception (via route/status), branch, or time window.', inputSchema: { type: 'object', properties: { - branch_a: { type: 'string', description: 'Baseline branch.' }, - branch_b: { type: 'string', description: 'Comparison branch.' }, - since: { type: 'string' }, - until: { type: 'string' }, - limit: { type: 'integer' }, + exception: { type: 'string', description: 'Exception class name (exact match).' }, + route: COMMON_FILTER_PROPERTIES.route, + status: COMMON_FILTER_PROPERTIES.status, + branch: COMMON_FILTER_PROPERTIES.branch, + commit: COMMON_FILTER_PROPERTIES.commit, + since: COMMON_FILTER_PROPERTIES.since, + until: COMMON_FILTER_PROPERTIES.until, + appmap: COMMON_FILTER_PROPERTIES.appmap, + limit: COMMON_FILTER_PROPERTIES.limit, + offset: COMMON_FILTER_PROPERTIES.offset, }, - required: ['branch_a', 'branch_b'], }, }, handler: (args, db) => - compare(db, { - branch_a: String(args.branch_a), - branch_b: String(args.branch_b), - since: maybeTime(args.since), - until: maybeTime(args.until), - limit: maybeNumber(args.limit), - }), + find(db, 'exceptions', buildFindFilter(args)) as FindExceptionRow[], }, + // ----- per-recording / cross-recording -------------------------------- + { spec: { - name: 'get_request_trace', + name: 'get_call_tree', description: - 'Call tree for one recording. Without focus, returns every event. With focus_type + focus_value, narrows to the neighborhood of matching events. focus_type is one of: function (focus_value = code_object fqid), sql_query (focus_value = SQL substring), http_server_request (focus_value = normalized_path), http_client_request (focus_value = URL substring).', + 'Call tree of one recording. Without focus, returns every event. With focus_type + focus_value, narrows to the neighborhood of matching events: focus_type ∈ {function, sql_query, http_server_request, http_client_request}, focus_value is the matching identifier (fqid / SQL substring / normalized_path / URL substring). Use min_elapsed_ms to prune fast leaves.', inputSchema: { type: 'object', properties: { - appmap_id: { description: 'Recording id or name.' }, + appmap: { type: 'string', description: 'Recording id (numeric) or name.' }, focus_type: { type: 'string', enum: ['function', 'sql_query', 'http_server_request', 'http_client_request'], }, focus_value: { type: 'string' }, - parent_depth: { type: 'integer', description: 'Ancestor levels to keep (default 5).' }, - child_depth: { type: 'integer', description: 'Descendant levels to keep (default 3).' }, + parent_depth: { type: 'integer', description: 'Ancestor levels (default 5).' }, + child_depth: { type: 'integer', description: 'Descendant levels (default 3).' }, min_elapsed_ms: { type: 'number' }, }, - required: ['appmap_id'], + required: ['appmap'], }, }, handler: (args, db) => { - const am = resolveByIdOrRef(db, args.appmap_id); + const am = resolveByIdOrRef(db, args.appmap); const opts: TreeOptions = {}; const focusType = maybeString(args.focus_type); const focusValue = maybeString(args.focus_value); @@ -329,32 +405,27 @@ const TOOLS: ToolImpl[] = [ { spec: { - name: 'get_related', + name: 'find_related', description: - 'Recordings ranked by similarity to a source recording. Score combines: same HTTP route (×5), shared SQL tables (×3 each), shared classes (×2 each). Primary use: find passing baselines for a failing recording with --status filter.', + 'Recordings ranked by similarity to a source recording. Score combines: same HTTP route (×5), shared SQL tables (×3 each), shared classes (×2 each). Primary use: pass a failing recording with status:succeeded to find a passing baseline for side-by-side comparison.', inputSchema: { type: 'object', properties: { - appmap_id: { description: 'Source recording (id or name).' }, - status: { type: 'string', description: 'e.g. "200", ">=500".' }, - route: { type: 'string' }, - branch: { type: 'string' }, - since: { type: 'string' }, - until: { type: 'string' }, - limit: { type: 'integer' }, + appmap: { type: 'string', description: 'Source recording (id or name).' }, + status: COMMON_FILTER_PROPERTIES.status, + route: COMMON_FILTER_PROPERTIES.route, + branch: COMMON_FILTER_PROPERTIES.branch, + since: COMMON_FILTER_PROPERTIES.since, + until: COMMON_FILTER_PROPERTIES.until, + limit: COMMON_FILTER_PROPERTIES.limit, }, - required: ['appmap_id'], + required: ['appmap'], }, }, handler: (args, db) => { - const am = resolveByIdOrRef(db, args.appmap_id); + const am = resolveByIdOrRef(db, args.appmap); const filter: RelatedFilter = {}; - if (args.status !== undefined && args.status !== null) { - // parseStatus is in lib/parseFilter; import lazily to avoid a cycle - // (mcp.ts → queries/related → lib/scope → lib/parseFilter is fine). - const { parseStatus } = require('../lib/parseFilter'); - filter.status = parseStatus(String(args.status)); - } + if (typeof args.status === 'string') filter.status = parseStatus(args.status); filter.route = maybeString(args.route); filter.branch = maybeString(args.branch); filter.since = maybeTime(args.since); @@ -363,6 +434,35 @@ const TOOLS: ToolImpl[] = [ return related(db, am.name, filter); }, }, + + { + spec: { + name: 'compare_branches', + description: + 'Per-route p95 latency for two branches with a delta column. Use to surface regressions a feature branch introduces relative to a baseline.', + inputSchema: { + type: 'object', + properties: { + branch_a: { type: 'string', description: 'Baseline branch.' }, + branch_b: { type: 'string', description: 'Comparison branch.' }, + since: { type: 'string' }, + until: { type: 'string' }, + sort: { type: 'string', enum: ['delta', 'p95-a', 'p95-b'] }, + limit: { type: 'integer' }, + }, + required: ['branch_a', 'branch_b'], + }, + }, + handler: (args, db) => + compare(db, { + branch_a: String(args.branch_a), + branch_b: String(args.branch_b), + since: maybeTime(args.since), + until: maybeTime(args.until), + sort: maybeString(args.sort) as 'delta' | 'p95-a' | 'p95-b' | undefined, + limit: maybeNumber(args.limit), + }), + }, ]; // --- resources ----------------------------------------------------------- @@ -386,8 +486,6 @@ export interface McpHandler { (msg: JsonRpcRequest): JsonRpcResponse | null; } -// Build a JSON-RPC dispatcher backed by the given DB. Returns null for -// notifications (no response expected). export function buildMcpHandler(db: sqlite3.Database): McpHandler { return (msg: JsonRpcRequest): JsonRpcResponse | null => { const id = (msg.id ?? null) as string | number | null; @@ -479,8 +577,6 @@ function errorResponse( return { jsonrpc: '2.0', id, error: { code, message } }; } -// Exposed for the demo / docs; keeps the tool list discoverable without -// going through the protocol. export function listTools(): readonly ToolSpec[] { return TOOLS.map((t) => t.spec); } diff --git a/packages/cli/tests/unit/cmds/query/queries/mcp.spec.ts b/packages/cli/tests/unit/cmds/query/queries/mcp.spec.ts index 31f3233801..a92d1fc36f 100644 --- a/packages/cli/tests/unit/cmds/query/queries/mcp.spec.ts +++ b/packages/cli/tests/unit/cmds/query/queries/mcp.spec.ts @@ -88,15 +88,17 @@ describe('MCP handler', () => { const names = ((r!.result as any).tools as Array<{ name: string }>).map((t) => t.name); expect(names).toEqual( expect.arrayContaining([ - 'get_endpoint_detail', - 'get_slow_queries', - 'get_function_hotspots', - 'get_exceptions', - 'get_log_events', - 'get_labeled_events', + 'list_endpoints', + 'function_hotspots', + 'sql_hotspots', + 'find_recordings', + 'find_requests', + 'find_queries', + 'find_calls', + 'find_exceptions', + 'get_call_tree', + 'find_related', 'compare_branches', - 'get_request_trace', - 'get_related', ]) ); } finally { @@ -148,7 +150,7 @@ describe('MCP handler', () => { jsonrpc: '2.0', id: 6, method: 'tools/call', - params: { name: 'get_exceptions', arguments: { limit: 10 } }, + params: { name: 'find_exceptions', arguments: { limit: 10 } }, }); const content = (r!.result as any).content; expect(Array.isArray(content)).toBe(true); @@ -161,7 +163,7 @@ describe('MCP handler', () => { } }); - it('get_request_trace resolves appmap_id (numeric or name) and applies focus_type', () => { + it('get_call_tree resolves appmap (numeric id or name) and applies focus_type', () => { const db = freshDb(); try { seedMinimal(db); @@ -173,8 +175,8 @@ describe('MCP handler', () => { id: 7, method: 'tools/call', params: { - name: 'get_request_trace', - arguments: { appmap_id: 1, focus_type: 'sql_query', focus_value: 'INSERT INTO orders' }, + name: 'get_call_tree', + arguments: { appmap: 1, focus_type: 'sql_query', focus_value: 'INSERT INTO orders' }, }, }); const idRows = JSON.parse((byId!.result as any).content[0].text); @@ -187,8 +189,8 @@ describe('MCP handler', () => { id: 8, method: 'tools/call', params: { - name: 'get_request_trace', - arguments: { appmap_id: 'rec' }, + name: 'get_call_tree', + arguments: { appmap: 'rec' }, }, }); const nameRows = JSON.parse((byName!.result as any).content[0].text); @@ -198,7 +200,7 @@ describe('MCP handler', () => { } }); - it('get_log_events filters function_calls by the log label', () => { + it('find_calls --label filters by the AppMap label', () => { const db = freshDb(); try { seedMinimal(db); @@ -206,7 +208,7 @@ describe('MCP handler', () => { jsonrpc: '2.0', id: 9, method: 'tools/call', - params: { name: 'get_log_events', arguments: {} }, + params: { name: 'find_calls', arguments: { label: 'log' } }, }); const rows = JSON.parse((r!.result as any).content[0].text); expect(rows).toHaveLength(1); @@ -216,6 +218,41 @@ describe('MCP handler', () => { } }); + it('list_endpoints / function_hotspots / sql_hotspots produce expected rows', () => { + const db = freshDb(); + try { + seedMinimal(db); + const handler = buildMcpHandler(db); + + const ep = call(handler, { + jsonrpc: '2.0', + id: 11, + method: 'tools/call', + params: { name: 'list_endpoints', arguments: {} }, + }); + const epRows = JSON.parse((ep!.result as any).content[0].text); + expect(epRows[0].route).toBe('/orders'); + + const fh = call(handler, { + jsonrpc: '2.0', + id: 12, + method: 'tools/call', + params: { name: 'function_hotspots', arguments: { limit: 5 } }, + }); + expect(JSON.parse((fh!.result as any).content[0].text).length).toBeGreaterThan(0); + + const sh = call(handler, { + jsonrpc: '2.0', + id: 13, + method: 'tools/call', + params: { name: 'sql_hotspots', arguments: { limit: 5 } }, + }); + expect(JSON.parse((sh!.result as any).content[0].text).length).toBeGreaterThan(0); + } finally { + db.close(); + } + }); + it('resources/read returns the endpoints summary as JSON', () => { const db = freshDb(); try { From b2213346fb3c661f3f6772dbcf4e61818d90e9eb Mon Sep 17 00:00:00 2001 From: kgilpin Date: Sat, 2 May 2026 16:43:50 -0400 Subject: [PATCH 19/30] feat(query): document MCP tool outputs, add appmap_id, alias 'recordings' - Each MCP tool description now lists its return columns inline; function_hotspots and get_call_tree include fqid format examples. - find_recordings rows expose appmap_id alongside appmap_name; get_call_tree accepts either. - `appmap query find recordings` is now an alias for `find appmaps` (matches the MCP find_recordings name and stops `REJECTED_FLAGS[type] is not iterable`). Co-Authored-By: Claude Opus 4.7 (1M context) --- packages/cli/src/cmds/query/queries/find.ts | 4 +++- packages/cli/src/cmds/query/queries/mcp.ts | 22 ++++++++++----------- packages/cli/src/cmds/query/verbs/find.ts | 11 +++++++++-- 3 files changed, 23 insertions(+), 14 deletions(-) diff --git a/packages/cli/src/cmds/query/queries/find.ts b/packages/cli/src/cmds/query/queries/find.ts index 62b5ad3665..e76f56a4c2 100644 --- a/packages/cli/src/cmds/query/queries/find.ts +++ b/packages/cli/src/cmds/query/queries/find.ts @@ -33,6 +33,7 @@ export interface FindFilter { } export interface FindAppmapRow { + appmap_id: number; appmap_name: string; route: string | null; status_code: number | null; @@ -140,7 +141,8 @@ export function findAppmaps(db: sqlite3.Database, filter: FindFilter): FindAppma if (filter.duration) params.push(filter.duration.value); let sql = ` - SELECT a.name AS appmap_name, + SELECT a.id AS appmap_id, + a.name AS appmap_name, COALESCE(h.normalized_path, h.path) AS route, h.status_code AS status_code, COALESCE(h.elapsed_ms, a.elapsed_ms) AS elapsed_ms, diff --git a/packages/cli/src/cmds/query/queries/mcp.ts b/packages/cli/src/cmds/query/queries/mcp.ts index 6db0839532..daae92d963 100644 --- a/packages/cli/src/cmds/query/queries/mcp.ts +++ b/packages/cli/src/cmds/query/queries/mcp.ts @@ -144,7 +144,7 @@ const TOOLS: ToolImpl[] = [ spec: { name: 'list_endpoints', description: - 'Per-route summary table. Returns count, average latency, p95, and error-rate columns for each (method, normalized_path). The first thing to call when orienting against an unfamiliar query database.', + 'Per-route summary table; the first call when orienting against an unfamiliar query database. Returns: method, route, count, avg_ms, p95_ms, err_pct.', inputSchema: { type: 'object', properties: { @@ -177,7 +177,7 @@ const TOOLS: ToolImpl[] = [ spec: { name: 'function_hotspots', description: - 'Functions ranked by total elapsed time across recordings. Returns calls / total_ms / self_ms per function. Filter by route to scope to a specific entry point or by class to focus on one component.', + 'Functions ranked by total elapsed time across recordings. Filter by route to scope to a specific entry point or by class to focus on one component. Returns: fqid, defined_class, method_id, calls, total_ms, self_ms. fqid examples: "app/Logger#error" (instance), "app/Util.parse" (static), "src/cmds/query/db/openQueryDb.openQueryDb" (module-level), "app/Outer::Inner#method" (nested classes).', inputSchema: { type: 'object', properties: { @@ -206,7 +206,7 @@ const TOOLS: ToolImpl[] = [ spec: { name: 'sql_hotspots', description: - 'SQL queries ranked by total elapsed time, deduplicated by text. Returns count / avg_ms / total_ms / sql_text per distinct query.', + 'SQL queries ranked by total elapsed time, deduplicated by text. Returns: count, avg_ms, total_ms, sql_text.', inputSchema: { type: 'object', properties: { @@ -235,7 +235,7 @@ const TOOLS: ToolImpl[] = [ spec: { name: 'find_recordings', description: - 'Recording-level rows matching filters. Each row is one .appmap.json file with its sample request, branch, and counts. Use to identify which recordings exercised a route, returned a particular status, or were taken on a branch.', + 'Recording-level rows matching filters. Each row is one .appmap.json file with its sample request, branch, and counts. Use to identify which recordings exercised a route, returned a particular status, or were taken on a branch. Returns: appmap_id, appmap_name, route, status_code, elapsed_ms, sql_count, branch, timestamp. Pass appmap_id (numeric) or appmap_name to get_call_tree / find_related.', inputSchema: { type: 'object', properties: { @@ -259,7 +259,7 @@ const TOOLS: ToolImpl[] = [ spec: { name: 'find_requests', description: - 'Individual HTTP request rows with status, elapsed time, and the recording each came from. Filter by route, status, duration, branch, time window.', + 'Individual HTTP request rows with status, elapsed time, and the recording each came from. Filter by route, status, duration, branch, time window. Returns: appmap_name, event_id, method, route, status_code, elapsed_ms, branch.', inputSchema: { type: 'object', properties: { @@ -284,7 +284,7 @@ const TOOLS: ToolImpl[] = [ spec: { name: 'find_queries', description: - 'SQL query rows. Filter by table (matches sql_text substring), caller class/method, duration, route, branch. Use duration:">100ms" to find slow queries; use route to scope to a specific request.', + 'SQL query rows. Filter by table (matches sql_text substring), caller class/method, duration, route, branch. Use duration:">100ms" to find slow queries; use route to scope to a specific request. Returns: appmap_name, event_id, sql_text, elapsed_ms, caller_class, caller_method.', inputSchema: { type: 'object', properties: { @@ -311,7 +311,7 @@ const TOOLS: ToolImpl[] = [ spec: { name: 'find_calls', description: - 'Function-call rows. Filter by class, method, label (e.g. "log", "security.authorization"), duration. Use --label=log to retrieve application log output, or --label=security.authorization to find authorization checks.', + 'Function-call rows. Filter by class, method, label (e.g. "log", "security.authorization"), duration. Use --label=log to retrieve application log output, or --label=security.authorization to find authorization checks. Returns: appmap_name, event_id, fqid, defined_class, method_id, elapsed_ms, parameters_json, return_value.', inputSchema: { type: 'object', properties: { @@ -341,7 +341,7 @@ const TOOLS: ToolImpl[] = [ spec: { name: 'find_exceptions', description: - 'Exception rows with class, message, source location. Filter by exception class name, the request that owns the exception (via route/status), branch, or time window.', + 'Exception rows with class, message, source location. Filter by exception class name, the request that owns the exception (via route/status), branch, or time window. Returns: appmap_name, event_id, exception_class, message, path, lineno.', inputSchema: { type: 'object', properties: { @@ -368,7 +368,7 @@ const TOOLS: ToolImpl[] = [ spec: { name: 'get_call_tree', description: - 'Call tree of one recording. Without focus, returns every event. With focus_type + focus_value, narrows to the neighborhood of matching events: focus_type ∈ {function, sql_query, http_server_request, http_client_request}, focus_value is the matching identifier (fqid / SQL substring / normalized_path / URL substring). Use min_elapsed_ms to prune fast leaves.', + 'Call tree of one recording. Without focus, returns every event. With focus_type + focus_value, narrows to the neighborhood of matching events: focus_type ∈ {function, sql_query, http_server_request, http_client_request}, focus_value is the matching identifier (fqid / SQL substring / normalized_path / URL substring). Use min_elapsed_ms to prune fast leaves. The appmap argument accepts a numeric appmap_id or an appmap_name (both surfaced by find_recordings). Returns ordered nodes: each has depth, kind ∈ {function, sql, http_server, http_client, exception}, event_id, parent_event_id, elapsed_ms, plus kind-specific fields (function: fqid/defined_class/method_id; sql: sql_text; http_server: method/route/status_code; http_client: method/url/status_code; exception: exception_class/message). fqid examples: "app/Logger#error" (instance), "app/Util.parse" (static).', inputSchema: { type: 'object', properties: { @@ -407,7 +407,7 @@ const TOOLS: ToolImpl[] = [ spec: { name: 'find_related', description: - 'Recordings ranked by similarity to a source recording. Score combines: same HTTP route (×5), shared SQL tables (×3 each), shared classes (×2 each). Primary use: pass a failing recording with status:succeeded to find a passing baseline for side-by-side comparison.', + 'Recordings ranked by similarity to a source recording. Score combines: same HTTP route (×5), shared SQL tables (×3 each), shared classes (×2 each). Primary use: pass a failing recording with status:succeeded to find a passing baseline for side-by-side comparison. Returns: appmap_name, score, method, route, status_code, elapsed_ms, shared (string array of contributing signals).', inputSchema: { type: 'object', properties: { @@ -439,7 +439,7 @@ const TOOLS: ToolImpl[] = [ spec: { name: 'compare_branches', description: - 'Per-route p95 latency for two branches with a delta column. Use to surface regressions a feature branch introduces relative to a baseline.', + 'Per-route p95 latency for two branches with a delta column. Use to surface regressions a feature branch introduces relative to a baseline. Returns: method, route, a_count, a_p95_ms, b_count, b_p95_ms, delta (b_p95/a_p95; null when either side has no measured durations).', inputSchema: { type: 'object', properties: { diff --git a/packages/cli/src/cmds/query/verbs/find.ts b/packages/cli/src/cmds/query/verbs/find.ts index 6d4ff30b41..92dcdba212 100644 --- a/packages/cli/src/cmds/query/verbs/find.ts +++ b/packages/cli/src/cmds/query/verbs/find.ts @@ -20,13 +20,20 @@ import { import { formatMs, formatTable } from '../lib/format'; const TYPES: readonly FindType[] = ['appmaps', 'requests', 'queries', 'calls', 'exceptions']; +// 'recordings' is accepted as an alias for 'appmaps' to match the MCP +// naming (find_recordings) and the user-facing concept of a "recording". +const TYPE_CHOICES: readonly string[] = ['appmaps', 'recordings', ...TYPES.filter((t) => t !== 'appmaps')]; + +function normalizeType(input: string): FindType { + return (input === 'recordings' ? 'appmaps' : input) as FindType; +} export const command = 'find '; export const describe = 'Row-level search across recordings'; export const builder = (args: yargs.Argv) => { return args - .positional('type', { type: 'string', choices: TYPES }) + .positional('type', { type: 'string', choices: TYPE_CHOICES }) .option('directory', { type: 'string', alias: 'd' }) .option('appmap-dir', { type: 'string' }) .option('query-db', { type: 'string', describe: 'path to query.db (overrides default)' }) @@ -104,7 +111,7 @@ export interface ParsedFind { } export function buildFindFilter(argv: Record): ParsedFind { - const type = argv.type as FindType; + const type = normalizeType(argv.type as string); validateFlags(type, argv); const filter: FindFilter = {}; From 6d2982edf3d3c4afc4c9df62aadb69531257eac7 Mon Sep 17 00:00:00 2001 From: kgilpin Date: Sat, 2 May 2026 17:00:52 -0400 Subject: [PATCH 20/30] style(query): clear remaining lint errors MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - queries/mcp.ts: collapse interface-with-call-signature into a type alias (prefer-function-type) and drop a now-redundant cast. - queries/tree.ts: ancestor walk uses an optional chain (preferred by prefer-optional-chain), with `== null` so a parent_event_id of 0 still walks up rather than terminating. - verbs/compare.ts, verbs/mcp.ts, verbs/tree.ts: drop type assertions that the inferred argv type already provides; the verb-level cast in handler() (argvIn → ArgumentsCamelCase) makes per-flag casts redundant. (no-unnecessary-type-assertion) - tests/unit/.../mcp.spec.ts: rewrite two Array casts as T[]. Mechanical lint cleanup; behavior unchanged. tsc + 202 query tests green. Co-Authored-By: Claude Opus 4.7 (1M context) --- packages/cli/src/cmds/query/queries/mcp.ts | 6 ++---- packages/cli/src/cmds/query/queries/tree.ts | 2 +- packages/cli/src/cmds/query/verbs/compare.ts | 4 ++-- packages/cli/src/cmds/query/verbs/mcp.ts | 2 +- packages/cli/src/cmds/query/verbs/tree.ts | 14 +++++++------- .../cli/tests/unit/cmds/query/queries/mcp.spec.ts | 4 ++-- 6 files changed, 15 insertions(+), 17 deletions(-) diff --git a/packages/cli/src/cmds/query/queries/mcp.ts b/packages/cli/src/cmds/query/queries/mcp.ts index daae92d963..2f00929eff 100644 --- a/packages/cli/src/cmds/query/queries/mcp.ts +++ b/packages/cli/src/cmds/query/queries/mcp.ts @@ -482,13 +482,11 @@ const RESOURCES: ResourceImpl[] = [ // --- handler ------------------------------------------------------------- -export interface McpHandler { - (msg: JsonRpcRequest): JsonRpcResponse | null; -} +export type McpHandler = (msg: JsonRpcRequest) => JsonRpcResponse | null; export function buildMcpHandler(db: sqlite3.Database): McpHandler { return (msg: JsonRpcRequest): JsonRpcResponse | null => { - const id = (msg.id ?? null) as string | number | null; + const id = msg.id ?? null; const method = msg.method; if (method.startsWith('notifications/')) return null; diff --git a/packages/cli/src/cmds/query/queries/tree.ts b/packages/cli/src/cmds/query/queries/tree.ts index 86f01e835d..f7e4eec460 100644 --- a/packages/cli/src/cmds/query/queries/tree.ts +++ b/packages/cli/src/cmds/query/queries/tree.ts @@ -379,7 +379,7 @@ function applyFocus(events: readonly TreeNode[], options: TreeOptions): TreeNode let cur = fid; for (let i = 0; i < ancestorBudget; i++) { const node = nodeByEventId.get(cur); - if (!node || node.parent_event_id === null) break; + if (node?.parent_event_id == null) break; const parentId = node.parent_event_id; if (!nodeByEventId.has(parentId)) break; ancestorIds.push(parentId); diff --git a/packages/cli/src/cmds/query/verbs/compare.ts b/packages/cli/src/cmds/query/verbs/compare.ts index dc6dfc7e5e..a5a1709248 100644 --- a/packages/cli/src/cmds/query/verbs/compare.ts +++ b/packages/cli/src/cmds/query/verbs/compare.ts @@ -44,8 +44,8 @@ export const handler = async (argvIn: yargs.ArgumentsCamelCase): Promis handleWorkingDirectory(argv.directory); const appmapDir = argv.queryDb ? '' : await locateAppMapDir(argv.appmapDir); - const branchA = argv.branchA as string | undefined; - const branchB = argv.branchB as string | undefined; + const branchA = argv.branchA; + const branchB = argv.branchB; if (!branchA || !branchB) throw new Error(' and are required'); const filter: CompareFilter = { diff --git a/packages/cli/src/cmds/query/verbs/mcp.ts b/packages/cli/src/cmds/query/verbs/mcp.ts index c0f9f25057..fcabfcd2b1 100644 --- a/packages/cli/src/cmds/query/verbs/mcp.ts +++ b/packages/cli/src/cmds/query/verbs/mcp.ts @@ -63,7 +63,7 @@ export const handler = async (argvIn: yargs.ArgumentsCamelCase): Promis } catch (err) { response = { jsonrpc: '2.0', - id: (msg.id ?? null) as string | number | null, + id: msg.id ?? null, error: { code: -32603, message: (err as Error).message }, }; } diff --git a/packages/cli/src/cmds/query/verbs/tree.ts b/packages/cli/src/cmds/query/verbs/tree.ts index d3d3768ac8..98473fdb2b 100644 --- a/packages/cli/src/cmds/query/verbs/tree.ts +++ b/packages/cli/src/cmds/query/verbs/tree.ts @@ -88,13 +88,13 @@ export const handler = async (argvIn: yargs.ArgumentsCamelCase): Promis } const treeOptions: TreeOptions = {}; - if (argv.focusFn) treeOptions.focusFn = argv.focusFn as string; - if (argv.focusSql) treeOptions.focusSql = argv.focusSql as string; - if (argv.focusRoute) treeOptions.focusRoute = argv.focusRoute as string; - if (argv.focusUrl) treeOptions.focusUrl = argv.focusUrl as string; - if (argv.ancestors !== undefined) treeOptions.ancestors = argv.ancestors as number; - if (argv.descendants !== undefined) treeOptions.descendants = argv.descendants as number; - if (argv.minElapsedMs !== undefined) treeOptions.minElapsedMs = argv.minElapsedMs as number; + if (argv.focusFn) treeOptions.focusFn = argv.focusFn; + if (argv.focusSql) treeOptions.focusSql = argv.focusSql; + if (argv.focusRoute) treeOptions.focusRoute = argv.focusRoute; + if (argv.focusUrl) treeOptions.focusUrl = argv.focusUrl; + if (argv.ancestors !== undefined) treeOptions.ancestors = argv.ancestors; + if (argv.descendants !== undefined) treeOptions.descendants = argv.descendants; + if (argv.minElapsedMs !== undefined) treeOptions.minElapsedMs = argv.minElapsedMs; const nodes = tree(db, ref, treeOptions); const filtered = applyFilter(nodes, argv.filter as 'all' | 'http' | 'sql'); diff --git a/packages/cli/tests/unit/cmds/query/queries/mcp.spec.ts b/packages/cli/tests/unit/cmds/query/queries/mcp.spec.ts index a92d1fc36f..63c07f9ee6 100644 --- a/packages/cli/tests/unit/cmds/query/queries/mcp.spec.ts +++ b/packages/cli/tests/unit/cmds/query/queries/mcp.spec.ts @@ -85,7 +85,7 @@ describe('MCP handler', () => { const db = freshDb(); try { const r = call(buildMcpHandler(db), { jsonrpc: '2.0', id: 2, method: 'tools/list' }); - const names = ((r!.result as any).tools as Array<{ name: string }>).map((t) => t.name); + const names = ((r!.result as any).tools as { name: string }[]).map((t) => t.name); expect(names).toEqual( expect.arrayContaining([ 'list_endpoints', @@ -110,7 +110,7 @@ describe('MCP handler', () => { const db = freshDb(); try { const r = call(buildMcpHandler(db), { jsonrpc: '2.0', id: 3, method: 'resources/list' }); - const uris = ((r!.result as any).resources as Array<{ uri: string }>).map((x) => x.uri); + const uris = ((r!.result as any).resources as { uri: string }[]).map((x) => x.uri); expect(uris).toContain('appmap://endpoints'); } finally { db.close(); From 2c0f6c58d8568b2aab335dadf9896d6b28fb4137 Mon Sep 17 00:00:00 2001 From: kgilpin Date: Sun, 3 May 2026 11:15:24 -0400 Subject: [PATCH 21/30] chore(cli): add typecheck and verify scripts `yarn typecheck` runs tsc --noEmit; `yarn verify` chains lint + typecheck for a single ~12s pre-commit gate. Catches both the lint failures and the CommandModule TS2345 class of issues that have been landing in CI. Co-Authored-By: Claude Opus 4.7 (1M context) --- packages/cli/package.json | 2 ++ 1 file changed, 2 insertions(+) diff --git a/packages/cli/package.json b/packages/cli/package.json index 62270c6a22..d23acb232a 100644 --- a/packages/cli/package.json +++ b/packages/cli/package.json @@ -16,6 +16,8 @@ "scripts": { "lint": "eslint src tests", "lint:fix": "eslint src tests --fix", + "typecheck": "tsc --noEmit", + "verify": "yarn lint && yarn typecheck", "pre-commit": "lint-staged", "test": "jest --filter=./tests/testFilter.js", "test:binary": "jest -c tests/binary/jest.config.js", From 2382d2914ba05bcfce8428bbb83f8d7ef08f9315 Mon Sep 17 00:00:00 2001 From: kgilpin Date: Sun, 3 May 2026 11:19:03 -0400 Subject: [PATCH 22/30] docs: add CLAUDE.md with per-package verify guidance Recommends `yarn verify` from the modified package's directory after substantial change batches; calls out the ~3x speed difference between scoped and full-monorepo verification (~10s vs ~30s). Co-Authored-By: Claude Opus 4.7 (1M context) --- CLAUDE.md | 35 +++++++++++++++++++++++++++++++++++ 1 file changed, 35 insertions(+) create mode 100644 CLAUDE.md diff --git a/CLAUDE.md b/CLAUDE.md new file mode 100644 index 0000000000..c4f8f2cea2 --- /dev/null +++ b/CLAUDE.md @@ -0,0 +1,35 @@ +# Verifying changes + +This is a Yarn 3 monorepo (`packages/*`). CI rejects pushes for both lint errors and tsc errors, and round-trip cost is several minutes per CI run. Verify locally before committing. + +## When to run verify + +After any **substantial batch of changes** to a package — multiple files touched, new functions added, tests added, refactors — run that package's `verify` script before reporting work complete or asking to commit. A one-line typo fix doesn't need it; a multi-file change does. + +Run `verify` for **only the packages with modified files**, not the whole monorepo. Full-monorepo lint takes ~30s; a single package's verify is ~10s. + +## How to run verify + +```sh +# from the package directory: +cd packages/cli && yarn verify +``` + +`yarn verify` chains `yarn lint && yarn typecheck`. Currently defined in: + +- `packages/cli` — `lint` (eslint) + `typecheck` (tsc --noEmit), ~10s total + +Other packages don't yet have `verify`. If a package without `verify` is modified, run its `lint` script (and `tsc --noEmit` if it has a tsconfig) manually, or add a `verify` script following the cli pattern. + +## What it catches + +- ESLint rules: `array-type`, `no-unnecessary-type-assertion`, `prefer-function-type`, `prefer-optional-chain`, `prefer-nullish-coalescing`, etc. — all rejected by CI as errors. +- Type errors that `tsc --noEmit` finds, including yargs `CommandModule` assignability issues that require widening exported handler argv types. + +## Determining which packages changed + +```sh +git diff --name-only HEAD | awk -F/ '/^packages\//{print "packages/"$2}' | sort -u +``` + +Run `yarn verify` (or fall back to `yarn lint`) in each. From 3cb6e4a678bd825428f134227a1ed195a3e8da3e Mon Sep 17 00:00:00 2001 From: kgilpin Date: Sun, 3 May 2026 11:35:11 -0400 Subject: [PATCH 23/30] feat(query): expose path:lineno on function rows; add list_labels MCP tool MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - function_calls already stored path/lineno; FindCallRow, FunctionHotspotRow, and FunctionNode now project them through the SQL/MCP layer so callers can Read source directly without a follow-up grep. - New list_labels MCP tool surfaces labels with code-object counts and a sample fqid — supports the appmap-fix skill's "what's already labeled" step. - Tool descriptions for find_calls and get_call_tree now state that parameters_json/return_value are populated only for labeled functions. - find verb's calls table gains a LOCATION column. Co-Authored-By: Claude Opus 4.7 (1M context) --- packages/cli/src/cmds/query/queries/find.ts | 4 +++ .../cli/src/cmds/query/queries/hotspots.ts | 7 +++++ packages/cli/src/cmds/query/queries/mcp.ts | 27 ++++++++++++++++--- packages/cli/src/cmds/query/queries/tree.ts | 7 +++++ packages/cli/src/cmds/query/verbs/find.ts | 3 ++- .../tests/unit/cmds/query/queries/mcp.spec.ts | 18 +++++++++++++ 6 files changed, 62 insertions(+), 4 deletions(-) diff --git a/packages/cli/src/cmds/query/queries/find.ts b/packages/cli/src/cmds/query/queries/find.ts index e76f56a4c2..abbfcc34d4 100644 --- a/packages/cli/src/cmds/query/queries/find.ts +++ b/packages/cli/src/cmds/query/queries/find.ts @@ -68,6 +68,8 @@ export interface FindCallRow { fqid: string | null; defined_class: string; method_id: string; + path: string | null; + lineno: number | null; elapsed_ms: number | null; parameters_json: string | null; return_value: string | null; @@ -285,6 +287,8 @@ export function findCalls(db: sqlite3.Database, filter: FindFilter): FindCallRow co.fqid AS fqid, fc.defined_class AS defined_class, fc.method_id AS method_id, + fc.path AS path, + fc.lineno AS lineno, fc.elapsed_ms AS elapsed_ms, fc.parameters_json AS parameters_json, fc.return_value AS return_value diff --git a/packages/cli/src/cmds/query/queries/hotspots.ts b/packages/cli/src/cmds/query/queries/hotspots.ts index 8fa49c4eb2..42cd76177c 100644 --- a/packages/cli/src/cmds/query/queries/hotspots.ts +++ b/packages/cli/src/cmds/query/queries/hotspots.ts @@ -14,6 +14,11 @@ export interface FunctionHotspotRow { fqid: string | null; defined_class: string; method_id: string; + // Representative source location: one call's path/lineno from the + // aggregated set. Useful for "show me the source of this hotspot" + // without a follow-up lookup. + path: string | null; + lineno: number | null; calls: number; total_ms: number; self_ms: number; @@ -72,6 +77,8 @@ export function functionHotspots( co.fqid AS fqid, fc.defined_class AS defined_class, fc.method_id AS method_id, + MIN(fc.path) AS path, + MIN(fc.lineno) AS lineno, COUNT(*) AS calls, SUM(COALESCE(fc.elapsed_ms, 0)) AS total_ms, SUM(COALESCE(fc.elapsed_ms, 0) diff --git a/packages/cli/src/cmds/query/queries/mcp.ts b/packages/cli/src/cmds/query/queries/mcp.ts index 2f00929eff..afe88d86db 100644 --- a/packages/cli/src/cmds/query/queries/mcp.ts +++ b/packages/cli/src/cmds/query/queries/mcp.ts @@ -177,7 +177,7 @@ const TOOLS: ToolImpl[] = [ spec: { name: 'function_hotspots', description: - 'Functions ranked by total elapsed time across recordings. Filter by route to scope to a specific entry point or by class to focus on one component. Returns: fqid, defined_class, method_id, calls, total_ms, self_ms. fqid examples: "app/Logger#error" (instance), "app/Util.parse" (static), "src/cmds/query/db/openQueryDb.openQueryDb" (module-level), "app/Outer::Inner#method" (nested classes).', + 'Functions ranked by total elapsed time across recordings. Filter by route to scope to a specific entry point or by class to focus on one component. Returns: fqid, defined_class, method_id, path, lineno, calls, total_ms, self_ms. path/lineno are one representative call\'s source location — read directly to see the function. fqid examples: "app/Logger#error" (instance), "app/Util.parse" (static), "src/cmds/query/db/openQueryDb.openQueryDb" (module-level), "app/Outer::Inner#method" (nested classes).', inputSchema: { type: 'object', properties: { @@ -229,6 +229,27 @@ const TOOLS: ToolImpl[] = [ }), }, + { + spec: { + name: 'list_labels', + description: + 'AppMap labels present in the database, ranked by usage. Use to discover what semantic anchors exist (canonical: "log", "secret", "security.authentication", "security.authorization", "deserialize", "system.exec", "job.create", "http.session.clear") and any project-specific or investigation labels (e.g. "bug.", "repro"). Pass a returned label to find_calls --label to retrieve its calls. Returns: label, count (distinct code objects bearing it), sample_fqid (one representative function).', + inputSchema: { type: 'object', properties: {} }, + }, + handler: (_args, db) => + db + .prepare( + `SELECT l.label AS label, + COUNT(DISTINCT co.id) AS count, + MIN(co.fqid) AS sample_fqid + FROM labels l + JOIN code_objects co ON co.id = l.code_object_id + GROUP BY l.label + ORDER BY count DESC, l.label` + ) + .all(), + }, + // ----- row-level finders ---------------------------------------------- { @@ -311,7 +332,7 @@ const TOOLS: ToolImpl[] = [ spec: { name: 'find_calls', description: - 'Function-call rows. Filter by class, method, label (e.g. "log", "security.authorization"), duration. Use --label=log to retrieve application log output, or --label=security.authorization to find authorization checks. Returns: appmap_name, event_id, fqid, defined_class, method_id, elapsed_ms, parameters_json, return_value.', + 'Function-call rows. Filter by class, method, label (e.g. "log", "security.authorization"), duration. Use --label=log to retrieve application log output, or --label=security.authorization to find authorization checks. Returns: appmap_name, event_id, fqid, defined_class, method_id, path, lineno, elapsed_ms, parameters_json, return_value. parameters_json and return_value are populated only for labeled functions; unlabeled rows return null. Use path:lineno to read the source.', inputSchema: { type: 'object', properties: { @@ -368,7 +389,7 @@ const TOOLS: ToolImpl[] = [ spec: { name: 'get_call_tree', description: - 'Call tree of one recording. Without focus, returns every event. With focus_type + focus_value, narrows to the neighborhood of matching events: focus_type ∈ {function, sql_query, http_server_request, http_client_request}, focus_value is the matching identifier (fqid / SQL substring / normalized_path / URL substring). Use min_elapsed_ms to prune fast leaves. The appmap argument accepts a numeric appmap_id or an appmap_name (both surfaced by find_recordings). Returns ordered nodes: each has depth, kind ∈ {function, sql, http_server, http_client, exception}, event_id, parent_event_id, elapsed_ms, plus kind-specific fields (function: fqid/defined_class/method_id; sql: sql_text; http_server: method/route/status_code; http_client: method/url/status_code; exception: exception_class/message). fqid examples: "app/Logger#error" (instance), "app/Util.parse" (static).', + 'Call tree of one recording. Without focus, returns every event. With focus_type + focus_value, narrows to the neighborhood of matching events: focus_type ∈ {function, sql_query, http_server_request, http_client_request}, focus_value is the matching identifier (fqid / SQL substring / normalized_path / URL substring). Use min_elapsed_ms to prune fast leaves. The appmap argument accepts a numeric appmap_id or an appmap_name (both surfaced by find_recordings). Returns ordered nodes: each has depth, kind ∈ {function, sql, http_server, http_client, exception}, event_id, parent_event_id, elapsed_ms, plus kind-specific fields (function: fqid/defined_class/method_id/path/lineno/parameters_json/return_value; sql: sql_text; http_server: method/route/status_code; http_client: method/url/status_code; exception: exception_class/message/path/lineno). function nodes\' parameters_json and return_value are populated only for labeled functions. Use path:lineno on function and exception nodes to read the source. fqid examples: "app/Logger#error" (instance), "app/Util.parse" (static).', inputSchema: { type: 'object', properties: { diff --git a/packages/cli/src/cmds/query/queries/tree.ts b/packages/cli/src/cmds/query/queries/tree.ts index f7e4eec460..ef48cf9d29 100644 --- a/packages/cli/src/cmds/query/queries/tree.ts +++ b/packages/cli/src/cmds/query/queries/tree.ts @@ -41,6 +41,8 @@ export interface FunctionNode extends BaseNode { fqid: string | null; defined_class: string; method_id: string; + path: string | null; + lineno: number | null; is_static: boolean; elapsed_ms: number | null; parameters_json: string | null; @@ -202,6 +204,7 @@ export function tree( .prepare( `SELECT fc.event_id, fc.parent_event_id, fc.thread_id, co.fqid AS fqid, fc.defined_class, fc.method_id, + fc.path, fc.lineno, fc.is_static, fc.elapsed_ms, fc.parameters_json, fc.return_value FROM function_calls fc LEFT JOIN code_objects co ON co.id = fc.code_object_id @@ -214,6 +217,8 @@ export function tree( fqid: string | null; defined_class: string; method_id: string; + path: string | null; + lineno: number | null; is_static: number; elapsed_ms: number | null; parameters_json: string | null; @@ -228,6 +233,8 @@ export function tree( fqid: r.fqid, defined_class: r.defined_class, method_id: r.method_id, + path: r.path, + lineno: r.lineno, is_static: r.is_static === 1, elapsed_ms: r.elapsed_ms, parameters_json: r.parameters_json, diff --git a/packages/cli/src/cmds/query/verbs/find.ts b/packages/cli/src/cmds/query/verbs/find.ts index 92dcdba212..488d267800 100644 --- a/packages/cli/src/cmds/query/verbs/find.ts +++ b/packages/cli/src/cmds/query/verbs/find.ts @@ -208,10 +208,11 @@ function renderTable(type: FindType, rows: unknown[]): string { ); case 'calls': return formatTable( - ['APPMAP', 'FQID', 'ELAPSED', 'PARAMS', 'RETURN'], + ['APPMAP', 'FQID', 'LOCATION', 'ELAPSED', 'PARAMS', 'RETURN'], (rows as FindCallRow[]).map((r) => [ r.appmap_name, r.fqid ?? `${r.defined_class}#${r.method_id}`, + r.path != null ? `${r.path}${r.lineno != null ? `:${r.lineno}` : ''}` : '', formatMs(r.elapsed_ms), formatParams(r.parameters_json), r.return_value ?? '', diff --git a/packages/cli/tests/unit/cmds/query/queries/mcp.spec.ts b/packages/cli/tests/unit/cmds/query/queries/mcp.spec.ts index 63c07f9ee6..3a4825e0ab 100644 --- a/packages/cli/tests/unit/cmds/query/queries/mcp.spec.ts +++ b/packages/cli/tests/unit/cmds/query/queries/mcp.spec.ts @@ -91,6 +91,7 @@ describe('MCP handler', () => { 'list_endpoints', 'function_hotspots', 'sql_hotspots', + 'list_labels', 'find_recordings', 'find_requests', 'find_queries', @@ -200,6 +201,23 @@ describe('MCP handler', () => { } }); + it('list_labels returns labels with counts and a sample fqid', () => { + const db = freshDb(); + try { + seedMinimal(db); + const r = call(buildMcpHandler(db), { + jsonrpc: '2.0', + id: 90, + method: 'tools/call', + params: { name: 'list_labels', arguments: {} }, + }); + const rows = JSON.parse((r!.result as any).content[0].text); + expect(rows).toEqual([{ label: 'log', count: 1, sample_fqid: 'app/Logger#error' }]); + } finally { + db.close(); + } + }); + it('find_calls --label filters by the AppMap label', () => { const db = freshDb(); try { From fc9db5afe4d432edbc1b41446bcb6cdbeac7fc9d Mon Sep 17 00:00:00 2001 From: kgilpin Date: Sun, 3 May 2026 11:35:26 -0400 Subject: [PATCH 24/30] chore: hoist verify to repo root, scoped to changed packages MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit scripts/verify.mjs reads `git status` (or `git diff --cached` with --staged), groups changed files by `packages//`, and per package runs ESLint --quiet on only the changed lintable files plus `tsc --noEmit` on the whole package. Skips files outside packages/ and packages without a `typecheck` script. Uses the package-local eslint binary so we pick up its plugin major versions (cli ships ESLint 8 vs root's 7). - root scripts: `yarn verify` (working tree) and `yarn verify:staged` (for hook use). - CLAUDE.md updated to call out `yarn verify` from repo root after substantial change batches; ~5–7s scoped vs ~30s full lint. - No pre-commit hook yet — left for the team to opt into. Co-Authored-By: Claude Opus 4.7 (1M context) --- CLAUDE.md | 32 ++++----- package.json | 2 + scripts/verify.mjs | 162 +++++++++++++++++++++++++++++++++++++++++++++ 3 files changed, 178 insertions(+), 18 deletions(-) create mode 100755 scripts/verify.mjs diff --git a/CLAUDE.md b/CLAUDE.md index c4f8f2cea2..541d14ae26 100644 --- a/CLAUDE.md +++ b/CLAUDE.md @@ -1,35 +1,31 @@ # Verifying changes -This is a Yarn 3 monorepo (`packages/*`). CI rejects pushes for both lint errors and tsc errors, and round-trip cost is several minutes per CI run. Verify locally before committing. +This is a Yarn 3 monorepo (`packages/*`). CI rejects pushes for both lint errors and tsc errors, with several-minute round-trip costs per CI run. Verify locally before committing. ## When to run verify -After any **substantial batch of changes** to a package — multiple files touched, new functions added, tests added, refactors — run that package's `verify` script before reporting work complete or asking to commit. A one-line typo fix doesn't need it; a multi-file change does. - -Run `verify` for **only the packages with modified files**, not the whole monorepo. Full-monorepo lint takes ~30s; a single package's verify is ~10s. +After any **substantial batch of changes** — multiple files touched, new functions added, tests added, refactors — run `yarn verify` from the repo root before reporting work complete or asking to commit. A one-line typo fix doesn't need it; a multi-file change does. ## How to run verify ```sh -# from the package directory: -cd packages/cli && yarn verify +yarn verify # check working-tree changes (staged + unstaged + untracked) +yarn verify:staged # check only staged changes ``` -`yarn verify` chains `yarn lint && yarn typecheck`. Currently defined in: - -- `packages/cli` — `lint` (eslint) + `typecheck` (tsc --noEmit), ~10s total +`scripts/verify.mjs`: -Other packages don't yet have `verify`. If a package without `verify` is modified, run its `lint` script (and `tsc --noEmit` if it has a tsconfig) manually, or add a `verify` script following the cli pattern. +1. Reads modified files from git. +2. Groups them by `packages//`. +3. For each affected package: runs ESLint (`--quiet`, errors only) on the changed lintable files, then `tsc --noEmit` on the whole package (since TS is project-wide, you can't typecheck a single file). -## What it catches +Typical run on one package: ~5–7s. CI's full lint+typecheck takes ~30s; scoped is ~3× faster. -- ESLint rules: `array-type`, `no-unnecessary-type-assertion`, `prefer-function-type`, `prefer-optional-chain`, `prefer-nullish-coalescing`, etc. — all rejected by CI as errors. -- Type errors that `tsc --noEmit` finds, including yargs `CommandModule` assignability issues that require widening exported handler argv types. +## Adding verify to a package -## Determining which packages changed +If you touch a package that doesn't yet participate, add a `typecheck` script (`tsc --noEmit`) to its `package.json` so `verify.mjs` includes it. The existing `lint` script is enough for ESLint coverage. -```sh -git diff --name-only HEAD | awk -F/ '/^packages\//{print "packages/"$2}' | sort -u -``` +## What verify catches -Run `yarn verify` (or fall back to `yarn lint`) in each. +- ESLint errors that CI rejects: `array-type` (use `T[]` not `Array`), `no-unnecessary-type-assertion`, `prefer-function-type`, `prefer-optional-chain`, etc. Warnings (e.g. `no-unsafe-*`, `prefer-nullish-coalescing`) are suppressed by `--quiet`. +- Type errors that `tsc --noEmit` finds, including yargs `CommandModule` assignability issues that require widening exported handler argv types. diff --git a/package.json b/package.json index 8b0e76759d..92d534e07b 100644 --- a/package.json +++ b/package.json @@ -6,6 +6,8 @@ ], "scripts": { "lint": "yarn workspaces foreach --exclude root -v run lint", + "verify": "node scripts/verify.mjs", + "verify:staged": "node scripts/verify.mjs --staged", "test": "yarn workspaces foreach --exclude '{root}' -v run test", "build": "yarn workspaces foreach -t --exclude root -v run build", "build-native": "yarn workspaces foreach -t --exclude root -v run build-native", diff --git a/scripts/verify.mjs b/scripts/verify.mjs new file mode 100755 index 0000000000..d96bd61912 --- /dev/null +++ b/scripts/verify.mjs @@ -0,0 +1,162 @@ +#!/usr/bin/env node +// Run lint + typecheck on packages with modified files. +// +// Usage: +// node scripts/verify.mjs # check working-tree changes (staged + unstaged + untracked) +// node scripts/verify.mjs --staged # check only staged changes (intended for pre-commit hook use) +// +// Per affected package: +// 1. eslint on the package's changed *.ts / *.tsx / *.js / *.mjs files +// 2. tsc --noEmit on the whole package, if it has tsconfig.json + a typecheck script +// +// Exits non-zero if any check fails. Skips files outside packages/. + +import { spawnSync } from 'node:child_process'; +import { existsSync, readFileSync } from 'node:fs'; +import path from 'node:path'; +import { fileURLToPath } from 'node:url'; + +const ROOT = path.resolve(path.dirname(fileURLToPath(import.meta.url)), '..'); +const STAGED = process.argv.includes('--staged'); + +const LINT_EXTS = new Set(['.ts', '.tsx', '.js', '.mjs', '.cjs']); + +function git(...args) { + const r = spawnSync('git', args, { cwd: ROOT, encoding: 'utf8' }); + if (r.status !== 0) { + console.error(`git ${args.join(' ')} failed:\n${r.stderr}`); + process.exit(1); + } + return r.stdout; +} + +function modifiedFiles() { + if (STAGED) { + // Staged adds/copies/modifies/renames. + return git('diff', '--cached', '--name-only', '--diff-filter=ACMR') + .split('\n') + .filter(Boolean); + } + // Working tree: tracked changes + untracked (excluding ignored). + // --porcelain output is `XY filename`; strip the 3-char status prefix. + // Renames appear as `R old -> new`; --no-renames keeps both columns simple. + return git('status', '--porcelain', '--no-renames') + .split('\n') + .filter(Boolean) + .map((line) => line.slice(3)); +} + +function groupByPackage(files) { + const byPkg = new Map(); + const skipped = []; + for (const file of files) { + const m = /^packages\/([^/]+)\//.exec(file); + if (!m) { + skipped.push(file); + continue; + } + const pkg = m[1]; + if (!byPkg.has(pkg)) byPkg.set(pkg, []); + byPkg.get(pkg).push(file); + } + return { byPkg, skipped }; +} + +function hasScript(pkgDir, name) { + const pj = path.join(pkgDir, 'package.json'); + if (!existsSync(pj)) return false; + try { + const json = JSON.parse(readFileSync(pj, 'utf8')); + return Boolean(json.scripts && json.scripts[name]); + } catch { + return false; + } +} + +function resolveBin(pkgDir, name) { + const pkgBin = path.join(pkgDir, 'node_modules', '.bin', name); + if (existsSync(pkgBin)) return pkgBin; + const rootBin = path.join(ROOT, 'node_modules', '.bin', name); + if (existsSync(rootBin)) return rootBin; + return null; +} + +function run(label, command, args, opts) { + console.log(`\n→ ${label}: ${command} ${args.join(' ')} (cwd: ${opts.cwd})`); + const r = spawnSync(command, args, { stdio: 'inherit', ...opts }); + return r.status === 0; +} + +function verifyPackage(pkg, files) { + const pkgDir = path.join(ROOT, 'packages', pkg); + const pkgJsonPath = path.join(pkgDir, 'package.json'); + if (!existsSync(pkgJsonPath)) { + console.warn(`Skipping ${pkg}: no packages/${pkg}/package.json`); + return true; + } + + let ok = true; + + // Lint changed lintable files. + const lintable = files + .filter((f) => LINT_EXTS.has(path.extname(f))) + .filter((f) => existsSync(path.join(ROOT, f))) // skip deleted files + .map((f) => path.relative(pkgDir, path.join(ROOT, f))); + + if (lintable.length > 0 && hasScript(pkgDir, 'lint')) { + // Prefer the package-local eslint binary — root and packages can carry + // different major versions (e.g. ESLint 7 at root vs 8 in cli), and + // @typescript-eslint plugins are pinned to one major. + const eslintBin = resolveBin(pkgDir, 'eslint'); + if (!eslintBin) { + console.warn(`Skipping lint for ${pkg}: eslint not found in node_modules/.bin`); + } else { + ok = + run( + `${pkg}: eslint (${lintable.length} file${lintable.length === 1 ? '' : 's'})`, + eslintBin, + // --quiet hides warnings; CI rules that only warn are not blockers. + ['--quiet', ...lintable], + { cwd: pkgDir } + ) && ok; + } + } + + // Typecheck the whole package. + if (hasScript(pkgDir, 'typecheck') && existsSync(path.join(pkgDir, 'tsconfig.json'))) { + const tscBin = resolveBin(pkgDir, 'tsc'); + if (!tscBin) { + console.warn(`Skipping typecheck for ${pkg}: tsc not found in node_modules/.bin`); + } else { + ok = run(`${pkg}: tsc --noEmit`, tscBin, ['--noEmit'], { cwd: pkgDir }) && ok; + } + } + + return ok; +} + +const files = modifiedFiles(); +if (files.length === 0) { + console.log('No modified files. Nothing to verify.'); + process.exit(0); +} + +const { byPkg, skipped } = groupByPackage(files); +console.log(`Verifying ${byPkg.size} package(s) with modified files:`); +for (const [pkg, pkgFiles] of byPkg) { + console.log(` packages/${pkg} (${pkgFiles.length} file${pkgFiles.length === 1 ? '' : 's'})`); +} +if (skipped.length > 0) { + console.log(` skipped ${skipped.length} non-package file(s)`); +} + +let allOk = true; +for (const [pkg, pkgFiles] of byPkg) { + allOk = verifyPackage(pkg, pkgFiles) && allOk; +} + +if (!allOk) { + console.error('\nverify: FAILED'); + process.exit(1); +} +console.log('\nverify: OK'); From 91ea859d2b003ea2d9ba58a5dc5a766936922637 Mon Sep 17 00:00:00 2001 From: kgilpin Date: Sun, 3 May 2026 11:47:55 -0400 Subject: [PATCH 25/30] feat(query): add find logs verb + find_logs MCP tool Surface log calls (functions labeled `log`) as a first-class find type. Filter by --message (SQL LIKE against parameters_json and return_value), --logger (class), and the standard appmap-scoped flags. False-positive matches on parameter names are accepted by design; tighten in post-processing. Display projection prefers a structured-return {message, ...} when present, else falls back to a parameter named message/msg or the first string-typed value. Co-Authored-By: Claude Opus 4.7 (1M context) --- packages/cli/src/cmds/query/queries/find.ts | 68 +++++- packages/cli/src/cmds/query/queries/mcp.ts | 34 +++ packages/cli/src/cmds/query/verbs/find.ts | 74 +++++- .../unit/cmds/query/queries/find.spec.ts | 214 +++++++++++++++++- .../tests/unit/cmds/query/queries/mcp.spec.ts | 52 +++++ .../tests/unit/cmds/query/verbs/find.spec.ts | 83 ++++++- 6 files changed, 513 insertions(+), 12 deletions(-) diff --git a/packages/cli/src/cmds/query/queries/find.ts b/packages/cli/src/cmds/query/queries/find.ts index abbfcc34d4..57131507ff 100644 --- a/packages/cli/src/cmds/query/queries/find.ts +++ b/packages/cli/src/cmds/query/queries/find.ts @@ -12,7 +12,7 @@ import { sqlCallerMethodClauses, } from '../lib/scope'; -export type FindType = 'appmaps' | 'requests' | 'queries' | 'calls' | 'exceptions'; +export type FindType = 'appmaps' | 'requests' | 'queries' | 'calls' | 'exceptions' | 'logs'; export interface FindFilter { route?: string; // "POST /orders" or "/orders" @@ -28,6 +28,8 @@ export interface FindFilter { appmap?: string; // appmap name (or basename of source_path) table?: string; // SQL table name (find queries) exception?: string; // exception class (find exceptions) + logger?: string; // --logger (find logs); class of the logging fn + message?: string; // --message (find logs); substring of the log line limit?: number; offset?: number; } @@ -75,6 +77,18 @@ export interface FindCallRow { return_value: string | null; } +export interface FindLogRow { + appmap_name: string; + event_id: number; + parent_event_id: number | null; + logger: string; // defined_class of the logging fn + method_id: string; + path: string | null; + lineno: number | null; + parameters_json: string | null; + return_value: string | null; +} + export interface FindExceptionRow { appmap_name: string; event_id: number; @@ -302,6 +316,56 @@ export function findCalls(db: sqlite3.Database, filter: FindFilter): FindCallRow return db.prepare(sql).all(...params) as FindCallRow[]; } +// Log rows: function_calls whose linked code_object has the canonical +// 'log' label. The label is the contract — it tells the importer to +// capture parameters_json + return_value, and tells us which calls are +// loggers. --message is a SQL LIKE substring against both columns; +// false positives (matching a parameter name, a class name, or a JSON +// punctuation byte) are accepted by design and can be tightened in +// post-processing. +export function findLogs(db: sqlite3.Database, filter: FindFilter): FindLogRow[] { + const where: string[] = [ + `fc.code_object_id IN (SELECT l.code_object_id FROM labels l WHERE l.label = 'log')`, + ]; + const params: (string | number)[] = []; + + const scope = appmapIdScope(filter, 'fc'); + if (scope) { + where.push(scope.sql); + params.push(...scope.params); + } + + if (filter.logger) { + const c = classFilterClauses(filter.logger, 'fc'); + where.push(...c.where); + params.push(...c.params); + } + + if (filter.message) { + where.push(`(fc.parameters_json LIKE ? OR fc.return_value LIKE ?)`); + const like = `%${filter.message}%`; + params.push(like, like); + } + + let sql = ` + SELECT a.name AS appmap_name, + fc.event_id AS event_id, + fc.parent_event_id AS parent_event_id, + fc.defined_class AS logger, + fc.method_id AS method_id, + fc.path AS path, + fc.lineno AS lineno, + fc.parameters_json AS parameters_json, + fc.return_value AS return_value + FROM function_calls fc + JOIN appmaps a ON a.id = fc.appmap_id + WHERE ${where.join(' AND ')} + ORDER BY a.source_path, fc.event_id + `; + sql = appendLimitOffset(sql, filter, params); + return db.prepare(sql).all(...params) as FindLogRow[]; +} + export function findExceptions(db: sqlite3.Database, filter: FindFilter): FindExceptionRow[] { const where: string[] = []; const params: (string | number)[] = []; @@ -350,5 +414,7 @@ export function find( return findCalls(db, filter); case 'exceptions': return findExceptions(db, filter); + case 'logs': + return findLogs(db, filter); } } diff --git a/packages/cli/src/cmds/query/queries/mcp.ts b/packages/cli/src/cmds/query/queries/mcp.ts index afe88d86db..9610d7101b 100644 --- a/packages/cli/src/cmds/query/queries/mcp.ts +++ b/packages/cli/src/cmds/query/queries/mcp.ts @@ -19,6 +19,7 @@ import { FindCallRow, FindExceptionRow, FindFilter, + FindLogRow, FindQueryRow, FindRequestRow, find, @@ -128,6 +129,8 @@ function buildFindFilter(args: Record): FindFilter { if (typeof args.appmap === 'string') f.appmap = args.appmap; if (typeof args.table === 'string') f.table = args.table; if (typeof args.exception === 'string') f.exception = args.exception; + if (typeof args.logger === 'string') f.logger = args.logger; + if (typeof args.message === 'string') f.message = args.message; f.since = maybeTime(args.since); f.until = maybeTime(args.until); f.limit = maybeNumber(args.limit); @@ -358,6 +361,37 @@ const TOOLS: ToolImpl[] = [ handler: (args, db) => find(db, 'calls', buildFindFilter(args)) as FindCallRow[], }, + { + spec: { + name: 'find_logs', + description: + 'Application log lines captured from functions labeled `log`. Filter by message substring (matches across the call\'s parameters and return value), logger class, recording, branch, or time window. Returns: appmap_name, event_id, parent_event_id, logger, method_id, path, lineno, parameters_json, return_value. The message is in parameters_json (a [{name, class, value}, ...] blob) — read the value of the parameter named `message`/`msg`, or the first string-typed parameter, or parse return_value as JSON if the recorder returns a structured `{level, message, ...}`. Use path:lineno to read the call site of the log statement.', + inputSchema: { + type: 'object', + properties: { + message: { + type: 'string', + description: + 'Substring to look for inside the captured log call. Matches a SQL LIKE against parameters_json and return_value — false positives (e.g. matching a class or parameter name) are accepted; tighten in post-processing if needed.', + }, + logger: { + type: 'string', + description: + 'Class of the logging function (defined_class). Accepts short or canonical fqid form, same as find_calls --class.', + }, + branch: COMMON_FILTER_PROPERTIES.branch, + commit: COMMON_FILTER_PROPERTIES.commit, + since: COMMON_FILTER_PROPERTIES.since, + until: COMMON_FILTER_PROPERTIES.until, + appmap: COMMON_FILTER_PROPERTIES.appmap, + limit: COMMON_FILTER_PROPERTIES.limit, + offset: COMMON_FILTER_PROPERTIES.offset, + }, + }, + }, + handler: (args, db) => find(db, 'logs', buildFindFilter(args)) as FindLogRow[], + }, + { spec: { name: 'find_exceptions', diff --git a/packages/cli/src/cmds/query/verbs/find.ts b/packages/cli/src/cmds/query/verbs/find.ts index 488d267800..0635651687 100644 --- a/packages/cli/src/cmds/query/verbs/find.ts +++ b/packages/cli/src/cmds/query/verbs/find.ts @@ -14,12 +14,13 @@ import { FindAppmapRow, FindCallRow, FindExceptionRow, + FindLogRow, FindQueryRow, FindRequestRow, } from '../queries/find'; import { formatMs, formatTable } from '../lib/format'; -const TYPES: readonly FindType[] = ['appmaps', 'requests', 'queries', 'calls', 'exceptions']; +const TYPES: readonly FindType[] = ['appmaps', 'requests', 'queries', 'calls', 'exceptions', 'logs']; // 'recordings' is accepted as an alias for 'appmaps' to match the MCP // naming (find_recordings) and the user-facing concept of a "recording". const TYPE_CHOICES: readonly string[] = ['appmaps', 'recordings', ...TYPES.filter((t) => t !== 'appmaps')]; @@ -54,6 +55,8 @@ export const builder = (args: yargs.Argv) => { .option('appmap', { type: 'string', describe: 'appmap name' }) .option('table', { type: 'string', describe: 'SQL table name (queries)' }) .option('exception', { type: 'string', describe: 'exception class (exceptions)' }) + .option('logger', { type: 'string', describe: 'logger class (logs)' }) + .option('message', { type: 'string', describe: 'log message substring (logs)' }) .option('limit', { type: 'number' }) .option('offset', { type: 'number' }) .option('json', { type: 'boolean', default: false }); @@ -67,11 +70,12 @@ type Argv = ReturnType extends yargs.Argv ? T : never; // types where they make sense; flagging them on the wrong type is an // error rather than a silent no-op. const REJECTED_FLAGS: Record = { - appmaps: ['class', 'method', 'label', 'table', 'exception'], - requests: ['class', 'method', 'label', 'table', 'exception'], - queries: ['label', 'exception'], - calls: ['table', 'exception'], - exceptions: ['class', 'method', 'label', 'duration', 'table'], + appmaps: ['class', 'method', 'label', 'table', 'exception', 'logger', 'message'], + requests: ['class', 'method', 'label', 'table', 'exception', 'logger', 'message'], + queries: ['label', 'exception', 'logger', 'message'], + calls: ['table', 'exception', 'logger', 'message'], + exceptions: ['class', 'method', 'label', 'duration', 'table', 'logger', 'message'], + logs: ['class', 'method', 'label', 'route', 'status', 'duration', 'table', 'exception'], }; // Per-flag hints, attached to error messages when a rejected flag is used. @@ -81,6 +85,10 @@ const REJECTED_HINTS: Partial>>> requests: { method: 'to filter by HTTP method, use --route "METHOD /path"', }, + logs: { + class: 'to filter logs by logger class, use --logger', + label: '--label is implied (logs always means label=log)', + }, }; // Exported for tests. Operates on a generic flag map so unit tests don't @@ -126,6 +134,8 @@ export function buildFindFilter(argv: Record): ParsedFind { if (typeof argv.appmap === 'string') filter.appmap = argv.appmap; if (typeof argv.table === 'string') filter.table = argv.table; if (typeof argv.exception === 'string') filter.exception = argv.exception; + if (typeof argv.logger === 'string') filter.logger = argv.logger; + if (typeof argv.message === 'string') filter.message = argv.message; if (typeof argv.limit === 'number') filter.limit = argv.limit; if (typeof argv.offset === 'number') filter.offset = argv.offset; @@ -228,7 +238,59 @@ function renderTable(type: FindType, rows: unknown[]): string { String(r.event_id), ]) ); + case 'logs': + return formatTable( + ['APPMAP', 'LOGGER', 'METHOD', 'MESSAGE', 'EVENT'], + (rows as FindLogRow[]).map((r) => [ + r.appmap_name, + r.logger, + r.method_id, + projectLogMessage(r.parameters_json, r.return_value), + String(r.event_id), + ]) + ); + } +} + +// Pick a displayable message from a log row's captured fields. +// 1. If return_value parses as JSON with a `message` field, use it +// (this is the structured-return contract). +// 2. Otherwise, look in parameters_json for a parameter whose `name` +// is `message` or `msg`; fall back to the first string-typed value. +// 3. Otherwise, stringify whatever's available so the row isn't blank. +// Display-only — does not affect filtering. The `--message` SQL LIKE +// runs against the raw columns and may return rows whose projected +// message doesn't contain the substring (e.g., matched a class name); +// that's the documented FP-tolerant behavior. +export function projectLogMessage( + parametersJson: string | null, + returnValue: string | null +): string { + if (returnValue) { + try { + const parsed = JSON.parse(returnValue) as Record; + if (parsed && typeof parsed === 'object' && typeof parsed.message === 'string') { + return parsed.message; + } + } catch { + // not structured — fall through + } + } + if (parametersJson) { + try { + const params = JSON.parse(parametersJson) as { name?: string; class?: string; value?: unknown }[]; + const named = params.find((p) => p.name === 'message' || p.name === 'msg'); + if (named?.value != null) return String(named.value); + const firstString = params.find((p) => typeof p.value === 'string'); + if (firstString) return String(firstString.value); + if (params.length > 0) return JSON.stringify(params.map((p) => p.value)); + } catch { + return parametersJson; + } } + // No structured message available. Return blank rather than the raw + // `return_value` (which is often noise like "true" / "None"). + return ''; } function formatParams(json: string | null): string { diff --git a/packages/cli/tests/unit/cmds/query/queries/find.spec.ts b/packages/cli/tests/unit/cmds/query/queries/find.spec.ts index d47f99464d..4a9481ca0b 100644 --- a/packages/cli/tests/unit/cmds/query/queries/find.spec.ts +++ b/packages/cli/tests/unit/cmds/query/queries/find.spec.ts @@ -5,6 +5,7 @@ import { findAppmaps, findCalls, findExceptions, + findLogs, findQueries, findRequests, } from '../../../../../src/cmds/query/queries/find'; @@ -36,11 +37,14 @@ interface Recording { }[]; calls?: { event_id: number; + parent_event_id?: number; defined_class: string; method_id: string; elapsed_ms?: number; fqid?: string; labels?: string[]; + parameters?: { name: string; class?: string; value: unknown }[]; + return_value?: unknown; }[]; exceptions?: { event_id: number; @@ -76,8 +80,9 @@ function seed(db: sqlite3.Database, recs: Recording[]): void { `INSERT OR IGNORE INTO labels (code_object_id, label) VALUES (?, ?)` ); const insCall = db.prepare( - `INSERT INTO function_calls (appmap_id, event_id, defined_class, method_id, code_object_id, elapsed_ms) - VALUES (?, ?, ?, ?, ?, ?)` + `INSERT INTO function_calls (appmap_id, event_id, parent_event_id, defined_class, method_id, + code_object_id, elapsed_ms, parameters_json, return_value) + VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)` ); const insExc = db.prepare( `INSERT INTO exceptions (appmap_id, event_id, exception_class, message) @@ -126,13 +131,23 @@ function seed(db: sqlite3.Database, recs: Recording[]): void { insCo.run(fqid, pkg, JSON.stringify([leaf]), leaf, c.method_id, 0); const coId = (selCoId.get(fqid) as { id: number }).id; for (const label of c.labels ?? []) insLabel.run(coId, label); + const paramsJson = c.parameters ? JSON.stringify(c.parameters) : null; + const returnVal = + c.return_value === undefined + ? null + : typeof c.return_value === 'string' + ? c.return_value + : JSON.stringify(c.return_value); insCall.run( aid, c.event_id, + c.parent_event_id ?? null, c.defined_class, c.method_id, coId, - c.elapsed_ms ?? null + c.elapsed_ms ?? null, + paramsJson, + returnVal ); } for (const e of r.exceptions ?? []) { @@ -597,6 +612,199 @@ describe('findCalls', () => { }); }); +describe('findLogs', () => { + it('returns only label=log calls; non-log calls are excluded', () => { + const db = freshDb(); + try { + seed(db, [ + { + name: 'a', + calls: [ + { + event_id: 1, + defined_class: 'Logger', + method_id: 'info', + labels: ['log'], + parameters: [{ name: 'message', class: 'String', value: 'starting up' }], + }, + { event_id: 2, defined_class: 'OrdersController', method_id: 'create' }, + ], + }, + ]); + const rows = findLogs(db, {}); + expect(rows).toHaveLength(1); + expect(rows[0].logger).toBe('Logger'); + expect(rows[0].method_id).toBe('info'); + } finally { + db.close(); + } + }); + + it('--message matches a substring inside parameters_json', () => { + const db = freshDb(); + try { + seed(db, [ + { + name: 'a', + calls: [ + { + event_id: 1, + defined_class: 'Logger', + method_id: 'error', + labels: ['log'], + parameters: [{ name: 'message', class: 'String', value: 'connection refused' }], + }, + { + event_id: 2, + defined_class: 'Logger', + method_id: 'info', + labels: ['log'], + parameters: [{ name: 'message', class: 'String', value: 'started worker' }], + }, + ], + }, + ]); + const rows = findLogs(db, { message: 'refused' }); + expect(rows).toHaveLength(1); + expect(rows[0].event_id).toBe(1); + } finally { + db.close(); + } + }); + + it('--message also matches against return_value (structured-return contract)', () => { + const db = freshDb(); + try { + seed(db, [ + { + name: 'a', + calls: [ + { + event_id: 1, + defined_class: 'Logger', + method_id: 'info', + labels: ['log'], + // No params; the message lives in a structured return_value. + return_value: { level: 'info', message: 'connection refused at host:5432' }, + }, + { + event_id: 2, + defined_class: 'Logger', + method_id: 'info', + labels: ['log'], + return_value: { level: 'info', message: 'all systems nominal' }, + }, + ], + }, + ]); + const rows = findLogs(db, { message: 'refused' }); + expect(rows).toHaveLength(1); + expect(rows[0].event_id).toBe(1); + } finally { + db.close(); + } + }); + + it('--logger filters by the logging class (uses classFilterClauses)', () => { + const db = freshDb(); + try { + seed(db, [ + { + name: 'a', + calls: [ + { + event_id: 1, + defined_class: 'app.AppLogger', + method_id: 'info', + labels: ['log'], + parameters: [{ name: 'msg', class: 'String', value: 'hello' }], + }, + { + event_id: 2, + defined_class: 'lib.AuditLogger', + method_id: 'info', + labels: ['log'], + parameters: [{ name: 'msg', class: 'String', value: 'audited' }], + }, + ], + }, + ]); + // Suffix-aware short-form match: "AppLogger" hits "app.AppLogger". + const rows = findLogs(db, { logger: 'AppLogger' }); + expect(rows).toHaveLength(1); + expect(rows[0].event_id).toBe(1); + } finally { + db.close(); + } + }); + + it('combines --message with appmap-scope filters (--branch)', () => { + const db = freshDb(); + try { + seed(db, [ + { + name: 'a', + branch: 'main', + calls: [ + { + event_id: 1, + defined_class: 'Logger', + method_id: 'info', + labels: ['log'], + parameters: [{ name: 'message', class: 'String', value: 'connection refused' }], + }, + ], + }, + { + name: 'b', + branch: 'feature', + calls: [ + { + event_id: 1, + defined_class: 'Logger', + method_id: 'info', + labels: ['log'], + parameters: [{ name: 'message', class: 'String', value: 'connection refused' }], + }, + ], + }, + ]); + const rows = findLogs(db, { message: 'refused', branch: 'feature' }); + expect(rows).toHaveLength(1); + expect(rows[0].appmap_name).toBe('b'); + } finally { + db.close(); + } + }); + + it('false positives are accepted: --message matches a parameter name', () => { + // Documents the design choice — broad LIKE over the JSON blob means + // a search for "message" matches the parameter name, not just the + // value. Display-time projection can tighten this if needed. + const db = freshDb(); + try { + seed(db, [ + { + name: 'a', + calls: [ + { + event_id: 1, + defined_class: 'Logger', + method_id: 'info', + labels: ['log'], + parameters: [{ name: 'message', class: 'String', value: 'hi' }], + }, + ], + }, + ]); + const rows = findLogs(db, { message: 'message' }); + expect(rows).toHaveLength(1); + } finally { + db.close(); + } + }); +}); + describe('findCalls --class / --method (fqid-aware)', () => { it('matches the canonical fqid prefix', () => { const db = freshDb(); diff --git a/packages/cli/tests/unit/cmds/query/queries/mcp.spec.ts b/packages/cli/tests/unit/cmds/query/queries/mcp.spec.ts index 3a4825e0ab..e88775d13b 100644 --- a/packages/cli/tests/unit/cmds/query/queries/mcp.spec.ts +++ b/packages/cli/tests/unit/cmds/query/queries/mcp.spec.ts @@ -96,6 +96,7 @@ describe('MCP handler', () => { 'find_requests', 'find_queries', 'find_calls', + 'find_logs', 'find_exceptions', 'get_call_tree', 'find_related', @@ -218,6 +219,57 @@ describe('MCP handler', () => { } }); + it('find_logs returns label=log calls and filters by --message', () => { + const db = freshDb(); + try { + seedMinimal(db); + // Add a parameters_json + return_value to the seeded log call so + // --message has something to LIKE against. + db.prepare( + `UPDATE function_calls + SET parameters_json = ?, return_value = ? + WHERE method_id = 'error'` + ).run( + JSON.stringify([{ name: 'message', class: 'String', value: 'connection refused' }]), + null + ); + const handler = buildMcpHandler(db); + + // No filter: returns the log row. + const all = call(handler, { + jsonrpc: '2.0', + id: 100, + method: 'tools/call', + params: { name: 'find_logs', arguments: {} }, + }); + const allRows = JSON.parse((all!.result as any).content[0].text); + expect(allRows).toHaveLength(1); + expect(allRows[0].logger).toBe('Logger'); + expect(allRows[0].method_id).toBe('error'); + expect(allRows[0].parameters_json).toContain('connection refused'); + + // Substring filter against parameters_json. + const matched = call(handler, { + jsonrpc: '2.0', + id: 101, + method: 'tools/call', + params: { name: 'find_logs', arguments: { message: 'refused' } }, + }); + expect(JSON.parse((matched!.result as any).content[0].text)).toHaveLength(1); + + // Substring that doesn't appear: zero rows. + const empty = call(handler, { + jsonrpc: '2.0', + id: 102, + method: 'tools/call', + params: { name: 'find_logs', arguments: { message: 'this never appears' } }, + }); + expect(JSON.parse((empty!.result as any).content[0].text)).toHaveLength(0); + } finally { + db.close(); + } + }); + it('find_calls --label filters by the AppMap label', () => { const db = freshDb(); try { diff --git a/packages/cli/tests/unit/cmds/query/verbs/find.spec.ts b/packages/cli/tests/unit/cmds/query/verbs/find.spec.ts index a296d72988..fe61c0f5cf 100644 --- a/packages/cli/tests/unit/cmds/query/verbs/find.spec.ts +++ b/packages/cli/tests/unit/cmds/query/verbs/find.spec.ts @@ -1,4 +1,8 @@ -import { buildFindFilter, validateFlags } from '../../../../../src/cmds/query/verbs/find'; +import { + buildFindFilter, + projectLogMessage, + validateFlags, +} from '../../../../../src/cmds/query/verbs/find'; describe('find verb flag validation', () => { it('accepts the universal flags on every type', () => { @@ -13,7 +17,7 @@ describe('find verb flag validation', () => { offset: 0, json: true, }; - for (const type of ['appmaps', 'requests', 'queries', 'calls', 'exceptions'] as const) { + for (const type of ['appmaps', 'requests', 'queries', 'calls', 'exceptions', 'logs'] as const) { expect(() => validateFlags(type, universal)).not.toThrow(); } }); @@ -72,6 +76,28 @@ describe('find verb flag validation', () => { validateFlags('appmaps', { class: undefined, table: null }) ).not.toThrow(); }); + + it('logs accepts --logger and --message; rejects --class with a hint', () => { + expect(() => validateFlags('logs', { logger: 'AppLogger' })).not.toThrow(); + expect(() => validateFlags('logs', { message: 'connection refused' })).not.toThrow(); + expect(() => validateFlags('logs', { class: 'AppLogger' })).toThrow(/--logger/); + expect(() => validateFlags('logs', { label: 'log' })).toThrow(/implied/); + }); + + it('logs rejects row-level filters that don\'t apply', () => { + expect(() => validateFlags('logs', { route: '/x' })).toThrow(/--route/); + expect(() => validateFlags('logs', { status: '500' })).toThrow(/--status/); + expect(() => validateFlags('logs', { duration: '>1s' })).toThrow(/--duration/); + expect(() => validateFlags('logs', { table: 'users' })).toThrow(/--table/); + expect(() => validateFlags('logs', { exception: 'X' })).toThrow(/--exception/); + }); + + it('--logger and --message are rejected on non-logs types', () => { + for (const type of ['appmaps', 'requests', 'queries', 'calls', 'exceptions'] as const) { + expect(() => validateFlags(type, { logger: 'X' })).toThrow(/--logger/); + expect(() => validateFlags(type, { message: 'x' })).toThrow(/--message/); + } + }); }); describe('buildFindFilter', () => { @@ -105,4 +131,57 @@ describe('buildFindFilter', () => { it('returns the parsed type', () => { expect(buildFindFilter({ type: 'appmaps' }).type).toBe('appmaps'); }); + + it('plumbs --logger and --message into the filter for logs', () => { + const { type, filter } = buildFindFilter({ + type: 'logs', + logger: 'AppLogger', + message: 'connection refused', + }); + expect(type).toBe('logs'); + expect(filter.logger).toBe('AppLogger'); + expect(filter.message).toBe('connection refused'); + }); +}); + +describe('projectLogMessage', () => { + it('prefers a structured-return message field when return_value is JSON', () => { + const r = JSON.stringify({ level: 'info', message: 'hello world' }); + expect(projectLogMessage(null, r)).toBe('hello world'); + }); + + it('uses a parameter named message when return_value lacks a structured message', () => { + const params = JSON.stringify([ + { name: 'tag', class: 'String', value: 'auth' }, + { name: 'message', class: 'String', value: 'login ok' }, + ]); + expect(projectLogMessage(params, null)).toBe('login ok'); + }); + + it('accepts msg as an alias for message', () => { + const params = JSON.stringify([{ name: 'msg', class: 'String', value: 'queued' }]); + expect(projectLogMessage(params, null)).toBe('queued'); + }); + + it('falls back to the first string-typed parameter value', () => { + const params = JSON.stringify([ + { name: 'count', class: 'Integer', value: 5 }, + { name: 'note', class: 'String', value: 'first text' }, + ]); + expect(projectLogMessage(params, null)).toBe('first text'); + }); + + it('returns a non-empty repr even when nothing matches', () => { + const params = JSON.stringify([{ name: 'count', class: 'Integer', value: 5 }]); + expect(projectLogMessage(params, null)).toBe('[5]'); + }); + + it('returns an empty string when both inputs are null', () => { + expect(projectLogMessage(null, null)).toBe(''); + }); + + it('return_value that is not JSON is treated as opaque (not the message)', () => { + // Falls through to parameters_json; if that's null, returns ''. + expect(projectLogMessage(null, 'true')).toBe(''); + }); }); From b87f5b62057dac23203ee969d6b844aff44d7007 Mon Sep 17 00:00:00 2001 From: kgilpin Date: Sun, 3 May 2026 11:54:37 -0400 Subject: [PATCH 26/30] feat(query): attach recent_logs to find_exceptions on opt-in Pass --with-logs N (CLI) or with_logs=N (MCP) to enrich each exception row with the last N log calls preceding it in the same recording, ordered chronologically. The default behavior is unchanged. The exception row also now exposes appmap_id alongside appmap_name so follow-up queries can use either as a join key. Implemented as a per-exception prepared-statement loop scoped by appmap_id and event_id < exception.event_id. Synthetic exceptions with NULL event_id receive an empty recent_logs array. Co-Authored-By: Claude Opus 4.7 (1M context) --- packages/cli/src/cmds/query/queries/find.ts | 51 ++++++- packages/cli/src/cmds/query/queries/mcp.ts | 8 +- packages/cli/src/cmds/query/verbs/find.ts | 40 ++++-- .../unit/cmds/query/queries/find.spec.ts | 131 ++++++++++++++++++ .../tests/unit/cmds/query/queries/mcp.spec.ts | 43 ++++++ .../tests/unit/cmds/query/verbs/find.spec.ts | 13 ++ 6 files changed, 270 insertions(+), 16 deletions(-) diff --git a/packages/cli/src/cmds/query/queries/find.ts b/packages/cli/src/cmds/query/queries/find.ts index 57131507ff..8469e30950 100644 --- a/packages/cli/src/cmds/query/queries/find.ts +++ b/packages/cli/src/cmds/query/queries/find.ts @@ -30,6 +30,7 @@ export interface FindFilter { exception?: string; // exception class (find exceptions) logger?: string; // --logger (find logs); class of the logging fn message?: string; // --message (find logs); substring of the log line + withLogs?: number; // --with-logs N (find exceptions); attach N preceding logs limit?: number; offset?: number; } @@ -90,12 +91,17 @@ export interface FindLogRow { } export interface FindExceptionRow { + appmap_id: number; appmap_name: string; event_id: number; exception_class: string; message: string | null; path: string | null; lineno: number | null; + // Populated only when filter.withLogs > 0. Ordered chronologically + // (oldest first), capped at filter.withLogs entries. Each row has + // event_id < this exception's event_id. + recent_logs?: FindLogRow[]; } // --- internal helpers (find-specific) --- @@ -382,7 +388,8 @@ export function findExceptions(db: sqlite3.Database, filter: FindFilter): FindEx } let sql = ` - SELECT a.name AS appmap_name, + SELECT e.appmap_id AS appmap_id, + a.name AS appmap_name, e.event_id AS event_id, e.exception_class AS exception_class, e.message AS message, @@ -394,7 +401,47 @@ export function findExceptions(db: sqlite3.Database, filter: FindFilter): FindEx ORDER BY a.source_path, e.event_id, e.exception_class `; sql = appendLimitOffset(sql, filter, params); - return db.prepare(sql).all(...params) as FindExceptionRow[]; + const rows = db.prepare(sql).all(...params) as FindExceptionRow[]; + + // Enrichment: for each exception, attach the last N log calls in the + // same recording with event_id strictly less than the exception's. We + // use event order rather than parent_event_id subtree walking — it's + // a strict subset of the call-tree relevant to most debugging + // questions ("what did the app log before it crashed?") and avoids a + // recursive CTE per row. Exceptions whose event_id is NULL (synthetic) + // can't be ordered, so they don't get logs attached. + if (filter.withLogs && filter.withLogs > 0) { + const logStmt = db.prepare(` + SELECT a.name AS appmap_name, + fc.event_id AS event_id, + fc.parent_event_id AS parent_event_id, + fc.defined_class AS logger, + fc.method_id AS method_id, + fc.path AS path, + fc.lineno AS lineno, + fc.parameters_json AS parameters_json, + fc.return_value AS return_value + FROM function_calls fc + JOIN appmaps a ON a.id = fc.appmap_id + WHERE fc.appmap_id = ? + AND fc.event_id < ? + AND fc.code_object_id IN ( + SELECT l.code_object_id FROM labels l WHERE l.label = 'log' + ) + ORDER BY fc.event_id DESC + LIMIT ? + `); + for (const row of rows) { + if (row.event_id == null) { + row.recent_logs = []; + continue; + } + const logs = logStmt.all(row.appmap_id, row.event_id, filter.withLogs) as FindLogRow[]; + row.recent_logs = logs.reverse(); // chronological + } + } + + return rows; } // Dispatcher. diff --git a/packages/cli/src/cmds/query/queries/mcp.ts b/packages/cli/src/cmds/query/queries/mcp.ts index 9610d7101b..733eeba36f 100644 --- a/packages/cli/src/cmds/query/queries/mcp.ts +++ b/packages/cli/src/cmds/query/queries/mcp.ts @@ -131,6 +131,8 @@ function buildFindFilter(args: Record): FindFilter { if (typeof args.exception === 'string') f.exception = args.exception; if (typeof args.logger === 'string') f.logger = args.logger; if (typeof args.message === 'string') f.message = args.message; + const withLogs = maybeNumber(args.with_logs); + if (withLogs !== undefined) f.withLogs = withLogs; f.since = maybeTime(args.since); f.until = maybeTime(args.until); f.limit = maybeNumber(args.limit); @@ -396,11 +398,15 @@ const TOOLS: ToolImpl[] = [ spec: { name: 'find_exceptions', description: - 'Exception rows with class, message, source location. Filter by exception class name, the request that owns the exception (via route/status), branch, or time window. Returns: appmap_name, event_id, exception_class, message, path, lineno.', + 'Exception rows with class, message, source location. Filter by exception class name, the request that owns the exception (via route/status), branch, or time window. Returns: appmap_id, appmap_name, event_id, exception_class, message, path, lineno. Pass with_logs=N to attach the last N log lines preceding each exception (chronological order) under recent_logs — usually the fastest way to see what the app reported before the failure.', inputSchema: { type: 'object', properties: { exception: { type: 'string', description: 'Exception class name (exact match).' }, + with_logs: { + type: 'integer', + description: 'Attach up to N preceding log lines per exception under recent_logs (chronological). Each entry has the same shape as a find_logs row.', + }, route: COMMON_FILTER_PROPERTIES.route, status: COMMON_FILTER_PROPERTIES.status, branch: COMMON_FILTER_PROPERTIES.branch, diff --git a/packages/cli/src/cmds/query/verbs/find.ts b/packages/cli/src/cmds/query/verbs/find.ts index 0635651687..7119c1e152 100644 --- a/packages/cli/src/cmds/query/verbs/find.ts +++ b/packages/cli/src/cmds/query/verbs/find.ts @@ -57,6 +57,10 @@ export const builder = (args: yargs.Argv) => { .option('exception', { type: 'string', describe: 'exception class (exceptions)' }) .option('logger', { type: 'string', describe: 'logger class (logs)' }) .option('message', { type: 'string', describe: 'log message substring (logs)' }) + .option('with-logs', { + type: 'number', + describe: 'attach the last N log lines preceding each row (exceptions)', + }) .option('limit', { type: 'number' }) .option('offset', { type: 'number' }) .option('json', { type: 'boolean', default: false }); @@ -70,12 +74,12 @@ type Argv = ReturnType extends yargs.Argv ? T : never; // types where they make sense; flagging them on the wrong type is an // error rather than a silent no-op. const REJECTED_FLAGS: Record = { - appmaps: ['class', 'method', 'label', 'table', 'exception', 'logger', 'message'], - requests: ['class', 'method', 'label', 'table', 'exception', 'logger', 'message'], - queries: ['label', 'exception', 'logger', 'message'], - calls: ['table', 'exception', 'logger', 'message'], + appmaps: ['class', 'method', 'label', 'table', 'exception', 'logger', 'message', 'with-logs'], + requests: ['class', 'method', 'label', 'table', 'exception', 'logger', 'message', 'with-logs'], + queries: ['label', 'exception', 'logger', 'message', 'with-logs'], + calls: ['table', 'exception', 'logger', 'message', 'with-logs'], exceptions: ['class', 'method', 'label', 'duration', 'table', 'logger', 'message'], - logs: ['class', 'method', 'label', 'route', 'status', 'duration', 'table', 'exception'], + logs: ['class', 'method', 'label', 'route', 'status', 'duration', 'table', 'exception', 'with-logs'], }; // Per-flag hints, attached to error messages when a rejected flag is used. @@ -136,6 +140,11 @@ export function buildFindFilter(argv: Record): ParsedFind { if (typeof argv.exception === 'string') filter.exception = argv.exception; if (typeof argv.logger === 'string') filter.logger = argv.logger; if (typeof argv.message === 'string') filter.message = argv.message; + // yargs camelCases --with-logs into argv.withLogs and also keeps the + // kebab-case key. Read both so direct test invocations don't have to + // depend on yargs's coercion. + const withLogs = argv.withLogs ?? argv['with-logs']; + if (typeof withLogs === 'number') filter.withLogs = withLogs; if (typeof argv.limit === 'number') filter.limit = argv.limit; if (typeof argv.offset === 'number') filter.offset = argv.offset; @@ -228,16 +237,21 @@ function renderTable(type: FindType, rows: unknown[]): string { r.return_value ?? '', ]) ); - case 'exceptions': + case 'exceptions': { + const exRows = rows as FindExceptionRow[]; + const withLogs = exRows.some((r) => r.recent_logs !== undefined); + const headers = withLogs + ? ['APPMAP', 'CLASS', 'MESSAGE', 'EVENT', 'LOGS'] + : ['APPMAP', 'CLASS', 'MESSAGE', 'EVENT']; return formatTable( - ['APPMAP', 'CLASS', 'MESSAGE', 'EVENT'], - (rows as FindExceptionRow[]).map((r) => [ - r.appmap_name, - r.exception_class, - r.message ?? '', - String(r.event_id), - ]) + headers, + exRows.map((r) => { + const base = [r.appmap_name, r.exception_class, r.message ?? '', String(r.event_id)]; + if (withLogs) base.push(String(r.recent_logs?.length ?? 0)); + return base; + }) ); + } case 'logs': return formatTable( ['APPMAP', 'LOGGER', 'METHOD', 'MESSAGE', 'EVENT'], diff --git a/packages/cli/tests/unit/cmds/query/queries/find.spec.ts b/packages/cli/tests/unit/cmds/query/queries/find.spec.ts index 4a9481ca0b..8e160cb14e 100644 --- a/packages/cli/tests/unit/cmds/query/queries/find.spec.ts +++ b/packages/cli/tests/unit/cmds/query/queries/find.spec.ts @@ -940,4 +940,135 @@ describe('findExceptions', () => { db.close(); } }); + + it('--with-logs attaches the last N preceding logs in chronological order', () => { + const db = freshDb(); + try { + seed(db, [ + { + name: 'a', + calls: [ + { + event_id: 1, + defined_class: 'Logger', + method_id: 'info', + labels: ['log'], + parameters: [{ name: 'message', class: 'String', value: 'starting up' }], + }, + { + event_id: 2, + defined_class: 'Logger', + method_id: 'warn', + labels: ['log'], + parameters: [{ name: 'message', class: 'String', value: 'connection slow' }], + }, + { + event_id: 3, + defined_class: 'Logger', + method_id: 'error', + labels: ['log'], + parameters: [{ name: 'message', class: 'String', value: 'connection refused' }], + }, + ], + // Exception lands at event 4, after the three logs. + exceptions: [{ event_id: 4, exception_class: 'IOError', message: 'broken pipe' }], + }, + ]); + const rows = findExceptions(db, { withLogs: 2 }); + expect(rows).toHaveLength(1); + expect(rows[0].recent_logs).toBeDefined(); + // Last 2 in chronological order: the warn at event 2, then error at event 3. + const logs = rows[0].recent_logs!; + expect(logs).toHaveLength(2); + expect(logs[0].event_id).toBe(2); + expect(logs[1].event_id).toBe(3); + } finally { + db.close(); + } + }); + + it('--with-logs is omitted when not requested (recent_logs undefined)', () => { + const db = freshDb(); + try { + seed(db, [ + { + name: 'a', + calls: [ + { + event_id: 1, + defined_class: 'Logger', + method_id: 'info', + labels: ['log'], + parameters: [{ name: 'message', class: 'String', value: 'hi' }], + }, + ], + exceptions: [{ event_id: 2, exception_class: 'IOError' }], + }, + ]); + const rows = findExceptions(db, {}); + expect(rows[0].recent_logs).toBeUndefined(); + } finally { + db.close(); + } + }); + + it('--with-logs with no preceding logs returns an empty array', () => { + const db = freshDb(); + try { + seed(db, [ + { + name: 'a', + // Exception at event 1; no logs at all. + exceptions: [{ event_id: 1, exception_class: 'IOError' }], + }, + ]); + const rows = findExceptions(db, { withLogs: 5 }); + expect(rows[0].recent_logs).toEqual([]); + } finally { + db.close(); + } + }); + + it('--with-logs only includes logs from the same recording', () => { + const db = freshDb(); + try { + seed(db, [ + { + name: 'a', + calls: [ + { + event_id: 1, + defined_class: 'Logger', + method_id: 'info', + labels: ['log'], + parameters: [{ name: 'message', class: 'String', value: 'recording-a log' }], + }, + ], + exceptions: [{ event_id: 2, exception_class: 'IOError' }], + }, + { + name: 'b', + calls: [ + { + event_id: 1, + defined_class: 'Logger', + method_id: 'info', + labels: ['log'], + parameters: [{ name: 'message', class: 'String', value: 'recording-b log' }], + }, + ], + exceptions: [{ event_id: 2, exception_class: 'IOError' }], + }, + ]); + const rows = findExceptions(db, { withLogs: 5 }); + expect(rows).toHaveLength(2); + // Each exception's recent_logs is scoped to its own recording. + for (const row of rows) { + expect(row.recent_logs).toHaveLength(1); + expect(row.recent_logs![0].appmap_name).toBe(row.appmap_name); + } + } finally { + db.close(); + } + }); }); diff --git a/packages/cli/tests/unit/cmds/query/queries/mcp.spec.ts b/packages/cli/tests/unit/cmds/query/queries/mcp.spec.ts index e88775d13b..13a00bc95c 100644 --- a/packages/cli/tests/unit/cmds/query/queries/mcp.spec.ts +++ b/packages/cli/tests/unit/cmds/query/queries/mcp.spec.ts @@ -270,6 +270,49 @@ describe('MCP handler', () => { } }); + it('find_exceptions with_logs attaches recent_logs', () => { + const db = freshDb(); + try { + seedMinimal(db); + // Give the seeded log call a message so recent_logs has content. + db.prepare( + `UPDATE function_calls + SET parameters_json = ? + WHERE method_id = 'error'` + ).run( + JSON.stringify([{ name: 'message', class: 'String', value: 'connection refused' }]) + ); + const handler = buildMcpHandler(db); + + // No with_logs: recent_logs is absent. + const noLogs = call(handler, { + jsonrpc: '2.0', + id: 200, + method: 'tools/call', + params: { name: 'find_exceptions', arguments: {} }, + }); + const noLogsRows = JSON.parse((noLogs!.result as any).content[0].text); + expect(noLogsRows[0].recent_logs).toBeUndefined(); + // appmap_id is now exposed. + expect(typeof noLogsRows[0].appmap_id).toBe('number'); + + // with_logs=5: recent_logs is present and non-empty (the seed has + // a log call at event 2, exception at event 2 — the log shares the + // event_id with the exception so it doesn't qualify; verify the + // shape regardless). + const withLogsRes = call(handler, { + jsonrpc: '2.0', + id: 201, + method: 'tools/call', + params: { name: 'find_exceptions', arguments: { with_logs: 5 } }, + }); + const withLogsRows = JSON.parse((withLogsRes!.result as any).content[0].text); + expect(Array.isArray(withLogsRows[0].recent_logs)).toBe(true); + } finally { + db.close(); + } + }); + it('find_calls --label filters by the AppMap label', () => { const db = freshDb(); try { diff --git a/packages/cli/tests/unit/cmds/query/verbs/find.spec.ts b/packages/cli/tests/unit/cmds/query/verbs/find.spec.ts index fe61c0f5cf..998f8cfd31 100644 --- a/packages/cli/tests/unit/cmds/query/verbs/find.spec.ts +++ b/packages/cli/tests/unit/cmds/query/verbs/find.spec.ts @@ -98,6 +98,13 @@ describe('find verb flag validation', () => { expect(() => validateFlags(type, { message: 'x' })).toThrow(/--message/); } }); + + it('--with-logs is accepted only on find exceptions', () => { + expect(() => validateFlags('exceptions', { 'with-logs': 5 })).not.toThrow(); + for (const type of ['appmaps', 'requests', 'queries', 'calls', 'logs'] as const) { + expect(() => validateFlags(type, { 'with-logs': 5 })).toThrow(/--with-logs/); + } + }); }); describe('buildFindFilter', () => { @@ -132,6 +139,12 @@ describe('buildFindFilter', () => { expect(buildFindFilter({ type: 'appmaps' }).type).toBe('appmaps'); }); + it('plumbs --with-logs into filter.withLogs from either kebab or camel keys', () => { + expect(buildFindFilter({ type: 'exceptions', withLogs: 5 }).filter.withLogs).toBe(5); + expect(buildFindFilter({ type: 'exceptions', 'with-logs': 7 }).filter.withLogs).toBe(7); + expect(buildFindFilter({ type: 'exceptions' }).filter.withLogs).toBeUndefined(); + }); + it('plumbs --logger and --message into the filter for logs', () => { const { type, filter } = buildFindFilter({ type: 'logs', From fc160a92d6430e856081f87da6af91853ddb7456 Mon Sep 17 00:00:00 2001 From: kgilpin Date: Sun, 3 May 2026 12:05:27 -0400 Subject: [PATCH 27/30] feat(query): inline log calls in tree render Function calls whose linked code_object carries the `log` label are now promoted to a dedicated `log` node kind in the tree. Renderers emit them as `LOG Logger.method: ` at their event position, with the message extracted via the same projection helper used by find_logs (structured return_value first, then a message/msg-named parameter, then the first string-typed value). The tree CLI verb gains `--filter logs` to flatten the tree to just log lines. The MCP get_call_tree result shape gains the `log` kind in the discriminated union. projectLogMessage moves to lib/logMessage.ts so both the find verb and the tree renderer share one projection rule. Co-Authored-By: Claude Opus 4.7 (1M context) --- packages/cli/src/cmds/query/lib/logMessage.ts | 40 ++++++++++ packages/cli/src/cmds/query/lib/treeRender.ts | 12 +++ packages/cli/src/cmds/query/queries/tree.ts | 76 ++++++++++++++----- packages/cli/src/cmds/query/verbs/find.ts | 43 +---------- packages/cli/src/cmds/query/verbs/tree.ts | 26 ++++--- .../unit/cmds/query/lib/treeRender.spec.ts | 63 +++++++++++++++ .../unit/cmds/query/queries/tree.spec.ts | 57 ++++++++++++++ .../tests/unit/cmds/query/verbs/tree.spec.ts | 62 +++++++++++++++ 8 files changed, 312 insertions(+), 67 deletions(-) create mode 100644 packages/cli/src/cmds/query/lib/logMessage.ts create mode 100644 packages/cli/tests/unit/cmds/query/lib/treeRender.spec.ts create mode 100644 packages/cli/tests/unit/cmds/query/verbs/tree.spec.ts diff --git a/packages/cli/src/cmds/query/lib/logMessage.ts b/packages/cli/src/cmds/query/lib/logMessage.ts new file mode 100644 index 0000000000..e0c7e31313 --- /dev/null +++ b/packages/cli/src/cmds/query/lib/logMessage.ts @@ -0,0 +1,40 @@ +// Pick a displayable message from a log row's captured fields. +// 1. If return_value parses as JSON with a `message` field, use it +// (this is the structured-return contract). +// 2. Otherwise, look in parameters_json for a parameter whose `name` +// is `message` or `msg`; fall back to the first string-typed value. +// 3. Otherwise, return ''. +// Display-only — does not affect filtering. The `--message` SQL LIKE +// runs against the raw columns and may return rows whose projected +// message doesn't contain the substring (e.g., matched a class name); +// that's the documented FP-tolerant behavior. +export function projectLogMessage( + parametersJson: string | null, + returnValue: string | null +): string { + if (returnValue) { + try { + const parsed = JSON.parse(returnValue) as Record; + if (parsed && typeof parsed === 'object' && typeof parsed.message === 'string') { + return parsed.message; + } + } catch { + // not structured — fall through + } + } + if (parametersJson) { + try { + const params = JSON.parse(parametersJson) as { name?: string; class?: string; value?: unknown }[]; + const named = params.find((p) => p.name === 'message' || p.name === 'msg'); + if (named?.value != null) return String(named.value); + const firstString = params.find((p) => typeof p.value === 'string'); + if (firstString) return String(firstString.value); + if (params.length > 0) return JSON.stringify(params.map((p) => p.value)); + } catch { + return parametersJson; + } + } + // No structured message available. Return blank rather than the raw + // `return_value` (which is often noise like "true" / "None"). + return ''; +} diff --git a/packages/cli/src/cmds/query/lib/treeRender.ts b/packages/cli/src/cmds/query/lib/treeRender.ts index 53084c5ff3..77dfb4532f 100644 --- a/packages/cli/src/cmds/query/lib/treeRender.ts +++ b/packages/cli/src/cmds/query/lib/treeRender.ts @@ -3,11 +3,13 @@ import { FunctionNode, HttpClientNode, HttpServerNode, + LogNode, SqlNode, TreeNode, TreeSummary, } from '../queries/tree'; import { formatCount, formatMs, formatTable } from './format'; +import { projectLogMessage } from './logMessage'; const INDENT = ' '; @@ -35,6 +37,8 @@ function renderTreeLine(node: TreeNode): string { return `${indent}SQL ${renderSql(node)}`; case 'exception': return `${indent}EXC ${renderException(node)}`; + case 'log': + return `${indent}LOG ${renderLog(node)}`; } } @@ -62,6 +66,12 @@ function renderSql(n: SqlNode): string { return `${truncate(n.sql_text, 120)} ${bracket(n.elapsed_ms)}`.trim(); } +function renderLog(n: LogNode): string { + const message = projectLogMessage(n.parameters_json, n.return_value); + const prefix = `${n.logger}.${n.method_id}`; + return message ? `${prefix}: ${truncate(message, 120)}` : prefix; +} + function bracket(ms: number | null): string { return ms == null ? '' : `[${formatMs(ms)}]`; } @@ -86,6 +96,8 @@ export function renderFlat(nodes: readonly TreeNode[]): string { return `CALL ${renderFunction(n)}`; case 'exception': return `EXC ${renderException(n)}`; + case 'log': + return `LOG ${renderLog(n)}`; } }) .join('\n'); diff --git a/packages/cli/src/cmds/query/queries/tree.ts b/packages/cli/src/cmds/query/queries/tree.ts index ef48cf9d29..768f957cfb 100644 --- a/packages/cli/src/cmds/query/queries/tree.ts +++ b/packages/cli/src/cmds/query/queries/tree.ts @@ -57,12 +57,29 @@ export interface ExceptionNode extends BaseNode { lineno: number | null; } +// Logging calls are function calls whose linked code_object carries the +// canonical `log` label. They share the function-call shape (same row in +// `function_calls`) but get their own kind so renderers can format the +// message inline rather than the bare call signature. +export interface LogNode extends BaseNode { + kind: 'log'; + fqid: string | null; + logger: string; // defined_class + method_id: string; // info / warn / error / etc. — best effort + path: string | null; + lineno: number | null; + elapsed_ms: number | null; + parameters_json: string | null; + return_value: string | null; +} + export type TreeNode = | HttpServerNode | HttpClientNode | SqlNode | FunctionNode - | ExceptionNode; + | ExceptionNode + | LogNode; export interface AppmapInfo { id: number; @@ -205,7 +222,11 @@ export function tree( `SELECT fc.event_id, fc.parent_event_id, fc.thread_id, co.fqid AS fqid, fc.defined_class, fc.method_id, fc.path, fc.lineno, - fc.is_static, fc.elapsed_ms, fc.parameters_json, fc.return_value + fc.is_static, fc.elapsed_ms, fc.parameters_json, fc.return_value, + EXISTS ( + SELECT 1 FROM labels l + WHERE l.code_object_id = fc.code_object_id AND l.label = 'log' + ) AS is_log FROM function_calls fc LEFT JOIN code_objects co ON co.id = fc.code_object_id WHERE fc.appmap_id = ?` @@ -223,23 +244,42 @@ export function tree( elapsed_ms: number | null; parameters_json: string | null; return_value: string | null; + is_log: number; }[]) { - events.push({ - kind: 'function', - event_id: r.event_id, - parent_event_id: r.parent_event_id, - thread_id: r.thread_id, - depth: 0, - fqid: r.fqid, - defined_class: r.defined_class, - method_id: r.method_id, - path: r.path, - lineno: r.lineno, - is_static: r.is_static === 1, - elapsed_ms: r.elapsed_ms, - parameters_json: r.parameters_json, - return_value: r.return_value, - }); + if (r.is_log === 1) { + events.push({ + kind: 'log', + event_id: r.event_id, + parent_event_id: r.parent_event_id, + thread_id: r.thread_id, + depth: 0, + fqid: r.fqid, + logger: r.defined_class, + method_id: r.method_id, + path: r.path, + lineno: r.lineno, + elapsed_ms: r.elapsed_ms, + parameters_json: r.parameters_json, + return_value: r.return_value, + }); + } else { + events.push({ + kind: 'function', + event_id: r.event_id, + parent_event_id: r.parent_event_id, + thread_id: r.thread_id, + depth: 0, + fqid: r.fqid, + defined_class: r.defined_class, + method_id: r.method_id, + path: r.path, + lineno: r.lineno, + is_static: r.is_static === 1, + elapsed_ms: r.elapsed_ms, + parameters_json: r.parameters_json, + return_value: r.return_value, + }); + } } for (const r of db diff --git a/packages/cli/src/cmds/query/verbs/find.ts b/packages/cli/src/cmds/query/verbs/find.ts index 7119c1e152..b0f1a9cfe3 100644 --- a/packages/cli/src/cmds/query/verbs/find.ts +++ b/packages/cli/src/cmds/query/verbs/find.ts @@ -19,6 +19,9 @@ import { FindRequestRow, } from '../queries/find'; import { formatMs, formatTable } from '../lib/format'; +import { projectLogMessage } from '../lib/logMessage'; + +export { projectLogMessage }; const TYPES: readonly FindType[] = ['appmaps', 'requests', 'queries', 'calls', 'exceptions', 'logs']; // 'recordings' is accepted as an alias for 'appmaps' to match the MCP @@ -266,46 +269,6 @@ function renderTable(type: FindType, rows: unknown[]): string { } } -// Pick a displayable message from a log row's captured fields. -// 1. If return_value parses as JSON with a `message` field, use it -// (this is the structured-return contract). -// 2. Otherwise, look in parameters_json for a parameter whose `name` -// is `message` or `msg`; fall back to the first string-typed value. -// 3. Otherwise, stringify whatever's available so the row isn't blank. -// Display-only — does not affect filtering. The `--message` SQL LIKE -// runs against the raw columns and may return rows whose projected -// message doesn't contain the substring (e.g., matched a class name); -// that's the documented FP-tolerant behavior. -export function projectLogMessage( - parametersJson: string | null, - returnValue: string | null -): string { - if (returnValue) { - try { - const parsed = JSON.parse(returnValue) as Record; - if (parsed && typeof parsed === 'object' && typeof parsed.message === 'string') { - return parsed.message; - } - } catch { - // not structured — fall through - } - } - if (parametersJson) { - try { - const params = JSON.parse(parametersJson) as { name?: string; class?: string; value?: unknown }[]; - const named = params.find((p) => p.name === 'message' || p.name === 'msg'); - if (named?.value != null) return String(named.value); - const firstString = params.find((p) => typeof p.value === 'string'); - if (firstString) return String(firstString.value); - if (params.length > 0) return JSON.stringify(params.map((p) => p.value)); - } catch { - return parametersJson; - } - } - // No structured message available. Return blank rather than the raw - // `return_value` (which is often noise like "true" / "None"). - return ''; -} function formatParams(json: string | null): string { if (!json) return ''; diff --git a/packages/cli/src/cmds/query/verbs/tree.ts b/packages/cli/src/cmds/query/verbs/tree.ts index 98473fdb2b..577e3e29eb 100644 --- a/packages/cli/src/cmds/query/verbs/tree.ts +++ b/packages/cli/src/cmds/query/verbs/tree.ts @@ -24,7 +24,7 @@ export const builder = (args: yargs.Argv) => { }) .option('filter', { type: 'string', - choices: ['all', 'http', 'sql'] as const, + choices: ['all', 'http', 'sql', 'logs'] as const, default: 'all', }) .option('focus-fn', { @@ -75,7 +75,7 @@ export const handler = async (argvIn: yargs.ArgumentsCamelCase): Promis if (argv.format === 'summary') { // The summary aggregates over all event types; combining with // --filter would be ambiguous, so reject rather than silently drop. - const f = argv.filter as 'all' | 'http' | 'sql'; + const f = argv.filter as TreeFilter; if (f !== 'all') { throw new Error( 'tree --format=summary does not accept --filter; remove one of them' @@ -97,11 +97,11 @@ export const handler = async (argvIn: yargs.ArgumentsCamelCase): Promis if (argv.minElapsedMs !== undefined) treeOptions.minElapsedMs = argv.minElapsedMs; const nodes = tree(db, ref, treeOptions); - const filtered = applyFilter(nodes, argv.filter as 'all' | 'http' | 'sql'); + const filtered = applyFilter(nodes, argv.filter as TreeFilter); if (argv.json) { log(JSON.stringify(filtered, null, 2)); } else { - const f = argv.filter as 'all' | 'http' | 'sql'; + const f = argv.filter as TreeFilter; log(f === 'all' ? renderTree(filtered) : renderFlat(filtered)); } } finally { @@ -109,9 +109,17 @@ export const handler = async (argvIn: yargs.ArgumentsCamelCase): Promis } }; -function applyFilter(nodes: readonly TreeNode[], filter: 'all' | 'http' | 'sql'): TreeNode[] { - if (filter === 'all') return [...nodes]; - if (filter === 'sql') return nodes.filter((n) => n.kind === 'sql'); - // 'http' — both inbound and outbound - return nodes.filter((n) => n.kind === 'http_server' || n.kind === 'http_client'); +type TreeFilter = 'all' | 'http' | 'sql' | 'logs'; + +export function applyFilter(nodes: readonly TreeNode[], filter: TreeFilter): TreeNode[] { + switch (filter) { + case 'all': + return [...nodes]; + case 'sql': + return nodes.filter((n) => n.kind === 'sql'); + case 'logs': + return nodes.filter((n) => n.kind === 'log'); + case 'http': + return nodes.filter((n) => n.kind === 'http_server' || n.kind === 'http_client'); + } } diff --git a/packages/cli/tests/unit/cmds/query/lib/treeRender.spec.ts b/packages/cli/tests/unit/cmds/query/lib/treeRender.spec.ts new file mode 100644 index 0000000000..a3769999f8 --- /dev/null +++ b/packages/cli/tests/unit/cmds/query/lib/treeRender.spec.ts @@ -0,0 +1,63 @@ +import { renderFlat, renderTree } from '../../../../../src/cmds/query/lib/treeRender'; +import { LogNode, TreeNode } from '../../../../../src/cmds/query/queries/tree'; + +function logNode(overrides: Partial = {}): LogNode { + return { + kind: 'log', + event_id: 5, + parent_event_id: 1, + thread_id: null, + depth: 1, + fqid: 'app/AppLogger#error', + logger: 'AppLogger', + method_id: 'error', + path: 'app/log.rb', + lineno: 12, + elapsed_ms: 0.1, + parameters_json: JSON.stringify([ + { name: 'message', class: 'String', value: 'connection refused' }, + ]), + return_value: null, + ...overrides, + }; +} + +describe('renderTree (log lines)', () => { + it('renders a log node with its projected message inline', () => { + const out = renderTree([logNode()] as TreeNode[]); + expect(out).toContain('LOG'); + expect(out).toContain('AppLogger.error'); + expect(out).toContain('connection refused'); + }); + + it('respects the indentation of the log node depth', () => { + const out = renderTree([logNode({ depth: 3 })] as TreeNode[]); + // 3 levels of two-space indent = 6 leading spaces. + expect(out.startsWith(' LOG')).toBe(true); + }); + + it('falls back to logger.method when no message can be projected', () => { + const out = renderTree([ + logNode({ parameters_json: null, return_value: null }), + ] as TreeNode[]); + // No trailing colon when message is empty. + expect(out).toMatch(/LOG\s+AppLogger\.error\s*$/); + }); + + it('uses a structured return_value when present', () => { + const out = renderTree([ + logNode({ + parameters_json: null, + return_value: JSON.stringify({ level: 'error', message: 'from return' }), + }), + ] as TreeNode[]); + expect(out).toContain('AppLogger.error: from return'); + }); +}); + +describe('renderFlat (log lines)', () => { + it('emits a LOG row when filtering down to log nodes', () => { + const out = renderFlat([logNode()] as TreeNode[]); + expect(out).toMatch(/^LOG\s+AppLogger\.error: connection refused$/); + }); +}); diff --git a/packages/cli/tests/unit/cmds/query/queries/tree.spec.ts b/packages/cli/tests/unit/cmds/query/queries/tree.spec.ts index 00898c318b..304e30f485 100644 --- a/packages/cli/tests/unit/cmds/query/queries/tree.spec.ts +++ b/packages/cli/tests/unit/cmds/query/queries/tree.spec.ts @@ -416,6 +416,63 @@ describe('tree --filter', () => { }); }); +describe('log nodes in tree', () => { + function seedWithLogger(db: sqlite3.Database): number { + const am = db + .prepare( + `INSERT INTO appmaps (name, source_path) VALUES ('with_logger', '/tmp/with_logger.appmap.json')` + ) + .run(); + const id = am.lastInsertRowid; + db.prepare( + `INSERT INTO http_requests (appmap_id, event_id, parent_event_id, method, path, status_code, elapsed_ms) + VALUES (?, 1, NULL, 'POST', '/orders', 500, 100)` + ).run(id); + db.prepare( + `INSERT INTO function_calls (appmap_id, event_id, parent_event_id, defined_class, method_id, elapsed_ms) + VALUES (?, 2, 1, 'OrdersController', 'create', 90)` + ).run(id); + db.prepare( + `INSERT OR IGNORE INTO code_objects (fqid, package, classes, leaf_class, method, is_static) + VALUES ('app/AppLogger#error', 'app', '["AppLogger"]', 'AppLogger', 'error', 0)` + ).run(); + const logCo = (db + .prepare(`SELECT id FROM code_objects WHERE fqid = 'app/AppLogger#error'`) + .get() as { id: number }).id; + db.prepare(`INSERT OR IGNORE INTO labels (code_object_id, label) VALUES (?, 'log')`).run(logCo); + db.prepare( + `INSERT INTO function_calls (appmap_id, event_id, parent_event_id, code_object_id, + defined_class, method_id, elapsed_ms, parameters_json) + VALUES (?, 3, 2, ?, 'AppLogger', 'error', 0.05, ?)` + ).run( + id, + logCo, + JSON.stringify([{ name: 'message', class: 'String', value: 'connection refused' }]) + ); + return Number(id); + } + + it('promotes function calls with the `log` label to a log node kind', () => { + const db = freshDb(); + try { + seedWithLogger(db); + const nodes = tree(db, 'with_logger'); + const log = nodes.find((n) => n.kind === 'log'); + expect(log).toBeDefined(); + if (log?.kind !== 'log') throw new Error('expected log'); + expect(log.logger).toBe('AppLogger'); + expect(log.method_id).toBe('error'); + expect(log.event_id).toBe(3); + // The same function_call should NOT also appear as a function node. + expect(nodes.filter((n) => n.event_id === 3 && n.kind === 'function')).toEqual([]); + // Other function_calls remain function nodes. + expect(nodes.find((n) => n.kind === 'function' && n.method_id === 'create')).toBeDefined(); + } finally { + db.close(); + } + }); +}); + describe('treeSummary', () => { it('counts SQL, surfaces entry/exception, and tallies labels', () => { const db = freshDb(); diff --git a/packages/cli/tests/unit/cmds/query/verbs/tree.spec.ts b/packages/cli/tests/unit/cmds/query/verbs/tree.spec.ts new file mode 100644 index 0000000000..6f2a31909b --- /dev/null +++ b/packages/cli/tests/unit/cmds/query/verbs/tree.spec.ts @@ -0,0 +1,62 @@ +import { applyFilter } from '../../../../../src/cmds/query/verbs/tree'; +import { + ExceptionNode, + FunctionNode, + HttpClientNode, + HttpServerNode, + LogNode, + SqlNode, + TreeNode, +} from '../../../../../src/cmds/query/queries/tree'; + +const baseFields = { + parent_event_id: null, + thread_id: null, + depth: 0, +}; + +const http: HttpServerNode = { + kind: 'http_server', event_id: 1, ...baseFields, + method: 'GET', route: '/x', status_code: 200, elapsed_ms: 1, +}; +const httpOut: HttpClientNode = { + kind: 'http_client', event_id: 2, ...baseFields, + method: 'GET', url: 'https://x', status_code: 200, elapsed_ms: 1, +}; +const sql: SqlNode = { + kind: 'sql', event_id: 3, ...baseFields, + sql_text: 'SELECT 1', database_type: null, elapsed_ms: 1, +}; +const fn: FunctionNode = { + kind: 'function', event_id: 4, ...baseFields, + fqid: 'app/X#m', defined_class: 'X', method_id: 'm', + path: null, lineno: null, is_static: false, + elapsed_ms: 1, parameters_json: null, return_value: null, +}; +const exc: ExceptionNode = { + kind: 'exception', event_id: 5, ...baseFields, + exception_class: 'IOError', message: null, path: null, lineno: null, +}; +const lg: LogNode = { + kind: 'log', event_id: 6, ...baseFields, + fqid: 'app/Logger#info', logger: 'Logger', method_id: 'info', + path: null, lineno: null, elapsed_ms: 0.1, + parameters_json: '[{"name":"message","value":"hi"}]', return_value: null, +}; + +const all: TreeNode[] = [http, httpOut, sql, fn, exc, lg]; + +describe('tree --filter', () => { + it('all returns every node', () => { + expect(applyFilter(all, 'all')).toHaveLength(6); + }); + it('http includes server and client requests', () => { + expect(applyFilter(all, 'http')).toEqual([http, httpOut]); + }); + it('sql returns only sql nodes', () => { + expect(applyFilter(all, 'sql')).toEqual([sql]); + }); + it('logs returns only log nodes', () => { + expect(applyFilter(all, 'logs')).toEqual([lg]); + }); +}); From 2280aa0ab8f15c2346a15af923f8c27ec4f31f39 Mon Sep 17 00:00:00 2001 From: kgilpin Date: Sun, 3 May 2026 12:10:01 -0400 Subject: [PATCH 28/30] feat(query): expose appmap://recording/{ref}/logs MCP resource MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Adds a templated resource that returns every log line from one recording, ordered by event_id. The {ref} placeholder accepts either a numeric appmap_id or a recording name/basename — the same forms find_recordings returns. Templates are listed via the standard resources/templates/list method; concrete URIs read via resources/read. Internally introduces a ResourceTemplateImpl shape and shared read-response helper so fixed and templated resources share one code path. Co-Authored-By: Claude Opus 4.7 (1M context) --- packages/cli/src/cmds/query/queries/mcp.ts | 113 +++++++++++++++--- .../tests/unit/cmds/query/queries/mcp.spec.ts | 83 ++++++++++++- 2 files changed, 176 insertions(+), 20 deletions(-) diff --git a/packages/cli/src/cmds/query/queries/mcp.ts b/packages/cli/src/cmds/query/queries/mcp.ts index 733eeba36f..df28ef59bd 100644 --- a/packages/cli/src/cmds/query/queries/mcp.ts +++ b/packages/cli/src/cmds/query/queries/mcp.ts @@ -70,6 +70,23 @@ interface ResourceImpl { read: (db: sqlite3.Database) => unknown; } +// Template-based resources expose a parameterized URI. The agent +// discovers them via resources/templates/list, then reads a concrete +// instance with resources/read by substituting the placeholder. +interface ResourceTemplateSpec { + uriTemplate: string; // RFC 6570 template + name: string; + description: string; + mimeType: string; +} + +interface ResourceTemplateImpl { + spec: ResourceTemplateSpec; + // Returns the args object if the URI matches the template, else null. + match: (uri: string) => Record | null; + read: (args: Record, db: sqlite3.Database) => unknown; +} + const SERVER_INFO = { name: 'appmap-query', version: '1.0.0' }; const PROTOCOL_VERSION = '2024-11-05'; @@ -541,6 +558,29 @@ const RESOURCES: ResourceImpl[] = [ }, ]; +const RESOURCE_TEMPLATES: ResourceTemplateImpl[] = [ + { + spec: { + uriTemplate: 'appmap://recording/{ref}/logs', + name: 'recording_logs', + description: + 'All log lines (functions labeled `log`) for one recording, ordered by event_id. {ref} is either the numeric appmap_id or the recording name/basename — same forms find_recordings returns. Each entry has the find_logs row shape.', + mimeType: 'application/json', + }, + match: (uri) => { + const m = /^appmap:\/\/recording\/([^/]+)\/logs$/.exec(uri); + if (!m) return null; + // The {ref} segment may be percent-encoded (recording names can + // contain spaces, em-dashes, etc.). + return { ref: decodeURIComponent(m[1]) }; + }, + read: (args, db) => { + const info = resolveByIdOrRef(db, args.ref); + return find(db, 'logs', { appmap: info.name }) as FindLogRow[]; + }, + }, +]; + // --- handler ------------------------------------------------------------- export type McpHandler = (msg: JsonRpcRequest) => JsonRpcResponse | null; @@ -600,28 +640,38 @@ export function buildMcpHandler(db: sqlite3.Database): McpHandler { }; } + if (method === 'resources/templates/list') { + return { + jsonrpc: '2.0', + id, + result: { resourceTemplates: RESOURCE_TEMPLATES.map((t) => t.spec) }, + }; + } + if (method === 'resources/read') { const params = (msg.params ?? {}) as { uri?: string }; - const resource = RESOURCES.find((r) => r.spec.uri === params.uri); - if (!resource) return errorResponse(id, -32602, `unknown resource: ${params.uri}`); - try { - const result = resource.read(db); - return { - jsonrpc: '2.0', - id, - result: { - contents: [ - { - uri: resource.spec.uri, - mimeType: resource.spec.mimeType, - text: JSON.stringify(result, null, 2), - }, - ], - }, - }; - } catch (e) { - return errorResponse(id, -32000, (e as Error).message); + const uri = params.uri ?? ''; + const resource = RESOURCES.find((r) => r.spec.uri === uri); + if (resource) { + try { + const result = resource.read(db); + return readResponse(id, uri, resource.spec.mimeType, result); + } catch (e) { + return errorResponse(id, -32000, (e as Error).message); + } } + for (const tmpl of RESOURCE_TEMPLATES) { + const matched = tmpl.match(uri); + if (matched) { + try { + const result = tmpl.read(matched, db); + return readResponse(id, uri, tmpl.spec.mimeType, result); + } catch (e) { + return errorResponse(id, -32000, (e as Error).message); + } + } + } + return errorResponse(id, -32602, `unknown resource: ${uri}`); } return errorResponse(id, -32601, `method not found: ${method}`); @@ -636,6 +686,27 @@ function errorResponse( return { jsonrpc: '2.0', id, error: { code, message } }; } +function readResponse( + id: string | number | null, + uri: string, + mimeType: string, + result: unknown +): JsonRpcResponse { + return { + jsonrpc: '2.0', + id, + result: { + contents: [ + { + uri, + mimeType, + text: JSON.stringify(result, null, 2), + }, + ], + }, + }; +} + export function listTools(): readonly ToolSpec[] { return TOOLS.map((t) => t.spec); } @@ -643,3 +714,7 @@ export function listTools(): readonly ToolSpec[] { export function listResources(): readonly ResourceSpec[] { return RESOURCES.map((r) => r.spec); } + +export function listResourceTemplates(): readonly ResourceTemplateSpec[] { + return RESOURCE_TEMPLATES.map((t) => t.spec); +} diff --git a/packages/cli/tests/unit/cmds/query/queries/mcp.spec.ts b/packages/cli/tests/unit/cmds/query/queries/mcp.spec.ts index 13a00bc95c..cbbaccb05f 100644 --- a/packages/cli/tests/unit/cmds/query/queries/mcp.spec.ts +++ b/packages/cli/tests/unit/cmds/query/queries/mcp.spec.ts @@ -5,6 +5,7 @@ import { buildMcpHandler, JsonRpcRequest, listResources, + listResourceTemplates, listTools, } from '../../../../../src/cmds/query/queries/mcp'; @@ -385,8 +386,88 @@ describe('MCP handler', () => { } }); - it('listTools / listResources are stable for documentation use', () => { + it('listTools / listResources / listResourceTemplates are stable for documentation use', () => { expect(listTools().length).toBeGreaterThan(0); expect(listResources().length).toBeGreaterThan(0); + expect(listResourceTemplates().length).toBeGreaterThan(0); + }); + + it('resources/templates/list advertises the per-recording logs template', () => { + const db = freshDb(); + try { + const r = call(buildMcpHandler(db), { + jsonrpc: '2.0', + id: 300, + method: 'resources/templates/list', + }); + const templates = (r!.result as any).resourceTemplates as { uriTemplate: string }[]; + expect(templates.some((t) => t.uriTemplate === 'appmap://recording/{ref}/logs')).toBe( + true + ); + } finally { + db.close(); + } + }); + + it('resources/read on appmap://recording//logs returns the recording\'s log rows', () => { + const db = freshDb(); + try { + seedMinimal(db); + // Give the seeded log call a captured message. + db.prepare( + `UPDATE function_calls + SET parameters_json = ? + WHERE method_id = 'error'` + ).run( + JSON.stringify([{ name: 'message', class: 'String', value: 'connection refused' }]) + ); + + const r = call(buildMcpHandler(db), { + jsonrpc: '2.0', + id: 301, + method: 'resources/read', + params: { uri: 'appmap://recording/rec/logs' }, + }); + const contents = (r!.result as any).contents; + expect(contents[0].uri).toBe('appmap://recording/rec/logs'); + const rows = JSON.parse(contents[0].text); + expect(rows).toHaveLength(1); + expect(rows[0].logger).toBe('Logger'); + expect(rows[0].method_id).toBe('error'); + } finally { + db.close(); + } + }); + + it('resources/read on a recording-logs URI with an unknown ref returns an error', () => { + const db = freshDb(); + try { + const r = call(buildMcpHandler(db), { + jsonrpc: '2.0', + id: 302, + method: 'resources/read', + params: { uri: 'appmap://recording/no-such-recording/logs' }, + }); + expect(r!.error).toBeDefined(); + expect(r!.error!.message).toMatch(/appmap not found/); + } finally { + db.close(); + } + }); + + it('resources/read on a URI that matches no resource or template returns an error', () => { + const db = freshDb(); + try { + const r = call(buildMcpHandler(db), { + jsonrpc: '2.0', + id: 303, + method: 'resources/read', + params: { uri: 'appmap://nope' }, + }); + expect(r!.error).toBeDefined(); + expect(r!.error!.message).toMatch(/unknown resource/); + } finally { + db.close(); + } }); }); From 4e72c6b153fdbc18dc3df20193ae9cb967323462 Mon Sep 17 00:00:00 2001 From: kgilpin Date: Sun, 3 May 2026 12:32:54 -0400 Subject: [PATCH 29/30] fix(query): with_logs neighborhood; project log message server-side Two MCP improvements surfaced by end-to-end log testing: - find_exceptions with_logs returned 0 entries when the relevant logs fired *inside* the throwing call. The exceptions table stored only the call-entry event_id, but logs inside the call have event_id > entry. Schema v5 adds return_event_id (the throw point in the event stream) and the with_logs SQL now bounds by it, so logs that fired inside the throwing call are included. The legacy call-event recorder shape leaves return_event_id null and falls back to event_id. - find_logs rows and get_call_tree LogNodes now carry a derived `message` field (using the existing projectLogMessage helper). Callers no longer have to parse parameters_json themselves. Recorder-introduced wrapping quotes (e.g. \"'hello'\") are stripped for display; raw parameters_json and return_value remain on the row. CLAUDE.md gains repo-specific notes about rebuilding the CLI after MCP source changes and the appmap-node cwd gotcha for monorepo TS projects (factored out of the user-agnostic skills). Co-Authored-By: Claude Opus 4.7 (1M context) --- CLAUDE.md | 8 +++ .../src/cmds/query/db/import/exceptions.ts | 10 +++- packages/cli/src/cmds/query/db/schema.ts | 9 ++- packages/cli/src/cmds/query/lib/logMessage.ts | 17 +++++- packages/cli/src/cmds/query/queries/find.ts | 40 +++++++++---- packages/cli/src/cmds/query/queries/mcp.ts | 4 +- packages/cli/src/cmds/query/queries/tree.ts | 6 ++ .../unit/cmds/query/lib/treeRender.spec.ts | 1 + .../tests/unit/cmds/query/queries/mcp.spec.ts | 59 +++++++++++++++++-- .../tests/unit/cmds/query/verbs/tree.spec.ts | 1 + 10 files changed, 132 insertions(+), 23 deletions(-) diff --git a/CLAUDE.md b/CLAUDE.md index 541d14ae26..ad1b28229a 100644 --- a/CLAUDE.md +++ b/CLAUDE.md @@ -29,3 +29,11 @@ If you touch a package that doesn't yet participate, add a `typecheck` script (` - ESLint errors that CI rejects: `array-type` (use `T[]` not `Array`), `no-unnecessary-type-assertion`, `prefer-function-type`, `prefer-optional-chain`, etc. Warnings (e.g. `no-unsafe-*`, `prefer-nullish-coalescing`) are suppressed by `--quiet`. - Type errors that `tsc --noEmit` finds, including yargs `CommandModule` assignability issues that require widening exported handler argv types. + +# Driving the MCP after MCP-side changes + +The `appmap query mcp` server lives in `built/cli.js`. If you change anything under `packages/cli/src/cmds/query/queries/mcp.ts` (or anywhere it transitively imports), you must run `npx tsc` (or `yarn build`) inside `packages/cli` before launching `mcp` for ad-hoc testing. A stale binary will respond to `tools/list` with the old surface — symptom is usually `unknown tool: …` from a client driving a tool the source defines. + +# Recording with appmap-node from a monorepo + +`npx appmap-node@latest npx jest …` invoked from the repo root can fail to parse `.ts` test files with a babel SyntaxError, because the inner jest doesn't pick up `packages//jest.config.js`'s `ts-jest` preset. Run from the package directory whose preset matters — e.g. `cd packages/cli && npx appmap-node@latest npx jest …`. diff --git a/packages/cli/src/cmds/query/db/import/exceptions.ts b/packages/cli/src/cmds/query/db/import/exceptions.ts index 6f1a7cc716..aeab5cf1c9 100644 --- a/packages/cli/src/cmds/query/db/import/exceptions.ts +++ b/packages/cli/src/cmds/query/db/import/exceptions.ts @@ -24,9 +24,9 @@ export function importExceptions( parentEventMap: Map ): void { const stmt = db.prepare( - `INSERT INTO exceptions (appmap_id, event_id, thread_id, parent_event_id, - exception_class, message, path, lineno) - VALUES (?, ?, ?, ?, ?, ?, ?, ?)` + `INSERT INTO exceptions (appmap_id, event_id, return_event_id, thread_id, + parent_event_id, exception_class, message, path, lineno) + VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)` ); const seenCallIds = new Set(); @@ -39,6 +39,7 @@ export function importExceptions( if (typeof ev.parent_id !== 'number') continue; const callEventId = ev.parent_id; + const returnEventId = typeof ev.id === 'number' ? ev.id : null; seenCallIds.add(callEventId); const parentEventId = parentEventMap.get(callEventId) ?? null; @@ -46,6 +47,7 @@ export function importExceptions( stmt.run( appmapId, callEventId, + returnEventId, ev.thread_id ?? null, parentEventId, exc.class, @@ -57,6 +59,7 @@ export function importExceptions( } // Pass 2: legacy shape — exceptions on a call event we didn't already cover. + // No paired return event in this shape, so return_event_id stays null. for (const ev of events) { if (ev.event !== 'call') continue; const excs = ev.exceptions as ExceptionObject[] | undefined; @@ -69,6 +72,7 @@ export function importExceptions( stmt.run( appmapId, ev.id, + null, ev.thread_id ?? null, parentEventId, exc.class, diff --git a/packages/cli/src/cmds/query/db/schema.ts b/packages/cli/src/cmds/query/db/schema.ts index b4e48325a5..28d4a212f6 100644 --- a/packages/cli/src/cmds/query/db/schema.ts +++ b/packages/cli/src/cmds/query/db/schema.ts @@ -4,7 +4,7 @@ // queries an APM dashboard or LLM agent needs. Ported from appmap-apm // (server/db/schema.py); shape preserved unchanged. -export const SCHEMA_VERSION = 4; +export const SCHEMA_VERSION = 5; export const SCHEMA = ` CREATE TABLE IF NOT EXISTS appmaps ( @@ -115,7 +115,14 @@ CREATE TABLE IF NOT EXISTS function_calls ( CREATE TABLE IF NOT EXISTS exceptions ( id INTEGER PRIMARY KEY AUTOINCREMENT, appmap_id INTEGER NOT NULL REFERENCES appmaps(id) ON DELETE CASCADE, + -- event_id is the call event id (the call this exception belongs to); + -- return_event_id is the return event id (where the throw materialized in + -- the event stream). Use return_event_id for ordering — e.g. the + -- with_logs neighborhood query (what did the app log before the throw?) + -- needs to include logs that fired *inside* the throwing call, which all + -- have event_id greater than the call entry id but less than the return. event_id INTEGER, + return_event_id INTEGER, thread_id INTEGER, parent_event_id INTEGER, exception_class TEXT NOT NULL, diff --git a/packages/cli/src/cmds/query/lib/logMessage.ts b/packages/cli/src/cmds/query/lib/logMessage.ts index e0c7e31313..7bb6d79ae1 100644 --- a/packages/cli/src/cmds/query/lib/logMessage.ts +++ b/packages/cli/src/cmds/query/lib/logMessage.ts @@ -26,9 +26,9 @@ export function projectLogMessage( try { const params = JSON.parse(parametersJson) as { name?: string; class?: string; value?: unknown }[]; const named = params.find((p) => p.name === 'message' || p.name === 'msg'); - if (named?.value != null) return String(named.value); + if (named?.value != null) return stripWrappingQuotes(String(named.value)); const firstString = params.find((p) => typeof p.value === 'string'); - if (firstString) return String(firstString.value); + if (firstString) return stripWrappingQuotes(String(firstString.value)); if (params.length > 0) return JSON.stringify(params.map((p) => p.value)); } catch { return parametersJson; @@ -38,3 +38,16 @@ export function projectLogMessage( // `return_value` (which is often noise like "true" / "None"). return ''; } + +// Some recorders stringify String parameter values with the source-code +// quote characters preserved (e.g. `'hello'`). Strip a single matched +// pair of leading+trailing single or double quotes so the display text +// is the raw message. +function stripWrappingQuotes(s: string): string { + if (s.length >= 2) { + const first = s[0]; + const last = s[s.length - 1]; + if ((first === "'" || first === '"') && first === last) return s.slice(1, -1); + } + return s; +} diff --git a/packages/cli/src/cmds/query/queries/find.ts b/packages/cli/src/cmds/query/queries/find.ts index 8469e30950..06f9bef619 100644 --- a/packages/cli/src/cmds/query/queries/find.ts +++ b/packages/cli/src/cmds/query/queries/find.ts @@ -1,5 +1,6 @@ import sqlite3 from 'better-sqlite3'; +import { projectLogMessage } from '../lib/logMessage'; import type { NumberFilter } from '../lib/parseFilter'; import { appmapIdScope, @@ -86,6 +87,10 @@ export interface FindLogRow { method_id: string; path: string | null; lineno: number | null; + // Display-projected message derived from parameters_json / return_value + // (see lib/logMessage.projectLogMessage). '' when nothing usable was + // captured; the raw JSON columns remain for callers who need them. + message: string; parameters_json: string | null; return_value: string | null; } @@ -94,13 +99,19 @@ export interface FindExceptionRow { appmap_id: number; appmap_name: string; event_id: number; + // Return event id where the throw materialized. with_logs uses this as + // the upper bound so logs that fired *inside* the throwing call are + // included. Null only for the legacy "exceptions on a call event" + // recorder shape. + return_event_id: number | null; exception_class: string; message: string | null; path: string | null; lineno: number | null; // Populated only when filter.withLogs > 0. Ordered chronologically // (oldest first), capped at filter.withLogs entries. Each row has - // event_id < this exception's event_id. + // event_id < the exception's return_event_id (or event_id if + // return_event_id is null). recent_logs?: FindLogRow[]; } @@ -369,7 +380,8 @@ export function findLogs(db: sqlite3.Database, filter: FindFilter): FindLogRow[] ORDER BY a.source_path, fc.event_id `; sql = appendLimitOffset(sql, filter, params); - return db.prepare(sql).all(...params) as FindLogRow[]; + const rows = db.prepare(sql).all(...params) as Omit[]; + return rows.map((r) => ({ ...r, message: projectLogMessage(r.parameters_json, r.return_value) })); } export function findExceptions(db: sqlite3.Database, filter: FindFilter): FindExceptionRow[] { @@ -391,6 +403,7 @@ export function findExceptions(db: sqlite3.Database, filter: FindFilter): FindEx SELECT e.appmap_id AS appmap_id, a.name AS appmap_name, e.event_id AS event_id, + e.return_event_id AS return_event_id, e.exception_class AS exception_class, e.message AS message, e.path AS path, @@ -404,12 +417,14 @@ export function findExceptions(db: sqlite3.Database, filter: FindFilter): FindEx const rows = db.prepare(sql).all(...params) as FindExceptionRow[]; // Enrichment: for each exception, attach the last N log calls in the - // same recording with event_id strictly less than the exception's. We - // use event order rather than parent_event_id subtree walking — it's - // a strict subset of the call-tree relevant to most debugging - // questions ("what did the app log before it crashed?") and avoids a - // recursive CTE per row. Exceptions whose event_id is NULL (synthetic) - // can't be ordered, so they don't get logs attached. + // same recording with event_id strictly less than the exception's + // return_event_id (the throw point in the event stream). Falling back + // to event_id (the call entry) only handles the legacy recorder shape + // — which produces no preceding logs anyway, since logs inside the + // call have event_id > the call entry. We use event order rather than + // parent_event_id subtree walking to avoid recursive CTEs; this picks + // up logs that ran in the same thread before the throw, which is + // what "what did the app log before it crashed?" asks. if (filter.withLogs && filter.withLogs > 0) { const logStmt = db.prepare(` SELECT a.name AS appmap_name, @@ -432,12 +447,15 @@ export function findExceptions(db: sqlite3.Database, filter: FindFilter): FindEx LIMIT ? `); for (const row of rows) { - if (row.event_id == null) { + const upperBound = row.return_event_id ?? row.event_id; + if (upperBound == null) { row.recent_logs = []; continue; } - const logs = logStmt.all(row.appmap_id, row.event_id, filter.withLogs) as FindLogRow[]; - row.recent_logs = logs.reverse(); // chronological + const logs = logStmt.all(row.appmap_id, upperBound, filter.withLogs) as Omit[]; + row.recent_logs = logs + .reverse() // chronological + .map((l) => ({ ...l, message: projectLogMessage(l.parameters_json, l.return_value) })); } } diff --git a/packages/cli/src/cmds/query/queries/mcp.ts b/packages/cli/src/cmds/query/queries/mcp.ts index df28ef59bd..936f7df89b 100644 --- a/packages/cli/src/cmds/query/queries/mcp.ts +++ b/packages/cli/src/cmds/query/queries/mcp.ts @@ -384,7 +384,7 @@ const TOOLS: ToolImpl[] = [ spec: { name: 'find_logs', description: - 'Application log lines captured from functions labeled `log`. Filter by message substring (matches across the call\'s parameters and return value), logger class, recording, branch, or time window. Returns: appmap_name, event_id, parent_event_id, logger, method_id, path, lineno, parameters_json, return_value. The message is in parameters_json (a [{name, class, value}, ...] blob) — read the value of the parameter named `message`/`msg`, or the first string-typed parameter, or parse return_value as JSON if the recorder returns a structured `{level, message, ...}`. Use path:lineno to read the call site of the log statement.', + 'Application log lines captured from functions labeled `log`. Filter by message substring (matches across the call\'s parameters and return value), logger class, recording, branch, or time window. Returns: appmap_name, event_id, parent_event_id, logger, method_id, path, lineno, message, parameters_json, return_value. `message` is the display-projected log text (extracted from a structured return_value or from the parameter named message/msg, falling back to the first string parameter); use it directly. parameters_json and return_value remain available for the underlying captured values. Use path:lineno to read the call site of the log statement.', inputSchema: { type: 'object', properties: { @@ -415,7 +415,7 @@ const TOOLS: ToolImpl[] = [ spec: { name: 'find_exceptions', description: - 'Exception rows with class, message, source location. Filter by exception class name, the request that owns the exception (via route/status), branch, or time window. Returns: appmap_id, appmap_name, event_id, exception_class, message, path, lineno. Pass with_logs=N to attach the last N log lines preceding each exception (chronological order) under recent_logs — usually the fastest way to see what the app reported before the failure.', + 'Exception rows with class, message, source location. Filter by exception class name, the request that owns the exception (via route/status), branch, or time window. Returns: appmap_id, appmap_name, event_id (the throwing call\'s entry id), return_event_id (the throw point in the event stream), exception_class, message, path, lineno. Pass with_logs=N to attach the last N log lines preceding the throw (chronological order) under recent_logs — usually the fastest way to see what the app reported before the failure. recent_logs uses return_event_id as the upper bound, so logs that fired *inside* the throwing call are included.', inputSchema: { type: 'object', properties: { diff --git a/packages/cli/src/cmds/query/queries/tree.ts b/packages/cli/src/cmds/query/queries/tree.ts index 768f957cfb..71476c956b 100644 --- a/packages/cli/src/cmds/query/queries/tree.ts +++ b/packages/cli/src/cmds/query/queries/tree.ts @@ -1,5 +1,6 @@ import sqlite3 from 'better-sqlite3'; +import { projectLogMessage } from '../lib/logMessage'; import { appmapRefClause } from '../lib/scope'; // Discriminated union of tree nodes. Each node corresponds to one row in @@ -69,6 +70,10 @@ export interface LogNode extends BaseNode { path: string | null; lineno: number | null; elapsed_ms: number | null; + // Display-projected message derived from parameters_json / return_value + // (see lib/logMessage.projectLogMessage). '' when nothing usable was + // captured. + message: string; parameters_json: string | null; return_value: string | null; } @@ -259,6 +264,7 @@ export function tree( path: r.path, lineno: r.lineno, elapsed_ms: r.elapsed_ms, + message: projectLogMessage(r.parameters_json, r.return_value), parameters_json: r.parameters_json, return_value: r.return_value, }); diff --git a/packages/cli/tests/unit/cmds/query/lib/treeRender.spec.ts b/packages/cli/tests/unit/cmds/query/lib/treeRender.spec.ts index a3769999f8..7ad6ae0ad5 100644 --- a/packages/cli/tests/unit/cmds/query/lib/treeRender.spec.ts +++ b/packages/cli/tests/unit/cmds/query/lib/treeRender.spec.ts @@ -14,6 +14,7 @@ function logNode(overrides: Partial = {}): LogNode { path: 'app/log.rb', lineno: 12, elapsed_ms: 0.1, + message: 'connection refused', parameters_json: JSON.stringify([ { name: 'message', class: 'String', value: 'connection refused' }, ]), diff --git a/packages/cli/tests/unit/cmds/query/queries/mcp.spec.ts b/packages/cli/tests/unit/cmds/query/queries/mcp.spec.ts index cbbaccb05f..cb13373732 100644 --- a/packages/cli/tests/unit/cmds/query/queries/mcp.spec.ts +++ b/packages/cli/tests/unit/cmds/query/queries/mcp.spec.ts @@ -36,16 +36,22 @@ function seedMinimal(db: sqlite3.Database): void { .get() as { id: number }).id; db.prepare(`INSERT OR IGNORE INTO labels (code_object_id, label) VALUES (?, 'log')`).run(co); db.prepare( - `INSERT INTO function_calls (appmap_id, event_id, parent_event_id, code_object_id, defined_class, method_id, elapsed_ms) - VALUES (?, 2, 1, ?, 'Logger', 'error', 0.1)` + `INSERT INTO function_calls (appmap_id, event_id, parent_event_id, code_object_id, + defined_class, method_id, elapsed_ms, parameters_json, return_value) + VALUES (?, 2, 1, ?, 'Logger', 'error', 0.1, + '[{"name":"message","class":"String","value":"connection refused"}]', NULL)` ).run(id, co); db.prepare( `INSERT INTO sql_queries (appmap_id, event_id, parent_event_id, sql_text, elapsed_ms) VALUES (?, 3, 2, 'INSERT INTO orders (id) VALUES (?)', 14)` ).run(id); + // Exception's call entry is event_id=2 (the Logger.error call), and the + // throw materialised at the return event id=4. with_logs uses event_id=2 + // as the call boundary and event_id=4 as the throw boundary. db.prepare( - `INSERT INTO exceptions (appmap_id, event_id, parent_event_id, exception_class, message) - VALUES (?, 2, 1, 'IntegrityError', 'duplicate key')` + `INSERT INTO exceptions (appmap_id, event_id, return_event_id, parent_event_id, + exception_class, message) + VALUES (?, 2, 4, 1, 'IntegrityError', 'duplicate key')` ).run(id); } @@ -314,6 +320,51 @@ describe('MCP handler', () => { } }); + it('find_logs row carries a derived message field', () => { + const db = freshDb(); + try { + seedMinimal(db); + const r = call(buildMcpHandler(db), { + jsonrpc: '2.0', + id: 91, + method: 'tools/call', + params: { name: 'find_logs', arguments: {} }, + }); + const rows = JSON.parse((r!.result as any).content[0].text); + expect(rows).toHaveLength(1); + expect(rows[0].message).toBe('connection refused'); + expect(rows[0].logger).toBe('Logger'); + expect(rows[0].parameters_json).toContain('connection refused'); + } finally { + db.close(); + } + }); + + it('find_exceptions with_logs uses return_event_id for ordering (regression)', () => { + const db = freshDb(); + try { + seedMinimal(db); + const r = call(buildMcpHandler(db), { + jsonrpc: '2.0', + id: 92, + method: 'tools/call', + params: { name: 'find_exceptions', arguments: { with_logs: 5 } }, + }); + const rows = JSON.parse((r!.result as any).content[0].text); + expect(rows).toHaveLength(1); + expect(rows[0].return_event_id).toBe(4); + // Pre-fix the with_logs SQL filtered by `event_id < exception.event_id` + // (=2), which excluded the Logger.error log call at event_id=2 entirely. + // With return_event_id (=4) as the upper bound, the log call (event 2) + // is included — that's the regression we're guarding against. + expect(rows[0].recent_logs).toHaveLength(1); + expect(rows[0].recent_logs[0].event_id).toBe(2); + expect(rows[0].recent_logs[0].message).toBe('connection refused'); + } finally { + db.close(); + } + }); + it('find_calls --label filters by the AppMap label', () => { const db = freshDb(); try { diff --git a/packages/cli/tests/unit/cmds/query/verbs/tree.spec.ts b/packages/cli/tests/unit/cmds/query/verbs/tree.spec.ts index 6f2a31909b..33a730a23d 100644 --- a/packages/cli/tests/unit/cmds/query/verbs/tree.spec.ts +++ b/packages/cli/tests/unit/cmds/query/verbs/tree.spec.ts @@ -41,6 +41,7 @@ const lg: LogNode = { kind: 'log', event_id: 6, ...baseFields, fqid: 'app/Logger#info', logger: 'Logger', method_id: 'info', path: null, lineno: null, elapsed_ms: 0.1, + message: 'hi', parameters_json: '[{"name":"message","value":"hi"}]', return_value: null, }; From 57174ce90d8d469e999c4f63b7d08e73d4bf3de3 Mon Sep 17 00:00:00 2001 From: kgilpin Date: Mon, 4 May 2026 11:08:27 -0400 Subject: [PATCH 30/30] feat(query): substring filters + Page pagination across list queries MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Search-style filters now match anything reasonable rather than requiring exact identifiers. The agent's frustration with `find_recordings` not finding "test_apply_high_id_first_then_low_id" inside a longer basename was the trigger; this generalizes the fix. - appmap: substring of name OR source_path. New appmapLikeClause used by appmapWhere; appmapRefClause stays exact for tree's resolveAppmap. - label / exception: LIKE %X% (was =). - route path: LIKE %X% (was exact); method still strict + case-insensitive. - class / method short forms ("Repo", "create") add a substring fallback on leaf_class / method / defined_class / method_id. Canonical fqid inputs (containing `/` or `::`) keep their previous strict matching so explicit disambiguation isn't broadened. Symmetric change in sqlCallerClassClauses / sqlCallerMethodClauses. All list-returning queries now return Page = {rows, total, limit, offset}: - Default limit 20 across find_*, hotspots, endpoints, related, compare. Pass limit=0 for unbounded. - paginate() helper at lib/page.ts wraps each base query with a COUNT(*) subquery for the total, then re-runs with LIMIT/OFFSET. - CLI verbs render rows as before plus a `(showing N–M of TOTAL …)` footer when truncated. --json emits the full Page object. - MCP tools return Page directly (text content is JSON.stringify'd already). Tool descriptions updated: `appmap`/`route`/`class`/ `method`/`label`/`exception` are documented as substring; `limit` documents the default 20 + 0-for-unbounded. Tests: 244 passing. yarn verify clean. Breaking change for any direct caller destructuring the array return — intentional per user request. Co-Authored-By: Claude Opus 4.7 (1M context) --- packages/cli/src/cmds/query/lib/page.ts | 56 ++++++++ packages/cli/src/cmds/query/lib/scope.ts | 121 ++++++++++++++---- .../cli/src/cmds/query/queries/compare.ts | 22 +++- .../cli/src/cmds/query/queries/endpoints.ts | 33 ++--- packages/cli/src/cmds/query/queries/find.ts | 88 ++++++------- .../cli/src/cmds/query/queries/hotspots.ts | 33 ++--- packages/cli/src/cmds/query/queries/mcp.ts | 108 +++++++++------- .../cli/src/cmds/query/queries/related.ts | 11 +- packages/cli/src/cmds/query/verbs/compare.ts | 13 +- .../cli/src/cmds/query/verbs/endpoints.ts | 13 +- packages/cli/src/cmds/query/verbs/find.ts | 9 +- packages/cli/src/cmds/query/verbs/hotspots.ts | 17 ++- packages/cli/src/cmds/query/verbs/related.ts | 13 +- .../unit/cmds/query/queries/compare.spec.ts | 8 +- .../unit/cmds/query/queries/endpoints.spec.ts | 32 ++--- .../unit/cmds/query/queries/find.spec.ts | 86 ++++++------- .../unit/cmds/query/queries/hotspots.spec.ts | 24 ++-- .../tests/unit/cmds/query/queries/mcp.spec.ts | 69 +++++----- .../unit/cmds/query/queries/related.spec.ts | 8 +- 19 files changed, 480 insertions(+), 284 deletions(-) create mode 100644 packages/cli/src/cmds/query/lib/page.ts diff --git a/packages/cli/src/cmds/query/lib/page.ts b/packages/cli/src/cmds/query/lib/page.ts new file mode 100644 index 0000000000..342213becd --- /dev/null +++ b/packages/cli/src/cmds/query/lib/page.ts @@ -0,0 +1,56 @@ +import sqlite3 from 'better-sqlite3'; + +// Standard wrapper for any list-returning query. `rows` is the slice +// the caller asked for; `total` is the count of all matching rows +// (ignoring limit/offset). JSON consumers get truncation info for free; +// text renderers append a footer when total > offset + rows.length. +export interface Page { + rows: T[]; + total: number; + limit: number; + offset: number; +} + +// Default limit applied when filter.limit is undefined. Pass 0 or +// negative to mean unbounded. +export const DEFAULT_PAGE_LIMIT = 20; + +// Run a base query and return its paginated result + total count. +// `baseSql` is the SELECT (with WHERE / ORDER BY) without LIMIT/OFFSET; +// the function wraps it in a COUNT subquery for the total. Limit/offset +// are inlined as numbers — they come from typed filter fields, not +// arbitrary user input, so this is safe. +export function paginate( + db: sqlite3.Database, + baseSql: string, + params: readonly (string | number)[], + options: { limit?: number; offset?: number } = {} +): Page { + const limit = options.limit ?? DEFAULT_PAGE_LIMIT; + const offset = options.offset ?? 0; + + const countSql = `SELECT COUNT(*) AS n FROM (${baseSql})`; + const total = (db.prepare(countSql).get(...params) as { n: number }).n; + + let rowsSql = baseSql; + if (limit > 0) { + rowsSql += ` LIMIT ${limit | 0} OFFSET ${offset | 0}`; + } else if (offset > 0) { + rowsSql += ` LIMIT -1 OFFSET ${offset | 0}`; + } + const rows = db.prepare(rowsSql).all(...params) as T[]; + + return { rows, total, limit, offset }; +} + +// Format the truncation footer for text-mode renderers. Returns null +// when nothing was clipped (so the caller can choose whether to print +// it at all). +export function truncationFooter(page: Page): string | null { + const shown = page.rows.length; + if (shown === 0 && page.total === 0) return null; + const last = page.offset + shown; + if (last >= page.total) return null; + const first = page.offset + 1; + return `(showing ${first}–${last} of ${page.total}; use --limit / --offset to page)`; +} diff --git a/packages/cli/src/cmds/query/lib/scope.ts b/packages/cli/src/cmds/query/lib/scope.ts index cdefcedd5b..4687984149 100644 --- a/packages/cli/src/cmds/query/lib/scope.ts +++ b/packages/cli/src/cmds/query/lib/scope.ts @@ -36,7 +36,9 @@ export function parseRoute(s: string): RouteSpec { // - exact appmap.name // - source_path ending in `.appmap.json` (Unix or Windows sep) // - source_path ending in `` (non-`.appmap.json` stores) -// Used by find / tree / hotspots so the lookup behaves the same everywhere. +// Used by `tree` and other single-resolve operations that want +// exact-match-or-fail (with an ambiguity error). For find/list contexts +// where lenient matching is the right UX, use appmapLikeClause. export function appmapRefClause( ref: string, alias: string @@ -49,6 +51,22 @@ export function appmapRefClause( }; } +// Lenient appmap match for find/list contexts: ref is a substring of +// either the human-readable name or the source_path. SQLite LIKE is +// case-insensitive for ASCII by default. Used by appmapWhere so all the +// find_* tools surface a recording when any reasonable word from its +// name or path is provided. +export function appmapLikeClause( + ref: string, + alias: string +): { sql: string; params: string[] } { + const like = `%${ref}%`; + return { + sql: `(${alias}.name LIKE ? OR ${alias}.source_path LIKE ?)`, + params: [like, like], + }; +} + export interface Clauses { where: string[]; params: (string | number)[]; @@ -75,7 +93,7 @@ export function appmapWhere(filter: RecordingScope, alias: string): Clauses { params.push(filter.until); } if (filter.appmap) { - const ref = appmapRefClause(filter.appmap, alias); + const ref = appmapLikeClause(filter.appmap, alias); where.push(ref.sql); params.push(...ref.params); } @@ -91,8 +109,8 @@ export function httpScopeClauses(filter: RecordingScope, alias = 'h'): Clauses { const params: (string | number)[] = []; if (filter.route) { const route = parseRoute(filter.route); - where.push(`COALESCE(${alias}.normalized_path, ${alias}.path) = ?`); - params.push(route.path); + where.push(`COALESCE(${alias}.normalized_path, ${alias}.path) LIKE ?`); + params.push(`%${route.path}%`); if (route.method) { where.push(`${alias}.method = ?`); params.push(route.method); @@ -211,22 +229,51 @@ export function classFilterClauses(input: string, fcAlias: string): Clauses { coParams.push(parts.method); } - // Fallback for unlinked function_calls. + // Fallback for unlinked function_calls. Includes a substring match on + // defined_class so a search like "Repo" matches "UserRepository" even + // when the row isn't linked to a code_object. const fbWhere: string[] = [ `${fcAlias}.defined_class = ?`, `${fcAlias}.defined_class LIKE '%.' || ?`, `${fcAlias}.defined_class LIKE '%::' || ?`, + `${fcAlias}.defined_class LIKE ?`, ]; - const fbParams: (string | number)[] = [parts.class, parts.class, parts.class]; + const fbParams: (string | number)[] = [ + parts.class, + parts.class, + parts.class, + `%${parts.class}%`, + ]; + + // Lenient leaf_class substring lookup against code_objects. Applied + // only when the user supplied a SHORT form (no package, no chain) — + // a canonical input like "org/example/UserRepository#findById" is + // explicit disambiguation and should match strictly. So short-form + // "Repo" finds "UserRepository", but full canonical doesn't broaden. + const isShortForm = !parts.package && !parts.class.includes('::'); + if (!isShortForm) { + return { + where: [ + `((${fcAlias}.code_object_id IN ( + SELECT id FROM code_objects WHERE ${coWhere.join(' AND ')} + )) + OR (${fcAlias}.code_object_id IS NULL AND (${fbWhere.join(' OR ')})))`, + ], + params: [...coParams, ...fbParams], + }; + } return { where: [ - `(${fcAlias}.code_object_id IN ( - SELECT id FROM code_objects WHERE ${coWhere.join(' AND ')} - ) + `((${fcAlias}.code_object_id IN ( + SELECT id FROM code_objects WHERE ${coWhere.join(' AND ')} + )) + OR (${fcAlias}.code_object_id IN ( + SELECT id FROM code_objects WHERE leaf_class LIKE ? + )) OR (${fcAlias}.code_object_id IS NULL AND (${fbWhere.join(' OR ')})))`, ], - params: [...coParams, ...fbParams], + params: [...coParams, `%${parts.class}%`, ...fbParams], }; } @@ -234,14 +281,16 @@ export function classFilterClauses(input: string, fcAlias: string): Clauses { // code_objects.method column, with a fallback to function_calls.method_id // for rows that aren't linked to a code_object. export function methodFilterClauses(input: string, fcAlias: string): Clauses { + const like = `%${input}%`; return { where: [ `(${fcAlias}.code_object_id IN ( - SELECT id FROM code_objects WHERE method = ? + SELECT id FROM code_objects WHERE method = ? OR method LIKE ? ) - OR (${fcAlias}.code_object_id IS NULL AND ${fcAlias}.method_id = ?))`, + OR (${fcAlias}.code_object_id IS NULL + AND (${fcAlias}.method_id = ? OR ${fcAlias}.method_id LIKE ?)))`, ], - params: [input, input], + params: [input, like, input, like], }; } @@ -292,22 +341,48 @@ export function sqlCallerClassClauses(input: string, qAlias: string): Clauses { return { where: [coClause], params: coParams }; } - // Fallback: match the row's raw caller_class with suffix-aware logic. + // Fallback: match the row's raw caller_class with suffix-aware logic + // plus a generic substring fallback so "Repo" finds "UserRepository". const fbConditions: string[] = [ `${qAlias}.caller_class = ?`, `${qAlias}.caller_class LIKE '%.' || ?`, `${qAlias}.caller_class LIKE '%::' || ?`, + `${qAlias}.caller_class LIKE ?`, + ]; + const fbParams: (string | number)[] = [ + parts.class, + parts.class, + parts.class, + `%${parts.class}%`, ]; - const fbParams: (string | number)[] = [parts.class, parts.class, parts.class]; const fbParts: string[] = [`(${fbConditions.join(' OR ')})`]; if (parts.method) { - fbParts.push(`${qAlias}.caller_method = ?`); - fbParams.push(parts.method); + fbParts.push(`(${qAlias}.caller_method = ? OR ${qAlias}.caller_method LIKE ?)`); + fbParams.push(parts.method, `%${parts.method}%`); + } + + // Substring leaf_class lookup against code_objects. As in + // classFilterClauses, only applied for short-form inputs ("Repo") + // so canonical fqids stay strict. (parts.package is filtered out + // earlier; the short-form check here is just on `::`.) + if (parts.class.includes('::')) { + return { + where: [`(${coClause} OR (${fbParts.join(' AND ')}))`], + params: [...coParams, ...fbParams], + }; } + const looseLeafClause = `${qAlias}.parent_event_id IN ( + SELECT fc.event_id FROM function_calls fc + WHERE fc.appmap_id = ${qAlias}.appmap_id + AND fc.code_object_id IN ( + SELECT id FROM code_objects WHERE leaf_class LIKE ? + ) + )`; + return { - where: [`(${coClause} OR (${fbParts.join(' AND ')}))`], - params: [...coParams, ...fbParams], + where: [`(${coClause} OR ${looseLeafClause} OR (${fbParts.join(' AND ')}))`], + params: [...coParams, `%${parts.class}%`, ...fbParams], }; } @@ -315,18 +390,20 @@ export function sqlCallerClassClauses(input: string, qAlias: string): Clauses { // code_object.method, with a fallback to caller_method for unlinked // parents. export function sqlCallerMethodClauses(input: string, qAlias: string): Clauses { + const like = `%${input}%`; return { where: [ `(${qAlias}.parent_event_id IN ( SELECT fc.event_id FROM function_calls fc WHERE fc.appmap_id = ${qAlias}.appmap_id AND fc.code_object_id IN ( - SELECT id FROM code_objects WHERE method = ? + SELECT id FROM code_objects WHERE method = ? OR method LIKE ? ) ) - OR ${qAlias}.caller_method = ?)`, + OR ${qAlias}.caller_method = ? + OR ${qAlias}.caller_method LIKE ?)`, ], - params: [input, input], + params: [input, like, input, like], }; } diff --git a/packages/cli/src/cmds/query/queries/compare.ts b/packages/cli/src/cmds/query/queries/compare.ts index db6fbda4c5..5ab2c45765 100644 --- a/packages/cli/src/cmds/query/queries/compare.ts +++ b/packages/cli/src/cmds/query/queries/compare.ts @@ -1,5 +1,6 @@ import sqlite3 from 'better-sqlite3'; +import { DEFAULT_PAGE_LIMIT, Page } from '../lib/page'; import { endpoints } from './endpoints'; export interface CompareRow { @@ -22,23 +23,32 @@ export interface CompareFilter { until?: string; sort?: CompareSort; limit?: number; + offset?: number; } // Computes per-route p95 for two branches and merges the results, exposing // delta = b_p95 / a_p95 alongside both sides' counts and p95s. Implementation // reuses endpoints() (which already does the SQL window-function p95) so the // p95 semantics match the endpoints verb exactly. -export function compare(db: sqlite3.Database, filter: CompareFilter): CompareRow[] { +export function compare( + db: sqlite3.Database, + filter: CompareFilter +): Page { + // Pull all endpoint rows for each branch (limit: 0 = unbounded). The + // outer pagination is on the merged compare rows, not on either side + // individually. const a = endpoints(db, { branch: filter.branch_a, since: filter.since, until: filter.until, - }); + limit: 0, + }).rows; const b = endpoints(db, { branch: filter.branch_b, since: filter.since, until: filter.until, - }); + limit: 0, + }).rows; const rows = new Map(); const key = (method: string, route: string) => `${method}\t${route}`; @@ -83,7 +93,11 @@ export function compare(db: sqlite3.Database, filter: CompareFilter): CompareRow const sortKey: CompareSort = filter.sort ?? 'delta'; result.sort(comparators[sortKey]); - return filter.limit !== undefined ? result.slice(0, filter.limit) : result; + const limit = filter.limit ?? DEFAULT_PAGE_LIMIT; + const offset = filter.offset ?? 0; + const total = result.length; + const sliced = limit > 0 ? result.slice(offset, offset + limit) : result.slice(offset); + return { rows: sliced, total, limit, offset }; } // "delta" sorts by absolute deviation from 1× — biggest changes (in diff --git a/packages/cli/src/cmds/query/queries/endpoints.ts b/packages/cli/src/cmds/query/queries/endpoints.ts index 05aa202b24..5105e7ace2 100644 --- a/packages/cli/src/cmds/query/queries/endpoints.ts +++ b/packages/cli/src/cmds/query/queries/endpoints.ts @@ -1,5 +1,6 @@ import sqlite3 from 'better-sqlite3'; +import { Page, paginate } from '../lib/page'; import type { Comparator, NumberFilter } from '../lib/parseFilter'; export interface EndpointRow { @@ -24,6 +25,7 @@ export interface EndpointsFilter { status?: NumberFilter; sort?: EndpointSort; limit?: number; + offset?: number; } // err_pct is fixed at "% of requests with status >= 500" (server errors), @@ -53,7 +55,7 @@ const VALID_OPS = new Set(['=', '>=', '<=', '>', '<']); export function endpoints( db: sqlite3.Database, filter: EndpointsFilter = {} -): EndpointRow[] { +): Page { const where: string[] = []; const params: (string | number)[] = []; @@ -89,7 +91,7 @@ export function endpoints( } const sortColumn = SORT_COLUMNS[sortKey]; - let sql = ` + const sql = ` WITH ranked AS ( SELECT h.method AS method, @@ -124,26 +126,25 @@ export function endpoints( ${havingSql} ORDER BY ${sortColumn} DESC NULLS LAST, method, route `; - if (filter.limit !== undefined) { - sql += ' LIMIT ?'; - params.push(filter.limit); - } - const rows = db.prepare(sql).all(...params) as { + const page = paginate<{ method: string; route: string; count: number; avg_ms: number | null; p95_ms: number | null; err_pct: number | null; - }[]; + }>(db, sql, params, { limit: filter.limit, offset: filter.offset }); - return rows.map((r) => ({ - method: r.method, - route: r.route, - count: r.count, - avg_ms: r.avg_ms, - p95_ms: r.p95_ms, - err_pct: r.err_pct ?? 0, - })); + return { + ...page, + rows: page.rows.map((r) => ({ + method: r.method, + route: r.route, + count: r.count, + avg_ms: r.avg_ms, + p95_ms: r.p95_ms, + err_pct: r.err_pct ?? 0, + })), + }; } diff --git a/packages/cli/src/cmds/query/queries/find.ts b/packages/cli/src/cmds/query/queries/find.ts index 06f9bef619..d84f8ebf1d 100644 --- a/packages/cli/src/cmds/query/queries/find.ts +++ b/packages/cli/src/cmds/query/queries/find.ts @@ -1,6 +1,7 @@ import sqlite3 from 'better-sqlite3'; import { projectLogMessage } from '../lib/logMessage'; +import { Page, paginate } from '../lib/page'; import type { NumberFilter } from '../lib/parseFilter'; import { appmapIdScope, @@ -132,26 +133,13 @@ function durationClause(filter: FindFilter, column: string): Clauses { return { where, params }; } -function appendLimitOffset(sql: string, filter: FindFilter, params: (string | number)[]): string { - let result = sql; - if (filter.limit !== undefined) { - result += ' LIMIT ?'; - params.push(filter.limit); - if (filter.offset !== undefined) { - result += ' OFFSET ?'; - params.push(filter.offset); - } - } else if (filter.offset !== undefined) { - // OFFSET without LIMIT: SQLite requires a LIMIT; use -1 (unbounded). - result += ' LIMIT -1 OFFSET ?'; - params.push(filter.offset); - } - return result; +function pageOptions(filter: FindFilter): { limit?: number; offset?: number } { + return { limit: filter.limit, offset: filter.offset }; } // --- per-type queries --- -export function findAppmaps(db: sqlite3.Database, filter: FindFilter): FindAppmapRow[] { +export function findAppmaps(db: sqlite3.Database, filter: FindFilter): Page { const a = appmapWhere(filter, 'a'); const h = httpScopeClauses(filter, 'h2'); const requireHttpMatch = h.where.length > 0; @@ -173,7 +161,7 @@ export function findAppmaps(db: sqlite3.Database, filter: FindFilter): FindAppma const params: (string | number)[] = [...h.params, ...a.params]; if (filter.duration) params.push(filter.duration.value); - let sql = ` + const sql = ` SELECT a.id AS appmap_id, a.name AS appmap_name, COALESCE(h.normalized_path, h.path) AS route, @@ -191,19 +179,18 @@ export function findAppmaps(db: sqlite3.Database, filter: FindFilter): FindAppma ${whereSql} ORDER BY a.timestamp, a.name `; - sql = appendLimitOffset(sql, filter, params); - return db.prepare(sql).all(...params) as FindAppmapRow[]; + return paginate(db, sql, params, pageOptions(filter)); } -export function findRequests(db: sqlite3.Database, filter: FindFilter): FindRequestRow[] { +export function findRequests(db: sqlite3.Database, filter: FindFilter): Page { const a = appmapWhere(filter, 'a'); const where: string[] = [...a.where]; const params: (string | number)[] = [...a.params]; if (filter.route) { const route = parseRoute(filter.route); - where.push(`COALESCE(h.normalized_path, h.path) = ?`); - params.push(route.path); + where.push(`COALESCE(h.normalized_path, h.path) LIKE ?`); + params.push(`%${route.path}%`); if (route.method) { where.push(`h.method = ?`); params.push(route.method); @@ -217,7 +204,7 @@ export function findRequests(db: sqlite3.Database, filter: FindFilter): FindRequ where.push(...dur.where); params.push(...dur.params); - let sql = ` + const sql = ` SELECT a.name AS appmap_name, h.event_id AS event_id, h.method AS method, @@ -230,11 +217,10 @@ export function findRequests(db: sqlite3.Database, filter: FindFilter): FindRequ ${where.length > 0 ? `WHERE ${where.join(' AND ')}` : ''} ORDER BY a.source_path, h.event_id `; - sql = appendLimitOffset(sql, filter, params); - return db.prepare(sql).all(...params) as FindRequestRow[]; + return paginate(db, sql, params, pageOptions(filter)); } -export function findQueries(db: sqlite3.Database, filter: FindFilter): FindQueryRow[] { +export function findQueries(db: sqlite3.Database, filter: FindFilter): Page { const where: string[] = []; const params: (string | number)[] = []; @@ -266,7 +252,7 @@ export function findQueries(db: sqlite3.Database, filter: FindFilter): FindQuery where.push(...dur.where); params.push(...dur.params); - let sql = ` + const sql = ` SELECT a.name AS appmap_name, q.event_id AS event_id, q.elapsed_ms AS elapsed_ms, @@ -278,11 +264,10 @@ export function findQueries(db: sqlite3.Database, filter: FindFilter): FindQuery ${where.length > 0 ? `WHERE ${where.join(' AND ')}` : ''} ORDER BY a.source_path, q.event_id `; - sql = appendLimitOffset(sql, filter, params); - return db.prepare(sql).all(...params) as FindQueryRow[]; + return paginate(db, sql, params, pageOptions(filter)); } -export function findCalls(db: sqlite3.Database, filter: FindFilter): FindCallRow[] { +export function findCalls(db: sqlite3.Database, filter: FindFilter): Page { const where: string[] = []; const params: (string | number)[] = []; @@ -304,15 +289,15 @@ export function findCalls(db: sqlite3.Database, filter: FindFilter): FindCallRow } if (filter.label) { where.push( - `fc.code_object_id IN (SELECT l.code_object_id FROM labels l WHERE l.label = ?)` + `fc.code_object_id IN (SELECT l.code_object_id FROM labels l WHERE l.label LIKE ?)` ); - params.push(filter.label); + params.push(`%${filter.label}%`); } const dur = durationClause(filter, 'fc.elapsed_ms'); where.push(...dur.where); params.push(...dur.params); - let sql = ` + const sql = ` SELECT a.name AS appmap_name, fc.event_id AS event_id, co.fqid AS fqid, @@ -329,8 +314,7 @@ export function findCalls(db: sqlite3.Database, filter: FindFilter): FindCallRow ${where.length > 0 ? `WHERE ${where.join(' AND ')}` : ''} ORDER BY a.source_path, fc.event_id `; - sql = appendLimitOffset(sql, filter, params); - return db.prepare(sql).all(...params) as FindCallRow[]; + return paginate(db, sql, params, pageOptions(filter)); } // Log rows: function_calls whose linked code_object has the canonical @@ -340,7 +324,7 @@ export function findCalls(db: sqlite3.Database, filter: FindFilter): FindCallRow // false positives (matching a parameter name, a class name, or a JSON // punctuation byte) are accepted by design and can be tightened in // post-processing. -export function findLogs(db: sqlite3.Database, filter: FindFilter): FindLogRow[] { +export function findLogs(db: sqlite3.Database, filter: FindFilter): Page { const where: string[] = [ `fc.code_object_id IN (SELECT l.code_object_id FROM labels l WHERE l.label = 'log')`, ]; @@ -364,7 +348,7 @@ export function findLogs(db: sqlite3.Database, filter: FindFilter): FindLogRow[] params.push(like, like); } - let sql = ` + const sql = ` SELECT a.name AS appmap_name, fc.event_id AS event_id, fc.parent_event_id AS parent_event_id, @@ -379,12 +363,20 @@ export function findLogs(db: sqlite3.Database, filter: FindFilter): FindLogRow[] WHERE ${where.join(' AND ')} ORDER BY a.source_path, fc.event_id `; - sql = appendLimitOffset(sql, filter, params); - const rows = db.prepare(sql).all(...params) as Omit[]; - return rows.map((r) => ({ ...r, message: projectLogMessage(r.parameters_json, r.return_value) })); + const page = paginate>(db, sql, params, pageOptions(filter)); + return { + ...page, + rows: page.rows.map((r) => ({ + ...r, + message: projectLogMessage(r.parameters_json, r.return_value), + })), + }; } -export function findExceptions(db: sqlite3.Database, filter: FindFilter): FindExceptionRow[] { +export function findExceptions( + db: sqlite3.Database, + filter: FindFilter +): Page { const where: string[] = []; const params: (string | number)[] = []; @@ -395,11 +387,11 @@ export function findExceptions(db: sqlite3.Database, filter: FindFilter): FindEx } if (filter.exception) { - where.push(`e.exception_class = ?`); - params.push(filter.exception); + where.push(`e.exception_class LIKE ?`); + params.push(`%${filter.exception}%`); } - let sql = ` + const sql = ` SELECT e.appmap_id AS appmap_id, a.name AS appmap_name, e.event_id AS event_id, @@ -413,8 +405,8 @@ export function findExceptions(db: sqlite3.Database, filter: FindFilter): FindEx ${where.length > 0 ? `WHERE ${where.join(' AND ')}` : ''} ORDER BY a.source_path, e.event_id, e.exception_class `; - sql = appendLimitOffset(sql, filter, params); - const rows = db.prepare(sql).all(...params) as FindExceptionRow[]; + const page = paginate(db, sql, params, pageOptions(filter)); + const rows = page.rows; // Enrichment: for each exception, attach the last N log calls in the // same recording with event_id strictly less than the exception's @@ -459,7 +451,7 @@ export function findExceptions(db: sqlite3.Database, filter: FindFilter): FindEx } } - return rows; + return page; } // Dispatcher. @@ -467,7 +459,7 @@ export function find( db: sqlite3.Database, type: FindType, filter: FindFilter -): unknown[] { +): Page { switch (type) { case 'appmaps': return findAppmaps(db, filter); diff --git a/packages/cli/src/cmds/query/queries/hotspots.ts b/packages/cli/src/cmds/query/queries/hotspots.ts index 42cd76177c..2d2bd92d7a 100644 --- a/packages/cli/src/cmds/query/queries/hotspots.ts +++ b/packages/cli/src/cmds/query/queries/hotspots.ts @@ -1,5 +1,6 @@ import sqlite3 from 'better-sqlite3'; +import { Page, paginate } from '../lib/page'; import { appmapIdScope, classFilterClauses, RecordingScope } from '../lib/scope'; export type HotspotType = 'function' | 'sql'; @@ -8,6 +9,7 @@ export interface HotspotsFilter extends RecordingScope { type?: HotspotType; className?: string; // function mode only limit?: number; + offset?: number; } export interface FunctionHotspotRow { @@ -56,7 +58,7 @@ const CHILD_TIME_CTE = ` export function functionHotspots( db: sqlite3.Database, filter: HotspotsFilter -): FunctionHotspotRow[] { +): Page { const where: string[] = []; const params: (string | number)[] = []; @@ -71,7 +73,7 @@ export function functionHotspots( params.push(...c.params); } - let sql = ` + const sql = ` ${CHILD_TIME_CTE} SELECT co.fqid AS fqid, @@ -91,14 +93,16 @@ export function functionHotspots( GROUP BY fc.code_object_id, fc.defined_class, fc.method_id ORDER BY total_ms DESC `; - if (filter.limit !== undefined) { - sql += ' LIMIT ?'; - params.push(filter.limit); - } - return db.prepare(sql).all(...params) as FunctionHotspotRow[]; + return paginate(db, sql, params, { + limit: filter.limit, + offset: filter.offset, + }); } -export function sqlHotspots(db: sqlite3.Database, filter: HotspotsFilter): SqlHotspotRow[] { +export function sqlHotspots( + db: sqlite3.Database, + filter: HotspotsFilter +): Page { const where: string[] = []; const params: (string | number)[] = []; @@ -108,7 +112,7 @@ export function sqlHotspots(db: sqlite3.Database, filter: HotspotsFilter): SqlHo params.push(...scope.params); } - let sql = ` + const sql = ` SELECT COUNT(*) AS count, AVG(q.elapsed_ms) AS avg_ms, @@ -119,16 +123,15 @@ export function sqlHotspots(db: sqlite3.Database, filter: HotspotsFilter): SqlHo GROUP BY q.sql_text ORDER BY total_ms DESC `; - if (filter.limit !== undefined) { - sql += ' LIMIT ?'; - params.push(filter.limit); - } - return db.prepare(sql).all(...params) as SqlHotspotRow[]; + return paginate(db, sql, params, { + limit: filter.limit, + offset: filter.offset, + }); } export function hotspots( db: sqlite3.Database, filter: HotspotsFilter -): FunctionHotspotRow[] | SqlHotspotRow[] { +): Page | Page { return filter.type === 'sql' ? sqlHotspots(db, filter) : functionHotspots(db, filter); } diff --git a/packages/cli/src/cmds/query/queries/mcp.ts b/packages/cli/src/cmds/query/queries/mcp.ts index 936f7df89b..fdc69067d7 100644 --- a/packages/cli/src/cmds/query/queries/mcp.ts +++ b/packages/cli/src/cmds/query/queries/mcp.ts @@ -120,16 +120,27 @@ function maybeString(s: unknown): string | undefined { // Common filter shape shared by the find_* tools and the hotspots tools. const COMMON_FILTER_PROPERTIES: Record = { - route: { type: 'string', description: 'e.g. "POST /orders" or "/orders".' }, + route: { + type: 'string', + description: + 'Substring of the request path; optionally prefixed with an HTTP method ("POST /orders"). e.g. "orders" matches /orders, /api/orders/:id.', + }, status: { type: 'string', description: 'e.g. "500", ">=500", "<400".' }, duration: { type: 'string', description: 'e.g. ">1s", ">=500ms".' }, - branch: { type: 'string' }, - commit: { type: 'string' }, + branch: { type: 'string', description: 'Exact branch name.' }, + commit: { type: 'string', description: 'Exact commit SHA.' }, since: { type: 'string', description: 'ISO timestamp lower bound.' }, until: { type: 'string', description: 'ISO timestamp upper bound.' }, - appmap: { type: 'string', description: 'Restrict to one recording (name or basename).' }, - limit: { type: 'integer' }, - offset: { type: 'integer' }, + appmap: { + type: 'string', + description: + 'Substring of the recording name OR source_path. Any reasonable word from the basename, test method, route, etc. matches. Case-insensitive.', + }, + limit: { + type: 'integer', + description: 'Default 20. Pass 0 for unbounded. Response includes total count.', + }, + offset: { type: 'integer', description: 'Skip this many rows for pagination.' }, }; // Build a FindFilter from MCP tool args, parsing structured fields. @@ -166,7 +177,7 @@ const TOOLS: ToolImpl[] = [ spec: { name: 'list_endpoints', description: - 'Per-route summary table; the first call when orienting against an unfamiliar query database. Returns: method, route, count, avg_ms, p95_ms, err_pct.', + 'Per-route summary table; the first call when orienting against an unfamiliar query database. Returns Page<{method, route, count, avg_ms, p95_ms, err_pct}> = {rows, total, limit, offset}.', inputSchema: { type: 'object', properties: { @@ -199,16 +210,17 @@ const TOOLS: ToolImpl[] = [ spec: { name: 'function_hotspots', description: - 'Functions ranked by total elapsed time across recordings. Filter by route to scope to a specific entry point or by class to focus on one component. Returns: fqid, defined_class, method_id, path, lineno, calls, total_ms, self_ms. path/lineno are one representative call\'s source location — read directly to see the function. fqid examples: "app/Logger#error" (instance), "app/Util.parse" (static), "src/cmds/query/db/openQueryDb.openQueryDb" (module-level), "app/Outer::Inner#method" (nested classes).', + 'Functions ranked by total elapsed time across recordings. Filter by route to scope to a specific entry point or by class (substring match) to focus on one component. Returns Page<{fqid, defined_class, method_id, path, lineno, calls, total_ms, self_ms}> = {rows, total, limit, offset}. path/lineno are one representative call\'s source location — read directly to see the function. fqid examples: "app/Logger#error" (instance), "app/Util.parse" (static), "src/cmds/query/db/openQueryDb.openQueryDb" (module-level), "app/Outer::Inner#method" (nested classes).', inputSchema: { type: 'object', properties: { - route: { type: 'string' }, - class: { type: 'string', description: 'class identifier; accepts short or canonical fqid form.' }, - branch: { type: 'string' }, - since: { type: 'string' }, - until: { type: 'string' }, - limit: { type: 'integer' }, + route: COMMON_FILTER_PROPERTIES.route, + class: { type: 'string', description: 'Substring of class identifier; canonical fqid forms also accepted.' }, + branch: COMMON_FILTER_PROPERTIES.branch, + since: COMMON_FILTER_PROPERTIES.since, + until: COMMON_FILTER_PROPERTIES.until, + limit: COMMON_FILTER_PROPERTIES.limit, + offset: COMMON_FILTER_PROPERTIES.offset, }, }, }, @@ -220,7 +232,8 @@ const TOOLS: ToolImpl[] = [ branch: maybeString(args.branch), since: maybeTime(args.since), until: maybeTime(args.until), - limit: maybeNumber(args.limit) ?? 20, + limit: maybeNumber(args.limit), + offset: maybeNumber(args.offset), }), }, @@ -228,15 +241,16 @@ const TOOLS: ToolImpl[] = [ spec: { name: 'sql_hotspots', description: - 'SQL queries ranked by total elapsed time, deduplicated by text. Returns: count, avg_ms, total_ms, sql_text.', + 'SQL queries ranked by total elapsed time, deduplicated by text. Returns Page<{count, avg_ms, total_ms, sql_text}> = {rows, total, limit, offset}.', inputSchema: { type: 'object', properties: { - route: { type: 'string' }, - branch: { type: 'string' }, - since: { type: 'string' }, - until: { type: 'string' }, - limit: { type: 'integer' }, + route: COMMON_FILTER_PROPERTIES.route, + branch: COMMON_FILTER_PROPERTIES.branch, + since: COMMON_FILTER_PROPERTIES.since, + until: COMMON_FILTER_PROPERTIES.until, + limit: COMMON_FILTER_PROPERTIES.limit, + offset: COMMON_FILTER_PROPERTIES.offset, }, }, }, @@ -247,7 +261,8 @@ const TOOLS: ToolImpl[] = [ branch: maybeString(args.branch), since: maybeTime(args.since), until: maybeTime(args.until), - limit: maybeNumber(args.limit) ?? 20, + limit: maybeNumber(args.limit), + offset: maybeNumber(args.offset), }), }, @@ -278,7 +293,7 @@ const TOOLS: ToolImpl[] = [ spec: { name: 'find_recordings', description: - 'Recording-level rows matching filters. Each row is one .appmap.json file with its sample request, branch, and counts. Use to identify which recordings exercised a route, returned a particular status, or were taken on a branch. Returns: appmap_id, appmap_name, route, status_code, elapsed_ms, sql_count, branch, timestamp. Pass appmap_id (numeric) or appmap_name to get_call_tree / find_related.', + 'Recording-level rows matching filters. Each row is one .appmap.json file with its sample request, branch, and counts. Use to identify which recordings exercised a route, returned a particular status, or were taken on a branch. The `appmap` filter is a substring match against name and source_path — pass any reasonable word from the basename, test method, or route. Returns Page<{appmap_id, appmap_name, route, status_code, elapsed_ms, sql_count, branch, timestamp}> = {rows, total, limit, offset}. Pass appmap_id (numeric) or appmap_name to get_call_tree / find_related.', inputSchema: { type: 'object', properties: { @@ -302,7 +317,7 @@ const TOOLS: ToolImpl[] = [ spec: { name: 'find_requests', description: - 'Individual HTTP request rows with status, elapsed time, and the recording each came from. Filter by route, status, duration, branch, time window. Returns: appmap_name, event_id, method, route, status_code, elapsed_ms, branch.', + 'Individual HTTP request rows with status, elapsed time, and the recording each came from. Filter by route (substring), status, duration, branch, time window. Returns Page<{appmap_name, event_id, method, route, status_code, elapsed_ms, branch}> = {rows, total, limit, offset}.', inputSchema: { type: 'object', properties: { @@ -319,15 +334,14 @@ const TOOLS: ToolImpl[] = [ }, }, }, - handler: (args, db) => - find(db, 'requests', buildFindFilter(args)) as FindRequestRow[], + handler: (args, db) => find(db, 'requests', buildFindFilter(args)), }, { spec: { name: 'find_queries', description: - 'SQL query rows. Filter by table (matches sql_text substring), caller class/method, duration, route, branch. Use duration:">100ms" to find slow queries; use route to scope to a specific request. Returns: appmap_name, event_id, sql_text, elapsed_ms, caller_class, caller_method.', + 'SQL query rows. Filter by table (substring), caller class/method (substring), duration, route, branch. Use duration:">100ms" to find slow queries; use route to scope to a specific request. Returns Page<{appmap_name, event_id, sql_text, elapsed_ms, caller_class, caller_method}> = {rows, total, limit, offset}.', inputSchema: { type: 'object', properties: { @@ -347,23 +361,24 @@ const TOOLS: ToolImpl[] = [ }, }, }, - handler: (args, db) => find(db, 'queries', buildFindFilter(args)) as FindQueryRow[], + handler: (args, db) => find(db, 'queries', buildFindFilter(args)), }, { spec: { name: 'find_calls', description: - 'Function-call rows. Filter by class, method, label (e.g. "log", "security.authorization"), duration. Use --label=log to retrieve application log output, or --label=security.authorization to find authorization checks. Returns: appmap_name, event_id, fqid, defined_class, method_id, path, lineno, elapsed_ms, parameters_json, return_value. parameters_json and return_value are populated only for labeled functions; unlabeled rows return null. Use path:lineno to read the source.', + 'Function-call rows. Filter by class (substring), method (substring), label (substring; e.g. "log", "security.authorization"), duration. Use label="log" to retrieve application log output, or label="security.authorization" to find authorization checks. Returns Page<{appmap_name, event_id, fqid, defined_class, method_id, path, lineno, elapsed_ms, parameters_json, return_value}> = {rows, total, limit, offset}. parameters_json and return_value are populated only for labeled functions; unlabeled rows return null. Use path:lineno to read the source.', inputSchema: { type: 'object', properties: { class: { type: 'string', - description: 'Class identifier; accepts short ("UserRepository") or canonical fqid form ("app/services/UserRepository") or with method ("UserRepository#findById").', + description: + 'Substring of the class identifier; canonical forms ("UserRepository", "app/services/UserRepository", "UserRepository#findById") get exact-or-leaf-class matching, but a partial like "Repo" also matches "UserRepository".', }, - method: { type: 'string' }, - label: { type: 'string', description: 'AppMap label name (exact match).' }, + method: { type: 'string', description: 'Substring of the method name.' }, + label: { type: 'string', description: 'Substring of the label name.' }, duration: COMMON_FILTER_PROPERTIES.duration, route: COMMON_FILTER_PROPERTIES.route, status: COMMON_FILTER_PROPERTIES.status, @@ -377,14 +392,14 @@ const TOOLS: ToolImpl[] = [ }, }, }, - handler: (args, db) => find(db, 'calls', buildFindFilter(args)) as FindCallRow[], + handler: (args, db) => find(db, 'calls', buildFindFilter(args)), }, { spec: { name: 'find_logs', description: - 'Application log lines captured from functions labeled `log`. Filter by message substring (matches across the call\'s parameters and return value), logger class, recording, branch, or time window. Returns: appmap_name, event_id, parent_event_id, logger, method_id, path, lineno, message, parameters_json, return_value. `message` is the display-projected log text (extracted from a structured return_value or from the parameter named message/msg, falling back to the first string parameter); use it directly. parameters_json and return_value remain available for the underlying captured values. Use path:lineno to read the call site of the log statement.', + 'Application log lines captured from functions labeled `log`. Filter by message substring (matches across the call\'s parameters and return value), logger class (substring), recording, branch, or time window. Returns Page<{appmap_name, event_id, parent_event_id, logger, method_id, path, lineno, message, parameters_json, return_value}> = {rows, total, limit, offset}. `message` is the display-projected log text (extracted from a structured return_value or from the parameter named message/msg, falling back to the first string parameter); use it directly. parameters_json and return_value remain available for the underlying captured values. Use path:lineno to read the call site of the log statement.', inputSchema: { type: 'object', properties: { @@ -408,18 +423,18 @@ const TOOLS: ToolImpl[] = [ }, }, }, - handler: (args, db) => find(db, 'logs', buildFindFilter(args)) as FindLogRow[], + handler: (args, db) => find(db, 'logs', buildFindFilter(args)), }, { spec: { name: 'find_exceptions', description: - 'Exception rows with class, message, source location. Filter by exception class name, the request that owns the exception (via route/status), branch, or time window. Returns: appmap_id, appmap_name, event_id (the throwing call\'s entry id), return_event_id (the throw point in the event stream), exception_class, message, path, lineno. Pass with_logs=N to attach the last N log lines preceding the throw (chronological order) under recent_logs — usually the fastest way to see what the app reported before the failure. recent_logs uses return_event_id as the upper bound, so logs that fired *inside* the throwing call are included.', + 'Exception rows with class, message, source location. Filter by exception class name (substring), the request that owns the exception (via route/status), branch, or time window. Returns Page<{appmap_id, appmap_name, event_id, return_event_id, exception_class, message, path, lineno, recent_logs?}> = {rows, total, limit, offset}. event_id is the throwing call\'s entry id; return_event_id is the throw point in the event stream. Pass with_logs=N to attach the last N log lines preceding the throw (chronological order) under recent_logs — usually the fastest way to see what the app reported before the failure. recent_logs uses return_event_id as the upper bound, so logs that fired *inside* the throwing call are included.', inputSchema: { type: 'object', properties: { - exception: { type: 'string', description: 'Exception class name (exact match).' }, + exception: { type: 'string', description: 'Substring of the exception class name.' }, with_logs: { type: 'integer', description: 'Attach up to N preceding log lines per exception under recent_logs (chronological). Each entry has the same shape as a find_logs row.', @@ -436,8 +451,7 @@ const TOOLS: ToolImpl[] = [ }, }, }, - handler: (args, db) => - find(db, 'exceptions', buildFindFilter(args)) as FindExceptionRow[], + handler: (args, db) => find(db, 'exceptions', buildFindFilter(args)), }, // ----- per-recording / cross-recording -------------------------------- @@ -485,7 +499,7 @@ const TOOLS: ToolImpl[] = [ spec: { name: 'find_related', description: - 'Recordings ranked by similarity to a source recording. Score combines: same HTTP route (×5), shared SQL tables (×3 each), shared classes (×2 each). Primary use: pass a failing recording with status:succeeded to find a passing baseline for side-by-side comparison. Returns: appmap_name, score, method, route, status_code, elapsed_ms, shared (string array of contributing signals).', + 'Recordings ranked by similarity to a source recording. Score combines: same HTTP route (×5), shared SQL tables (×3 each), shared classes (×2 each). Primary use: pass a failing recording with status:succeeded to find a passing baseline for side-by-side comparison. Returns Page<{appmap_name, score, method, route, status_code, elapsed_ms, shared}> = {rows, total, limit, offset}. shared is a string array of contributing signals.', inputSchema: { type: 'object', properties: { @@ -496,6 +510,7 @@ const TOOLS: ToolImpl[] = [ since: COMMON_FILTER_PROPERTIES.since, until: COMMON_FILTER_PROPERTIES.until, limit: COMMON_FILTER_PROPERTIES.limit, + offset: COMMON_FILTER_PROPERTIES.offset, }, required: ['appmap'], }, @@ -509,6 +524,7 @@ const TOOLS: ToolImpl[] = [ filter.since = maybeTime(args.since); filter.until = maybeTime(args.until); filter.limit = maybeNumber(args.limit); + filter.offset = maybeNumber(args.offset); return related(db, am.name, filter); }, }, @@ -517,16 +533,17 @@ const TOOLS: ToolImpl[] = [ spec: { name: 'compare_branches', description: - 'Per-route p95 latency for two branches with a delta column. Use to surface regressions a feature branch introduces relative to a baseline. Returns: method, route, a_count, a_p95_ms, b_count, b_p95_ms, delta (b_p95/a_p95; null when either side has no measured durations).', + 'Per-route p95 latency for two branches with a delta column. Use to surface regressions a feature branch introduces relative to a baseline. Returns Page<{method, route, a_count, a_p95_ms, b_count, b_p95_ms, delta}> = {rows, total, limit, offset}. delta is b_p95/a_p95; null when either side has no measured durations.', inputSchema: { type: 'object', properties: { branch_a: { type: 'string', description: 'Baseline branch.' }, branch_b: { type: 'string', description: 'Comparison branch.' }, - since: { type: 'string' }, - until: { type: 'string' }, + since: COMMON_FILTER_PROPERTIES.since, + until: COMMON_FILTER_PROPERTIES.until, sort: { type: 'string', enum: ['delta', 'p95-a', 'p95-b'] }, - limit: { type: 'integer' }, + limit: COMMON_FILTER_PROPERTIES.limit, + offset: COMMON_FILTER_PROPERTIES.offset, }, required: ['branch_a', 'branch_b'], }, @@ -539,6 +556,7 @@ const TOOLS: ToolImpl[] = [ until: maybeTime(args.until), sort: maybeString(args.sort) as 'delta' | 'p95-a' | 'p95-b' | undefined, limit: maybeNumber(args.limit), + offset: maybeNumber(args.offset), }), }, ]; @@ -576,7 +594,7 @@ const RESOURCE_TEMPLATES: ResourceTemplateImpl[] = [ }, read: (args, db) => { const info = resolveByIdOrRef(db, args.ref); - return find(db, 'logs', { appmap: info.name }) as FindLogRow[]; + return find(db, 'logs', { appmap: info.name, limit: 0 }); }, }, ]; diff --git a/packages/cli/src/cmds/query/queries/related.ts b/packages/cli/src/cmds/query/queries/related.ts index dd6021f761..1e36963acd 100644 --- a/packages/cli/src/cmds/query/queries/related.ts +++ b/packages/cli/src/cmds/query/queries/related.ts @@ -1,5 +1,6 @@ import sqlite3 from 'better-sqlite3'; +import { DEFAULT_PAGE_LIMIT, Page } from '../lib/page'; import { appmapWhere, httpScopeClauses, RecordingScope } from '../lib/scope'; import { resolveAppmap } from './tree'; @@ -40,6 +41,7 @@ export interface RelatedRow { export interface RelatedFilter extends RecordingScope { limit?: number; + offset?: number; } interface AppmapSig { @@ -117,7 +119,7 @@ export function related( db: sqlite3.Database, sourceRef: string, filter: RelatedFilter = {} -): RelatedRow[] { +): Page { const source = resolveAppmap(db, sourceRef); const sourceSig = loadSignature(db, source.id); @@ -190,5 +192,10 @@ export function related( } scored.sort((a, b) => b.score - a.score); - return filter.limit !== undefined ? scored.slice(0, filter.limit) : scored; + + const limit = filter.limit ?? DEFAULT_PAGE_LIMIT; + const offset = filter.offset ?? 0; + const total = scored.length; + const sliced = limit > 0 ? scored.slice(offset, offset + limit) : scored.slice(offset); + return { rows: sliced, total, limit, offset }; } diff --git a/packages/cli/src/cmds/query/verbs/compare.ts b/packages/cli/src/cmds/query/verbs/compare.ts index a5a1709248..cc01c96db3 100644 --- a/packages/cli/src/cmds/query/verbs/compare.ts +++ b/packages/cli/src/cmds/query/verbs/compare.ts @@ -5,6 +5,7 @@ import { handleWorkingDirectory } from '../../../lib/handleWorkingDirectory'; import { locateAppMapDir } from '../../../lib/locateAppMapDir'; import { verbose } from '../../../utils'; import { openReadOnly } from '../lib/openReadOnly'; +import { truncationFooter } from '../lib/page'; import { parseTime } from '../lib/parseFilter'; import { compare, @@ -32,7 +33,8 @@ export const builder = (args: yargs.Argv) => { default: 'delta', }) .option('include-counts', { type: 'boolean', default: false }) - .option('limit', { type: 'number' }) + .option('limit', { type: 'number', describe: 'default 20; pass 0 for unbounded' }) + .option('offset', { type: 'number' }) .option('json', { type: 'boolean', default: false }); }; @@ -56,15 +58,18 @@ export const handler = async (argvIn: yargs.ArgumentsCamelCase): Promis if (argv.since) filter.since = parseTime(argv.since); if (argv.until) filter.until = parseTime(argv.until); if (argv.limit !== undefined) filter.limit = argv.limit; + if (argv.offset !== undefined) filter.offset = argv.offset; const db = openReadOnly(appmapDir, argv.queryDb); try { - const rows = compare(db, filter); + const page = compare(db, filter); if (argv.json) { - log(JSON.stringify(rows, null, 2)); + log(JSON.stringify(page, null, 2)); return; } - log(renderCompare(rows, branchA, branchB, !!argv.includeCounts)); + log(renderCompare(page.rows, branchA, branchB, !!argv.includeCounts)); + const footer = truncationFooter(page); + if (footer) log(footer); } finally { db.close(); } diff --git a/packages/cli/src/cmds/query/verbs/endpoints.ts b/packages/cli/src/cmds/query/verbs/endpoints.ts index 8352536377..d12bb9d258 100644 --- a/packages/cli/src/cmds/query/verbs/endpoints.ts +++ b/packages/cli/src/cmds/query/verbs/endpoints.ts @@ -5,6 +5,7 @@ import { handleWorkingDirectory } from '../../../lib/handleWorkingDirectory'; import { locateAppMapDir } from '../../../lib/locateAppMapDir'; import { verbose } from '../../../utils'; import { openReadOnly } from '../lib/openReadOnly'; +import { truncationFooter } from '../lib/page'; import { parseStatus, parseTime } from '../lib/parseFilter'; import { endpoints, @@ -34,7 +35,8 @@ export const builder = (args: yargs.Argv) => { choices: ['count', 'avg', 'p95', 'err'] as const, default: 'count', }) - .option('limit', { type: 'number' }) + .option('limit', { type: 'number', describe: 'default 20; pass 0 for unbounded' }) + .option('offset', { type: 'number' }) .option('json', { type: 'boolean', default: false }); }; @@ -55,18 +57,19 @@ export const handler = async (argvIn: yargs.ArgumentsCamelCase): Promis if (argv.branch) filter.branch = argv.branch; if (argv.status) filter.status = parseStatus(argv.status); if (argv.limit !== undefined) filter.limit = argv.limit; + if (argv.offset !== undefined) filter.offset = argv.offset; const db = openReadOnly(appmapDir, argv.queryDb); try { - const rows = endpoints(db, filter); + const page = endpoints(db, filter); if (argv.json) { - log(JSON.stringify(rows, null, 2)); + log(JSON.stringify(page, null, 2)); return; } log( formatTable( ['METHOD', 'ROUTE', 'COUNT', 'AVG', 'P95', 'ERR%'], - rows.map((r) => [ + page.rows.map((r) => [ r.method, r.route, formatCount(r.count), @@ -76,6 +79,8 @@ export const handler = async (argvIn: yargs.ArgumentsCamelCase): Promis ]) ) ); + const footer = truncationFooter(page); + if (footer) log(footer); } finally { db.close(); } diff --git a/packages/cli/src/cmds/query/verbs/find.ts b/packages/cli/src/cmds/query/verbs/find.ts index b0f1a9cfe3..6429415e34 100644 --- a/packages/cli/src/cmds/query/verbs/find.ts +++ b/packages/cli/src/cmds/query/verbs/find.ts @@ -5,6 +5,7 @@ import { handleWorkingDirectory } from '../../../lib/handleWorkingDirectory'; import { locateAppMapDir } from '../../../lib/locateAppMapDir'; import { verbose } from '../../../utils'; import { openReadOnly } from '../lib/openReadOnly'; +import { truncationFooter } from '../lib/page'; import { parseDuration, parseStatus, parseTime } from '../lib/parseFilter'; import { parseClassRef } from '../lib/scope'; import { @@ -180,12 +181,14 @@ export const handler = async (argvIn: yargs.ArgumentsCamelCase): Promis const db = openReadOnly(appmapDir, argv.queryDb); try { - const rows = find(db, type, filter); + const page = find(db, type, filter); if (argv.json) { - log(JSON.stringify(rows, null, 2)); + log(JSON.stringify(page, null, 2)); return; } - log(renderTable(type, rows)); + log(renderTable(type, page.rows)); + const footer = truncationFooter(page); + if (footer) log(footer); } finally { db.close(); } diff --git a/packages/cli/src/cmds/query/verbs/hotspots.ts b/packages/cli/src/cmds/query/verbs/hotspots.ts index b1caa3914b..f7f00f7fd7 100644 --- a/packages/cli/src/cmds/query/verbs/hotspots.ts +++ b/packages/cli/src/cmds/query/verbs/hotspots.ts @@ -5,6 +5,7 @@ import { handleWorkingDirectory } from '../../../lib/handleWorkingDirectory'; import { locateAppMapDir } from '../../../lib/locateAppMapDir'; import { verbose } from '../../../utils'; import { openReadOnly } from '../lib/openReadOnly'; +import { truncationFooter } from '../lib/page'; import { parseTime } from '../lib/parseFilter'; import { FunctionHotspotRow, @@ -36,7 +37,8 @@ export const builder = (args: yargs.Argv) => { .option('branch', { type: 'string' }) .option('since', { type: 'string' }) .option('until', { type: 'string' }) - .option('limit', { type: 'number' }) + .option('limit', { type: 'number', describe: 'default 20; pass 0 for unbounded' }) + .option('offset', { type: 'number' }) .option('json', { type: 'boolean', default: false }); }; @@ -81,15 +83,22 @@ export const handler = async (argvIn: yargs.ArgumentsCamelCase): Promis if (argv.since) filter.since = parseTime(argv.since); if (argv.until) filter.until = parseTime(argv.until); if (argv.limit !== undefined) filter.limit = argv.limit; + if (argv.offset !== undefined) filter.offset = argv.offset; const db = openReadOnly(appmapDir, argv.queryDb); try { - const rows = hotspots(db, filter); + const page = hotspots(db, filter); if (argv.json) { - log(JSON.stringify(rows, null, 2)); + log(JSON.stringify(page, null, 2)); return; } - log(filter.type === 'sql' ? renderSql(rows as SqlHotspotRow[]) : renderFunctions(rows as FunctionHotspotRow[])); + log( + filter.type === 'sql' + ? renderSql(page.rows as readonly SqlHotspotRow[]) + : renderFunctions(page.rows as readonly FunctionHotspotRow[]) + ); + const footer = truncationFooter(page); + if (footer) log(footer); } finally { db.close(); } diff --git a/packages/cli/src/cmds/query/verbs/related.ts b/packages/cli/src/cmds/query/verbs/related.ts index aeb1d0c3a9..49ae01ba38 100644 --- a/packages/cli/src/cmds/query/verbs/related.ts +++ b/packages/cli/src/cmds/query/verbs/related.ts @@ -5,6 +5,7 @@ import { handleWorkingDirectory } from '../../../lib/handleWorkingDirectory'; import { locateAppMapDir } from '../../../lib/locateAppMapDir'; import { verbose } from '../../../utils'; import { openReadOnly } from '../lib/openReadOnly'; +import { truncationFooter } from '../lib/page'; import { parseStatus, parseTime } from '../lib/parseFilter'; import { related, RelatedFilter, RelatedRow } from '../queries/related'; import { formatCount, formatMs, formatTable } from '../lib/format'; @@ -31,7 +32,8 @@ export const builder = (args: yargs.Argv) => { type: 'string', describe: 'e.g. "POST /orders" (path is exact match; method case-insensitive)', }) - .option('limit', { type: 'number' }) + .option('limit', { type: 'number', describe: 'default 20; pass 0 for unbounded' }) + .option('offset', { type: 'number' }) .option('json', { type: 'boolean', default: false }); }; @@ -55,15 +57,18 @@ export const handler = async (argvIn: yargs.ArgumentsCamelCase): Promis if (argv.status) filter.status = parseStatus(argv.status); if (argv.route) filter.route = argv.route; if (argv.limit !== undefined) filter.limit = argv.limit; + if (argv.offset !== undefined) filter.offset = argv.offset; const db = openReadOnly(appmapDir, argv.queryDb); try { - const rows = related(db, ref, filter); + const page = related(db, ref, filter); if (argv.json) { - log(JSON.stringify(rows, null, 2)); + log(JSON.stringify(page, null, 2)); return; } - log(renderRelated(rows)); + log(renderRelated(page.rows)); + const footer = truncationFooter(page); + if (footer) log(footer); } finally { db.close(); } diff --git a/packages/cli/tests/unit/cmds/query/queries/compare.spec.ts b/packages/cli/tests/unit/cmds/query/queries/compare.spec.ts index 1a260621ef..b9a1800be2 100644 --- a/packages/cli/tests/unit/cmds/query/queries/compare.spec.ts +++ b/packages/cli/tests/unit/cmds/query/queries/compare.spec.ts @@ -58,7 +58,7 @@ describe('compare', () => { { branch: 'feat', method: 'GET', path: '/reports', status: 200, elapsed_ms: 6000 }, { branch: 'feat', method: 'GET', path: '/reports', status: 200, elapsed_ms: 6100 }, ]); - const rows = compare(db, { branch_a: 'main', branch_b: 'feat' }); + const rows = compare(db, { branch_a: 'main', branch_b: 'feat' }).rows; expect(rows).toHaveLength(1); const r = rows[0]; expect(r.method).toBe('GET'); @@ -78,7 +78,7 @@ describe('compare', () => { { branch: 'main', method: 'GET', path: '/old', status: 200, elapsed_ms: 100 }, { branch: 'feat', method: 'GET', path: '/new', status: 200, elapsed_ms: 50 }, ]); - const rows = compare(db, { branch_a: 'main', branch_b: 'feat' }); + const rows = compare(db, { branch_a: 'main', branch_b: 'feat' }).rows; const old = rows.find((r) => r.route === '/old')!; const fresh = rows.find((r) => r.route === '/new')!; expect(old.a_p95_ms).toBe(100); @@ -106,7 +106,7 @@ describe('compare', () => { { branch: 'main', method: 'GET', path: '/c', status: 200, elapsed_ms: 100 }, { branch: 'feat', method: 'GET', path: '/c', status: 200, elapsed_ms: 105 }, ]); - const rows = compare(db, { branch_a: 'main', branch_b: 'feat', sort: 'delta' }); + const rows = compare(db, { branch_a: 'main', branch_b: 'feat', sort: 'delta' }).rows; // /a (10×) and /b (1/5×) have the largest log-delta; /c last. expect(rows[rows.length - 1].route).toBe('/c'); } finally { @@ -124,7 +124,7 @@ describe('compare', () => { { branch: 'feat', method: 'GET', path: '/b', status: 200, elapsed_ms: 100 }, ]); expect( - compare(db, { branch_a: 'main', branch_b: 'feat', limit: 1 }) + compare(db, { branch_a: 'main', branch_b: 'feat', limit: 1 }).rows ).toHaveLength(1); } finally { db.close(); diff --git a/packages/cli/tests/unit/cmds/query/queries/endpoints.spec.ts b/packages/cli/tests/unit/cmds/query/queries/endpoints.spec.ts index 88160bd4a1..f166e69850 100644 --- a/packages/cli/tests/unit/cmds/query/queries/endpoints.spec.ts +++ b/packages/cli/tests/unit/cmds/query/queries/endpoints.spec.ts @@ -56,7 +56,7 @@ describe('endpoints', () => { it('returns an empty array when there are no requests', () => { const db = freshDb(); try { - expect(endpoints(db)).toEqual([]); + expect(endpoints(db).rows).toEqual([]); } finally { db.close(); } @@ -70,7 +70,7 @@ describe('endpoints', () => { { method: 'GET', path: '/x', status: 200, elapsed_ms: 200 }, { method: 'POST', path: '/x', status: 201, elapsed_ms: 150 }, ]); - const out = endpoints(db); + const out = endpoints(db).rows; const get = out.find((r) => r.method === 'GET'); const post = out.find((r) => r.method === 'POST'); expect(get?.count).toBe(2); @@ -89,7 +89,7 @@ describe('endpoints', () => { { method: 'GET', path: '/orders/99', normalized_path: '/orders/:id', status: 200, elapsed_ms: 200 }, { method: 'GET', path: '/raw-only', status: 200, elapsed_ms: 50 }, ]); - const out = endpoints(db); + const out = endpoints(db).rows; expect(out.find((r) => r.route === '/orders/:id')?.count).toBe(2); expect(out.find((r) => r.route === '/raw-only')?.count).toBe(1); } finally { @@ -108,7 +108,7 @@ describe('endpoints', () => { reqs.push({ method: 'GET', path: '/x', status: 500, elapsed_ms: 1000 }); seed(db, reqs); - const row = endpoints(db).find((r) => r.route === '/x')!; + const row = endpoints(db).rows.find((r) => r.route === '/x')!; expect(row.count).toBe(10); expect(row.err_pct).toBeCloseTo(10); expect(row.avg_ms).toBeCloseTo((10 + 20 + 30 + 40 + 50 + 60 + 70 + 80 + 90 + 1000) / 10); @@ -129,7 +129,7 @@ describe('endpoints', () => { { method: 'POST', path: '/orders', status: 201, elapsed_ms: 110 }, { method: 'POST', path: '/orders', status: 500, elapsed_ms: 520 }, ]); - const out = endpoints(db, { status: { op: '>=', value: 500 } }); + const out = endpoints(db, { status: { op: '>=', value: 500 } }).rows; // /quiet has no 5xx → excluded. // /orders has one 5xx → included; count=3, err_pct=33%. expect(out).toHaveLength(1); @@ -149,8 +149,8 @@ describe('endpoints', () => { { method: 'GET', path: '/x', status: 200, elapsed_ms: 100, branch: 'main' }, { method: 'GET', path: '/y', status: 200, elapsed_ms: 100, branch: 'feature' }, ]); - expect(endpoints(db, { branch: 'main' })).toHaveLength(1); - expect(endpoints(db, { branch: 'main' })[0].route).toBe('/x'); + expect(endpoints(db, { branch: 'main' }).rows).toHaveLength(1); + expect(endpoints(db, { branch: 'main' }).rows[0].route).toBe('/x'); } finally { db.close(); } @@ -164,12 +164,12 @@ describe('endpoints', () => { { method: 'GET', path: '/x', status: 200, elapsed_ms: 100, timestamp: '2026-04-15T00:00:00.000Z' }, { method: 'GET', path: '/x', status: 200, elapsed_ms: 100, timestamp: '2026-04-30T00:00:00.000Z' }, ]); - expect(endpoints(db, { since: '2026-04-10T00:00:00.000Z' })[0].count).toBe(2); + expect(endpoints(db, { since: '2026-04-10T00:00:00.000Z' }).rows[0].count).toBe(2); expect( endpoints(db, { since: '2026-04-10T00:00:00.000Z', until: '2026-04-20T00:00:00.000Z', - })[0].count + }).rows[0].count ).toBe(1); } finally { db.close(); @@ -186,13 +186,13 @@ describe('endpoints', () => { { method: 'GET', path: '/c', status: 500, elapsed_ms: 20 }, { method: 'GET', path: '/d', status: 200, elapsed_ms: 200 }, ]); - const byCount = endpoints(db, { sort: 'count' }).map((r) => r.route); + const byCount = endpoints(db, { sort: 'count' }).rows.map((r) => r.route); expect(byCount[0]).toBe('/b'); // count 2 - const byErr = endpoints(db, { sort: 'err' }).map((r) => r.route); + const byErr = endpoints(db, { sort: 'err' }).rows.map((r) => r.route); expect(byErr[0]).toBe('/c'); // 100% err - const byAvg = endpoints(db, { sort: 'avg' }).map((r) => r.route); + const byAvg = endpoints(db, { sort: 'avg' }).rows.map((r) => r.route); expect(byAvg[0]).toBe('/d'); // 200ms avg - const byP95 = endpoints(db, { sort: 'p95' }).map((r) => r.route); + const byP95 = endpoints(db, { sort: 'p95' }).rows.map((r) => r.route); expect(byP95[0]).toBe('/d'); // 200ms p95 } finally { db.close(); @@ -206,7 +206,7 @@ describe('endpoints', () => { { method: 'GET', path: '/measured', status: 200, elapsed_ms: 0 }, { method: 'GET', path: '/unmeasured', status: 200, elapsed_ms: null }, ]); - const byP95 = endpoints(db, { sort: 'p95' }).map((r) => r.route); + const byP95 = endpoints(db, { sort: 'p95' }).rows.map((r) => r.route); expect(byP95).toEqual(['/measured', '/unmeasured']); } finally { db.close(); @@ -221,7 +221,7 @@ describe('endpoints', () => { { method: 'GET', path: '/b', status: 200, elapsed_ms: 100 }, { method: 'GET', path: '/c', status: 200, elapsed_ms: 100 }, ]); - expect(endpoints(db, { limit: 2 })).toHaveLength(2); + expect(endpoints(db, { limit: 2 }).rows).toHaveLength(2); } finally { db.close(); } @@ -234,7 +234,7 @@ describe('endpoints', () => { { method: 'GET', path: '/x', status: 200, elapsed_ms: 100 }, { method: 'GET', path: '/x', status: 200, elapsed_ms: null }, ]); - const row = endpoints(db)[0]; + const row = endpoints(db).rows[0]; expect(row.count).toBe(2); expect(row.avg_ms).toBe(100); expect(row.p95_ms).toBe(100); diff --git a/packages/cli/tests/unit/cmds/query/queries/find.spec.ts b/packages/cli/tests/unit/cmds/query/queries/find.spec.ts index 8e160cb14e..9966d15515 100644 --- a/packages/cli/tests/unit/cmds/query/queries/find.spec.ts +++ b/packages/cli/tests/unit/cmds/query/queries/find.spec.ts @@ -177,21 +177,21 @@ describe('findRequests', () => { ]); // Method-prefixed route - const r1 = findRequests(db, { route: 'POST /orders' }); + const r1 = findRequests(db, { route: 'POST /orders' }).rows; expect(r1).toHaveLength(2); expect(r1.every((r) => r.method === 'POST' && r.route === '/orders')).toBe(true); // Status filter - const r2 = findRequests(db, { status: { op: '>=', value: 500 } }); + const r2 = findRequests(db, { status: { op: '>=', value: 500 } }).rows; expect(r2).toHaveLength(2); // Duration filter - const r3 = findRequests(db, { duration: { op: '>', value: 550 } }); + const r3 = findRequests(db, { duration: { op: '>', value: 550 } }).rows; expect(r3).toHaveLength(1); expect(r3[0].appmap_name).toBe('b'); // Branch filter - const r4 = findRequests(db, { branch: 'feature' }); + const r4 = findRequests(db, { branch: 'feature' }).rows; expect(r4).toHaveLength(1); expect(r4[0].appmap_name).toBe('b'); } finally { @@ -212,8 +212,8 @@ describe('findRequests', () => { ], }, ]); - expect(findRequests(db, { limit: 2 })).toHaveLength(2); - expect(findRequests(db, { limit: 2, offset: 1 })[0].event_id).toBe(2); + expect(findRequests(db, { limit: 2 }).rows).toHaveLength(2); + expect(findRequests(db, { limit: 2, offset: 1 }).rows[0].event_id).toBe(2); } finally { db.close(); } @@ -235,7 +235,7 @@ describe('findAppmaps', () => { }, { name: 'b', branch: 'feature' }, ]); - const rows = findAppmaps(db, {}); + const rows = findAppmaps(db, {}).rows; expect(rows).toHaveLength(2); const a = rows.find((r) => r.appmap_name === 'a')!; expect(a.route).toBe('/x'); // first request by event_id @@ -260,7 +260,7 @@ describe('findAppmaps', () => { ], }, ]); - const rows = findAppmaps(db, { route: 'POST /orders' }); + const rows = findAppmaps(db, { route: 'POST /orders' }).rows; expect(rows).toHaveLength(1); expect(rows[0].elapsed_ms).toBe(100); // event_id=1 wins, not 2 } finally { @@ -281,7 +281,7 @@ describe('findAppmaps', () => { requests: [{ event_id: 1, method: 'GET', path: '/x', status: 200, elapsed_ms: 5000 }], }, ]); - const rows = findAppmaps(db, { duration: { op: '>', value: 1000 } }); + const rows = findAppmaps(db, { duration: { op: '>', value: 1000 } }).rows; expect(rows).toHaveLength(1); expect(rows[0].appmap_name).toBe('slow'); } finally { @@ -302,7 +302,7 @@ describe('findAppmaps', () => { }, { name: 'b', requests: [{ event_id: 1, method: 'GET', path: '/x', status: 200 }] }, ]); - const rows = findAppmaps(db, { route: 'POST /orders', status: { op: '>=', value: 500 } }); + const rows = findAppmaps(db, { route: 'POST /orders', status: { op: '>=', value: 500 } }).rows; expect(rows).toHaveLength(1); expect(rows[0].appmap_name).toBe('a'); expect(rows[0].route).toBe('/orders'); @@ -333,7 +333,7 @@ describe('findQueries', () => { queries: [{ event_id: 2, sql: 'INSERT INTO orders (...) VALUES (...)' }], }, ]); - const rows = findQueries(db, { table: 'orders', status: { op: '>=', value: 500 } }); + const rows = findQueries(db, { table: 'orders', status: { op: '>=', value: 500 } }).rows; expect(rows).toHaveLength(1); expect(rows[0].appmap_name).toBe('a'); expect(rows[0].sql_text).toContain('INSERT INTO orders'); @@ -359,7 +359,7 @@ describe('find filters: --commit, --since/--until, --duration', () => { requests: [{ event_id: 1, method: 'GET', path: '/y', status: 200 }], }, ]); - const rows = findRequests(db, { commit: 'abc123' }); + const rows = findRequests(db, { commit: 'abc123' }).rows; expect(rows).toHaveLength(1); expect(rows[0].appmap_name).toBe('a'); } finally { @@ -374,7 +374,7 @@ describe('find filters: --commit, --since/--until, --duration', () => { { name: 'a', commit: 'abc' }, { name: 'b', commit: 'def' }, ]); - expect(findAppmaps(db, { commit: 'abc' })).toHaveLength(1); + expect(findAppmaps(db, { commit: 'abc' }).rows).toHaveLength(1); } finally { db.close(); } @@ -403,7 +403,7 @@ describe('find filters: --commit, --since/--until, --duration', () => { const rows = findRequests(db, { since: '2026-04-10T00:00:00.000Z', until: '2026-04-20T00:00:00.000Z', - }); + }).rows; expect(rows).toHaveLength(1); expect(rows[0].appmap_name).toBe('b'); } finally { @@ -426,7 +426,7 @@ describe('find filters: --commit, --since/--until, --duration', () => { calls: [{ event_id: 1, defined_class: 'X', method_id: 'm' }], }, ]); - const rows = findCalls(db, { since: '2026-04-15T00:00:00.000Z' }); + const rows = findCalls(db, { since: '2026-04-15T00:00:00.000Z' }).rows; expect(rows).toHaveLength(1); expect(rows[0].appmap_name).toBe('new'); } finally { @@ -446,7 +446,7 @@ describe('find filters: --commit, --since/--until, --duration', () => { ], }, ]); - const rows = findCalls(db, { duration: { op: '>', value: 100 } }); + const rows = findCalls(db, { duration: { op: '>', value: 100 } }).rows; expect(rows).toHaveLength(1); expect(rows[0].method_id).toBe('slow'); } finally { @@ -466,7 +466,7 @@ describe('find filters: --commit, --since/--until, --duration', () => { ], }, ]); - const rows = findQueries(db, { duration: { op: '>=', value: 10 } }); + const rows = findQueries(db, { duration: { op: '>=', value: 10 } }).rows; expect(rows).toHaveLength(1); expect(rows[0].sql_text).toBe('SELECT 2'); } finally { @@ -505,13 +505,13 @@ describe('find filters: --commit, --since/--until, --duration', () => { ]); // Class part is read from code_objects (UserRepository), not from // the WrongClassName caller_class string. - expect(findQueries(db, { className: 'UserRepository' })).toHaveLength(1); + expect(findQueries(db, { className: 'UserRepository' }).rows).toHaveLength(1); // Full chain match also works. - expect(findQueries(db, { className: 'org/example/UserRepository' })).toHaveLength( + expect(findQueries(db, { className: 'org/example/UserRepository' }).rows).toHaveLength( 1 ); // Misspelled — no match. - expect(findQueries(db, { className: 'OtherRepository' })).toHaveLength(0); + expect(findQueries(db, { className: 'OtherRepository' }).rows).toHaveLength(0); } finally { db.close(); } @@ -546,13 +546,13 @@ describe('find filters: --commit, --since/--until, --duration', () => { }, ]); // Java dot-suffix - expect(findQueries(db, { className: 'UserRepository' })).toHaveLength(1); + expect(findQueries(db, { className: 'UserRepository' }).rows).toHaveLength(1); // Ruby ::-suffix - expect(findQueries(db, { className: 'Cipher' })).toHaveLength(1); + expect(findQueries(db, { className: 'Cipher' }).rows).toHaveLength(1); // Exact match also works - expect(findQueries(db, { className: 'OpenSSL::Cipher' })).toHaveLength(1); + expect(findQueries(db, { className: 'OpenSSL::Cipher' }).rows).toHaveLength(1); // Top-level - expect(findQueries(db, { className: 'Other' })).toHaveLength(1); + expect(findQueries(db, { className: 'Other' }).rows).toHaveLength(1); } finally { db.close(); } @@ -582,7 +582,7 @@ describe('findCalls', () => { className: 'IdempotencyKey', route: 'POST /orders', status: { op: '>=', value: 500 }, - }); + }).rows; expect(rows).toHaveLength(1); expect(rows[0].appmap_name).toBe('a'); expect(rows[0].fqid).toBe('app/IdempotencyKey.generate'); @@ -603,7 +603,7 @@ describe('findCalls', () => { ], }, ]); - const rows = findCalls(db, { label: 'log' }); + const rows = findCalls(db, { label: 'log' }).rows; expect(rows).toHaveLength(1); expect(rows[0].defined_class).toBe('Logger'); } finally { @@ -631,7 +631,7 @@ describe('findLogs', () => { ], }, ]); - const rows = findLogs(db, {}); + const rows = findLogs(db, {}).rows; expect(rows).toHaveLength(1); expect(rows[0].logger).toBe('Logger'); expect(rows[0].method_id).toBe('info'); @@ -664,7 +664,7 @@ describe('findLogs', () => { ], }, ]); - const rows = findLogs(db, { message: 'refused' }); + const rows = findLogs(db, { message: 'refused' }).rows; expect(rows).toHaveLength(1); expect(rows[0].event_id).toBe(1); } finally { @@ -697,7 +697,7 @@ describe('findLogs', () => { ], }, ]); - const rows = findLogs(db, { message: 'refused' }); + const rows = findLogs(db, { message: 'refused' }).rows; expect(rows).toHaveLength(1); expect(rows[0].event_id).toBe(1); } finally { @@ -730,7 +730,7 @@ describe('findLogs', () => { }, ]); // Suffix-aware short-form match: "AppLogger" hits "app.AppLogger". - const rows = findLogs(db, { logger: 'AppLogger' }); + const rows = findLogs(db, { logger: 'AppLogger' }).rows; expect(rows).toHaveLength(1); expect(rows[0].event_id).toBe(1); } finally { @@ -769,7 +769,7 @@ describe('findLogs', () => { ], }, ]); - const rows = findLogs(db, { message: 'refused', branch: 'feature' }); + const rows = findLogs(db, { message: 'refused', branch: 'feature' }).rows; expect(rows).toHaveLength(1); expect(rows[0].appmap_name).toBe('b'); } finally { @@ -797,7 +797,7 @@ describe('findLogs', () => { ], }, ]); - const rows = findLogs(db, { message: 'message' }); + const rows = findLogs(db, { message: 'message' }).rows; expect(rows).toHaveLength(1); } finally { db.close(); @@ -823,7 +823,7 @@ describe('findCalls --class / --method (fqid-aware)', () => { }, ]); // Canonical V3 fqid prefix (slash form, sans method) - const rows = findCalls(db, { className: 'org/example/UserRepository' }); + const rows = findCalls(db, { className: 'org/example/UserRepository' }).rows; expect(rows).toHaveLength(1); } finally { db.close(); @@ -852,7 +852,7 @@ describe('findCalls --class / --method (fqid-aware)', () => { ], }, ]); - const rows = findCalls(db, { className: 'UserRepository' }); + const rows = findCalls(db, { className: 'UserRepository' }).rows; expect(rows).toHaveLength(1); expect(rows[0].method_id).toBe('findById'); } finally { @@ -873,7 +873,7 @@ describe('findCalls --class / --method (fqid-aware)', () => { VALUES (?, 1, 'org.example.UserRepository', 'findById')` ).run(am.lastInsertRowid); - const rows = findCalls(db, { className: 'UserRepository' }); + const rows = findCalls(db, { className: 'UserRepository' }).rows; expect(rows).toHaveLength(1); } finally { db.close(); @@ -908,7 +908,7 @@ describe('findCalls --class / --method (fqid-aware)', () => { ], }, ]); - const rows = findCalls(db, { method: 'findById' }); + const rows = findCalls(db, { method: 'findById' }).rows; expect(rows).toHaveLength(2); expect(rows.every((r) => r.method_id === 'findById')).toBe(true); } finally { @@ -933,9 +933,9 @@ describe('findExceptions', () => { exceptions: [{ event_id: 2, exception_class: 'RecordNotFound' }], }, ]); - expect(findExceptions(db, { exception: 'IntegrityError' })).toHaveLength(1); - expect(findExceptions(db, { route: 'POST /orders' })).toHaveLength(1); - expect(findExceptions(db, { route: 'POST /orders' })[0].appmap_name).toBe('a'); + expect(findExceptions(db, { exception: 'IntegrityError' }).rows).toHaveLength(1); + expect(findExceptions(db, { route: 'POST /orders' }).rows).toHaveLength(1); + expect(findExceptions(db, { route: 'POST /orders' }).rows[0].appmap_name).toBe('a'); } finally { db.close(); } @@ -974,7 +974,7 @@ describe('findExceptions', () => { exceptions: [{ event_id: 4, exception_class: 'IOError', message: 'broken pipe' }], }, ]); - const rows = findExceptions(db, { withLogs: 2 }); + const rows = findExceptions(db, { withLogs: 2 }).rows; expect(rows).toHaveLength(1); expect(rows[0].recent_logs).toBeDefined(); // Last 2 in chronological order: the warn at event 2, then error at event 3. @@ -1005,7 +1005,7 @@ describe('findExceptions', () => { exceptions: [{ event_id: 2, exception_class: 'IOError' }], }, ]); - const rows = findExceptions(db, {}); + const rows = findExceptions(db, {}).rows; expect(rows[0].recent_logs).toBeUndefined(); } finally { db.close(); @@ -1022,7 +1022,7 @@ describe('findExceptions', () => { exceptions: [{ event_id: 1, exception_class: 'IOError' }], }, ]); - const rows = findExceptions(db, { withLogs: 5 }); + const rows = findExceptions(db, { withLogs: 5 }).rows; expect(rows[0].recent_logs).toEqual([]); } finally { db.close(); @@ -1060,7 +1060,7 @@ describe('findExceptions', () => { exceptions: [{ event_id: 2, exception_class: 'IOError' }], }, ]); - const rows = findExceptions(db, { withLogs: 5 }); + const rows = findExceptions(db, { withLogs: 5 }).rows; expect(rows).toHaveLength(2); // Each exception's recent_logs is scoped to its own recording. for (const row of rows) { diff --git a/packages/cli/tests/unit/cmds/query/queries/hotspots.spec.ts b/packages/cli/tests/unit/cmds/query/queries/hotspots.spec.ts index 47d9732524..ad91e0b7bd 100644 --- a/packages/cli/tests/unit/cmds/query/queries/hotspots.spec.ts +++ b/packages/cli/tests/unit/cmds/query/queries/hotspots.spec.ts @@ -84,7 +84,7 @@ describe('functionHotspots', () => { { event_id: 3, defined_class: 'Y', method_id: 'slow', fqid: 'app/Y#slow', elapsed_ms: 100 }, ], }); - const rows = functionHotspots(db, {}); + const rows = functionHotspots(db, {}).rows; expect(rows[0].fqid).toBe('app/Y#slow'); expect(rows[0].calls).toBe(1); expect(rows[0].total_ms).toBe(100); @@ -110,10 +110,10 @@ describe('functionHotspots', () => { ], queries: [{ event_id: 4, parent_event_id: 1, sql: 'SELECT 1', elapsed_ms: 1 }], }); - const outer = functionHotspots(db, {}).find((r) => r.fqid === 'app/X#outer')!; + const outer = functionHotspots(db, {}).rows.find((r) => r.fqid === 'app/X#outer')!; expect(outer.total_ms).toBe(10); expect(outer.self_ms).toBe(2); - const inner1 = functionHotspots(db, {}).find((r) => r.fqid === 'app/X#inner1')!; + const inner1 = functionHotspots(db, {}).rows.find((r) => r.fqid === 'app/X#inner1')!; expect(inner1.self_ms).toBe(3); // leaf — self equals total } finally { db.close(); @@ -131,7 +131,7 @@ describe('functionHotspots', () => { name: 'b', calls: [{ event_id: 1, defined_class: 'X', method_id: 'm', fqid: 'app/X#m', elapsed_ms: 20 }], }); - const rows = functionHotspots(db, {}); + const rows = functionHotspots(db, {}).rows; expect(rows).toHaveLength(1); expect(rows[0].calls).toBe(2); expect(rows[0].total_ms).toBe(30); @@ -153,7 +153,7 @@ describe('functionHotspots', () => { request: { event_id: 0, method: 'POST', path: '/orders', status: 200 }, calls: [{ event_id: 1, defined_class: 'O', method_id: 'create', fqid: 'app/O#create', elapsed_ms: 50 }], }); - const rows = functionHotspots(db, { route: 'GET /reports' }); + const rows = functionHotspots(db, { route: 'GET /reports' }).rows; expect(rows).toHaveLength(1); expect(rows[0].fqid).toBe('app/R#calc'); } finally { @@ -172,7 +172,7 @@ describe('functionHotspots', () => { { event_id: 3, defined_class: 'UsersController', method_id: 'show', elapsed_ms: 200 }, ], }); - const rows = functionHotspots(db, { className: 'OrdersController' }); + const rows = functionHotspots(db, { className: 'OrdersController' }).rows; expect(rows.map((r) => r.method_id).sort()).toEqual(['create', 'index']); } finally { db.close(); @@ -201,7 +201,7 @@ describe('functionHotspots', () => { }, ], }); - const rows = functionHotspots(db, { className: 'UserRepository' }); + const rows = functionHotspots(db, { className: 'UserRepository' }).rows; expect(rows).toHaveLength(1); expect(rows[0].fqid).toBe('org/example/UserRepository#findById'); } finally { @@ -220,7 +220,7 @@ describe('functionHotspots', () => { { event_id: 3, defined_class: 'X', method_id: 'c', elapsed_ms: 25 }, ], }); - expect(functionHotspots(db, { limit: 2 })).toHaveLength(2); + expect(functionHotspots(db, { limit: 2 }).rows).toHaveLength(2); } finally { db.close(); } @@ -239,7 +239,7 @@ describe('sqlHotspots', () => { { event_id: 3, sql: 'SELECT * FROM tenants WHERE slug = ?', elapsed_ms: 80 }, ], }); - const rows = sqlHotspots(db, {}); + const rows = sqlHotspots(db, {}).rows; expect(rows[0].sql_text).toBe('SELECT * FROM tenants WHERE slug = ?'); expect(rows[0].count).toBe(1); expect(rows[0].avg_ms).toBeCloseTo(80); @@ -271,11 +271,11 @@ describe('sqlHotspots', () => { `INSERT INTO sql_queries (appmap_id, event_id, sql_text, elapsed_ms) VALUES (?, 1, 'SELECT b', 2)` ).run(newId); - const since = sqlHotspots(db, { since: '2026-04-15T00:00:00.000Z' }); + const since = sqlHotspots(db, { since: '2026-04-15T00:00:00.000Z' }).rows; expect(since).toHaveLength(1); expect(since[0].sql_text).toBe('SELECT b'); - const until = sqlHotspots(db, { until: '2026-04-15T00:00:00.000Z' }); + const until = sqlHotspots(db, { until: '2026-04-15T00:00:00.000Z' }).rows; expect(until).toHaveLength(1); expect(until[0].sql_text).toBe('SELECT a'); } finally { @@ -296,7 +296,7 @@ describe('sqlHotspots', () => { branch: 'feature', queries: [{ event_id: 1, sql: 'SELECT 2', elapsed_ms: 2 }], }); - const main = sqlHotspots(db, { branch: 'main' }); + const main = sqlHotspots(db, { branch: 'main' }).rows; expect(main).toHaveLength(1); expect(main[0].sql_text).toBe('SELECT 1'); } finally { diff --git a/packages/cli/tests/unit/cmds/query/queries/mcp.spec.ts b/packages/cli/tests/unit/cmds/query/queries/mcp.spec.ts index cb13373732..b0821c7419 100644 --- a/packages/cli/tests/unit/cmds/query/queries/mcp.spec.ts +++ b/packages/cli/tests/unit/cmds/query/queries/mcp.spec.ts @@ -165,8 +165,9 @@ describe('MCP handler', () => { expect(Array.isArray(content)).toBe(true); expect(content[0].type).toBe('text'); const parsed = JSON.parse(content[0].text); - expect(parsed).toHaveLength(1); - expect(parsed[0].exception_class).toBe('IntegrityError'); + expect(parsed.rows).toHaveLength(1); + expect(parsed.total).toBe(1); + expect(parsed.rows[0].exception_class).toBe('IntegrityError'); } finally { db.close(); } @@ -250,10 +251,10 @@ describe('MCP handler', () => { params: { name: 'find_logs', arguments: {} }, }); const allRows = JSON.parse((all!.result as any).content[0].text); - expect(allRows).toHaveLength(1); - expect(allRows[0].logger).toBe('Logger'); - expect(allRows[0].method_id).toBe('error'); - expect(allRows[0].parameters_json).toContain('connection refused'); + expect(allRows.rows).toHaveLength(1); + expect(allRows.rows[0].logger).toBe('Logger'); + expect(allRows.rows[0].method_id).toBe('error'); + expect(allRows.rows[0].parameters_json).toContain('connection refused'); // Substring filter against parameters_json. const matched = call(handler, { @@ -262,7 +263,7 @@ describe('MCP handler', () => { method: 'tools/call', params: { name: 'find_logs', arguments: { message: 'refused' } }, }); - expect(JSON.parse((matched!.result as any).content[0].text)).toHaveLength(1); + expect(JSON.parse((matched!.result as any).content[0].text).rows).toHaveLength(1); // Substring that doesn't appear: zero rows. const empty = call(handler, { @@ -271,7 +272,7 @@ describe('MCP handler', () => { method: 'tools/call', params: { name: 'find_logs', arguments: { message: 'this never appears' } }, }); - expect(JSON.parse((empty!.result as any).content[0].text)).toHaveLength(0); + expect(JSON.parse((empty!.result as any).content[0].text).rows).toHaveLength(0); } finally { db.close(); } @@ -299,9 +300,9 @@ describe('MCP handler', () => { params: { name: 'find_exceptions', arguments: {} }, }); const noLogsRows = JSON.parse((noLogs!.result as any).content[0].text); - expect(noLogsRows[0].recent_logs).toBeUndefined(); + expect(noLogsRows.rows[0].recent_logs).toBeUndefined(); // appmap_id is now exposed. - expect(typeof noLogsRows[0].appmap_id).toBe('number'); + expect(typeof noLogsRows.rows[0].appmap_id).toBe('number'); // with_logs=5: recent_logs is present and non-empty (the seed has // a log call at event 2, exception at event 2 — the log shares the @@ -314,7 +315,7 @@ describe('MCP handler', () => { params: { name: 'find_exceptions', arguments: { with_logs: 5 } }, }); const withLogsRows = JSON.parse((withLogsRes!.result as any).content[0].text); - expect(Array.isArray(withLogsRows[0].recent_logs)).toBe(true); + expect(Array.isArray(withLogsRows.rows[0].recent_logs)).toBe(true); } finally { db.close(); } @@ -330,11 +331,11 @@ describe('MCP handler', () => { method: 'tools/call', params: { name: 'find_logs', arguments: {} }, }); - const rows = JSON.parse((r!.result as any).content[0].text); - expect(rows).toHaveLength(1); - expect(rows[0].message).toBe('connection refused'); - expect(rows[0].logger).toBe('Logger'); - expect(rows[0].parameters_json).toContain('connection refused'); + const page = JSON.parse((r!.result as any).content[0].text); + expect(page.rows).toHaveLength(1); + expect(page.rows[0].message).toBe('connection refused'); + expect(page.rows[0].logger).toBe('Logger'); + expect(page.rows[0].parameters_json).toContain('connection refused'); } finally { db.close(); } @@ -350,16 +351,16 @@ describe('MCP handler', () => { method: 'tools/call', params: { name: 'find_exceptions', arguments: { with_logs: 5 } }, }); - const rows = JSON.parse((r!.result as any).content[0].text); - expect(rows).toHaveLength(1); - expect(rows[0].return_event_id).toBe(4); + const page = JSON.parse((r!.result as any).content[0].text); + expect(page.rows).toHaveLength(1); + expect(page.rows[0].return_event_id).toBe(4); // Pre-fix the with_logs SQL filtered by `event_id < exception.event_id` // (=2), which excluded the Logger.error log call at event_id=2 entirely. // With return_event_id (=4) as the upper bound, the log call (event 2) // is included — that's the regression we're guarding against. - expect(rows[0].recent_logs).toHaveLength(1); - expect(rows[0].recent_logs[0].event_id).toBe(2); - expect(rows[0].recent_logs[0].message).toBe('connection refused'); + expect(page.rows[0].recent_logs).toHaveLength(1); + expect(page.rows[0].recent_logs[0].event_id).toBe(2); + expect(page.rows[0].recent_logs[0].message).toBe('connection refused'); } finally { db.close(); } @@ -375,9 +376,9 @@ describe('MCP handler', () => { method: 'tools/call', params: { name: 'find_calls', arguments: { label: 'log' } }, }); - const rows = JSON.parse((r!.result as any).content[0].text); - expect(rows).toHaveLength(1); - expect(rows[0].method_id).toBe('error'); + const page = JSON.parse((r!.result as any).content[0].text); + expect(page.rows).toHaveLength(1); + expect(page.rows[0].method_id).toBe('error'); } finally { db.close(); } @@ -395,8 +396,8 @@ describe('MCP handler', () => { method: 'tools/call', params: { name: 'list_endpoints', arguments: {} }, }); - const epRows = JSON.parse((ep!.result as any).content[0].text); - expect(epRows[0].route).toBe('/orders'); + const epPage = JSON.parse((ep!.result as any).content[0].text); + expect(epPage.rows[0].route).toBe('/orders'); const fh = call(handler, { jsonrpc: '2.0', @@ -404,7 +405,7 @@ describe('MCP handler', () => { method: 'tools/call', params: { name: 'function_hotspots', arguments: { limit: 5 } }, }); - expect(JSON.parse((fh!.result as any).content[0].text).length).toBeGreaterThan(0); + expect(JSON.parse((fh!.result as any).content[0].text).rows.length).toBeGreaterThan(0); const sh = call(handler, { jsonrpc: '2.0', @@ -412,7 +413,7 @@ describe('MCP handler', () => { method: 'tools/call', params: { name: 'sql_hotspots', arguments: { limit: 5 } }, }); - expect(JSON.parse((sh!.result as any).content[0].text).length).toBeGreaterThan(0); + expect(JSON.parse((sh!.result as any).content[0].text).rows.length).toBeGreaterThan(0); } finally { db.close(); } @@ -431,7 +432,7 @@ describe('MCP handler', () => { const contents = (r!.result as any).contents; expect(contents[0].uri).toBe('appmap://endpoints'); const parsed = JSON.parse(contents[0].text); - expect(parsed[0].route).toBe('/orders'); + expect(parsed.rows[0].route).toBe('/orders'); } finally { db.close(); } @@ -481,10 +482,10 @@ describe('MCP handler', () => { }); const contents = (r!.result as any).contents; expect(contents[0].uri).toBe('appmap://recording/rec/logs'); - const rows = JSON.parse(contents[0].text); - expect(rows).toHaveLength(1); - expect(rows[0].logger).toBe('Logger'); - expect(rows[0].method_id).toBe('error'); + const page = JSON.parse(contents[0].text); + expect(page.rows).toHaveLength(1); + expect(page.rows[0].logger).toBe('Logger'); + expect(page.rows[0].method_id).toBe('error'); } finally { db.close(); } diff --git a/packages/cli/tests/unit/cmds/query/queries/related.spec.ts b/packages/cli/tests/unit/cmds/query/queries/related.spec.ts index 017fa441a0..19e330de1b 100644 --- a/packages/cli/tests/unit/cmds/query/queries/related.spec.ts +++ b/packages/cli/tests/unit/cmds/query/queries/related.spec.ts @@ -111,7 +111,7 @@ describe('related', () => { classes: ['HealthController'], }, ]); - const rows = related(db, 'source'); + const rows = related(db, 'source').rows; expect(rows.find((r) => r.appmap_name === 'source')).toBeUndefined(); expect(rows.find((r) => r.appmap_name === 'unrelated')).toBeUndefined(); const best = rows.find((r) => r.appmap_name === 'best')!; @@ -154,7 +154,7 @@ describe('related', () => { classes: ['Foo'], }, ]); - const rows = related(db, 'src', { branch: 'main' }); + const rows = related(db, 'src', { branch: 'main' }).rows; expect(rows.map((r) => r.appmap_name)).toEqual(['main_match']); } finally { db.close(); @@ -181,7 +181,7 @@ describe('related', () => { classes: ['Foo'], }, ]); - const rows = related(db, 'src', { status: { op: '<', value: 400 } }); + const rows = related(db, 'src', { status: { op: '<', value: 400 } }).rows; expect(rows.map((r) => r.appmap_name)).toEqual(['succeeded']); } finally { db.close(); @@ -197,7 +197,7 @@ describe('related', () => { { name: 'b', request: { method: 'GET', path: '/x', status: 200 }, classes: ['A'] }, { name: 'c', request: { method: 'GET', path: '/x', status: 200 }, classes: ['B'] }, ]); - expect(related(db, 'src', { limit: 2 })).toHaveLength(2); + expect(related(db, 'src', { limit: 2 }).rows).toHaveLength(2); } finally { db.close(); }