diff --git a/CLAUDE.md b/CLAUDE.md new file mode 100644 index 0000000000..ad1b28229a --- /dev/null +++ b/CLAUDE.md @@ -0,0 +1,39 @@ +# Verifying changes + +This is a Yarn 3 monorepo (`packages/*`). CI rejects pushes for both lint errors and tsc errors, with several-minute round-trip costs per CI run. Verify locally before committing. + +## When to run verify + +After any **substantial batch of changes** — multiple files touched, new functions added, tests added, refactors — run `yarn verify` from the repo root before reporting work complete or asking to commit. A one-line typo fix doesn't need it; a multi-file change does. + +## How to run verify + +```sh +yarn verify # check working-tree changes (staged + unstaged + untracked) +yarn verify:staged # check only staged changes +``` + +`scripts/verify.mjs`: + +1. Reads modified files from git. +2. Groups them by `packages//`. +3. For each affected package: runs ESLint (`--quiet`, errors only) on the changed lintable files, then `tsc --noEmit` on the whole package (since TS is project-wide, you can't typecheck a single file). + +Typical run on one package: ~5–7s. CI's full lint+typecheck takes ~30s; scoped is ~3× faster. + +## Adding verify to a package + +If you touch a package that doesn't yet participate, add a `typecheck` script (`tsc --noEmit`) to its `package.json` so `verify.mjs` includes it. The existing `lint` script is enough for ESLint coverage. + +## What verify catches + +- ESLint errors that CI rejects: `array-type` (use `T[]` not `Array`), `no-unnecessary-type-assertion`, `prefer-function-type`, `prefer-optional-chain`, etc. Warnings (e.g. `no-unsafe-*`, `prefer-nullish-coalescing`) are suppressed by `--quiet`. +- Type errors that `tsc --noEmit` finds, including yargs `CommandModule` assignability issues that require widening exported handler argv types. + +# Driving the MCP after MCP-side changes + +The `appmap query mcp` server lives in `built/cli.js`. If you change anything under `packages/cli/src/cmds/query/queries/mcp.ts` (or anywhere it transitively imports), you must run `npx tsc` (or `yarn build`) inside `packages/cli` before launching `mcp` for ad-hoc testing. A stale binary will respond to `tools/list` with the old surface — symptom is usually `unknown tool: …` from a client driving a tool the source defines. + +# Recording with appmap-node from a monorepo + +`npx appmap-node@latest npx jest …` invoked from the repo root can fail to parse `.ts` test files with a babel SyntaxError, because the inner jest doesn't pick up `packages//jest.config.js`'s `ts-jest` preset. Run from the package directory whose preset matters — e.g. `cd packages/cli && npx appmap-node@latest npx jest …`. diff --git a/package.json b/package.json index 8b0e76759d..92d534e07b 100644 --- a/package.json +++ b/package.json @@ -6,6 +6,8 @@ ], "scripts": { "lint": "yarn workspaces foreach --exclude root -v run lint", + "verify": "node scripts/verify.mjs", + "verify:staged": "node scripts/verify.mjs --staged", "test": "yarn workspaces foreach --exclude '{root}' -v run test", "build": "yarn workspaces foreach -t --exclude root -v run build", "build-native": "yarn workspaces foreach -t --exclude root -v run build-native", diff --git a/packages/cli/package.json b/packages/cli/package.json index 62270c6a22..d23acb232a 100644 --- a/packages/cli/package.json +++ b/packages/cli/package.json @@ -16,6 +16,8 @@ "scripts": { "lint": "eslint src tests", "lint:fix": "eslint src tests --fix", + "typecheck": "tsc --noEmit", + "verify": "yarn lint && yarn typecheck", "pre-commit": "lint-staged", "test": "jest --filter=./tests/testFilter.js", "test:binary": "jest -c tests/binary/jest.config.js", diff --git a/packages/cli/scripts/demo-query.sh b/packages/cli/scripts/demo-query.sh new file mode 100755 index 0000000000..6c1d189631 --- /dev/null +++ b/packages/cli/scripts/demo-query.sh @@ -0,0 +1,103 @@ +#!/usr/bin/env bash +# +# Quick demo of the `appmap query` verbs against a fixture set. +# +# Usage: +# ./scripts/demo-query.sh # uses appmap-apm fixtures if present, +# # else bundled ruby fixtures +# ./scripts/demo-query.sh /path/to/your/appmaps # any directory of *.appmap.json files +# +# Side effects: copies the fixture set to a temp dir, builds and imports a +# query.db there, leaves the originals untouched. Cleans up on exit. + +set -euo pipefail + +cd "$(dirname "$0")/.." # → packages/cli + +# Pick the richest fixture set available. +DEFAULT="$HOME/source/appland/appmap-apm/tests/fixtures/tmp/appmap" +[ -d "$DEFAULT" ] || DEFAULT="$(pwd)/tests/unit/fixtures/ruby" +SRC="${1:-$DEFAULT}" +[ -d "$SRC" ] || { echo "fixture dir not found: $SRC" >&2; exit 2; } + +# Temp work area: copy the fixtures so `appmap index` can write fingerprint +# sidecars without touching the originals. +TMP="$(mktemp -d -t appmap-demo)" +DATA="$TMP/data" +DB="$TMP/query.db" +mkdir -p "$DATA" +cp -r "$SRC"/. "$DATA"/ +export NODE_NO_WARNINGS=1 +trap 'rm -rf "$TMP"' EXIT + +CLI=( node "$(pwd)/built/cli.js" ) + +echo "Building CLI…" >&2 +npx tsc 2>&1 | grep -v 'navie-local' >&2 || true + +# Filter out diagnostic noise from @appland/models that the verbs themselves +# don't emit (kept loose so we don't suppress real errors). +NOISE='\[DEBUG ' + +banner() { + echo + echo "── \$ appmap $*" +} +run() { + banner "$@" + "${CLI[@]}" "$@" 2>&1 | grep -vE "$NOISE" || true +} +run_quiet() { + banner "$@" + "${CLI[@]}" "$@" 2>&1 | grep -vE "$NOISE" | tail -5 || true +} + +cat < --query-db " +"${CLI[@]}" index --appmap-dir "$DATA" --query-db "$DB" >/dev/null 2>&1 +COUNT=$(node -e " + const db = require('better-sqlite3')('$DB', { readonly: true }); + process.stdout.write(String(db.prepare('SELECT COUNT(*) AS n FROM appmaps').get().n)); +") +echo "indexed $COUNT recordings" + +run query endpoints --query-db "$DB" --sort p95 --limit 5 +run query find queries --query-db "$DB" --table users --limit 3 || true +run query find exceptions --query-db "$DB" --limit 5 || true +run query hotspots --query-db "$DB" --limit 5 +run query hotspots --query-db "$DB" --type=sql --limit 3 + +# related: find passing baselines for a recording (with whatever data exists) +RELATED_SOURCE="$(node -e " + const db = require('better-sqlite3')('$DB', { readonly: true }); + const r = db.prepare(\"SELECT name FROM appmaps WHERE name LIKE '%oups%' LIMIT 1\").get(); + process.stdout.write(r ? r.name : ''); +")" +if [ -n "$RELATED_SOURCE" ]; then + run query related "$RELATED_SOURCE" --query-db "$DB" --limit 5 +fi + +# Pick the recording with the most events for the tree demos. +APPMAP="$(node -e " + const db = require('better-sqlite3')('$DB', { readonly: true }); + const r = db.prepare( + 'SELECT name FROM appmaps WHERE event_count > 0 ORDER BY event_count DESC LIMIT 1' + ).get(); + process.stdout.write(r ? r.name : ''); +")" + +if [ -n "$APPMAP" ]; then + run query tree "$APPMAP" --query-db "$DB" --format=summary + run query tree "$APPMAP" --query-db "$DB" --filter=sql +fi + +echo +echo "Done." diff --git a/packages/cli/src/cli.ts b/packages/cli/src/cli.ts index 630ad5edb1..fe4a3b69fe 100755 --- a/packages/cli/src/cli.ts +++ b/packages/cli/src/cli.ts @@ -40,6 +40,7 @@ import * as RpcCommand from './cmds/index/rpc'; import * as RpcClientCommand from './cmds/rpcClient'; import * as NavieCommand from './cmds/navie'; import * as ApplyCommand from './cmds/apply'; +import * as QueryCommand from './cmds/query/query'; import * as RunTestCommand from './cmds/runTest'; import TelemetryTestCommand from './cmds/testTelemetry'; import { default as sqlErrorLog } from './lib/sqlErrorLog'; @@ -156,6 +157,7 @@ yargs(process.argv.slice(2)) .command(RpcClientCommand) .command(NavieCommand) .command(ApplyCommand) + .command(QueryCommand) .command(RunTestCommand) .command(TelemetryTestCommand) .option('verbose', { diff --git a/packages/cli/src/cmds/index/index.ts b/packages/cli/src/cmds/index/index.ts index 4621bdf44a..77abf4e022 100644 --- a/packages/cli/src/cmds/index/index.ts +++ b/packages/cli/src/cmds/index/index.ts @@ -10,6 +10,8 @@ import { configureRpcDirectories, handleWorkingDirectory } from '../../lib/handl import { locateAppMapDir } from '../../lib/locateAppMapDir'; import { verbose } from '../../utils'; import { log, warn } from 'console'; +import { openQueryDb } from '../query/db'; +import { QueryDbIndexer } from '../query/db/import/QueryDbIndexer'; import { numProcessed } from '../../rpc/index/numProcessed'; import { search } from '../../rpc/search/search'; import appmapFilter from '../../rpc/appmap/filter'; @@ -51,6 +53,10 @@ export const builder = (args: yargs.Argv) => { type: 'number', alias: 'p', }); + args.option('query-db', { + describe: 'path to query.db (overrides default ~/.appmap/data//query.db)', + type: 'string', + }); args.option('navie-provider', { describe: 'navie provider to use', type: 'string', @@ -78,11 +84,19 @@ export const handler = async (argv) => { const runServer = watch || port !== undefined; if (port && !watch) warn(`Note: --port option implies --watch`); + const queryDb = openQueryDb(appmapDir, argv.queryDb as string | undefined); + const indexer = new QueryDbIndexer(queryDb.db); + log( + `Query DB at ${queryDb.path} (schema v${queryDb.version}${ + queryDb.rebuilt ? ', rebuilt' : '' + })` + ); + if (runServer) { void checkLicense(false); log(`Running indexer in watch mode`); - const cmd = new FingerprintWatchCommand(appmapDir); + const cmd = new FingerprintWatchCommand(appmapDir, indexer); await cmd.execute(); if (port !== undefined) { @@ -149,7 +163,8 @@ export const handler = async (argv) => { } } } else { - const cmd = new FingerprintDirectoryCommand(appmapDir); + const cmd = new FingerprintDirectoryCommand(appmapDir, indexer); await cmd.execute(); + indexer.close(); } }; diff --git a/packages/cli/src/cmds/query/db/import/QueryDbIndexer.ts b/packages/cli/src/cmds/query/db/import/QueryDbIndexer.ts new file mode 100644 index 0000000000..8977ee1f23 --- /dev/null +++ b/packages/cli/src/cmds/query/db/import/QueryDbIndexer.ts @@ -0,0 +1,69 @@ +import { resolve } from 'path'; + +import sqlite3 from 'better-sqlite3'; + +import { findFiles } from '../../../../utils'; +import { deleteAppmap, importAppmap } from './importAppmap'; + +// Subscribes to fingerprint pipeline events and routes per-file work into +// the query DB. Owns no policy beyond "import on index, delete on unlink"; +// callers wire it up to whichever queue/watcher fits the command shape. +// +// Failure handling: per-file errors (bad JSON, missing fields) are logged +// and skipped; the walk does not abort. DB-level errors still propagate — +// those indicate a real bug, not bad data. + +interface IndexEmitter { + on(event: 'index', listener: (ev: { path: string }) => void): unknown; +} + +export class QueryDbIndexer { + private imported = 0; + private failed = 0; + + constructor(private readonly db: sqlite3.Database) {} + + // Subscribe to a FingerprintQueue (or anything matching its 'index' event + // shape) so each successfully fingerprinted file is also imported. + attach(queue: IndexEmitter): void { + queue.on('index', (ev) => this.onIndexed(ev.path)); + } + + // Walk a directory and import any .appmap.json that doesn't already have + // a row in the appmaps table. Bridges the gap when query.db is fresh but + // fingerprints already exist (so the fingerprinter skips them and never + // emits an 'index' event for the importer to catch). + async syncDirectory(directory: string): Promise { + const present = this.db.prepare('SELECT 1 FROM appmaps WHERE source_path = ?'); + await findFiles(directory, '.appmap.json', (file) => { + const absolutePath = resolve(file); + if (!present.get(absolutePath)) this.onIndexed(absolutePath); + }); + } + + onIndexed(file: string): void { + try { + importAppmap(this.db, file); + this.imported += 1; + } catch (err) { + this.failed += 1; + console.warn(`query db: failed to import ${file}: ${(err as Error).message}`); + } + } + + onRemoved(file: string): void { + try { + deleteAppmap(this.db, file); + } catch (err) { + console.warn(`query db: failed to delete ${file}: ${(err as Error).message}`); + } + } + + stats(): { imported: number; failed: number } { + return { imported: this.imported, failed: this.failed }; + } + + close(): void { + this.db.close(); + } +} diff --git a/packages/cli/src/cmds/query/db/import/appmapRecord.ts b/packages/cli/src/cmds/query/db/import/appmapRecord.ts new file mode 100644 index 0000000000..77ad40de59 --- /dev/null +++ b/packages/cli/src/cmds/query/db/import/appmapRecord.ts @@ -0,0 +1,98 @@ +import { statSync } from 'fs'; +import { basename } from 'path'; + +import sqlite3 from 'better-sqlite3'; + +export interface AppmapMetadata { + name?: string; + language?: { name?: string }; + frameworks?: { name?: string }[]; + recorder?: { type?: string }; + git?: { repository?: string; branch?: string; commit?: string }; + timestamp?: number; + labels?: unknown; +} + +export interface ParsedAppmap { + events?: Record[]; + metadata?: AppmapMetadata; + classMap?: unknown; +} + +export interface AppmapRecordResult { + appmapId: number; + timestampIso: string; +} + +// Insert the top-level appmaps row and return its id and resolved timestamp. +// +// Total elapsed is taken from the first return event carrying an +// http_server_response. If metadata.timestamp is missing, falls back to the +// file's mtime so time-range queries still work. +export function insertAppmapRecord( + db: sqlite3.Database, + absolutePath: string, + appmap: ParsedAppmap +): AppmapRecordResult { + const events = appmap.events ?? []; + const metadata = appmap.metadata ?? {}; + + let totalElapsedMs: number | null = null; + for (const ev of events) { + if (ev.event === 'return' && 'http_server_response' in ev) { + const elapsed = ev.elapsed; + if (typeof elapsed === 'number') totalElapsedMs = elapsed * 1000; + break; + } + } + + let sqlQueryCount = 0; + let httpRequestCount = 0; + for (const ev of events) { + if ('sql_query' in ev) sqlQueryCount += 1; + if ('http_server_request' in ev) httpRequestCount += 1; + } + + const language = metadata.language?.name ?? null; + const framework = metadata.frameworks?.[0]?.name ?? null; + const recorderType = metadata.recorder?.type ?? null; + const git = metadata.git ?? {}; + + let timestampIso: string; + if (typeof metadata.timestamp === 'number') { + timestampIso = new Date(metadata.timestamp * 1000).toISOString(); + } else { + timestampIso = statSync(absolutePath).mtime.toISOString(); + } + + const labels = metadata.labels; + const metadataLabelsJson = labels ? JSON.stringify(labels) : null; + const name = metadata.name ?? basename(absolutePath); + + const info = db + .prepare( + `INSERT INTO appmaps (name, source_path, language, framework, recorder_type, + git_repository, git_branch, git_commit, timestamp, + event_count, sql_query_count, http_request_count, elapsed_ms, + metadata_labels) + VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)` + ) + .run( + name, + absolutePath, + language, + framework, + recorderType, + git.repository ?? null, + git.branch ?? null, + git.commit ?? null, + timestampIso, + events.length, + sqlQueryCount, + httpRequestCount, + totalElapsedMs, + metadataLabelsJson + ); + + return { appmapId: Number(info.lastInsertRowid), timestampIso }; +} diff --git a/packages/cli/src/cmds/query/db/import/codeObjects.ts b/packages/cli/src/cmds/query/db/import/codeObjects.ts new file mode 100644 index 0000000000..b93689e4c1 --- /dev/null +++ b/packages/cli/src/cmds/query/db/import/codeObjects.ts @@ -0,0 +1,123 @@ +import sqlite3 from 'better-sqlite3'; + +import type { CodeObjectType, Label } from '@appland/models'; + +// Minimal classMap node shape used by the walk. Stays loose to avoid +// coupling to @appland/models per V3 ("no @appland/models for ingestion"); +// only leaf type names are imported (type-only). +export interface ClassMapNode { + type?: CodeObjectType; + name?: string; + static?: boolean; + location?: string; + labels?: Label[]; + children?: ClassMapNode[]; +} + +// Walk the classMap tree, insert one code_objects row per function node, +// insert its labels, and return a map of "{location}|{method}" → +// code_object_id. The method component disambiguates classMap entries +// that share a path:lineno (e.g. Spring Data proxy methods), preventing +// the function_calls linker from binding events to the wrong code_object. +// +// Each function is decomposed into: +// - package : slash-joined package path (e.g. "app/services/idempotency") +// - class : ::-joined class chain (e.g. "Outer::Inner") +// - method : leaf method name (e.g. "generate") +// - is_static : 1 for static, 0 for instance +// +// fqid is derived from these and matches @appland/models' codeObjectId.js: +// - between package and class: '/' +// - between class and child class: '::' +// - between class and function method: '.' (static) or '#' (instance) +// +// Behavior preserved from the Python prototype: +// - Function node names with an auxtype suffix like " (get)" are trimmed. +// - Functions without a location are skipped (e.g., C-extensions). +// - When descending from a package, the class chain resets (a class +// directly under a package starts a fresh chain). +export function importCodeObjects( + db: sqlite3.Database, + classMap: readonly ClassMapNode[] +): Map { + const lookup = new Map(); + + const insertCodeObject = db.prepare( + `INSERT OR IGNORE INTO code_objects (fqid, package, classes, leaf_class, method, is_static) + VALUES (?, ?, ?, ?, ?, ?)` + ); + const selectCodeObjectId = db.prepare(`SELECT id FROM code_objects WHERE fqid = ?`); + const insertLabel = db.prepare( + `INSERT OR IGNORE INTO labels (code_object_id, label) VALUES (?, ?)` + ); + + function buildFqid( + packageTokens: readonly string[], + classTokens: readonly string[], + method: string, + isStatic: boolean + ): string { + const pkg = packageTokens.join('/'); + const cls = classTokens.join('::'); + const methodSep = isStatic ? '.' : '#'; + if (pkg && cls) return `${pkg}/${cls}${methodSep}${method}`; + if (pkg) return `${pkg}${methodSep}${method}`; + if (cls) return `${cls}${methodSep}${method}`; + return `${methodSep}${method}`; + } + + function walk( + node: ClassMapNode, + packageTokens: readonly string[], + classTokens: readonly string[] + ): void { + const nodeType = node.type; + const name = node.name ?? ''; + + if (nodeType === 'function') { + const location = node.location; + if (!location) return; + + const parenIdx = name.indexOf(' ('); + const methodName = parenIdx >= 0 ? name.slice(0, parenIdx) : name; + const isStatic = !!node.static; + + const fqid = buildFqid(packageTokens, classTokens, methodName, isStatic); + + const leafClass = classTokens.length > 0 ? classTokens[classTokens.length - 1] : ''; + insertCodeObject.run( + fqid, + packageTokens.join('/'), + JSON.stringify([...classTokens]), + leafClass, + methodName, + isStatic ? 1 : 0 + ); + const row = selectCodeObjectId.get(fqid) as { id: number }; + // Key includes the method name so multiple functions sharing a + // path:lineno (e.g. Spring Data proxy methods) each map to their + // own code_object instead of clobbering one another. + lookup.set(`${location}|${methodName}`, row.id); + + const labels = node.labels ?? []; + for (const label of labels) insertLabel.run(row.id, label); + return; + } + + let nextPackageTokens = packageTokens; + let nextClassTokens = classTokens; + if (nodeType === 'package') { + nextPackageTokens = [...packageTokens, name]; + nextClassTokens = []; // package descent resets the class chain + } else if (nodeType === 'class') { + nextClassTokens = [...classTokens, name]; + } + + const children = node.children ?? []; + for (const child of children) walk(child, nextPackageTokens, nextClassTokens); + } + + for (const root of classMap) walk(root, [], []); + + return lookup; +} diff --git a/packages/cli/src/cmds/query/db/import/exceptions.ts b/packages/cli/src/cmds/query/db/import/exceptions.ts new file mode 100644 index 0000000000..aeab5cf1c9 --- /dev/null +++ b/packages/cli/src/cmds/query/db/import/exceptions.ts @@ -0,0 +1,85 @@ +import sqlite3 from 'better-sqlite3'; + +import type { ExceptionObject } from '@appland/models'; + +// Import exceptions into the exceptions table. +// +// In the AppMap event stream, `exceptions` lives on **return** events: the +// return event terminates a call, and any thrown exception that propagated +// is attached there. The row's `event_id` and `parent_event_id` should +// describe the **call** the exception belongs to, not the return event: +// +// - event_id = call event id (= ev.parent_id on the return) +// - parent_event_id = parent of that call in the per-thread stack +// (= parentEventMap.get(callEventId)) +// +// Some recorders also place `exceptions` directly on the call event itself. +// We accept that legacy shape but de-dup against the canonical return-event +// source: if a call id was already covered via its return event, we skip +// the call-event source for the same id. +export function importExceptions( + db: sqlite3.Database, + appmapId: number, + events: readonly Record[], + parentEventMap: Map +): void { + const stmt = db.prepare( + `INSERT INTO exceptions (appmap_id, event_id, return_event_id, thread_id, + parent_event_id, exception_class, message, path, lineno) + VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)` + ); + + const seenCallIds = new Set(); + + // Pass 1: return events carrying exceptions (canonical case). + for (const ev of events) { + if (ev.event !== 'return') continue; + const excs = ev.exceptions as ExceptionObject[] | undefined; + if (!Array.isArray(excs)) continue; + if (typeof ev.parent_id !== 'number') continue; + + const callEventId = ev.parent_id; + const returnEventId = typeof ev.id === 'number' ? ev.id : null; + seenCallIds.add(callEventId); + const parentEventId = parentEventMap.get(callEventId) ?? null; + + for (const exc of excs) { + stmt.run( + appmapId, + callEventId, + returnEventId, + ev.thread_id ?? null, + parentEventId, + exc.class, + exc.message ?? null, + exc.path ?? null, + exc.lineno ?? null + ); + } + } + + // Pass 2: legacy shape — exceptions on a call event we didn't already cover. + // No paired return event in this shape, so return_event_id stays null. + for (const ev of events) { + if (ev.event !== 'call') continue; + const excs = ev.exceptions as ExceptionObject[] | undefined; + if (!Array.isArray(excs)) continue; + if (typeof ev.id !== 'number' || seenCallIds.has(ev.id)) continue; + + const parentEventId = parentEventMap.get(ev.id) ?? null; + + for (const exc of excs) { + stmt.run( + appmapId, + ev.id, + null, + ev.thread_id ?? null, + parentEventId, + exc.class, + exc.message ?? null, + exc.path ?? null, + exc.lineno ?? null + ); + } + } +} diff --git a/packages/cli/src/cmds/query/db/import/functionCalls.ts b/packages/cli/src/cmds/query/db/import/functionCalls.ts new file mode 100644 index 0000000000..43f1035857 --- /dev/null +++ b/packages/cli/src/cmds/query/db/import/functionCalls.ts @@ -0,0 +1,88 @@ +import sqlite3 from 'better-sqlite3'; + +import type { ReturnEventLike } from './returnEventMap'; + +// Import function call events. Each event is linked to a code_object via +// (path, lineno) → classMap location, and gains parameter / return-value +// capture iff the linked code_object has any labels. +export function importFunctionCalls( + db: sqlite3.Database, + appmapId: number, + events: readonly Record[], + returnEvents: Map, + parentEventMap: Map, + codeObjectLookup: Map +): void { + // Set of code_object_ids that have labels — narrows param capture to the + // functions an investigator cares about (log, security.*, dao.*, …). + const labeledCoIds = new Set(); + if (codeObjectLookup.size > 0) { + const placeholders = new Array(codeObjectLookup.size).fill('?').join(','); + const ids = [...codeObjectLookup.values()]; + const rows = db + .prepare( + `SELECT DISTINCT code_object_id FROM labels WHERE code_object_id IN (${placeholders})` + ) + .all(...ids) as { code_object_id: number }[]; + for (const r of rows) labeledCoIds.add(r.code_object_id); + } + + const stmt = db.prepare( + `INSERT INTO function_calls (appmap_id, event_id, thread_id, parent_event_id, + code_object_id, defined_class, method_id, path, lineno, is_static, + elapsed_ms, parameters_json, return_value) + VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)` + ); + + for (const ev of events) { + if (ev.event !== 'call') continue; + if (!ev.defined_class || !ev.method_id) continue; + if ('http_server_request' in ev || 'sql_query' in ev) continue; + + const ret = returnEvents.get(ev.id) ?? {}; + const elapsed = ret.elapsed; + + let coId: number | null = null; + const evPath = ev.path; + const evLineno = ev.lineno; + if (evPath != null && evLineno != null) { + // Lookup key matches the importer's: ":|". + // The method component disambiguates classMap entries that share + // a path:lineno, so two events at the same source location bind to + // their own code_object rather than colliding. + coId = codeObjectLookup.get(`${evPath}:${evLineno}|${ev.method_id}`) ?? null; + } + + let paramsJson: string | null = null; + let returnVal: string | null = null; + if (coId !== null && labeledCoIds.has(coId)) { + const params = ev.parameters; + if (Array.isArray(params) && params.length > 0) { + paramsJson = JSON.stringify( + params.map((p: any) => ({ name: p?.name, class: p?.class, value: p?.value })) + ); + } + const rv = (ret as any).return_value; + if (rv && typeof rv === 'object') { + const value = rv.value; + returnVal = value == null ? null : String(value); + } + } + + stmt.run( + appmapId, + ev.id, + ev.thread_id ?? null, + parentEventMap.get(ev.id) ?? null, + coId, + ev.defined_class, + ev.method_id, + evPath ?? null, + evLineno ?? null, + ev.static ? 1 : 0, + typeof elapsed === 'number' ? elapsed * 1000 : null, + paramsJson, + returnVal + ); + } +} diff --git a/packages/cli/src/cmds/query/db/import/httpClientRequests.ts b/packages/cli/src/cmds/query/db/import/httpClientRequests.ts new file mode 100644 index 0000000000..4f338f00b3 --- /dev/null +++ b/packages/cli/src/cmds/query/db/import/httpClientRequests.ts @@ -0,0 +1,42 @@ +import sqlite3 from 'better-sqlite3'; + +import type { HttpClientRequest, HttpClientResponse } from '@appland/models'; + +import type { ReturnEventLike } from './returnEventMap'; + +// Mirror httpRequests.ts: recordings carry a status_code field alongside +// what @appland/models declares, and an empty {} when no return was emitted. +type RawHttpClientResponse = Partial & { status_code?: number }; + +export function importHttpClientRequests( + db: sqlite3.Database, + appmapId: number, + events: readonly Record[], + returnEvents: Map, + parentEventMap: Map +): void { + const stmt = db.prepare( + `INSERT INTO http_client_requests (appmap_id, event_id, thread_id, parent_event_id, + method, url, status_code, elapsed_ms) + VALUES (?, ?, ?, ?, ?, ?, ?, ?)` + ); + + for (const ev of events) { + const req = ev.http_client_request as HttpClientRequest | undefined; + if (!req) continue; + const ret = returnEvents.get(ev.id) ?? {}; + const resp = (ret.http_client_response ?? {}) as RawHttpClientResponse; + const elapsed = ret.elapsed; + + stmt.run( + appmapId, + ev.id, + ev.thread_id ?? null, + parentEventMap.get(ev.id) ?? null, + req.request_method ?? 'GET', + req.url ?? '', + resp.status_code ?? null, + typeof elapsed === 'number' ? elapsed * 1000 : null + ); + } +} diff --git a/packages/cli/src/cmds/query/db/import/httpRequests.ts b/packages/cli/src/cmds/query/db/import/httpRequests.ts new file mode 100644 index 0000000000..b54a293a61 --- /dev/null +++ b/packages/cli/src/cmds/query/db/import/httpRequests.ts @@ -0,0 +1,51 @@ +import sqlite3 from 'better-sqlite3'; + +import type { HttpServerRequest, HttpServerResponse } from '@appland/models'; + +import type { ReturnEventLike } from './returnEventMap'; + +// Some recorders carry status_code/mime_type alongside the @appland/models +// HttpServerResponse fields, and we may also see an empty object when no +// return event was emitted. Capture the JSON-as-found here without changing +// the upstream type definitions. +type RawHttpServerResponse = Partial & { + status_code?: number; + mime_type?: string; +}; + +export function importHttpRequests( + db: sqlite3.Database, + appmapId: number, + events: readonly Record[], + returnEvents: Map, + parentEventMap: Map +): void { + const stmt = db.prepare( + `INSERT INTO http_requests (appmap_id, event_id, thread_id, parent_event_id, + method, path, normalized_path, protocol, status_code, mime_type, + elapsed_ms) + VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)` + ); + + for (const ev of events) { + const req = ev.http_server_request as HttpServerRequest | undefined; + if (!req) continue; + const ret = returnEvents.get(ev.id) ?? {}; + const resp = (ret.http_server_response ?? {}) as RawHttpServerResponse; + const elapsed = ret.elapsed; + + stmt.run( + appmapId, + ev.id, + ev.thread_id ?? null, + parentEventMap.get(ev.id) ?? null, + req.request_method, + req.path_info, + req.normalized_path_info ?? null, + req.protocol ?? null, + resp.status_code ?? 0, + resp.mime_type ?? null, + typeof elapsed === 'number' ? elapsed * 1000 : null + ); + } +} diff --git a/packages/cli/src/cmds/query/db/import/importAppmap.ts b/packages/cli/src/cmds/query/db/import/importAppmap.ts new file mode 100644 index 0000000000..47443c910e --- /dev/null +++ b/packages/cli/src/cmds/query/db/import/importAppmap.ts @@ -0,0 +1,67 @@ +import { readFileSync } from 'fs'; +import { resolve } from 'path'; + +import sqlite3 from 'better-sqlite3'; + +import { insertAppmapRecord, ParsedAppmap } from './appmapRecord'; +import { importCodeObjects, ClassMapNode } from './codeObjects'; +import { importHttpRequests } from './httpRequests'; +import { importHttpClientRequests } from './httpClientRequests'; +import { importSqlQueries } from './sqlQueries'; +import { importFunctionCalls } from './functionCalls'; +import { importExceptions } from './exceptions'; +import { buildParentEventMap } from './parentEventMap'; +import { buildReturnEventMap } from './returnEventMap'; + +export interface ImportResult { + appmapId: number; + eventCount: number; + sqlCount: number; + httpCount: number; +} + +// Idempotency: existing rows for this source_path are dropped (FK cascade +// clears child rows) before re-inserting. The whole import runs in one +// transaction — partial state is never visible to readers. +export function importAppmap(db: sqlite3.Database, filePath: string): ImportResult { + const absolutePath = resolve(filePath); + const raw = readFileSync(absolutePath, 'utf8'); + const parsed = JSON.parse(raw) as ParsedAppmap & { classMap?: ClassMapNode[] }; + const events = parsed.events ?? []; + const classMap = parsed.classMap ?? []; + + const tx = db.transaction((): ImportResult => { + db.prepare('DELETE FROM appmaps WHERE source_path = ?').run(absolutePath); + + const { appmapId } = insertAppmapRecord(db, absolutePath, parsed); + const codeObjectLookup = importCodeObjects(db, classMap); + + const returnEvents = buildReturnEventMap(events); + const parentEventMap = buildParentEventMap(events); + + importHttpRequests(db, appmapId, events, returnEvents, parentEventMap); + importHttpClientRequests(db, appmapId, events, returnEvents, parentEventMap); + importSqlQueries(db, appmapId, events, returnEvents, parentEventMap); + importFunctionCalls(db, appmapId, events, returnEvents, parentEventMap, codeObjectLookup); + importExceptions(db, appmapId, events, parentEventMap); + + let sqlCount = 0; + let httpCount = 0; + for (const ev of events) { + if ('sql_query' in ev) sqlCount += 1; + if ('http_server_request' in ev) httpCount += 1; + } + + return { appmapId, eventCount: events.length, sqlCount, httpCount }; + }); + + return tx(); +} + +// Drop all rows for the given recording. ON DELETE CASCADE removes child +// rows from http_requests, sql_queries, function_calls, exceptions, etc. +export function deleteAppmap(db: sqlite3.Database, filePath: string): boolean { + const absolutePath = resolve(filePath); + const info = db.prepare('DELETE FROM appmaps WHERE source_path = ?').run(absolutePath); + return info.changes > 0; +} diff --git a/packages/cli/src/cmds/query/db/import/parentEventMap.ts b/packages/cli/src/cmds/query/db/import/parentEventMap.ts new file mode 100644 index 0000000000..07c81c6d94 --- /dev/null +++ b/packages/cli/src/cmds/query/db/import/parentEventMap.ts @@ -0,0 +1,40 @@ +// Minimal shape of an AppMap event used during parent-link reconstruction. +// We only inspect a few fields, so we keep this loose rather than coupling +// to @appland/models. +export interface AppMapEventLike { + id?: number; + thread_id?: number; + event?: string; +} + +// Walk the event stream once and return a map of event_id → parent_event_id +// using per-thread call stacks. Each 'call' event's parent is the top of its +// thread's stack at the moment of the call; each 'return' pops the stack. +// +// Events with no thread_id or no id are skipped. Threads are independent: +// events on different threads never become each other's parents. +export function buildParentEventMap(events: readonly AppMapEventLike[]): Map { + const parentMap = new Map(); + const threadStacks = new Map(); + + for (const ev of events) { + const tid = ev.thread_id; + const eid = ev.id; + if (tid === undefined || eid === undefined) continue; + + if (ev.event === 'call') { + let stack = threadStacks.get(tid); + if (!stack) { + stack = []; + threadStacks.set(tid, stack); + } + if (stack.length > 0) parentMap.set(eid, stack[stack.length - 1]); + stack.push(eid); + } else if (ev.event === 'return') { + const stack = threadStacks.get(tid); + if (stack && stack.length > 0) stack.pop(); + } + } + + return parentMap; +} diff --git a/packages/cli/src/cmds/query/db/import/parseLocation.ts b/packages/cli/src/cmds/query/db/import/parseLocation.ts new file mode 100644 index 0000000000..0348ea9130 --- /dev/null +++ b/packages/cli/src/cmds/query/db/import/parseLocation.ts @@ -0,0 +1,26 @@ +// Parse a classMap location string into [path, lineno]. +// +// Handles: +// "app/views.py:10" → ["app/views.py", 10] +// "/abs/path/file.rb:511" → ["/abs/path/file.rb", 511] +// "File.java:-1" → ["File.java", -1] +// "OpenSSL::Cipher#decrypt" → [null, null] (C-extension; no file) +// "" → [null, null] +// +// Splits on the rightmost ':' so paths containing colons (Windows drive +// letters, namespaced classes) are handled correctly. +export function parseLocation(location: string | undefined | null): [string | null, number | null] { + if (!location) return [null, null]; + + const idx = location.lastIndexOf(':'); + if (idx <= 0) return [null, null]; + + const pathPart = location.slice(0, idx); + const linenoPart = location.slice(idx + 1); + + if (linenoPart.length === 0) return [null, null]; + // Integer parse — accept leading minus, reject anything non-numeric. + if (!/^-?\d+$/.test(linenoPart)) return [null, null]; + + return [pathPart, Number.parseInt(linenoPart, 10)]; +} diff --git a/packages/cli/src/cmds/query/db/import/returnEventMap.ts b/packages/cli/src/cmds/query/db/import/returnEventMap.ts new file mode 100644 index 0000000000..fa9ca7c8db --- /dev/null +++ b/packages/cli/src/cmds/query/db/import/returnEventMap.ts @@ -0,0 +1,26 @@ +// Index return events by their associated call event id, so call events can +// look up their elapsed time + return-specific payload (http_server_response, +// http_client_response, return_value). +// +// In the AppMap event stream, each return event carries `parent_id` pointing +// at the id of the call event it terminates. +export interface ReturnEventLike { + event?: string; + parent_id?: number; + elapsed?: number; + http_server_response?: Record; + http_client_response?: Record; + return_value?: Record; +} + +export function buildReturnEventMap( + events: readonly ReturnEventLike[] +): Map { + const map = new Map(); + for (const ev of events) { + if (ev.event === 'return' && typeof ev.parent_id === 'number') { + map.set(ev.parent_id, ev); + } + } + return map; +} diff --git a/packages/cli/src/cmds/query/db/import/sqlQueries.ts b/packages/cli/src/cmds/query/db/import/sqlQueries.ts new file mode 100644 index 0000000000..22492012c9 --- /dev/null +++ b/packages/cli/src/cmds/query/db/import/sqlQueries.ts @@ -0,0 +1,62 @@ +import sqlite3 from 'better-sqlite3'; + +import type { SqlQuery } from '@appland/models'; + +import type { ReturnEventLike } from './returnEventMap'; + +// Import sql_query events. The caller class/method is taken from the event +// itself when present; otherwise it's derived from the parent call event in +// the per-thread call stack (matches the Python prototype). +export function importSqlQueries( + db: sqlite3.Database, + appmapId: number, + events: readonly Record[], + returnEvents: Map, + parentEventMap: Map +): void { + // event_id → call event, for parent-callsite lookup. + const callEvents = new Map>(); + for (const ev of events) { + if (ev.event === 'call' && typeof ev.id === 'number') callEvents.set(ev.id, ev); + } + + const stmt = db.prepare( + `INSERT INTO sql_queries (appmap_id, event_id, thread_id, parent_event_id, + sql_text, database_type, server_version, caller_class, caller_method, + elapsed_ms) + VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)` + ); + + for (const ev of events) { + const sq = ev.sql_query as SqlQuery | undefined; + if (!sq) continue; + const ret = returnEvents.get(ev.id) ?? {}; + const elapsed = ret.elapsed; + + let callerClass: string | null = ev.defined_class ?? null; + let callerMethod: string | null = ev.method_id ?? null; + if (!callerClass) { + const parentEid = parentEventMap.get(ev.id); + if (parentEid !== undefined) { + const parent = callEvents.get(parentEid); + if (parent) { + callerClass = parent.defined_class ?? null; + callerMethod = parent.method_id ?? null; + } + } + } + + stmt.run( + appmapId, + ev.id, + ev.thread_id ?? null, + parentEventMap.get(ev.id) ?? null, + sq.sql, + sq.database_type ?? null, + sq.server_version ?? null, + callerClass, + callerMethod, + typeof elapsed === 'number' ? elapsed * 1000 : null + ); + } +} diff --git a/packages/cli/src/cmds/query/db/index.ts b/packages/cli/src/cmds/query/db/index.ts new file mode 100644 index 0000000000..7a3acb61d0 --- /dev/null +++ b/packages/cli/src/cmds/query/db/index.ts @@ -0,0 +1,3 @@ +export { SCHEMA, SCHEMA_VERSION, SCHEMA_TABLES } from './schema'; +export { queryDbPath, QUERY_DB_FILENAME } from './path'; +export { openQueryDb, OpenQueryDbResult } from './openQueryDb'; diff --git a/packages/cli/src/cmds/query/db/openQueryDb.ts b/packages/cli/src/cmds/query/db/openQueryDb.ts new file mode 100644 index 0000000000..f5c4117161 --- /dev/null +++ b/packages/cli/src/cmds/query/db/openQueryDb.ts @@ -0,0 +1,55 @@ +import { mkdirSync } from 'fs'; +import { dirname } from 'path'; + +import sqlite3 from 'better-sqlite3'; + +import { SCHEMA, SCHEMA_TABLES, SCHEMA_VERSION } from './schema'; +import { queryDbPath } from './path'; + +export interface OpenQueryDbResult { + db: sqlite3.Database; + path: string; + version: number; + rebuilt: boolean; +} + +// Open the query DB for the given appmap directory, ensuring its schema is +// at SCHEMA_VERSION. Creates the parent directory and the file if missing. +// Drops and rebuilds all schema tables if the on-disk version doesn't match. +// +// `dbPath` overrides path derivation (used by tests). +export function openQueryDb(appmapDir: string, dbPath?: string): OpenQueryDbResult { + const path = dbPath ?? queryDbPath(appmapDir); + mkdirSync(dirname(path), { recursive: true }); + + const db = sqlite3(path); + db.pragma('journal_mode = WAL'); + db.pragma('foreign_keys = ON'); + db.pragma('busy_timeout = 5000'); + + const currentVersion = db.pragma('user_version', { simple: true }) as number; + let rebuilt = false; + + if (currentVersion === 0) { + db.exec(SCHEMA); + db.pragma(`user_version = ${SCHEMA_VERSION}`); + } else if (currentVersion !== SCHEMA_VERSION) { + rebuildSchema(db); + rebuilt = true; + } + + return { db, path, version: SCHEMA_VERSION, rebuilt }; +} + +function rebuildSchema(db: sqlite3.Database): void { + const tx = db.transaction(() => { + db.pragma('foreign_keys = OFF'); + for (const table of SCHEMA_TABLES) { + db.exec(`DROP TABLE IF EXISTS ${table}`); + } + db.exec(SCHEMA); + db.pragma(`user_version = ${SCHEMA_VERSION}`); + db.pragma('foreign_keys = ON'); + }); + tx(); +} diff --git a/packages/cli/src/cmds/query/db/path.ts b/packages/cli/src/cmds/query/db/path.ts new file mode 100644 index 0000000000..2cae528308 --- /dev/null +++ b/packages/cli/src/cmds/query/db/path.ts @@ -0,0 +1,20 @@ +import { createHash } from 'crypto'; +import { homedir } from 'os'; +import { join, resolve } from 'path'; + +export const QUERY_DB_FILENAME = 'query.db'; + +// Derive the on-disk path for the query DB that corresponds to the given +// appmap directory. The path is rooted at `~/.appmap/data//query.db`, +// where `` is the first 12 hex characters of the SHA-256 digest of +// the resolved directory path. +// +// Pure: returns the path without creating any directories. +// +// To use a different path (tests, CI, demo scripts), call openQueryDb / +// openReadOnly with an explicit `dbPath` argument; the corresponding CLI +// flag is `--db`. +export function queryDbPath(appmapDir: string): string { + const id = createHash('sha256').update(resolve(appmapDir)).digest('hex').slice(0, 12); + return join(homedir(), '.appmap', 'data', id, QUERY_DB_FILENAME); +} diff --git a/packages/cli/src/cmds/query/db/schema.ts b/packages/cli/src/cmds/query/db/schema.ts new file mode 100644 index 0000000000..28d4a212f6 --- /dev/null +++ b/packages/cli/src/cmds/query/db/schema.ts @@ -0,0 +1,175 @@ +// SQLite schema for AppMap APM data. +// +// Denormalizes AppMap events into APM-oriented tables optimized for the +// queries an APM dashboard or LLM agent needs. Ported from appmap-apm +// (server/db/schema.py); shape preserved unchanged. + +export const SCHEMA_VERSION = 5; + +export const SCHEMA = ` +CREATE TABLE IF NOT EXISTS appmaps ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + name TEXT NOT NULL, + source_path TEXT NOT NULL UNIQUE, + language TEXT, + framework TEXT, + recorder_type TEXT, + git_repository TEXT, + git_branch TEXT, + git_commit TEXT, + timestamp TEXT, + event_count INTEGER NOT NULL DEFAULT 0, + sql_query_count INTEGER NOT NULL DEFAULT 0, + http_request_count INTEGER NOT NULL DEFAULT 0, + elapsed_ms REAL, + metadata_labels TEXT -- JSON array of metadata-level labels +); + +-- Code objects from classMap entries (one per unique instrumented function). +-- A lookup table for stable fqids. Components are stored separately so +-- filters can match exactly without parsing the fqid string: +-- package slash-joined package path ('' for top-level fn) +-- classes JSON array of class names ('[]' for package-level fn) +-- leaf_class last element of classes, denormalized for fast lookup +-- on short-form filters like --class Cipher (matches both +-- top-level Cipher and OpenSSL::Cipher). +-- method leaf method name +-- is_static 1 for static / class methods, 0 for instance methods +-- The fqid is always derivable from these (package + cls-chain + (#|.) + +-- method); we keep it as a stored column for output ergonomics and +-- uniqueness enforcement. +-- +-- This table intentionally does NOT store path/lineno/location, because +-- those vary across recordings of the same code (different branches or +-- revisions). Per-recording location data lives on function_calls. +CREATE TABLE IF NOT EXISTS code_objects ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + fqid TEXT NOT NULL UNIQUE, + package TEXT NOT NULL, + classes TEXT NOT NULL, + leaf_class TEXT NOT NULL, + method TEXT NOT NULL, + is_static INTEGER NOT NULL DEFAULT 0 +); + +CREATE TABLE IF NOT EXISTS http_requests ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + appmap_id INTEGER NOT NULL REFERENCES appmaps(id) ON DELETE CASCADE, + event_id INTEGER NOT NULL, + thread_id INTEGER, + parent_event_id INTEGER, + method TEXT NOT NULL, + path TEXT NOT NULL, + normalized_path TEXT, + protocol TEXT, + status_code INTEGER NOT NULL, + mime_type TEXT, + elapsed_ms REAL + -- Note: no per-row timestamp. Time range queries JOIN to appmaps and + -- filter on appmaps.timestamp (the recording-level value). +); + +CREATE TABLE IF NOT EXISTS http_client_requests ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + appmap_id INTEGER NOT NULL REFERENCES appmaps(id) ON DELETE CASCADE, + event_id INTEGER NOT NULL, + thread_id INTEGER, + parent_event_id INTEGER, + method TEXT NOT NULL, + url TEXT NOT NULL, + status_code INTEGER, + elapsed_ms REAL +); + +CREATE TABLE IF NOT EXISTS sql_queries ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + appmap_id INTEGER NOT NULL REFERENCES appmaps(id) ON DELETE CASCADE, + event_id INTEGER NOT NULL, + thread_id INTEGER, + parent_event_id INTEGER, + sql_text TEXT NOT NULL, + database_type TEXT, + server_version TEXT, + caller_class TEXT, + caller_method TEXT, + elapsed_ms REAL +); + +CREATE TABLE IF NOT EXISTS function_calls ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + appmap_id INTEGER NOT NULL REFERENCES appmaps(id) ON DELETE CASCADE, + event_id INTEGER NOT NULL, + thread_id INTEGER, + parent_event_id INTEGER, + code_object_id INTEGER REFERENCES code_objects(id), + defined_class TEXT NOT NULL, + method_id TEXT NOT NULL, + path TEXT, + lineno INTEGER, + is_static INTEGER NOT NULL DEFAULT 0, + elapsed_ms REAL, + parameters_json TEXT, -- JSON of parameter values (for labeled/log functions) + return_value TEXT -- string repr of return value (for labeled/log functions) +); + +CREATE TABLE IF NOT EXISTS exceptions ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + appmap_id INTEGER NOT NULL REFERENCES appmaps(id) ON DELETE CASCADE, + -- event_id is the call event id (the call this exception belongs to); + -- return_event_id is the return event id (where the throw materialized in + -- the event stream). Use return_event_id for ordering — e.g. the + -- with_logs neighborhood query (what did the app log before the throw?) + -- needs to include logs that fired *inside* the throwing call, which all + -- have event_id greater than the call entry id but less than the return. + event_id INTEGER, + return_event_id INTEGER, + thread_id INTEGER, + parent_event_id INTEGER, + exception_class TEXT NOT NULL, + message TEXT, + path TEXT, + lineno INTEGER +); + +-- Labels from classMap function entries (log, security.*, etc.) +CREATE TABLE IF NOT EXISTS labels ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + code_object_id INTEGER NOT NULL REFERENCES code_objects(id), + label TEXT NOT NULL, + UNIQUE(code_object_id, label) +); + +-- Indexes for common APM queries +CREATE INDEX IF NOT EXISTS idx_http_requests_appmap ON http_requests(appmap_id); +CREATE INDEX IF NOT EXISTS idx_http_requests_path ON http_requests(normalized_path, method); +CREATE INDEX IF NOT EXISTS idx_http_requests_status ON http_requests(status_code); +CREATE INDEX IF NOT EXISTS idx_http_client_requests_appmap ON http_client_requests(appmap_id); +CREATE INDEX IF NOT EXISTS idx_sql_queries_appmap ON sql_queries(appmap_id); +CREATE INDEX IF NOT EXISTS idx_sql_queries_elapsed ON sql_queries(elapsed_ms DESC); +CREATE INDEX IF NOT EXISTS idx_function_calls_appmap ON function_calls(appmap_id); +CREATE INDEX IF NOT EXISTS idx_function_calls_class_method ON function_calls(defined_class, method_id); +CREATE INDEX IF NOT EXISTS idx_function_calls_code_object ON function_calls(code_object_id); +CREATE INDEX IF NOT EXISTS idx_function_calls_parent ON function_calls(appmap_id, parent_event_id); +CREATE INDEX IF NOT EXISTS idx_exceptions_appmap ON exceptions(appmap_id); +CREATE INDEX IF NOT EXISTS idx_exceptions_class ON exceptions(exception_class); +CREATE INDEX IF NOT EXISTS idx_code_objects_fqid ON code_objects(fqid); +CREATE INDEX IF NOT EXISTS idx_code_objects_leaf_class ON code_objects(leaf_class); +CREATE INDEX IF NOT EXISTS idx_code_objects_method ON code_objects(method); +CREATE INDEX IF NOT EXISTS idx_code_objects_leaf_method ON code_objects(leaf_class, method); +CREATE INDEX IF NOT EXISTS idx_labels_label ON labels(label); +CREATE INDEX IF NOT EXISTS idx_labels_code_object ON labels(code_object_id); +CREATE INDEX IF NOT EXISTS idx_appmaps_timestamp ON appmaps(timestamp); +CREATE INDEX IF NOT EXISTS idx_appmaps_branch ON appmaps(git_branch); +`; + +// Names of all schema tables (used by the version-mismatch teardown path). +export const SCHEMA_TABLES = [ + 'appmaps', + 'code_objects', + 'http_requests', + 'http_client_requests', + 'sql_queries', + 'function_calls', + 'exceptions', + 'labels', +]; diff --git a/packages/cli/src/cmds/query/lib/format.ts b/packages/cli/src/cmds/query/lib/format.ts new file mode 100644 index 0000000000..d8effde2ba --- /dev/null +++ b/packages/cli/src/cmds/query/lib/format.ts @@ -0,0 +1,37 @@ +// Render a row of cells as a tab-aligned line. Per V3, tabular output never +// wraps — long fqids extend past terminal width rather than break (so grep +// over output stays usable). Pipe through `less -S` or use --json instead. +// +// `widths` is a per-column minimum width; cells longer than the minimum +// extend the column. +export function formatTable( + headers: readonly string[], + rows: readonly (readonly string[])[] +): string { + const widths = headers.map((h, i) => + Math.max(h.length, ...rows.map((r) => (r[i] ?? '').length)) + ); + const lines: string[] = []; + lines.push(headers.map((h, i) => h.padEnd(widths[i])).join(' ')); + for (const row of rows) { + lines.push(row.map((c, i) => (c ?? '').padEnd(widths[i])).join(' ')); + } + return lines.join('\n'); +} + +// Format a duration in ms as "12ms" / "480ms" / "3.4s" / "1.2s". +export function formatMs(ms: number | null | undefined): string { + if (ms == null) return '—'; + if (ms < 1000) return `${Math.round(ms)}ms`; + return `${(ms / 1000).toFixed(1)}s`; +} + +// Format a non-negative integer with thousands separators ("1,891"). +export function formatCount(n: number): string { + return n.toLocaleString('en-US'); +} + +// Format a percentage like "4.6%". +export function formatPct(pct: number): string { + return `${pct.toFixed(1)}%`; +} diff --git a/packages/cli/src/cmds/query/lib/logMessage.ts b/packages/cli/src/cmds/query/lib/logMessage.ts new file mode 100644 index 0000000000..7bb6d79ae1 --- /dev/null +++ b/packages/cli/src/cmds/query/lib/logMessage.ts @@ -0,0 +1,53 @@ +// Pick a displayable message from a log row's captured fields. +// 1. If return_value parses as JSON with a `message` field, use it +// (this is the structured-return contract). +// 2. Otherwise, look in parameters_json for a parameter whose `name` +// is `message` or `msg`; fall back to the first string-typed value. +// 3. Otherwise, return ''. +// Display-only — does not affect filtering. The `--message` SQL LIKE +// runs against the raw columns and may return rows whose projected +// message doesn't contain the substring (e.g., matched a class name); +// that's the documented FP-tolerant behavior. +export function projectLogMessage( + parametersJson: string | null, + returnValue: string | null +): string { + if (returnValue) { + try { + const parsed = JSON.parse(returnValue) as Record; + if (parsed && typeof parsed === 'object' && typeof parsed.message === 'string') { + return parsed.message; + } + } catch { + // not structured — fall through + } + } + if (parametersJson) { + try { + const params = JSON.parse(parametersJson) as { name?: string; class?: string; value?: unknown }[]; + const named = params.find((p) => p.name === 'message' || p.name === 'msg'); + if (named?.value != null) return stripWrappingQuotes(String(named.value)); + const firstString = params.find((p) => typeof p.value === 'string'); + if (firstString) return stripWrappingQuotes(String(firstString.value)); + if (params.length > 0) return JSON.stringify(params.map((p) => p.value)); + } catch { + return parametersJson; + } + } + // No structured message available. Return blank rather than the raw + // `return_value` (which is often noise like "true" / "None"). + return ''; +} + +// Some recorders stringify String parameter values with the source-code +// quote characters preserved (e.g. `'hello'`). Strip a single matched +// pair of leading+trailing single or double quotes so the display text +// is the raw message. +function stripWrappingQuotes(s: string): string { + if (s.length >= 2) { + const first = s[0]; + const last = s[s.length - 1]; + if ((first === "'" || first === '"') && first === last) return s.slice(1, -1); + } + return s; +} diff --git a/packages/cli/src/cmds/query/lib/openReadOnly.ts b/packages/cli/src/cmds/query/lib/openReadOnly.ts new file mode 100644 index 0000000000..cb68998c36 --- /dev/null +++ b/packages/cli/src/cmds/query/lib/openReadOnly.ts @@ -0,0 +1,18 @@ +import { existsSync } from 'fs'; + +import sqlite3 from 'better-sqlite3'; + +import { queryDbPath } from '../db/path'; + +// Open the query DB read-only for the given appmap directory. +// Errors if the DB doesn't exist, prompting the user to run `appmap index`. +// `dbPath` overrides path derivation (used by tests and the --query-db flag). +export function openReadOnly(appmapDir: string, dbPath?: string): sqlite3.Database { + const path = dbPath ?? queryDbPath(appmapDir); + if (!existsSync(path)) { + throw new Error( + `query DB not found at ${path}\nRun \`appmap index\` first to build it.` + ); + } + return sqlite3(path, { readonly: true }); +} diff --git a/packages/cli/src/cmds/query/lib/page.ts b/packages/cli/src/cmds/query/lib/page.ts new file mode 100644 index 0000000000..342213becd --- /dev/null +++ b/packages/cli/src/cmds/query/lib/page.ts @@ -0,0 +1,56 @@ +import sqlite3 from 'better-sqlite3'; + +// Standard wrapper for any list-returning query. `rows` is the slice +// the caller asked for; `total` is the count of all matching rows +// (ignoring limit/offset). JSON consumers get truncation info for free; +// text renderers append a footer when total > offset + rows.length. +export interface Page { + rows: T[]; + total: number; + limit: number; + offset: number; +} + +// Default limit applied when filter.limit is undefined. Pass 0 or +// negative to mean unbounded. +export const DEFAULT_PAGE_LIMIT = 20; + +// Run a base query and return its paginated result + total count. +// `baseSql` is the SELECT (with WHERE / ORDER BY) without LIMIT/OFFSET; +// the function wraps it in a COUNT subquery for the total. Limit/offset +// are inlined as numbers — they come from typed filter fields, not +// arbitrary user input, so this is safe. +export function paginate( + db: sqlite3.Database, + baseSql: string, + params: readonly (string | number)[], + options: { limit?: number; offset?: number } = {} +): Page { + const limit = options.limit ?? DEFAULT_PAGE_LIMIT; + const offset = options.offset ?? 0; + + const countSql = `SELECT COUNT(*) AS n FROM (${baseSql})`; + const total = (db.prepare(countSql).get(...params) as { n: number }).n; + + let rowsSql = baseSql; + if (limit > 0) { + rowsSql += ` LIMIT ${limit | 0} OFFSET ${offset | 0}`; + } else if (offset > 0) { + rowsSql += ` LIMIT -1 OFFSET ${offset | 0}`; + } + const rows = db.prepare(rowsSql).all(...params) as T[]; + + return { rows, total, limit, offset }; +} + +// Format the truncation footer for text-mode renderers. Returns null +// when nothing was clipped (so the caller can choose whether to print +// it at all). +export function truncationFooter(page: Page): string | null { + const shown = page.rows.length; + if (shown === 0 && page.total === 0) return null; + const last = page.offset + shown; + if (last >= page.total) return null; + const first = page.offset + 1; + return `(showing ${first}–${last} of ${page.total}; use --limit / --offset to page)`; +} diff --git a/packages/cli/src/cmds/query/lib/parseFilter.ts b/packages/cli/src/cmds/query/lib/parseFilter.ts new file mode 100644 index 0000000000..00ea5eb5d8 --- /dev/null +++ b/packages/cli/src/cmds/query/lib/parseFilter.ts @@ -0,0 +1,76 @@ +// Shared parsers for CLI filter flags. +// +// These convert user-typed strings into canonical structured forms that +// query functions can apply uniformly. Verbs share these so a flag like +// --since "7d ago" means the same thing everywhere. + +export type Comparator = '=' | '>=' | '<=' | '>' | '<'; + +export interface NumberFilter { + op: Comparator; + value: number; +} + +// Parse a status filter: "500", "=500", ">=500", ">500", "<400", "<=399". +// Whitespace around the operator is tolerated. +export function parseStatus(input: string): NumberFilter { + const m = input.trim().match(/^(>=|<=|>|<|=)?\s*(\d+)$/); + if (!m) throw new Error(`invalid --status filter: ${input}`); + const op = (m[1] ?? '=') as Comparator; + return { op, value: Number.parseInt(m[2], 10) }; +} + +// Parse a duration filter: ">1s", ">=500ms", "<2m". The numeric form is +// returned in milliseconds for direct comparison against elapsed_ms. +export function parseDuration(input: string): NumberFilter { + const m = input.trim().match(/^(>=|<=|>|<|=)?\s*(\d+(?:\.\d+)?)\s*(ms|s|m|h)?$/); + if (!m) throw new Error(`invalid --duration filter: ${input}`); + const op = (m[1] ?? '=') as Comparator; + const n = Number.parseFloat(m[2]); + const unit = m[3] ?? 'ms'; + const ms = + unit === 'ms' + ? n + : unit === 's' + ? n * 1000 + : unit === 'm' + ? n * 60_000 + : n * 3_600_000; + return { op, value: ms }; +} + +// Parse a time spec: ISO date/timestamp ("2026-04-29", "2026-04-29T14:21:08Z") +// or a relative offset ("7d ago", "30m ago", "2h ago", "45s ago"). Returns +// an ISO 8601 string suitable for direct text comparison against the +// timestamp column (which is also ISO 8601). +export function parseTime(input: string, now: Date = new Date()): string { + const trimmed = input.trim(); + + const rel = trimmed.match(/^(\d+)\s*([smhd])\s+ago$/); + if (rel) { + const n = Number.parseInt(rel[1], 10); + const unit = rel[2]; + const ms = + unit === 's' + ? n * 1_000 + : unit === 'm' + ? n * 60_000 + : unit === 'h' + ? n * 3_600_000 + : n * 86_400_000; + return new Date(now.getTime() - ms).toISOString(); + } + + const ms = Date.parse(trimmed); + if (Number.isNaN(ms)) throw new Error(`invalid time filter: ${input}`); + return new Date(ms).toISOString(); +} + +// Apply a NumberFilter as a WHERE-clause fragment. Returns the SQL fragment +// (with a `?` placeholder) and the value to bind. Throws on unknown op. +export function numberFilterSql( + column: string, + filter: NumberFilter +): { sql: string; value: number } { + return { sql: `${column} ${filter.op} ?`, value: filter.value }; +} diff --git a/packages/cli/src/cmds/query/lib/scope.ts b/packages/cli/src/cmds/query/lib/scope.ts new file mode 100644 index 0000000000..4687984149 --- /dev/null +++ b/packages/cli/src/cmds/query/lib/scope.ts @@ -0,0 +1,439 @@ +import type { NumberFilter } from './parseFilter'; + +// Shared recording-scope filter shape — the subset of CLI filter flags that +// constrain *which recordings* a verb considers (as opposed to row-level +// filters like --duration or --label that constrain rows within a recording). +// +// Verb-specific filter types should extend this; helpers in this file accept +// any shape with the relevant fields. +export interface RecordingScope { + branch?: string; + commit?: string; + since?: string; + until?: string; + appmap?: string; + // HTTP-level filters that scope to "the recording must contain ≥1 + // matching server request": + route?: string; // "POST /orders" or "/orders" + status?: NumberFilter; +} + +export interface RouteSpec { + method?: string; + path: string; +} + +const HTTP_METHODS = /^(GET|POST|PUT|PATCH|DELETE|HEAD|OPTIONS)\s+(.+)$/i; + +export function parseRoute(s: string): RouteSpec { + const m = s.match(HTTP_METHODS); + if (m) return { method: m[1].toUpperCase(), path: m[2] }; + return { path: s }; +} + +// Returns SQL clauses (and params) that match an --appmap reference against +// the appmaps table, accepting any of: +// - exact appmap.name +// - source_path ending in `.appmap.json` (Unix or Windows sep) +// - source_path ending in `` (non-`.appmap.json` stores) +// Used by `tree` and other single-resolve operations that want +// exact-match-or-fail (with an ambiguity error). For find/list contexts +// where lenient matching is the right UX, use appmapLikeClause. +export function appmapRefClause( + ref: string, + alias: string +): { sql: string; params: string[] } { + return { + sql: `(${alias}.name = ? + OR ${alias}.source_path GLOB '*[/\\\\]' || ? || '.appmap.json' + OR ${alias}.source_path GLOB '*[/\\\\]' || ?)`, + params: [ref, ref, ref], + }; +} + +// Lenient appmap match for find/list contexts: ref is a substring of +// either the human-readable name or the source_path. SQLite LIKE is +// case-insensitive for ASCII by default. Used by appmapWhere so all the +// find_* tools surface a recording when any reasonable word from its +// name or path is provided. +export function appmapLikeClause( + ref: string, + alias: string +): { sql: string; params: string[] } { + const like = `%${ref}%`; + return { + sql: `(${alias}.name LIKE ? OR ${alias}.source_path LIKE ?)`, + params: [like, like], + }; +} + +export interface Clauses { + where: string[]; + params: (string | number)[]; +} + +// Recording-level filters that go on the appmaps row directly. +export function appmapWhere(filter: RecordingScope, alias: string): Clauses { + const where: string[] = []; + const params: (string | number)[] = []; + if (filter.branch) { + where.push(`${alias}.git_branch = ?`); + params.push(filter.branch); + } + if (filter.commit) { + where.push(`${alias}.git_commit = ?`); + params.push(filter.commit); + } + if (filter.since) { + where.push(`${alias}.timestamp >= ?`); + params.push(filter.since); + } + if (filter.until) { + where.push(`${alias}.timestamp <= ?`); + params.push(filter.until); + } + if (filter.appmap) { + const ref = appmapLikeClause(filter.appmap, alias); + where.push(ref.sql); + params.push(...ref.params); + } + return { where, params }; +} + +// HTTP-level filters that scope to "the recording must contain ≥1 matching +// server request." Used as a subquery for non-request finds. The alias +// defaults to `h`; override when emitting clauses inside a nested subquery +// where the outer alias is taken. +export function httpScopeClauses(filter: RecordingScope, alias = 'h'): Clauses { + const where: string[] = []; + const params: (string | number)[] = []; + if (filter.route) { + const route = parseRoute(filter.route); + where.push(`COALESCE(${alias}.normalized_path, ${alias}.path) LIKE ?`); + params.push(`%${route.path}%`); + if (route.method) { + where.push(`${alias}.method = ?`); + params.push(route.method); + } + } + if (filter.status) { + where.push(`${alias}.status_code ${filter.status.op} ?`); + params.push(filter.status.value); + } + return { where, params }; +} + +// Parse a --class flag value into (package, class, method) components, per +// V3's accepted forms: +// short : "UserRepository" / "Cls1::Cls2" +// class+method : "UserRepository#findById" / "Cls1::Cls2#m" +// pkg+class : "app/services/UserRepository" +// full fqid : "app/services/UserRepository#findById" +// "app/Outer::Inner.parse" (static method) +// +// Class chains use `::` as the inner separator (canonical V3, also Ruby / +// C++ idiomatic); we treat that as part of the class name, not a split +// point. The method separator (`#` or `.`) only applies when the input is +// in canonical form (contains `/`) or uses `#` explicitly. A short-form +// dot like "org.example.Foo" is kept whole — Java/Python dot-form class +// names match the defined_class fallback, not the normalized columns. +export interface ClassMethodParts { + package?: string; + class?: string; + method?: string; +} + +export function parseClassRef(input: string): ClassMethodParts { + const slashIdx = input.lastIndexOf('/'); + + if (slashIdx < 0) { + // Short form. `#` is the only unambiguous method separator here; `.` + // is left in place because it could be part of a Java/Python class name + // ("org.example.Foo") rather than a method separator. + const hashIdx = input.lastIndexOf('#'); + if (hashIdx > 0) { + return { + class: input.slice(0, hashIdx) || undefined, + method: input.slice(hashIdx + 1), + }; + } + return { class: input.length > 0 ? input : undefined }; + } + + // Canonical fqid (slash present). The method separator is the rightmost + // `#` or `.` AFTER the last `/`. (Inner `.` characters in the package + // path don't apply — packages are slash-separated.) + let methodSepIdx = -1; + for (let i = input.length - 1; i > slashIdx; i--) { + const ch = input[i]; + if (ch === '#' || ch === '.') { + methodSepIdx = i; + break; + } + } + + let classPart = input; + let methodPart: string | undefined; + if (methodSepIdx > 0) { + classPart = input.slice(0, methodSepIdx); + methodPart = input.slice(methodSepIdx + 1); + } + + const classSlashIdx = classPart.lastIndexOf('/'); + const pkg = classPart.slice(0, classSlashIdx); + const cls = classPart.slice(classSlashIdx + 1); + return { + package: pkg.length > 0 ? pkg : undefined, + class: cls.length > 0 ? cls : undefined, + method: methodPart, + }; +} + +// Match a --class input against function_calls via the normalized +// code_objects columns. The `class` part of the user input is interpreted +// as either: +// - A `::`-separated chain ("Outer::Inner") → match `classes` exactly +// (canonical JSON form). +// - A single-segment short form ("Cipher") → match `leaf_class` +// exactly (hits any chain ending in that class — top-level Cipher +// and OpenSSL::Cipher both qualify). +// Adding a package narrows further: `... AND package = ?`. +// +// Falls back to function_calls.defined_class for rows that aren't linked +// to a code_object (sparse classMap recordings). The fallback recognizes +// `.` (Java/Python), `::` (Ruby/C++) as suffix separators for short-form +// matching against the raw event field. +export function classFilterClauses(input: string, fcAlias: string): Clauses { + const parts = parseClassRef(input); + if (!parts.class) { + return { where: ['1 = 0'], params: [] }; + } + + const coWhere: string[] = []; + const coParams: (string | number)[] = []; + if (parts.class.includes('::')) { + // Full chain — match classes JSON exactly. + coWhere.push('classes = ?'); + coParams.push(JSON.stringify(parts.class.split('::'))); + } else { + // Single segment — match the leaf. + coWhere.push('leaf_class = ?'); + coParams.push(parts.class); + } + if (parts.package) { + coWhere.push('package = ?'); + coParams.push(parts.package); + } + if (parts.method) { + coWhere.push('method = ?'); + coParams.push(parts.method); + } + + // Fallback for unlinked function_calls. Includes a substring match on + // defined_class so a search like "Repo" matches "UserRepository" even + // when the row isn't linked to a code_object. + const fbWhere: string[] = [ + `${fcAlias}.defined_class = ?`, + `${fcAlias}.defined_class LIKE '%.' || ?`, + `${fcAlias}.defined_class LIKE '%::' || ?`, + `${fcAlias}.defined_class LIKE ?`, + ]; + const fbParams: (string | number)[] = [ + parts.class, + parts.class, + parts.class, + `%${parts.class}%`, + ]; + + // Lenient leaf_class substring lookup against code_objects. Applied + // only when the user supplied a SHORT form (no package, no chain) — + // a canonical input like "org/example/UserRepository#findById" is + // explicit disambiguation and should match strictly. So short-form + // "Repo" finds "UserRepository", but full canonical doesn't broaden. + const isShortForm = !parts.package && !parts.class.includes('::'); + if (!isShortForm) { + return { + where: [ + `((${fcAlias}.code_object_id IN ( + SELECT id FROM code_objects WHERE ${coWhere.join(' AND ')} + )) + OR (${fcAlias}.code_object_id IS NULL AND (${fbWhere.join(' OR ')})))`, + ], + params: [...coParams, ...fbParams], + }; + } + + return { + where: [ + `((${fcAlias}.code_object_id IN ( + SELECT id FROM code_objects WHERE ${coWhere.join(' AND ')} + )) + OR (${fcAlias}.code_object_id IN ( + SELECT id FROM code_objects WHERE leaf_class LIKE ? + )) + OR (${fcAlias}.code_object_id IS NULL AND (${fbWhere.join(' OR ')})))`, + ], + params: [...coParams, `%${parts.class}%`, ...fbParams], + }; +} + +// Match a --method input against function_calls via the normalized +// code_objects.method column, with a fallback to function_calls.method_id +// for rows that aren't linked to a code_object. +export function methodFilterClauses(input: string, fcAlias: string): Clauses { + const like = `%${input}%`; + return { + where: [ + `(${fcAlias}.code_object_id IN ( + SELECT id FROM code_objects WHERE method = ? OR method LIKE ? + ) + OR (${fcAlias}.code_object_id IS NULL + AND (${fcAlias}.method_id = ? OR ${fcAlias}.method_id LIKE ?)))`, + ], + params: [input, like, input, like], + }; +} + +// Match a --class input against a sql_query by following its +// parent_event_id back to the function_call that issued the query, then +// looking up that call's code_object — same canonical path as +// classFilterClauses uses for direct function_calls. Falls back to the +// row's denormalized caller_class string when the parent function_call +// has no code_object link. +// +// When the user supplies --package, only the code_object path is used +// (the caller_class string has no package component to match against). +export function sqlCallerClassClauses(input: string, qAlias: string): Clauses { + const parts = parseClassRef(input); + if (!parts.class) { + return { where: ['1 = 0'], params: [] }; + } + + const coWhere: string[] = []; + const coParams: (string | number)[] = []; + if (parts.class.includes('::')) { + coWhere.push('classes = ?'); + coParams.push(JSON.stringify(parts.class.split('::'))); + } else { + coWhere.push('leaf_class = ?'); + coParams.push(parts.class); + } + if (parts.package) { + coWhere.push('package = ?'); + coParams.push(parts.package); + } + if (parts.method) { + coWhere.push('method = ?'); + coParams.push(parts.method); + } + + const coClause = `${qAlias}.parent_event_id IN ( + SELECT fc.event_id FROM function_calls fc + WHERE fc.appmap_id = ${qAlias}.appmap_id + AND fc.code_object_id IN ( + SELECT id FROM code_objects WHERE ${coWhere.join(' AND ')} + ) + )`; + + // If the user gave an explicit package, only the code_object path can + // honor it (caller_class has no package column to match). + if (parts.package) { + return { where: [coClause], params: coParams }; + } + + // Fallback: match the row's raw caller_class with suffix-aware logic + // plus a generic substring fallback so "Repo" finds "UserRepository". + const fbConditions: string[] = [ + `${qAlias}.caller_class = ?`, + `${qAlias}.caller_class LIKE '%.' || ?`, + `${qAlias}.caller_class LIKE '%::' || ?`, + `${qAlias}.caller_class LIKE ?`, + ]; + const fbParams: (string | number)[] = [ + parts.class, + parts.class, + parts.class, + `%${parts.class}%`, + ]; + const fbParts: string[] = [`(${fbConditions.join(' OR ')})`]; + if (parts.method) { + fbParts.push(`(${qAlias}.caller_method = ? OR ${qAlias}.caller_method LIKE ?)`); + fbParams.push(parts.method, `%${parts.method}%`); + } + + // Substring leaf_class lookup against code_objects. As in + // classFilterClauses, only applied for short-form inputs ("Repo") + // so canonical fqids stay strict. (parts.package is filtered out + // earlier; the short-form check here is just on `::`.) + if (parts.class.includes('::')) { + return { + where: [`(${coClause} OR (${fbParts.join(' AND ')}))`], + params: [...coParams, ...fbParams], + }; + } + + const looseLeafClause = `${qAlias}.parent_event_id IN ( + SELECT fc.event_id FROM function_calls fc + WHERE fc.appmap_id = ${qAlias}.appmap_id + AND fc.code_object_id IN ( + SELECT id FROM code_objects WHERE leaf_class LIKE ? + ) + )`; + + return { + where: [`(${coClause} OR ${looseLeafClause} OR (${fbParts.join(' AND ')}))`], + params: [...coParams, `%${parts.class}%`, ...fbParams], + }; +} + +// Match a --method input against a sql_query via its parent function_call's +// code_object.method, with a fallback to caller_method for unlinked +// parents. +export function sqlCallerMethodClauses(input: string, qAlias: string): Clauses { + const like = `%${input}%`; + return { + where: [ + `(${qAlias}.parent_event_id IN ( + SELECT fc.event_id FROM function_calls fc + WHERE fc.appmap_id = ${qAlias}.appmap_id + AND fc.code_object_id IN ( + SELECT id FROM code_objects WHERE method = ? OR method LIKE ? + ) + ) + OR ${qAlias}.caller_method = ? + OR ${qAlias}.caller_method LIKE ?)`, + ], + params: [input, like, input, like], + }; +} + +// Build ".appmap_id IN (SELECT a.id …)" for tables where filtering at +// the appmap-id level is the right shape (sql_queries, function_calls, +// exceptions, http_client_requests). Returns null if no recording-level +// filtering is needed. +export function appmapIdScope( + filter: RecordingScope, + rowAlias: string +): { sql: string; params: (string | number)[] } | null { + const a = appmapWhere(filter, 'a'); + const h = httpScopeClauses(filter); + if (a.where.length === 0 && h.where.length === 0) return null; + + if (h.where.length > 0) { + const all = [...a.where, ...h.where].join(' AND '); + return { + sql: `${rowAlias}.appmap_id IN ( + SELECT DISTINCT a.id FROM appmaps a + JOIN http_requests h ON h.appmap_id = a.id + WHERE ${all} + )`, + params: [...a.params, ...h.params], + }; + } + return { + sql: `${rowAlias}.appmap_id IN ( + SELECT a.id FROM appmaps a WHERE ${a.where.join(' AND ')} + )`, + params: a.params, + }; +} diff --git a/packages/cli/src/cmds/query/lib/treeRender.ts b/packages/cli/src/cmds/query/lib/treeRender.ts new file mode 100644 index 0000000000..77dfb4532f --- /dev/null +++ b/packages/cli/src/cmds/query/lib/treeRender.ts @@ -0,0 +1,149 @@ +import { + ExceptionNode, + FunctionNode, + HttpClientNode, + HttpServerNode, + LogNode, + SqlNode, + TreeNode, + TreeSummary, +} from '../queries/tree'; +import { formatCount, formatMs, formatTable } from './format'; +import { projectLogMessage } from './logMessage'; + +const INDENT = ' '; + +// Render the full tree. Each event is one line; depth maps to indentation. +// Format mirrors V3: +// HTTP→ POST /orders → HTTP 500 [520ms] +// CALL app/.../OrdersController#create [519ms] +// SQL INSERT INTO orders (...) [14ms] +// EXC IntegrityError: duplicate key +// HTTP← GET https://api.example/v1 → 200 [40ms] +export function renderTree(nodes: readonly TreeNode[]): string { + return nodes.map(renderTreeLine).join('\n'); +} + +function renderTreeLine(node: TreeNode): string { + const indent = INDENT.repeat(node.depth); + switch (node.kind) { + case 'http_server': + return `${indent}HTTP→ ${renderHttpServer(node)}`; + case 'http_client': + return `${indent}HTTP← ${renderHttpClient(node)}`; + case 'function': + return `${indent}CALL ${renderFunction(node)}`; + case 'sql': + return `${indent}SQL ${renderSql(node)}`; + case 'exception': + return `${indent}EXC ${renderException(node)}`; + case 'log': + return `${indent}LOG ${renderLog(node)}`; + } +} + +function renderException(n: ExceptionNode): string { + const where = n.path ? ` @ ${n.path}${n.lineno != null ? `:${n.lineno}` : ''}` : ''; + return `${n.exception_class}${n.message ? `: ${n.message}` : ''}${where}`; +} + +function renderHttpServer(n: HttpServerNode): string { + return `${n.method} ${n.route} → HTTP ${n.status_code} ${bracket(n.elapsed_ms)}`.trim(); +} + +function renderHttpClient(n: HttpClientNode): string { + const status = n.status_code != null ? ` → ${n.status_code}` : ''; + return `${n.method} ${n.url}${status} ${bracket(n.elapsed_ms)}`.trim(); +} + +function renderFunction(n: FunctionNode): string { + const id = n.fqid ?? `${n.defined_class}${n.is_static ? '.' : '#'}${n.method_id}`; + const ret = n.return_value != null ? ` → ${n.return_value}` : ''; + return `${id} ${bracket(n.elapsed_ms)}${ret}`.trim(); +} + +function renderSql(n: SqlNode): string { + return `${truncate(n.sql_text, 120)} ${bracket(n.elapsed_ms)}`.trim(); +} + +function renderLog(n: LogNode): string { + const message = projectLogMessage(n.parameters_json, n.return_value); + const prefix = `${n.logger}.${n.method_id}`; + return message ? `${prefix}: ${truncate(message, 120)}` : prefix; +} + +function bracket(ms: number | null): string { + return ms == null ? '' : `[${formatMs(ms)}]`; +} + +function truncate(s: string, n: number): string { + return s.length <= n ? s : s.slice(0, n - 1) + '…'; +} + +// Flat rendering — used by --filter=sql and --filter=http (no indentation, +// just the matching events in order). +export function renderFlat(nodes: readonly TreeNode[]): string { + return nodes + .map((n) => { + switch (n.kind) { + case 'http_server': + return `HTTP→ ${renderHttpServer(n)}`; + case 'http_client': + return `HTTP← ${renderHttpClient(n)}`; + case 'sql': + return `SQL ${renderSql(n)}`; + case 'function': + return `CALL ${renderFunction(n)}`; + case 'exception': + return `EXC ${renderException(n)}`; + case 'log': + return `LOG ${renderLog(n)}`; + } + }) + .join('\n'); +} + +// Summary format: per V3, a one-screen overview without the tree. +// ENTRY POST /orders → 500 [520ms] +// SQL 3 queries, 19ms total +// EXCEPTION IntegrityError +// LABELS log×2, dao×3, security.idempotency×1 +export function renderSummary(s: TreeSummary): string { + const rows: [string, string][] = []; + + if (s.entry) { + rows.push([ + 'ENTRY', + `${s.entry.method} ${s.entry.route} → ${s.entry.status_code} ${bracket(s.entry.elapsed_ms)}`, + ]); + } + + if (s.sql.count > 0) { + rows.push([ + 'SQL', + `${formatCount(s.sql.count)} quer${s.sql.count === 1 ? 'y' : 'ies'}, ${formatMs(s.sql.total_ms)} total`, + ]); + } + + if (s.http_client.count > 0) { + rows.push([ + 'HTTP→OUT', + `${formatCount(s.http_client.count)} request${s.http_client.count === 1 ? '' : 's'}, ${formatMs(s.http_client.total_ms)} total`, + ]); + } + + for (const e of s.exceptions) { + const where = e.path ? ` @ ${e.path}${e.lineno != null ? `:${e.lineno}` : ''}` : ''; + rows.push([ + 'EXCEPTION', + e.exception_class + (e.message ? `: ${e.message}` : '') + where, + ]); + } + + if (s.labels.length > 0) { + const text = s.labels.map((l) => `${l.label}×${l.count}`).join(', '); + rows.push(['LABELS', text]); + } + + return formatTable(['', ''], rows); +} diff --git a/packages/cli/src/cmds/query/queries/compare.ts b/packages/cli/src/cmds/query/queries/compare.ts new file mode 100644 index 0000000000..5ab2c45765 --- /dev/null +++ b/packages/cli/src/cmds/query/queries/compare.ts @@ -0,0 +1,121 @@ +import sqlite3 from 'better-sqlite3'; + +import { DEFAULT_PAGE_LIMIT, Page } from '../lib/page'; +import { endpoints } from './endpoints'; + +export interface CompareRow { + method: string; + route: string; + a_count: number; + a_p95_ms: number | null; + b_count: number; + b_p95_ms: number | null; + // b_p95 / a_p95 — undefined when either side has no measured durations. + delta: number | null; +} + +export type CompareSort = 'delta' | 'p95-a' | 'p95-b'; + +export interface CompareFilter { + branch_a: string; + branch_b: string; + since?: string; + until?: string; + sort?: CompareSort; + limit?: number; + offset?: number; +} + +// Computes per-route p95 for two branches and merges the results, exposing +// delta = b_p95 / a_p95 alongside both sides' counts and p95s. Implementation +// reuses endpoints() (which already does the SQL window-function p95) so the +// p95 semantics match the endpoints verb exactly. +export function compare( + db: sqlite3.Database, + filter: CompareFilter +): Page { + // Pull all endpoint rows for each branch (limit: 0 = unbounded). The + // outer pagination is on the merged compare rows, not on either side + // individually. + const a = endpoints(db, { + branch: filter.branch_a, + since: filter.since, + until: filter.until, + limit: 0, + }).rows; + const b = endpoints(db, { + branch: filter.branch_b, + since: filter.since, + until: filter.until, + limit: 0, + }).rows; + + const rows = new Map(); + const key = (method: string, route: string) => `${method}\t${route}`; + + for (const r of a) { + rows.set(key(r.method, r.route), { + method: r.method, + route: r.route, + a_count: r.count, + a_p95_ms: r.p95_ms, + b_count: 0, + b_p95_ms: null, + delta: null, + }); + } + for (const r of b) { + const k = key(r.method, r.route); + const existing = rows.get(k); + if (existing) { + existing.b_count = r.count; + existing.b_p95_ms = r.p95_ms; + } else { + rows.set(k, { + method: r.method, + route: r.route, + a_count: 0, + a_p95_ms: null, + b_count: r.count, + b_p95_ms: r.p95_ms, + delta: null, + }); + } + } + + for (const row of rows.values()) { + if (row.a_p95_ms != null && row.a_p95_ms > 0 && row.b_p95_ms != null) { + row.delta = row.b_p95_ms / row.a_p95_ms; + } + } + + const result = [...rows.values()]; + const sortKey: CompareSort = filter.sort ?? 'delta'; + result.sort(comparators[sortKey]); + + const limit = filter.limit ?? DEFAULT_PAGE_LIMIT; + const offset = filter.offset ?? 0; + const total = result.length; + const sliced = limit > 0 ? result.slice(offset, offset + limit) : result.slice(offset); + return { rows: sliced, total, limit, offset }; +} + +// "delta" sorts by absolute deviation from 1× — biggest changes (in +// either direction) at the top. "p95-a" / "p95-b" sort by the named side +// descending. All keys put nulls last. +function descNullsLast(a: number | null, b: number | null): number { + if (a == null && b == null) return 0; + if (a == null) return 1; + if (b == null) return -1; + return b - a; +} + +const comparators: Record number> = { + delta: (x, y) => { + const xd = x.delta == null ? null : Math.abs(Math.log(x.delta)); + const yd = y.delta == null ? null : Math.abs(Math.log(y.delta)); + return descNullsLast(xd, yd); + }, + 'p95-a': (x, y) => descNullsLast(x.a_p95_ms, y.a_p95_ms), + 'p95-b': (x, y) => descNullsLast(x.b_p95_ms, y.b_p95_ms), +}; diff --git a/packages/cli/src/cmds/query/queries/endpoints.ts b/packages/cli/src/cmds/query/queries/endpoints.ts new file mode 100644 index 0000000000..5105e7ace2 --- /dev/null +++ b/packages/cli/src/cmds/query/queries/endpoints.ts @@ -0,0 +1,150 @@ +import sqlite3 from 'better-sqlite3'; + +import { Page, paginate } from '../lib/page'; +import type { Comparator, NumberFilter } from '../lib/parseFilter'; + +export interface EndpointRow { + method: string; + route: string; + count: number; + avg_ms: number | null; + p95_ms: number | null; + err_pct: number; +} + +export type EndpointSort = 'count' | 'avg' | 'p95' | 'err'; + +export interface EndpointsFilter { + // ISO timestamp (use parseTime to build). + since?: string; + until?: string; + branch?: string; + // --status N (or comparator). Acts as a HAVING-style filter on routes: + // a route is shown iff at least one of its requests matches. Counts / + // averages / p95 / err_pct remain over all of that route's requests. + status?: NumberFilter; + sort?: EndpointSort; + limit?: number; + offset?: number; +} + +// err_pct is fixed at "% of requests with status >= 500" (server errors), +// independent of any --status filter. +const ERR_THRESHOLD = 500; + +const SORT_COLUMNS: Record = { + count: 'count', + avg: 'avg_ms', + p95: 'p95_ms', + err: 'err_pct', +}; + +const VALID_OPS = new Set(['=', '>=', '<=', '>', '<']); + +// Aggregation runs entirely in SQL. Per route: +// count COUNT(*) over the partition +// avg_ms AVG(elapsed_ms) over non-null values +// p95_ms elapsed_ms at rank ceil(0.95 * measured_count) within +// the partition (computed with a ROW_NUMBER() window). +// err_pct 100 * SUM(status >= 500) / COUNT(*) +// --status acts as a HAVING filter: route is shown iff ≥1 of its rows +// matches; aggregates remain over all rows. +// +// SQL injection surface: filter.sort and filter.status.op are validated +// against fixed allow-lists before being interpolated. +export function endpoints( + db: sqlite3.Database, + filter: EndpointsFilter = {} +): Page { + const where: string[] = []; + const params: (string | number)[] = []; + + if (filter.branch) { + where.push('a.git_branch = ?'); + params.push(filter.branch); + } + // --since/--until filter on the recording's timestamp (a.timestamp) — + // the canonical recording-level attribute. find verbs use the same + // column. + if (filter.since) { + where.push('a.timestamp >= ?'); + params.push(filter.since); + } + if (filter.until) { + where.push('a.timestamp <= ?'); + params.push(filter.until); + } + const whereSql = where.length > 0 ? `WHERE ${where.join(' AND ')}` : ''; + + let havingSql = ''; + if (filter.status) { + if (!VALID_OPS.has(filter.status.op)) { + throw new Error(`invalid status op: ${filter.status.op}`); + } + havingSql = `HAVING SUM(CASE WHEN status_code ${filter.status.op} ? THEN 1 ELSE 0 END) > 0`; + params.push(filter.status.value); + } + + const sortKey = filter.sort ?? 'count'; + if (!(sortKey in SORT_COLUMNS)) { + throw new Error(`invalid sort key: ${sortKey}`); + } + const sortColumn = SORT_COLUMNS[sortKey]; + + const sql = ` + WITH ranked AS ( + SELECT + h.method AS method, + COALESCE(h.normalized_path, h.path) AS route, + h.elapsed_ms AS elapsed_ms, + h.status_code AS status_code, + ROW_NUMBER() OVER ( + PARTITION BY h.method, COALESCE(h.normalized_path, h.path) + ORDER BY h.elapsed_ms NULLS LAST + ) AS rn, + SUM(CASE WHEN h.elapsed_ms IS NOT NULL THEN 1 ELSE 0 END) OVER ( + PARTITION BY h.method, COALESCE(h.normalized_path, h.path) + ) AS measured_count + FROM http_requests h + JOIN appmaps a ON a.id = h.appmap_id + ${whereSql} + ) + SELECT + method, + route, + COUNT(*) AS count, + AVG(CASE WHEN elapsed_ms IS NOT NULL THEN elapsed_ms END) AS avg_ms, + MAX(CASE + WHEN measured_count > 0 + AND rn = (measured_count * 19 + 19) / 20 + THEN elapsed_ms + END) AS p95_ms, + CAST(SUM(CASE WHEN status_code >= ${ERR_THRESHOLD} THEN 1 ELSE 0 END) AS REAL) + * 100.0 / COUNT(*) AS err_pct + FROM ranked + GROUP BY method, route + ${havingSql} + ORDER BY ${sortColumn} DESC NULLS LAST, method, route + `; + + const page = paginate<{ + method: string; + route: string; + count: number; + avg_ms: number | null; + p95_ms: number | null; + err_pct: number | null; + }>(db, sql, params, { limit: filter.limit, offset: filter.offset }); + + return { + ...page, + rows: page.rows.map((r) => ({ + method: r.method, + route: r.route, + count: r.count, + avg_ms: r.avg_ms, + p95_ms: r.p95_ms, + err_pct: r.err_pct ?? 0, + })), + }; +} diff --git a/packages/cli/src/cmds/query/queries/find.ts b/packages/cli/src/cmds/query/queries/find.ts new file mode 100644 index 0000000000..d84f8ebf1d --- /dev/null +++ b/packages/cli/src/cmds/query/queries/find.ts @@ -0,0 +1,477 @@ +import sqlite3 from 'better-sqlite3'; + +import { projectLogMessage } from '../lib/logMessage'; +import { Page, paginate } from '../lib/page'; +import type { NumberFilter } from '../lib/parseFilter'; +import { + appmapIdScope, + appmapWhere, + classFilterClauses, + httpScopeClauses, + methodFilterClauses, + parseRoute, + sqlCallerClassClauses, + sqlCallerMethodClauses, +} from '../lib/scope'; + +export type FindType = 'appmaps' | 'requests' | 'queries' | 'calls' | 'exceptions' | 'logs'; + +export interface FindFilter { + route?: string; // "POST /orders" or "/orders" + className?: string; // --class (TS reserved word workaround) + method?: string; // --method (method_id, not HTTP method) + label?: string; // --label + branch?: string; + commit?: string; + status?: NumberFilter; // --status N / >=N + duration?: NumberFilter; // --duration ">1s" → ms + since?: string; + until?: string; + appmap?: string; // appmap name (or basename of source_path) + table?: string; // SQL table name (find queries) + exception?: string; // exception class (find exceptions) + logger?: string; // --logger (find logs); class of the logging fn + message?: string; // --message (find logs); substring of the log line + withLogs?: number; // --with-logs N (find exceptions); attach N preceding logs + limit?: number; + offset?: number; +} + +export interface FindAppmapRow { + appmap_id: number; + appmap_name: string; + route: string | null; + status_code: number | null; + elapsed_ms: number | null; + sql_count: number; + branch: string | null; + timestamp: string | null; +} + +export interface FindRequestRow { + appmap_name: string; + event_id: number; + method: string; + route: string; + status_code: number; + elapsed_ms: number | null; + branch: string | null; +} + +export interface FindQueryRow { + appmap_name: string; + event_id: number; + elapsed_ms: number | null; + caller_class: string | null; + caller_method: string | null; + sql_text: string; +} + +export interface FindCallRow { + appmap_name: string; + event_id: number; + fqid: string | null; + defined_class: string; + method_id: string; + path: string | null; + lineno: number | null; + elapsed_ms: number | null; + parameters_json: string | null; + return_value: string | null; +} + +export interface FindLogRow { + appmap_name: string; + event_id: number; + parent_event_id: number | null; + logger: string; // defined_class of the logging fn + method_id: string; + path: string | null; + lineno: number | null; + // Display-projected message derived from parameters_json / return_value + // (see lib/logMessage.projectLogMessage). '' when nothing usable was + // captured; the raw JSON columns remain for callers who need them. + message: string; + parameters_json: string | null; + return_value: string | null; +} + +export interface FindExceptionRow { + appmap_id: number; + appmap_name: string; + event_id: number; + // Return event id where the throw materialized. with_logs uses this as + // the upper bound so logs that fired *inside* the throwing call are + // included. Null only for the legacy "exceptions on a call event" + // recorder shape. + return_event_id: number | null; + exception_class: string; + message: string | null; + path: string | null; + lineno: number | null; + // Populated only when filter.withLogs > 0. Ordered chronologically + // (oldest first), capped at filter.withLogs entries. Each row has + // event_id < the exception's return_event_id (or event_id if + // return_event_id is null). + recent_logs?: FindLogRow[]; +} + +// --- internal helpers (find-specific) --- + +interface Clauses { + where: string[]; + params: (string | number)[]; +} + +function durationClause(filter: FindFilter, column: string): Clauses { + const where: string[] = []; + const params: (string | number)[] = []; + if (filter.duration) { + where.push(`${column} ${filter.duration.op} ?`); + params.push(filter.duration.value); + } + return { where, params }; +} + +function pageOptions(filter: FindFilter): { limit?: number; offset?: number } { + return { limit: filter.limit, offset: filter.offset }; +} + +// --- per-type queries --- + +export function findAppmaps(db: sqlite3.Database, filter: FindFilter): Page { + const a = appmapWhere(filter, 'a'); + const h = httpScopeClauses(filter, 'h2'); + const requireHttpMatch = h.where.length > 0; + + // Pick a deterministic "sample" request per appmap via a correlated + // subquery: the http_request with the smallest event_id among those + // matching --route / --status (or any request if no http filter). This + // avoids the non-determinism of GROUP BY a.id with non-aggregated h.*. + const innerHttpFilter = requireHttpMatch ? ` AND ${h.where.join(' AND ')}` : ''; + + const whereParts: string[] = [...a.where]; + if (requireHttpMatch) whereParts.push('h.id IS NOT NULL'); + if (filter.duration) { + whereParts.push(`a.elapsed_ms ${filter.duration.op} ?`); + } + const whereSql = whereParts.length > 0 ? `WHERE ${whereParts.join(' AND ')}` : ''; + + // Param order: inner subquery http filters → outer WHERE (appmap, then duration). + const params: (string | number)[] = [...h.params, ...a.params]; + if (filter.duration) params.push(filter.duration.value); + + const sql = ` + SELECT a.id AS appmap_id, + a.name AS appmap_name, + COALESCE(h.normalized_path, h.path) AS route, + h.status_code AS status_code, + COALESCE(h.elapsed_ms, a.elapsed_ms) AS elapsed_ms, + a.sql_query_count AS sql_count, + a.git_branch AS branch, + a.timestamp AS timestamp + FROM appmaps a + LEFT JOIN http_requests h ON h.id = ( + SELECT h2.id FROM http_requests h2 + WHERE h2.appmap_id = a.id${innerHttpFilter} + ORDER BY h2.event_id LIMIT 1 + ) + ${whereSql} + ORDER BY a.timestamp, a.name + `; + return paginate(db, sql, params, pageOptions(filter)); +} + +export function findRequests(db: sqlite3.Database, filter: FindFilter): Page { + const a = appmapWhere(filter, 'a'); + const where: string[] = [...a.where]; + const params: (string | number)[] = [...a.params]; + + if (filter.route) { + const route = parseRoute(filter.route); + where.push(`COALESCE(h.normalized_path, h.path) LIKE ?`); + params.push(`%${route.path}%`); + if (route.method) { + where.push(`h.method = ?`); + params.push(route.method); + } + } + if (filter.status) { + where.push(`h.status_code ${filter.status.op} ?`); + params.push(filter.status.value); + } + const dur = durationClause(filter, 'h.elapsed_ms'); + where.push(...dur.where); + params.push(...dur.params); + + const sql = ` + SELECT a.name AS appmap_name, + h.event_id AS event_id, + h.method AS method, + COALESCE(h.normalized_path, h.path) AS route, + h.status_code AS status_code, + h.elapsed_ms AS elapsed_ms, + a.git_branch AS branch + FROM http_requests h + JOIN appmaps a ON a.id = h.appmap_id + ${where.length > 0 ? `WHERE ${where.join(' AND ')}` : ''} + ORDER BY a.source_path, h.event_id + `; + return paginate(db, sql, params, pageOptions(filter)); +} + +export function findQueries(db: sqlite3.Database, filter: FindFilter): Page { + const where: string[] = []; + const params: (string | number)[] = []; + + const scope = appmapIdScope(filter, 'q'); + if (scope) { + where.push(scope.sql); + params.push(...scope.params); + } + + if (filter.table) { + where.push(`q.sql_text LIKE ?`); + params.push(`%${filter.table}%`); + } + if (filter.className) { + // The caller of a sql_query is the function_call referenced by + // q.parent_event_id, which has its own code_object link. Use that + // canonical path; fall back to the denormalized caller_class string + // when the parent function_call has no code_object link. + const c = sqlCallerClassClauses(filter.className, 'q'); + where.push(...c.where); + params.push(...c.params); + } + if (filter.method) { + const m = sqlCallerMethodClauses(filter.method, 'q'); + where.push(...m.where); + params.push(...m.params); + } + const dur = durationClause(filter, 'q.elapsed_ms'); + where.push(...dur.where); + params.push(...dur.params); + + const sql = ` + SELECT a.name AS appmap_name, + q.event_id AS event_id, + q.elapsed_ms AS elapsed_ms, + q.caller_class AS caller_class, + q.caller_method AS caller_method, + q.sql_text AS sql_text + FROM sql_queries q + JOIN appmaps a ON a.id = q.appmap_id + ${where.length > 0 ? `WHERE ${where.join(' AND ')}` : ''} + ORDER BY a.source_path, q.event_id + `; + return paginate(db, sql, params, pageOptions(filter)); +} + +export function findCalls(db: sqlite3.Database, filter: FindFilter): Page { + const where: string[] = []; + const params: (string | number)[] = []; + + const scope = appmapIdScope(filter, 'fc'); + if (scope) { + where.push(scope.sql); + params.push(...scope.params); + } + + if (filter.className) { + const c = classFilterClauses(filter.className, 'fc'); + where.push(...c.where); + params.push(...c.params); + } + if (filter.method) { + const m = methodFilterClauses(filter.method, 'fc'); + where.push(...m.where); + params.push(...m.params); + } + if (filter.label) { + where.push( + `fc.code_object_id IN (SELECT l.code_object_id FROM labels l WHERE l.label LIKE ?)` + ); + params.push(`%${filter.label}%`); + } + const dur = durationClause(filter, 'fc.elapsed_ms'); + where.push(...dur.where); + params.push(...dur.params); + + const sql = ` + SELECT a.name AS appmap_name, + fc.event_id AS event_id, + co.fqid AS fqid, + fc.defined_class AS defined_class, + fc.method_id AS method_id, + fc.path AS path, + fc.lineno AS lineno, + fc.elapsed_ms AS elapsed_ms, + fc.parameters_json AS parameters_json, + fc.return_value AS return_value + FROM function_calls fc + JOIN appmaps a ON a.id = fc.appmap_id + LEFT JOIN code_objects co ON co.id = fc.code_object_id + ${where.length > 0 ? `WHERE ${where.join(' AND ')}` : ''} + ORDER BY a.source_path, fc.event_id + `; + return paginate(db, sql, params, pageOptions(filter)); +} + +// Log rows: function_calls whose linked code_object has the canonical +// 'log' label. The label is the contract — it tells the importer to +// capture parameters_json + return_value, and tells us which calls are +// loggers. --message is a SQL LIKE substring against both columns; +// false positives (matching a parameter name, a class name, or a JSON +// punctuation byte) are accepted by design and can be tightened in +// post-processing. +export function findLogs(db: sqlite3.Database, filter: FindFilter): Page { + const where: string[] = [ + `fc.code_object_id IN (SELECT l.code_object_id FROM labels l WHERE l.label = 'log')`, + ]; + const params: (string | number)[] = []; + + const scope = appmapIdScope(filter, 'fc'); + if (scope) { + where.push(scope.sql); + params.push(...scope.params); + } + + if (filter.logger) { + const c = classFilterClauses(filter.logger, 'fc'); + where.push(...c.where); + params.push(...c.params); + } + + if (filter.message) { + where.push(`(fc.parameters_json LIKE ? OR fc.return_value LIKE ?)`); + const like = `%${filter.message}%`; + params.push(like, like); + } + + const sql = ` + SELECT a.name AS appmap_name, + fc.event_id AS event_id, + fc.parent_event_id AS parent_event_id, + fc.defined_class AS logger, + fc.method_id AS method_id, + fc.path AS path, + fc.lineno AS lineno, + fc.parameters_json AS parameters_json, + fc.return_value AS return_value + FROM function_calls fc + JOIN appmaps a ON a.id = fc.appmap_id + WHERE ${where.join(' AND ')} + ORDER BY a.source_path, fc.event_id + `; + const page = paginate>(db, sql, params, pageOptions(filter)); + return { + ...page, + rows: page.rows.map((r) => ({ + ...r, + message: projectLogMessage(r.parameters_json, r.return_value), + })), + }; +} + +export function findExceptions( + db: sqlite3.Database, + filter: FindFilter +): Page { + const where: string[] = []; + const params: (string | number)[] = []; + + const scope = appmapIdScope(filter, 'e'); + if (scope) { + where.push(scope.sql); + params.push(...scope.params); + } + + if (filter.exception) { + where.push(`e.exception_class LIKE ?`); + params.push(`%${filter.exception}%`); + } + + const sql = ` + SELECT e.appmap_id AS appmap_id, + a.name AS appmap_name, + e.event_id AS event_id, + e.return_event_id AS return_event_id, + e.exception_class AS exception_class, + e.message AS message, + e.path AS path, + e.lineno AS lineno + FROM exceptions e + JOIN appmaps a ON a.id = e.appmap_id + ${where.length > 0 ? `WHERE ${where.join(' AND ')}` : ''} + ORDER BY a.source_path, e.event_id, e.exception_class + `; + const page = paginate(db, sql, params, pageOptions(filter)); + const rows = page.rows; + + // Enrichment: for each exception, attach the last N log calls in the + // same recording with event_id strictly less than the exception's + // return_event_id (the throw point in the event stream). Falling back + // to event_id (the call entry) only handles the legacy recorder shape + // — which produces no preceding logs anyway, since logs inside the + // call have event_id > the call entry. We use event order rather than + // parent_event_id subtree walking to avoid recursive CTEs; this picks + // up logs that ran in the same thread before the throw, which is + // what "what did the app log before it crashed?" asks. + if (filter.withLogs && filter.withLogs > 0) { + const logStmt = db.prepare(` + SELECT a.name AS appmap_name, + fc.event_id AS event_id, + fc.parent_event_id AS parent_event_id, + fc.defined_class AS logger, + fc.method_id AS method_id, + fc.path AS path, + fc.lineno AS lineno, + fc.parameters_json AS parameters_json, + fc.return_value AS return_value + FROM function_calls fc + JOIN appmaps a ON a.id = fc.appmap_id + WHERE fc.appmap_id = ? + AND fc.event_id < ? + AND fc.code_object_id IN ( + SELECT l.code_object_id FROM labels l WHERE l.label = 'log' + ) + ORDER BY fc.event_id DESC + LIMIT ? + `); + for (const row of rows) { + const upperBound = row.return_event_id ?? row.event_id; + if (upperBound == null) { + row.recent_logs = []; + continue; + } + const logs = logStmt.all(row.appmap_id, upperBound, filter.withLogs) as Omit[]; + row.recent_logs = logs + .reverse() // chronological + .map((l) => ({ ...l, message: projectLogMessage(l.parameters_json, l.return_value) })); + } + } + + return page; +} + +// Dispatcher. +export function find( + db: sqlite3.Database, + type: FindType, + filter: FindFilter +): Page { + switch (type) { + case 'appmaps': + return findAppmaps(db, filter); + case 'requests': + return findRequests(db, filter); + case 'queries': + return findQueries(db, filter); + case 'calls': + return findCalls(db, filter); + case 'exceptions': + return findExceptions(db, filter); + case 'logs': + return findLogs(db, filter); + } +} diff --git a/packages/cli/src/cmds/query/queries/hotspots.ts b/packages/cli/src/cmds/query/queries/hotspots.ts new file mode 100644 index 0000000000..2d2bd92d7a --- /dev/null +++ b/packages/cli/src/cmds/query/queries/hotspots.ts @@ -0,0 +1,137 @@ +import sqlite3 from 'better-sqlite3'; + +import { Page, paginate } from '../lib/page'; +import { appmapIdScope, classFilterClauses, RecordingScope } from '../lib/scope'; + +export type HotspotType = 'function' | 'sql'; + +export interface HotspotsFilter extends RecordingScope { + type?: HotspotType; + className?: string; // function mode only + limit?: number; + offset?: number; +} + +export interface FunctionHotspotRow { + fqid: string | null; + defined_class: string; + method_id: string; + // Representative source location: one call's path/lineno from the + // aggregated set. Useful for "show me the source of this hotspot" + // without a follow-up lookup. + path: string | null; + lineno: number | null; + calls: number; + total_ms: number; + self_ms: number; +} + +export interface SqlHotspotRow { + count: number; + avg_ms: number; + total_ms: number; + sql_text: string; +} + +// SELF_MS = elapsed_ms - sum of immediate children's elapsed_ms, where a +// child is any function_call / sql_query / http_client_request whose +// parent_event_id points at this call. Computed via a one-pass CTE that +// pre-aggregates per-event child time, so the join is O(rows) regardless of +// nesting depth. +const CHILD_TIME_CTE = ` + WITH child_events AS ( + SELECT appmap_id, parent_event_id, elapsed_ms FROM function_calls + UNION ALL + SELECT appmap_id, parent_event_id, elapsed_ms FROM sql_queries + UNION ALL + SELECT appmap_id, parent_event_id, elapsed_ms FROM http_client_requests + ), + child_time AS ( + SELECT appmap_id, parent_event_id AS event_id, + SUM(COALESCE(elapsed_ms, 0)) AS sum_children + FROM child_events + WHERE parent_event_id IS NOT NULL + GROUP BY appmap_id, parent_event_id + ) +`; + +export function functionHotspots( + db: sqlite3.Database, + filter: HotspotsFilter +): Page { + const where: string[] = []; + const params: (string | number)[] = []; + + const scope = appmapIdScope(filter, 'fc'); + if (scope) { + where.push(scope.sql); + params.push(...scope.params); + } + if (filter.className) { + const c = classFilterClauses(filter.className, 'fc'); + where.push(...c.where); + params.push(...c.params); + } + + const sql = ` + ${CHILD_TIME_CTE} + SELECT + co.fqid AS fqid, + fc.defined_class AS defined_class, + fc.method_id AS method_id, + MIN(fc.path) AS path, + MIN(fc.lineno) AS lineno, + COUNT(*) AS calls, + SUM(COALESCE(fc.elapsed_ms, 0)) AS total_ms, + SUM(COALESCE(fc.elapsed_ms, 0) + - COALESCE(ct.sum_children, 0)) AS self_ms + FROM function_calls fc + LEFT JOIN child_time ct + ON ct.appmap_id = fc.appmap_id AND ct.event_id = fc.event_id + LEFT JOIN code_objects co ON co.id = fc.code_object_id + ${where.length > 0 ? `WHERE ${where.join(' AND ')}` : ''} + GROUP BY fc.code_object_id, fc.defined_class, fc.method_id + ORDER BY total_ms DESC + `; + return paginate(db, sql, params, { + limit: filter.limit, + offset: filter.offset, + }); +} + +export function sqlHotspots( + db: sqlite3.Database, + filter: HotspotsFilter +): Page { + const where: string[] = []; + const params: (string | number)[] = []; + + const scope = appmapIdScope(filter, 'q'); + if (scope) { + where.push(scope.sql); + params.push(...scope.params); + } + + const sql = ` + SELECT + COUNT(*) AS count, + AVG(q.elapsed_ms) AS avg_ms, + SUM(COALESCE(q.elapsed_ms, 0)) AS total_ms, + q.sql_text AS sql_text + FROM sql_queries q + ${where.length > 0 ? `WHERE ${where.join(' AND ')}` : ''} + GROUP BY q.sql_text + ORDER BY total_ms DESC + `; + return paginate(db, sql, params, { + limit: filter.limit, + offset: filter.offset, + }); +} + +export function hotspots( + db: sqlite3.Database, + filter: HotspotsFilter +): Page | Page { + return filter.type === 'sql' ? sqlHotspots(db, filter) : functionHotspots(db, filter); +} diff --git a/packages/cli/src/cmds/query/queries/mcp.ts b/packages/cli/src/cmds/query/queries/mcp.ts new file mode 100644 index 0000000000..fdc69067d7 --- /dev/null +++ b/packages/cli/src/cmds/query/queries/mcp.ts @@ -0,0 +1,738 @@ +// MCP (Model Context Protocol) handler. Exposes the V3 query surface as +// MCP tools and resources. +// +// Tool names are LLM-readable and uniquely identify what each tool +// returns: `find_*` returns row-level matches, `function_hotspots` / +// `sql_hotspots` return rankings, `get_call_tree` returns one recording's +// tree, etc. Mirrors V3's CLI verbs but with descriptive names rather +// than the terse single-noun forms the CLI uses. +// +// Wire format: newline-delimited JSON-RPC 2.0 over stdio. This module +// implements the message dispatch logic only; the stdio loop lives in +// the verb so this file stays testable without process I/O. + +import sqlite3 from 'better-sqlite3'; + +import { compare } from './compare'; +import { endpoints, EndpointSort, EndpointsFilter } from './endpoints'; +import { + FindCallRow, + FindExceptionRow, + FindFilter, + FindLogRow, + FindQueryRow, + FindRequestRow, + find, +} from './find'; +import { hotspots } from './hotspots'; +import { related, RelatedFilter } from './related'; +import { resolveAppmap, tree, AppmapInfo, TreeOptions } from './tree'; +import { parseDuration, parseStatus, parseTime } from '../lib/parseFilter'; + +export interface JsonRpcRequest { + jsonrpc: '2.0'; + id?: string | number | null; + method: string; + params?: Record; +} + +export interface JsonRpcResponse { + jsonrpc: '2.0'; + id: string | number | null; + result?: unknown; + error?: { code: number; message: string; data?: unknown }; +} + +interface ToolSpec { + name: string; + description: string; + inputSchema: { + type: 'object'; + properties: Record; + required?: string[]; + }; +} + +interface ToolImpl { + spec: ToolSpec; + handler: (args: Record, db: sqlite3.Database) => unknown; +} + +interface ResourceSpec { + uri: string; + name: string; + description: string; + mimeType: string; +} + +interface ResourceImpl { + spec: ResourceSpec; + read: (db: sqlite3.Database) => unknown; +} + +// Template-based resources expose a parameterized URI. The agent +// discovers them via resources/templates/list, then reads a concrete +// instance with resources/read by substituting the placeholder. +interface ResourceTemplateSpec { + uriTemplate: string; // RFC 6570 template + name: string; + description: string; + mimeType: string; +} + +interface ResourceTemplateImpl { + spec: ResourceTemplateSpec; + // Returns the args object if the URI matches the template, else null. + match: (uri: string) => Record | null; + read: (args: Record, db: sqlite3.Database) => unknown; +} + +const SERVER_INFO = { name: 'appmap-query', version: '1.0.0' }; +const PROTOCOL_VERSION = '2024-11-05'; + +// --- helpers ------------------------------------------------------------ + +// Accept either a numeric appmap.id or a name/basename ref. +function resolveByIdOrRef(db: sqlite3.Database, idOrRef: unknown): AppmapInfo { + const s = String(idOrRef); + if (/^\d+$/.test(s)) { + const row = db + .prepare(`SELECT id, name, source_path FROM appmaps WHERE id = ?`) + .get(Number(s)) as AppmapInfo | undefined; + if (row) return row; + } + return resolveAppmap(db, s); +} + +function maybeTime(s: unknown): string | undefined { + return typeof s === 'string' && s.length > 0 ? parseTime(s) : undefined; +} + +function maybeNumber(n: unknown): number | undefined { + if (typeof n === 'number' && Number.isFinite(n)) return n; + if (typeof n === 'string' && /^-?\d+(\.\d+)?$/.test(n)) return Number(n); + return undefined; +} + +function maybeString(s: unknown): string | undefined { + return typeof s === 'string' && s.length > 0 ? s : undefined; +} + +// Common filter shape shared by the find_* tools and the hotspots tools. +const COMMON_FILTER_PROPERTIES: Record = { + route: { + type: 'string', + description: + 'Substring of the request path; optionally prefixed with an HTTP method ("POST /orders"). e.g. "orders" matches /orders, /api/orders/:id.', + }, + status: { type: 'string', description: 'e.g. "500", ">=500", "<400".' }, + duration: { type: 'string', description: 'e.g. ">1s", ">=500ms".' }, + branch: { type: 'string', description: 'Exact branch name.' }, + commit: { type: 'string', description: 'Exact commit SHA.' }, + since: { type: 'string', description: 'ISO timestamp lower bound.' }, + until: { type: 'string', description: 'ISO timestamp upper bound.' }, + appmap: { + type: 'string', + description: + 'Substring of the recording name OR source_path. Any reasonable word from the basename, test method, route, etc. matches. Case-insensitive.', + }, + limit: { + type: 'integer', + description: 'Default 20. Pass 0 for unbounded. Response includes total count.', + }, + offset: { type: 'integer', description: 'Skip this many rows for pagination.' }, +}; + +// Build a FindFilter from MCP tool args, parsing structured fields. +function buildFindFilter(args: Record): FindFilter { + const f: FindFilter = {}; + if (typeof args.route === 'string') f.route = args.route; + if (typeof args.class === 'string') f.className = args.class; + if (typeof args.method === 'string') f.method = args.method; + if (typeof args.label === 'string') f.label = args.label; + if (typeof args.branch === 'string') f.branch = args.branch; + if (typeof args.commit === 'string') f.commit = args.commit; + if (typeof args.status === 'string') f.status = parseStatus(args.status); + if (typeof args.duration === 'string') f.duration = parseDuration(args.duration); + if (typeof args.appmap === 'string') f.appmap = args.appmap; + if (typeof args.table === 'string') f.table = args.table; + if (typeof args.exception === 'string') f.exception = args.exception; + if (typeof args.logger === 'string') f.logger = args.logger; + if (typeof args.message === 'string') f.message = args.message; + const withLogs = maybeNumber(args.with_logs); + if (withLogs !== undefined) f.withLogs = withLogs; + f.since = maybeTime(args.since); + f.until = maybeTime(args.until); + f.limit = maybeNumber(args.limit); + f.offset = maybeNumber(args.offset); + return f; +} + +// --- tools -------------------------------------------------------------- + +const TOOLS: ToolImpl[] = [ + // ----- aggregations ---------------------------------------------------- + + { + spec: { + name: 'list_endpoints', + description: + 'Per-route summary table; the first call when orienting against an unfamiliar query database. Returns Page<{method, route, count, avg_ms, p95_ms, err_pct}> = {rows, total, limit, offset}.', + inputSchema: { + type: 'object', + properties: { + branch: { type: 'string' }, + since: { type: 'string' }, + until: { type: 'string' }, + status: { + type: 'string', + description: + 'Route filter — e.g. ">=500". A route is shown if any request matches; aggregates remain over all of that route\'s requests.', + }, + sort: { type: 'string', enum: ['count', 'avg', 'p95', 'err'] }, + limit: { type: 'integer' }, + }, + }, + }, + handler: (args, db) => { + const f: EndpointsFilter = {}; + f.branch = maybeString(args.branch); + f.since = maybeTime(args.since); + f.until = maybeTime(args.until); + if (typeof args.status === 'string') f.status = parseStatus(args.status); + if (typeof args.sort === 'string') f.sort = args.sort as EndpointSort; + f.limit = maybeNumber(args.limit); + return endpoints(db, f); + }, + }, + + { + spec: { + name: 'function_hotspots', + description: + 'Functions ranked by total elapsed time across recordings. Filter by route to scope to a specific entry point or by class (substring match) to focus on one component. Returns Page<{fqid, defined_class, method_id, path, lineno, calls, total_ms, self_ms}> = {rows, total, limit, offset}. path/lineno are one representative call\'s source location — read directly to see the function. fqid examples: "app/Logger#error" (instance), "app/Util.parse" (static), "src/cmds/query/db/openQueryDb.openQueryDb" (module-level), "app/Outer::Inner#method" (nested classes).', + inputSchema: { + type: 'object', + properties: { + route: COMMON_FILTER_PROPERTIES.route, + class: { type: 'string', description: 'Substring of class identifier; canonical fqid forms also accepted.' }, + branch: COMMON_FILTER_PROPERTIES.branch, + since: COMMON_FILTER_PROPERTIES.since, + until: COMMON_FILTER_PROPERTIES.until, + limit: COMMON_FILTER_PROPERTIES.limit, + offset: COMMON_FILTER_PROPERTIES.offset, + }, + }, + }, + handler: (args, db) => + hotspots(db, { + type: 'function', + route: maybeString(args.route), + className: maybeString(args.class), + branch: maybeString(args.branch), + since: maybeTime(args.since), + until: maybeTime(args.until), + limit: maybeNumber(args.limit), + offset: maybeNumber(args.offset), + }), + }, + + { + spec: { + name: 'sql_hotspots', + description: + 'SQL queries ranked by total elapsed time, deduplicated by text. Returns Page<{count, avg_ms, total_ms, sql_text}> = {rows, total, limit, offset}.', + inputSchema: { + type: 'object', + properties: { + route: COMMON_FILTER_PROPERTIES.route, + branch: COMMON_FILTER_PROPERTIES.branch, + since: COMMON_FILTER_PROPERTIES.since, + until: COMMON_FILTER_PROPERTIES.until, + limit: COMMON_FILTER_PROPERTIES.limit, + offset: COMMON_FILTER_PROPERTIES.offset, + }, + }, + }, + handler: (args, db) => + hotspots(db, { + type: 'sql', + route: maybeString(args.route), + branch: maybeString(args.branch), + since: maybeTime(args.since), + until: maybeTime(args.until), + limit: maybeNumber(args.limit), + offset: maybeNumber(args.offset), + }), + }, + + { + spec: { + name: 'list_labels', + description: + 'AppMap labels present in the database, ranked by usage. Use to discover what semantic anchors exist (canonical: "log", "secret", "security.authentication", "security.authorization", "deserialize", "system.exec", "job.create", "http.session.clear") and any project-specific or investigation labels (e.g. "bug.", "repro"). Pass a returned label to find_calls --label to retrieve its calls. Returns: label, count (distinct code objects bearing it), sample_fqid (one representative function).', + inputSchema: { type: 'object', properties: {} }, + }, + handler: (_args, db) => + db + .prepare( + `SELECT l.label AS label, + COUNT(DISTINCT co.id) AS count, + MIN(co.fqid) AS sample_fqid + FROM labels l + JOIN code_objects co ON co.id = l.code_object_id + GROUP BY l.label + ORDER BY count DESC, l.label` + ) + .all(), + }, + + // ----- row-level finders ---------------------------------------------- + + { + spec: { + name: 'find_recordings', + description: + 'Recording-level rows matching filters. Each row is one .appmap.json file with its sample request, branch, and counts. Use to identify which recordings exercised a route, returned a particular status, or were taken on a branch. The `appmap` filter is a substring match against name and source_path — pass any reasonable word from the basename, test method, or route. Returns Page<{appmap_id, appmap_name, route, status_code, elapsed_ms, sql_count, branch, timestamp}> = {rows, total, limit, offset}. Pass appmap_id (numeric) or appmap_name to get_call_tree / find_related.', + inputSchema: { + type: 'object', + properties: { + route: COMMON_FILTER_PROPERTIES.route, + status: COMMON_FILTER_PROPERTIES.status, + branch: COMMON_FILTER_PROPERTIES.branch, + commit: COMMON_FILTER_PROPERTIES.commit, + since: COMMON_FILTER_PROPERTIES.since, + until: COMMON_FILTER_PROPERTIES.until, + duration: COMMON_FILTER_PROPERTIES.duration, + appmap: COMMON_FILTER_PROPERTIES.appmap, + limit: COMMON_FILTER_PROPERTIES.limit, + offset: COMMON_FILTER_PROPERTIES.offset, + }, + }, + }, + handler: (args, db) => find(db, 'appmaps', buildFindFilter(args)), + }, + + { + spec: { + name: 'find_requests', + description: + 'Individual HTTP request rows with status, elapsed time, and the recording each came from. Filter by route (substring), status, duration, branch, time window. Returns Page<{appmap_name, event_id, method, route, status_code, elapsed_ms, branch}> = {rows, total, limit, offset}.', + inputSchema: { + type: 'object', + properties: { + route: COMMON_FILTER_PROPERTIES.route, + status: COMMON_FILTER_PROPERTIES.status, + duration: COMMON_FILTER_PROPERTIES.duration, + branch: COMMON_FILTER_PROPERTIES.branch, + commit: COMMON_FILTER_PROPERTIES.commit, + since: COMMON_FILTER_PROPERTIES.since, + until: COMMON_FILTER_PROPERTIES.until, + appmap: COMMON_FILTER_PROPERTIES.appmap, + limit: COMMON_FILTER_PROPERTIES.limit, + offset: COMMON_FILTER_PROPERTIES.offset, + }, + }, + }, + handler: (args, db) => find(db, 'requests', buildFindFilter(args)), + }, + + { + spec: { + name: 'find_queries', + description: + 'SQL query rows. Filter by table (substring), caller class/method (substring), duration, route, branch. Use duration:">100ms" to find slow queries; use route to scope to a specific request. Returns Page<{appmap_name, event_id, sql_text, elapsed_ms, caller_class, caller_method}> = {rows, total, limit, offset}.', + inputSchema: { + type: 'object', + properties: { + table: { type: 'string', description: 'SQL table name (matches sql_text substring).' }, + class: { type: 'string', description: 'Caller class identifier.' }, + method: { type: 'string', description: 'Caller method name.' }, + duration: COMMON_FILTER_PROPERTIES.duration, + route: COMMON_FILTER_PROPERTIES.route, + status: COMMON_FILTER_PROPERTIES.status, + branch: COMMON_FILTER_PROPERTIES.branch, + commit: COMMON_FILTER_PROPERTIES.commit, + since: COMMON_FILTER_PROPERTIES.since, + until: COMMON_FILTER_PROPERTIES.until, + appmap: COMMON_FILTER_PROPERTIES.appmap, + limit: COMMON_FILTER_PROPERTIES.limit, + offset: COMMON_FILTER_PROPERTIES.offset, + }, + }, + }, + handler: (args, db) => find(db, 'queries', buildFindFilter(args)), + }, + + { + spec: { + name: 'find_calls', + description: + 'Function-call rows. Filter by class (substring), method (substring), label (substring; e.g. "log", "security.authorization"), duration. Use label="log" to retrieve application log output, or label="security.authorization" to find authorization checks. Returns Page<{appmap_name, event_id, fqid, defined_class, method_id, path, lineno, elapsed_ms, parameters_json, return_value}> = {rows, total, limit, offset}. parameters_json and return_value are populated only for labeled functions; unlabeled rows return null. Use path:lineno to read the source.', + inputSchema: { + type: 'object', + properties: { + class: { + type: 'string', + description: + 'Substring of the class identifier; canonical forms ("UserRepository", "app/services/UserRepository", "UserRepository#findById") get exact-or-leaf-class matching, but a partial like "Repo" also matches "UserRepository".', + }, + method: { type: 'string', description: 'Substring of the method name.' }, + label: { type: 'string', description: 'Substring of the label name.' }, + duration: COMMON_FILTER_PROPERTIES.duration, + route: COMMON_FILTER_PROPERTIES.route, + status: COMMON_FILTER_PROPERTIES.status, + branch: COMMON_FILTER_PROPERTIES.branch, + commit: COMMON_FILTER_PROPERTIES.commit, + since: COMMON_FILTER_PROPERTIES.since, + until: COMMON_FILTER_PROPERTIES.until, + appmap: COMMON_FILTER_PROPERTIES.appmap, + limit: COMMON_FILTER_PROPERTIES.limit, + offset: COMMON_FILTER_PROPERTIES.offset, + }, + }, + }, + handler: (args, db) => find(db, 'calls', buildFindFilter(args)), + }, + + { + spec: { + name: 'find_logs', + description: + 'Application log lines captured from functions labeled `log`. Filter by message substring (matches across the call\'s parameters and return value), logger class (substring), recording, branch, or time window. Returns Page<{appmap_name, event_id, parent_event_id, logger, method_id, path, lineno, message, parameters_json, return_value}> = {rows, total, limit, offset}. `message` is the display-projected log text (extracted from a structured return_value or from the parameter named message/msg, falling back to the first string parameter); use it directly. parameters_json and return_value remain available for the underlying captured values. Use path:lineno to read the call site of the log statement.', + inputSchema: { + type: 'object', + properties: { + message: { + type: 'string', + description: + 'Substring to look for inside the captured log call. Matches a SQL LIKE against parameters_json and return_value — false positives (e.g. matching a class or parameter name) are accepted; tighten in post-processing if needed.', + }, + logger: { + type: 'string', + description: + 'Class of the logging function (defined_class). Accepts short or canonical fqid form, same as find_calls --class.', + }, + branch: COMMON_FILTER_PROPERTIES.branch, + commit: COMMON_FILTER_PROPERTIES.commit, + since: COMMON_FILTER_PROPERTIES.since, + until: COMMON_FILTER_PROPERTIES.until, + appmap: COMMON_FILTER_PROPERTIES.appmap, + limit: COMMON_FILTER_PROPERTIES.limit, + offset: COMMON_FILTER_PROPERTIES.offset, + }, + }, + }, + handler: (args, db) => find(db, 'logs', buildFindFilter(args)), + }, + + { + spec: { + name: 'find_exceptions', + description: + 'Exception rows with class, message, source location. Filter by exception class name (substring), the request that owns the exception (via route/status), branch, or time window. Returns Page<{appmap_id, appmap_name, event_id, return_event_id, exception_class, message, path, lineno, recent_logs?}> = {rows, total, limit, offset}. event_id is the throwing call\'s entry id; return_event_id is the throw point in the event stream. Pass with_logs=N to attach the last N log lines preceding the throw (chronological order) under recent_logs — usually the fastest way to see what the app reported before the failure. recent_logs uses return_event_id as the upper bound, so logs that fired *inside* the throwing call are included.', + inputSchema: { + type: 'object', + properties: { + exception: { type: 'string', description: 'Substring of the exception class name.' }, + with_logs: { + type: 'integer', + description: 'Attach up to N preceding log lines per exception under recent_logs (chronological). Each entry has the same shape as a find_logs row.', + }, + route: COMMON_FILTER_PROPERTIES.route, + status: COMMON_FILTER_PROPERTIES.status, + branch: COMMON_FILTER_PROPERTIES.branch, + commit: COMMON_FILTER_PROPERTIES.commit, + since: COMMON_FILTER_PROPERTIES.since, + until: COMMON_FILTER_PROPERTIES.until, + appmap: COMMON_FILTER_PROPERTIES.appmap, + limit: COMMON_FILTER_PROPERTIES.limit, + offset: COMMON_FILTER_PROPERTIES.offset, + }, + }, + }, + handler: (args, db) => find(db, 'exceptions', buildFindFilter(args)), + }, + + // ----- per-recording / cross-recording -------------------------------- + + { + spec: { + name: 'get_call_tree', + description: + 'Call tree of one recording. Without focus, returns every event. With focus_type + focus_value, narrows to the neighborhood of matching events: focus_type ∈ {function, sql_query, http_server_request, http_client_request}, focus_value is the matching identifier (fqid / SQL substring / normalized_path / URL substring). Use min_elapsed_ms to prune fast leaves. The appmap argument accepts a numeric appmap_id or an appmap_name (both surfaced by find_recordings). Returns ordered nodes: each has depth, kind ∈ {function, sql, http_server, http_client, exception}, event_id, parent_event_id, elapsed_ms, plus kind-specific fields (function: fqid/defined_class/method_id/path/lineno/parameters_json/return_value; sql: sql_text; http_server: method/route/status_code; http_client: method/url/status_code; exception: exception_class/message/path/lineno). function nodes\' parameters_json and return_value are populated only for labeled functions. Use path:lineno on function and exception nodes to read the source. fqid examples: "app/Logger#error" (instance), "app/Util.parse" (static).', + inputSchema: { + type: 'object', + properties: { + appmap: { type: 'string', description: 'Recording id (numeric) or name.' }, + focus_type: { + type: 'string', + enum: ['function', 'sql_query', 'http_server_request', 'http_client_request'], + }, + focus_value: { type: 'string' }, + parent_depth: { type: 'integer', description: 'Ancestor levels (default 5).' }, + child_depth: { type: 'integer', description: 'Descendant levels (default 3).' }, + min_elapsed_ms: { type: 'number' }, + }, + required: ['appmap'], + }, + }, + handler: (args, db) => { + const am = resolveByIdOrRef(db, args.appmap); + const opts: TreeOptions = {}; + const focusType = maybeString(args.focus_type); + const focusValue = maybeString(args.focus_value); + if (focusType && focusValue) { + if (focusType === 'function') opts.focusFn = focusValue; + else if (focusType === 'sql_query') opts.focusSql = focusValue; + else if (focusType === 'http_server_request') opts.focusRoute = focusValue; + else if (focusType === 'http_client_request') opts.focusUrl = focusValue; + } + opts.ancestors = maybeNumber(args.parent_depth); + opts.descendants = maybeNumber(args.child_depth); + opts.minElapsedMs = maybeNumber(args.min_elapsed_ms); + return tree(db, am.name, opts); + }, + }, + + { + spec: { + name: 'find_related', + description: + 'Recordings ranked by similarity to a source recording. Score combines: same HTTP route (×5), shared SQL tables (×3 each), shared classes (×2 each). Primary use: pass a failing recording with status:succeeded to find a passing baseline for side-by-side comparison. Returns Page<{appmap_name, score, method, route, status_code, elapsed_ms, shared}> = {rows, total, limit, offset}. shared is a string array of contributing signals.', + inputSchema: { + type: 'object', + properties: { + appmap: { type: 'string', description: 'Source recording (id or name).' }, + status: COMMON_FILTER_PROPERTIES.status, + route: COMMON_FILTER_PROPERTIES.route, + branch: COMMON_FILTER_PROPERTIES.branch, + since: COMMON_FILTER_PROPERTIES.since, + until: COMMON_FILTER_PROPERTIES.until, + limit: COMMON_FILTER_PROPERTIES.limit, + offset: COMMON_FILTER_PROPERTIES.offset, + }, + required: ['appmap'], + }, + }, + handler: (args, db) => { + const am = resolveByIdOrRef(db, args.appmap); + const filter: RelatedFilter = {}; + if (typeof args.status === 'string') filter.status = parseStatus(args.status); + filter.route = maybeString(args.route); + filter.branch = maybeString(args.branch); + filter.since = maybeTime(args.since); + filter.until = maybeTime(args.until); + filter.limit = maybeNumber(args.limit); + filter.offset = maybeNumber(args.offset); + return related(db, am.name, filter); + }, + }, + + { + spec: { + name: 'compare_branches', + description: + 'Per-route p95 latency for two branches with a delta column. Use to surface regressions a feature branch introduces relative to a baseline. Returns Page<{method, route, a_count, a_p95_ms, b_count, b_p95_ms, delta}> = {rows, total, limit, offset}. delta is b_p95/a_p95; null when either side has no measured durations.', + inputSchema: { + type: 'object', + properties: { + branch_a: { type: 'string', description: 'Baseline branch.' }, + branch_b: { type: 'string', description: 'Comparison branch.' }, + since: COMMON_FILTER_PROPERTIES.since, + until: COMMON_FILTER_PROPERTIES.until, + sort: { type: 'string', enum: ['delta', 'p95-a', 'p95-b'] }, + limit: COMMON_FILTER_PROPERTIES.limit, + offset: COMMON_FILTER_PROPERTIES.offset, + }, + required: ['branch_a', 'branch_b'], + }, + }, + handler: (args, db) => + compare(db, { + branch_a: String(args.branch_a), + branch_b: String(args.branch_b), + since: maybeTime(args.since), + until: maybeTime(args.until), + sort: maybeString(args.sort) as 'delta' | 'p95-a' | 'p95-b' | undefined, + limit: maybeNumber(args.limit), + offset: maybeNumber(args.offset), + }), + }, +]; + +// --- resources ----------------------------------------------------------- + +const RESOURCES: ResourceImpl[] = [ + { + spec: { + uri: 'appmap://endpoints', + name: 'endpoints', + description: + 'All HTTP endpoints with request count, average latency, p95, and error rate.', + mimeType: 'application/json', + }, + read: (db) => endpoints(db, { limit: 200 }), + }, +]; + +const RESOURCE_TEMPLATES: ResourceTemplateImpl[] = [ + { + spec: { + uriTemplate: 'appmap://recording/{ref}/logs', + name: 'recording_logs', + description: + 'All log lines (functions labeled `log`) for one recording, ordered by event_id. {ref} is either the numeric appmap_id or the recording name/basename — same forms find_recordings returns. Each entry has the find_logs row shape.', + mimeType: 'application/json', + }, + match: (uri) => { + const m = /^appmap:\/\/recording\/([^/]+)\/logs$/.exec(uri); + if (!m) return null; + // The {ref} segment may be percent-encoded (recording names can + // contain spaces, em-dashes, etc.). + return { ref: decodeURIComponent(m[1]) }; + }, + read: (args, db) => { + const info = resolveByIdOrRef(db, args.ref); + return find(db, 'logs', { appmap: info.name, limit: 0 }); + }, + }, +]; + +// --- handler ------------------------------------------------------------- + +export type McpHandler = (msg: JsonRpcRequest) => JsonRpcResponse | null; + +export function buildMcpHandler(db: sqlite3.Database): McpHandler { + return (msg: JsonRpcRequest): JsonRpcResponse | null => { + const id = msg.id ?? null; + const method = msg.method; + + if (method.startsWith('notifications/')) return null; + + if (method === 'initialize') { + return { + jsonrpc: '2.0', + id, + result: { + protocolVersion: PROTOCOL_VERSION, + serverInfo: SERVER_INFO, + capabilities: { tools: {}, resources: {} }, + }, + }; + } + + if (method === 'tools/list') { + return { + jsonrpc: '2.0', + id, + result: { tools: TOOLS.map((t) => t.spec) }, + }; + } + + if (method === 'tools/call') { + const params = (msg.params ?? {}) as { name?: string; arguments?: Record }; + const name = params.name; + const args = params.arguments ?? {}; + const tool = TOOLS.find((t) => t.spec.name === name); + if (!tool) return errorResponse(id, -32601, `unknown tool: ${name}`); + try { + const result = tool.handler(args, db); + return { + jsonrpc: '2.0', + id, + result: { + content: [{ type: 'text', text: JSON.stringify(result, null, 2) }], + }, + }; + } catch (e) { + return errorResponse(id, -32000, (e as Error).message); + } + } + + if (method === 'resources/list') { + return { + jsonrpc: '2.0', + id, + result: { resources: RESOURCES.map((r) => r.spec) }, + }; + } + + if (method === 'resources/templates/list') { + return { + jsonrpc: '2.0', + id, + result: { resourceTemplates: RESOURCE_TEMPLATES.map((t) => t.spec) }, + }; + } + + if (method === 'resources/read') { + const params = (msg.params ?? {}) as { uri?: string }; + const uri = params.uri ?? ''; + const resource = RESOURCES.find((r) => r.spec.uri === uri); + if (resource) { + try { + const result = resource.read(db); + return readResponse(id, uri, resource.spec.mimeType, result); + } catch (e) { + return errorResponse(id, -32000, (e as Error).message); + } + } + for (const tmpl of RESOURCE_TEMPLATES) { + const matched = tmpl.match(uri); + if (matched) { + try { + const result = tmpl.read(matched, db); + return readResponse(id, uri, tmpl.spec.mimeType, result); + } catch (e) { + return errorResponse(id, -32000, (e as Error).message); + } + } + } + return errorResponse(id, -32602, `unknown resource: ${uri}`); + } + + return errorResponse(id, -32601, `method not found: ${method}`); + }; +} + +function errorResponse( + id: string | number | null, + code: number, + message: string +): JsonRpcResponse { + return { jsonrpc: '2.0', id, error: { code, message } }; +} + +function readResponse( + id: string | number | null, + uri: string, + mimeType: string, + result: unknown +): JsonRpcResponse { + return { + jsonrpc: '2.0', + id, + result: { + contents: [ + { + uri, + mimeType, + text: JSON.stringify(result, null, 2), + }, + ], + }, + }; +} + +export function listTools(): readonly ToolSpec[] { + return TOOLS.map((t) => t.spec); +} + +export function listResources(): readonly ResourceSpec[] { + return RESOURCES.map((r) => r.spec); +} + +export function listResourceTemplates(): readonly ResourceTemplateSpec[] { + return RESOURCE_TEMPLATES.map((t) => t.spec); +} diff --git a/packages/cli/src/cmds/query/queries/related.ts b/packages/cli/src/cmds/query/queries/related.ts new file mode 100644 index 0000000000..1e36963acd --- /dev/null +++ b/packages/cli/src/cmds/query/queries/related.ts @@ -0,0 +1,201 @@ +import sqlite3 from 'better-sqlite3'; + +import { DEFAULT_PAGE_LIMIT, Page } from '../lib/page'; +import { appmapWhere, httpScopeClauses, RecordingScope } from '../lib/scope'; +import { resolveAppmap } from './tree'; + +// Score weights, from V3: +// same HTTP route ×5 (binary) +// same SQL tables ×3 (per shared table) +// same classes ×2 (per shared class) +const ROUTE_WEIGHT = 5; +const TABLE_WEIGHT = 3; +const CLASS_WEIGHT = 2; + +// Heuristic table-name extraction. Matches identifiers following +// FROM/JOIN/INTO/UPDATE; strips a single leading schema qualifier and +// lowercases for case-insensitive matching. Imperfect (won't handle +// nested subqueries / unusual quoting cleanly) but adequate for the +// similarity score, which is itself a heuristic. +const TABLE_PATTERN = /\b(?:FROM|JOIN|INTO|UPDATE)\s+["`]?(?:\w+\.)?(\w+)["`]?/gi; + +export function extractTables(sqlText: string): Set { + const tables = new Set(); + TABLE_PATTERN.lastIndex = 0; + let m: RegExpExecArray | null; + while ((m = TABLE_PATTERN.exec(sqlText)) !== null) { + tables.add(m[1].toLowerCase()); + } + return tables; +} + +export interface RelatedRow { + appmap_name: string; + score: number; + method: string | null; + route: string | null; + status_code: number | null; + elapsed_ms: number | null; + shared: string[]; +} + +export interface RelatedFilter extends RecordingScope { + limit?: number; + offset?: number; +} + +interface AppmapSig { + id: number; + name: string; + method: string | null; + route: string | null; + status_code: number | null; + elapsed_ms: number | null; + tables: Set; + classes: Set; +} + +// Strip trailing class name from a defined_class string. Handles Java/Python +// dot-form ("org.example.X" → "X") and Ruby/C++ chain ("Foo::Bar" → "Bar"). +function leafFromDefinedClass(s: string): string { + const ddIdx = s.lastIndexOf('::'); + const dotIdx = s.lastIndexOf('.'); + const idx = Math.max(ddIdx, dotIdx); + return idx >= 0 ? s.slice(idx + (s[idx] === ':' ? 2 : 1)) : s; +} + +function loadSignature(db: sqlite3.Database, appmapId: number): AppmapSig { + const meta = db + .prepare( + `SELECT a.id, a.name, a.elapsed_ms, + (SELECT h.method FROM http_requests h WHERE h.appmap_id = a.id + ORDER BY h.event_id LIMIT 1) AS method, + (SELECT COALESCE(h.normalized_path, h.path) FROM http_requests h + WHERE h.appmap_id = a.id ORDER BY h.event_id LIMIT 1) AS route, + (SELECT h.status_code FROM http_requests h + WHERE h.appmap_id = a.id ORDER BY h.event_id LIMIT 1) AS status_code + FROM appmaps a WHERE a.id = ?` + ) + .get(appmapId) as { + id: number; + name: string; + elapsed_ms: number | null; + method: string | null; + route: string | null; + status_code: number | null; + }; + + const sqlRows = db + .prepare(`SELECT sql_text FROM sql_queries WHERE appmap_id = ?`) + .all(appmapId) as { sql_text: string }[]; + const tables = new Set(); + for (const r of sqlRows) for (const t of extractTables(r.sql_text)) tables.add(t); + + const classes = new Set(); + for (const r of db + .prepare( + `SELECT DISTINCT co.leaf_class AS name FROM code_objects co + JOIN function_calls fc ON fc.code_object_id = co.id + WHERE fc.appmap_id = ?` + ) + .all(appmapId) as { name: string }[]) { + if (r.name) classes.add(r.name); + } + // Fall back to defined_class for unlinked rows so sparsely-linked + // recordings still contribute classes to the score. + for (const r of db + .prepare( + `SELECT DISTINCT fc.defined_class AS name FROM function_calls fc + WHERE fc.appmap_id = ? AND fc.code_object_id IS NULL` + ) + .all(appmapId) as { name: string }[]) { + if (r.name) classes.add(leafFromDefinedClass(r.name)); + } + + return { ...meta, tables, classes }; +} + +export function related( + db: sqlite3.Database, + sourceRef: string, + filter: RelatedFilter = {} +): Page { + const source = resolveAppmap(db, sourceRef); + const sourceSig = loadSignature(db, source.id); + + // Candidate pool: appmaps matching recording-level / http filters, + // excluding the source itself. + const a = appmapWhere(filter, 'a'); + const h = httpScopeClauses(filter); + + const whereParts: string[] = ['a.id != ?']; + const params: (string | number)[] = [source.id]; + + whereParts.push(...a.where); + params.push(...a.params); + + if (h.where.length > 0) { + whereParts.push(`EXISTS ( + SELECT 1 FROM http_requests h WHERE h.appmap_id = a.id AND ${h.where.join(' AND ')} + )`); + params.push(...h.params); + } + + const candidates = db + .prepare(`SELECT a.id FROM appmaps a WHERE ${whereParts.join(' AND ')} ORDER BY a.id`) + .all(...params) as { id: number }[]; + + const scored: RelatedRow[] = []; + for (const c of candidates) { + const sig = loadSignature(db, c.id); + + let score = 0; + const shared: string[] = []; + + // Route match (binary). Method is part of the comparison only if the + // source has one — recordings without an http_server_request are + // matched purely on path. + if ( + sourceSig.route && + sig.route === sourceSig.route && + (!sourceSig.method || sig.method === sourceSig.method) + ) { + score += ROUTE_WEIGHT; + shared.push('route'); + } + + for (const t of sig.tables) { + if (sourceSig.tables.has(t)) { + score += TABLE_WEIGHT; + shared.push(t); + } + } + + for (const cls of sig.classes) { + if (sourceSig.classes.has(cls)) { + score += CLASS_WEIGHT; + shared.push(cls); + } + } + + if (score > 0) { + scored.push({ + appmap_name: sig.name, + score, + method: sig.method, + route: sig.route, + status_code: sig.status_code, + elapsed_ms: sig.elapsed_ms, + shared, + }); + } + } + + scored.sort((a, b) => b.score - a.score); + + const limit = filter.limit ?? DEFAULT_PAGE_LIMIT; + const offset = filter.offset ?? 0; + const total = scored.length; + const sliced = limit > 0 ? scored.slice(offset, offset + limit) : scored.slice(offset); + return { rows: sliced, total, limit, offset }; +} diff --git a/packages/cli/src/cmds/query/queries/tree.ts b/packages/cli/src/cmds/query/queries/tree.ts new file mode 100644 index 0000000000..71476c956b --- /dev/null +++ b/packages/cli/src/cmds/query/queries/tree.ts @@ -0,0 +1,566 @@ +import sqlite3 from 'better-sqlite3'; + +import { projectLogMessage } from '../lib/logMessage'; +import { appmapRefClause } from '../lib/scope'; + +// Discriminated union of tree nodes. Each node corresponds to one row in +// one of the per-event tables; `depth` is computed from parent_event_id +// chains within the same recording. + +interface BaseNode { + event_id: number; + parent_event_id: number | null; + thread_id: number | null; + depth: number; +} + +export interface HttpServerNode extends BaseNode { + kind: 'http_server'; + method: string; + route: string; + status_code: number; + elapsed_ms: number | null; +} + +export interface HttpClientNode extends BaseNode { + kind: 'http_client'; + method: string; + url: string; + status_code: number | null; + elapsed_ms: number | null; +} + +export interface SqlNode extends BaseNode { + kind: 'sql'; + sql_text: string; + database_type: string | null; + elapsed_ms: number | null; +} + +export interface FunctionNode extends BaseNode { + kind: 'function'; + fqid: string | null; + defined_class: string; + method_id: string; + path: string | null; + lineno: number | null; + is_static: boolean; + elapsed_ms: number | null; + parameters_json: string | null; + return_value: string | null; +} + +export interface ExceptionNode extends BaseNode { + kind: 'exception'; + exception_class: string; + message: string | null; + path: string | null; + lineno: number | null; +} + +// Logging calls are function calls whose linked code_object carries the +// canonical `log` label. They share the function-call shape (same row in +// `function_calls`) but get their own kind so renderers can format the +// message inline rather than the bare call signature. +export interface LogNode extends BaseNode { + kind: 'log'; + fqid: string | null; + logger: string; // defined_class + method_id: string; // info / warn / error / etc. — best effort + path: string | null; + lineno: number | null; + elapsed_ms: number | null; + // Display-projected message derived from parameters_json / return_value + // (see lib/logMessage.projectLogMessage). '' when nothing usable was + // captured. + message: string; + parameters_json: string | null; + return_value: string | null; +} + +export type TreeNode = + | HttpServerNode + | HttpClientNode + | SqlNode + | FunctionNode + | ExceptionNode + | LogNode; + +export interface AppmapInfo { + id: number; + name: string; + source_path: string; +} + +// Resolve a user-supplied appmap reference (name or source-path basename) to +// the row in `appmaps`. Throws on miss or ambiguity (returns candidates in +// the message so the user can disambiguate). +export function resolveAppmap(db: sqlite3.Database, ref: string): AppmapInfo { + const m = appmapRefClause(ref, 'a'); + const rows = db + .prepare( + `SELECT a.id, a.name, a.source_path FROM appmaps a + WHERE ${m.sql} + ORDER BY a.source_path` + ) + .all(...m.params) as AppmapInfo[]; + if (rows.length === 0) throw new Error(`appmap not found: ${ref}`); + if (rows.length > 1) { + const list = rows.map((r) => ` - ${r.source_path}`).join('\n'); + throw new Error(`appmap "${ref}" is ambiguous; matches:\n${list}`); + } + return rows[0]; +} + +export interface TreeOptions { + // Focus criteria — multiple may be supplied; results are the union of + // matches' neighborhoods. Without any focus, the full tree is returned. + focusFn?: string; // exact code_object fqid + focusSql?: string; // case-insensitive substring of sql_text + focusRoute?: string; // normalized_path (or raw path) of a server request + focusUrl?: string; // case-insensitive substring of an outbound URL + + // Depth budgets, in effect only when focus is active. + ancestors?: number; // ancestor levels to keep above each match (default 5) + descendants?: number; // descendant levels below each match (default 3) + + // Prune subtrees whose maximum elapsed time is below this threshold — + // useful for trimming traces dominated by fast leaf calls. + minElapsedMs?: number; +} + +const DEFAULT_ANCESTORS = 5; +const DEFAULT_DESCENDANTS = 3; + +// Build the flat-but-depth-annotated tree for a recording. Events are +// returned in event_id order; consumers can render with indentation. +export function tree( + db: sqlite3.Database, + appmapRef: string, + options: TreeOptions = {} +): TreeNode[] { + const am = resolveAppmap(db, appmapRef); + const events: TreeNode[] = []; + + for (const r of db + .prepare( + `SELECT event_id, parent_event_id, thread_id, method, + COALESCE(normalized_path, path) AS route, status_code, elapsed_ms + FROM http_requests WHERE appmap_id = ?` + ) + .all(am.id) as { + event_id: number; + parent_event_id: number | null; + thread_id: number | null; + method: string; + route: string; + status_code: number; + elapsed_ms: number | null; + }[]) { + events.push({ + kind: 'http_server', + event_id: r.event_id, + parent_event_id: r.parent_event_id, + thread_id: r.thread_id, + depth: 0, + method: r.method, + route: r.route, + status_code: r.status_code, + elapsed_ms: r.elapsed_ms, + }); + } + + for (const r of db + .prepare( + `SELECT event_id, parent_event_id, thread_id, method, url, status_code, elapsed_ms + FROM http_client_requests WHERE appmap_id = ?` + ) + .all(am.id) as { + event_id: number; + parent_event_id: number | null; + thread_id: number | null; + method: string; + url: string; + status_code: number | null; + elapsed_ms: number | null; + }[]) { + events.push({ + kind: 'http_client', + event_id: r.event_id, + parent_event_id: r.parent_event_id, + thread_id: r.thread_id, + depth: 0, + method: r.method, + url: r.url, + status_code: r.status_code, + elapsed_ms: r.elapsed_ms, + }); + } + + for (const r of db + .prepare( + `SELECT event_id, parent_event_id, thread_id, sql_text, database_type, elapsed_ms + FROM sql_queries WHERE appmap_id = ?` + ) + .all(am.id) as { + event_id: number; + parent_event_id: number | null; + thread_id: number | null; + sql_text: string; + database_type: string | null; + elapsed_ms: number | null; + }[]) { + events.push({ + kind: 'sql', + event_id: r.event_id, + parent_event_id: r.parent_event_id, + thread_id: r.thread_id, + depth: 0, + sql_text: r.sql_text, + database_type: r.database_type, + elapsed_ms: r.elapsed_ms, + }); + } + + for (const r of db + .prepare( + `SELECT fc.event_id, fc.parent_event_id, fc.thread_id, + co.fqid AS fqid, fc.defined_class, fc.method_id, + fc.path, fc.lineno, + fc.is_static, fc.elapsed_ms, fc.parameters_json, fc.return_value, + EXISTS ( + SELECT 1 FROM labels l + WHERE l.code_object_id = fc.code_object_id AND l.label = 'log' + ) AS is_log + FROM function_calls fc + LEFT JOIN code_objects co ON co.id = fc.code_object_id + WHERE fc.appmap_id = ?` + ) + .all(am.id) as { + event_id: number; + parent_event_id: number | null; + thread_id: number | null; + fqid: string | null; + defined_class: string; + method_id: string; + path: string | null; + lineno: number | null; + is_static: number; + elapsed_ms: number | null; + parameters_json: string | null; + return_value: string | null; + is_log: number; + }[]) { + if (r.is_log === 1) { + events.push({ + kind: 'log', + event_id: r.event_id, + parent_event_id: r.parent_event_id, + thread_id: r.thread_id, + depth: 0, + fqid: r.fqid, + logger: r.defined_class, + method_id: r.method_id, + path: r.path, + lineno: r.lineno, + elapsed_ms: r.elapsed_ms, + message: projectLogMessage(r.parameters_json, r.return_value), + parameters_json: r.parameters_json, + return_value: r.return_value, + }); + } else { + events.push({ + kind: 'function', + event_id: r.event_id, + parent_event_id: r.parent_event_id, + thread_id: r.thread_id, + depth: 0, + fqid: r.fqid, + defined_class: r.defined_class, + method_id: r.method_id, + path: r.path, + lineno: r.lineno, + is_static: r.is_static === 1, + elapsed_ms: r.elapsed_ms, + parameters_json: r.parameters_json, + return_value: r.return_value, + }); + } + } + + for (const r of db + .prepare( + `SELECT event_id, parent_event_id, thread_id, exception_class, message, + path, lineno + FROM exceptions WHERE appmap_id = ?` + ) + .all(am.id) as { + event_id: number; + parent_event_id: number | null; + thread_id: number | null; + exception_class: string; + message: string | null; + path: string | null; + lineno: number | null; + }[]) { + events.push({ + kind: 'exception', + event_id: r.event_id, + parent_event_id: r.parent_event_id, + thread_id: r.thread_id, + depth: 0, + exception_class: r.exception_class, + message: r.message, + path: r.path, + lineno: r.lineno, + }); + } + + events.sort((a, b) => a.event_id - b.event_id); + computeDepths(events); + + let result = events; + if (hasFocus(options)) { + result = applyFocus(result, options); + } + if (options.minElapsedMs && options.minElapsedMs > 0) { + result = pruneByElapsed(result, options.minElapsedMs); + } + // Re-anchor depth to the highest included ancestor in the surviving + // set, so the rendered indentation starts at column 0 instead of + // floating wherever the original absolute depth happened to be. + if (result !== events) recomputeDepthsRelative(result); + + return result; +} + +function computeDepths(events: TreeNode[]): void { + // Events are sorted by event_id; parents always precede children, so a + // single forward pass suffices. + const depthByEventId = new Map(); + for (const ev of events) { + let depth = 0; + if (ev.parent_event_id !== null) { + const p = depthByEventId.get(ev.parent_event_id); + if (p !== undefined) depth = p + 1; + } + ev.depth = depth; + depthByEventId.set(ev.event_id, depth); + } +} + +function recomputeDepthsRelative(events: readonly TreeNode[]): void { + const includedIds = new Set(events.map((e) => e.event_id)); + const eventsByEventId = new Map(); + for (const e of events) { + if (!eventsByEventId.has(e.event_id)) eventsByEventId.set(e.event_id, e); + } + for (const e of events) { + let d = 0; + let pid = e.parent_event_id; + while (pid !== null && includedIds.has(pid)) { + d += 1; + const parent = eventsByEventId.get(pid); + if (!parent) break; + pid = parent.parent_event_id; + } + e.depth = d; + } +} + +function hasFocus(options: TreeOptions): boolean { + return !!(options.focusFn || options.focusSql || options.focusRoute || options.focusUrl); +} + +function matchesFocus(node: TreeNode, options: TreeOptions): boolean { + if (options.focusFn && node.kind === 'function') { + return node.fqid === options.focusFn; + } + if (options.focusSql && node.kind === 'sql') { + return node.sql_text.toLowerCase().includes(options.focusSql.toLowerCase()); + } + if (options.focusRoute && node.kind === 'http_server') { + return node.route === options.focusRoute; + } + if (options.focusUrl && node.kind === 'http_client') { + return node.url.toLowerCase().includes(options.focusUrl.toLowerCase()); + } + return false; +} + +// Filter `events` to a neighborhood around the focus matches: +// - the matches themselves +// - up to `ancestors` parent levels above each match +// - the direct children of each ancestor (so siblings of the match are visible) +// - up to `descendants` levels below each match +function applyFocus(events: readonly TreeNode[], options: TreeOptions): TreeNode[] { + const ancestorBudget = options.ancestors ?? DEFAULT_ANCESTORS; + const descendantBudget = options.descendants ?? DEFAULT_DESCENDANTS; + + // Build helpers. Multiple TreeNodes can share an event_id (e.g. a call + // event can also have an exception attached), so children are keyed + // by event_id and we dedupe. + const nodeByEventId = new Map(); + for (const e of events) { + if (!nodeByEventId.has(e.event_id)) nodeByEventId.set(e.event_id, e); + } + const childrenByParent = new Map>(); + for (const e of events) { + if (e.parent_event_id !== null) { + let bucket = childrenByParent.get(e.parent_event_id); + if (!bucket) { + bucket = new Set(); + childrenByParent.set(e.parent_event_id, bucket); + } + bucket.add(e.event_id); + } + } + + const focusIds = new Set(); + for (const e of events) { + if (matchesFocus(e, options)) focusIds.add(e.event_id); + } + if (focusIds.size === 0) return []; + + const included = new Set(); + for (const fid of focusIds) { + included.add(fid); + + // Walk up to `ancestorBudget` ancestors; record them and remember the + // path so we can include their direct children. + const ancestorIds: number[] = []; + let cur = fid; + for (let i = 0; i < ancestorBudget; i++) { + const node = nodeByEventId.get(cur); + if (node?.parent_event_id == null) break; + const parentId = node.parent_event_id; + if (!nodeByEventId.has(parentId)) break; + ancestorIds.push(parentId); + included.add(parentId); + cur = parentId; + } + // Direct children of every ancestor (so the focus's siblings — and + // siblings of every node on the path to root — are visible). + for (const aid of ancestorIds) { + const kids = childrenByParent.get(aid); + if (kids) for (const k of kids) included.add(k); + } + + // Descendants up to `descendantBudget` levels (BFS). + const queue: { id: number; depth: number }[] = [{ id: fid, depth: 0 }]; + while (queue.length > 0) { + const next = queue.shift(); + if (!next) break; + if (next.depth >= descendantBudget) continue; + const kids = childrenByParent.get(next.id); + if (!kids) continue; + for (const k of kids) { + if (!included.has(k)) { + included.add(k); + queue.push({ id: k, depth: next.depth + 1 }); + } + } + } + } + + return events.filter((e) => included.has(e.event_id)); +} + +// Prune subtrees whose entire branch's maximum elapsed_ms is below the +// threshold. A node is kept iff it (or any of its descendants) has an +// elapsed_ms ≥ threshold. Events without elapsed (exceptions, http +// requests with no return) are kept iff their owning subtree qualifies. +function pruneByElapsed(events: readonly TreeNode[], minMs: number): TreeNode[] { + const childrenByParent = new Map>(); + for (const e of events) { + if (e.parent_event_id !== null) { + let bucket = childrenByParent.get(e.parent_event_id); + if (!bucket) { + bucket = new Set(); + childrenByParent.set(e.parent_event_id, bucket); + } + bucket.add(e.event_id); + } + } + const elapsedById = new Map(); + for (const e of events) { + const cur = elapsedById.get(e.event_id) ?? 0; + const here = 'elapsed_ms' in e && typeof e.elapsed_ms === 'number' ? e.elapsed_ms : 0; + if (here > cur) elapsedById.set(e.event_id, here); + } + const maxByEventId = new Map(); + function maxFor(id: number): number { + const cached = maxByEventId.get(id); + if (cached !== undefined) return cached; + let m = elapsedById.get(id) ?? 0; + const kids = childrenByParent.get(id); + if (kids) for (const k of kids) m = Math.max(m, maxFor(k)); + maxByEventId.set(id, m); + return m; + } + return events.filter((e) => maxFor(e.event_id) >= minMs); +} + +export interface TreeSummary { + appmap_name: string; + source_path: string; + entry: { method: string; route: string; status_code: number; elapsed_ms: number | null } | null; + sql: { count: number; total_ms: number }; + http_client: { count: number; total_ms: number }; + exceptions: { + exception_class: string; + message: string | null; + path: string | null; + lineno: number | null; + }[]; + labels: { label: string; count: number }[]; +} + +export function treeSummary(db: sqlite3.Database, appmapRef: string): TreeSummary { + const am = resolveAppmap(db, appmapRef); + const nodes = tree(db, appmapRef); + + const httpServer = nodes.find((n): n is HttpServerNode => n.kind === 'http_server'); + const sqls = nodes.filter((n): n is SqlNode => n.kind === 'sql'); + const httpClients = nodes.filter((n): n is HttpClientNode => n.kind === 'http_client'); + const excs = nodes.filter((n): n is ExceptionNode => n.kind === 'exception'); + + const labelRows = db + .prepare( + `SELECT l.label, COUNT(*) AS n + FROM function_calls fc + JOIN labels l ON l.code_object_id = fc.code_object_id + WHERE fc.appmap_id = ? + GROUP BY l.label + ORDER BY n DESC, l.label` + ) + .all(am.id) as { label: string; n: number }[]; + + return { + appmap_name: am.name, + source_path: am.source_path, + entry: httpServer + ? { + method: httpServer.method, + route: httpServer.route, + status_code: httpServer.status_code, + elapsed_ms: httpServer.elapsed_ms, + } + : null, + sql: { + count: sqls.length, + total_ms: sqls.reduce((s, q) => s + (q.elapsed_ms ?? 0), 0), + }, + http_client: { + count: httpClients.length, + total_ms: httpClients.reduce((s, c) => s + (c.elapsed_ms ?? 0), 0), + }, + exceptions: excs.map((e) => ({ + exception_class: e.exception_class, + message: e.message, + path: e.path, + lineno: e.lineno, + })), + labels: labelRows.map((r) => ({ label: r.label, count: r.n })), + }; +} diff --git a/packages/cli/src/cmds/query/query.ts b/packages/cli/src/cmds/query/query.ts new file mode 100644 index 0000000000..8bad243e44 --- /dev/null +++ b/packages/cli/src/cmds/query/query.ts @@ -0,0 +1,29 @@ +import yargs from 'yargs'; + +import * as CompareVerb from './verbs/compare'; +import * as EndpointsVerb from './verbs/endpoints'; +import * as FindVerb from './verbs/find'; +import * as HotspotsVerb from './verbs/hotspots'; +import * as McpVerb from './verbs/mcp'; +import * as RelatedVerb from './verbs/related'; +import * as TreeVerb from './verbs/tree'; + +export const command = 'query'; +export const describe = + 'Query AppMap recordings (endpoints, find, tree, related, hotspots, compare, mcp)'; + +export const builder = (args: yargs.Argv) => + args + .command(CompareVerb) + .command(EndpointsVerb) + .command(FindVerb) + .command(HotspotsVerb) + .command(McpVerb) + .command(RelatedVerb) + .command(TreeVerb) + .demandCommand(1, 'specify a query verb') + .strict(); + +export const handler = (): void => { + // Dispatched by subcommand. Yargs will print help if no verb is given. +}; diff --git a/packages/cli/src/cmds/query/verbs/compare.ts b/packages/cli/src/cmds/query/verbs/compare.ts new file mode 100644 index 0000000000..cc01c96db3 --- /dev/null +++ b/packages/cli/src/cmds/query/verbs/compare.ts @@ -0,0 +1,120 @@ +import yargs from 'yargs'; +import { log } from 'console'; + +import { handleWorkingDirectory } from '../../../lib/handleWorkingDirectory'; +import { locateAppMapDir } from '../../../lib/locateAppMapDir'; +import { verbose } from '../../../utils'; +import { openReadOnly } from '../lib/openReadOnly'; +import { truncationFooter } from '../lib/page'; +import { parseTime } from '../lib/parseFilter'; +import { + compare, + CompareFilter, + CompareRow, + CompareSort, +} from '../queries/compare'; +import { formatCount, formatMs, formatTable } from '../lib/format'; + +export const command = 'compare '; +export const describe = 'Per-route latency delta between two branches'; + +export const builder = (args: yargs.Argv) => { + return args + .positional('branch-a', { type: 'string', describe: 'baseline branch' }) + .positional('branch-b', { type: 'string', describe: 'comparison branch' }) + .option('directory', { type: 'string', alias: 'd' }) + .option('appmap-dir', { type: 'string' }) + .option('query-db', { type: 'string', describe: 'path to query.db (overrides default)' }) + .option('since', { type: 'string' }) + .option('until', { type: 'string' }) + .option('sort', { + type: 'string', + choices: ['delta', 'p95-a', 'p95-b'] as const, + default: 'delta', + }) + .option('include-counts', { type: 'boolean', default: false }) + .option('limit', { type: 'number', describe: 'default 20; pass 0 for unbounded' }) + .option('offset', { type: 'number' }) + .option('json', { type: 'boolean', default: false }); +}; + +type Argv = ReturnType extends yargs.Argv ? T : never; + +export const handler = async (argvIn: yargs.ArgumentsCamelCase): Promise => { + const argv = argvIn as yargs.ArgumentsCamelCase; + verbose(argv.verbose as boolean | undefined); + handleWorkingDirectory(argv.directory); + const appmapDir = argv.queryDb ? '' : await locateAppMapDir(argv.appmapDir); + + const branchA = argv.branchA; + const branchB = argv.branchB; + if (!branchA || !branchB) throw new Error(' and are required'); + + const filter: CompareFilter = { + branch_a: branchA, + branch_b: branchB, + sort: argv.sort as CompareSort, + }; + if (argv.since) filter.since = parseTime(argv.since); + if (argv.until) filter.until = parseTime(argv.until); + if (argv.limit !== undefined) filter.limit = argv.limit; + if (argv.offset !== undefined) filter.offset = argv.offset; + + const db = openReadOnly(appmapDir, argv.queryDb); + try { + const page = compare(db, filter); + if (argv.json) { + log(JSON.stringify(page, null, 2)); + return; + } + log(renderCompare(page.rows, branchA, branchB, !!argv.includeCounts)); + const footer = truncationFooter(page); + if (footer) log(footer); + } finally { + db.close(); + } +}; + +// Format a delta ratio (b_p95 / a_p95) for display: +// ~ if 0.8 ≤ ratio ≤ 1.25 (no meaningful change) +// +Nx / -Nx for ≥2× or ≤0.5 +// +N% / -N% otherwise +// ? if delta is null +function formatDelta(d: number | null): string { + if (d == null) return '?'; + if (d >= 0.8 && d <= 1.25) return '~'; + if (d >= 2) return `+${d.toFixed(1)}×`; + if (d <= 0.5) return `-${(1 / d).toFixed(1)}×`; + const pct = (d - 1) * 100; + return `${pct > 0 ? '+' : ''}${pct.toFixed(0)}%`; +} + +function renderCompare( + rows: readonly CompareRow[], + branchA: string, + branchB: string, + includeCounts: boolean +): string { + if (includeCounts) { + return formatTable( + ['ROUTE', `${branchA}_p95`, `${branchA}_n`, `${branchB}_p95`, `${branchB}_n`, 'Δ'], + rows.map((r) => [ + `${r.method} ${r.route}`, + formatMs(r.a_p95_ms), + formatCount(r.a_count), + formatMs(r.b_p95_ms), + formatCount(r.b_count), + formatDelta(r.delta), + ]) + ); + } + return formatTable( + ['ROUTE', `${branchA}_p95`, `${branchB}_p95`, 'Δ'], + rows.map((r) => [ + `${r.method} ${r.route}`, + formatMs(r.a_p95_ms), + formatMs(r.b_p95_ms), + formatDelta(r.delta), + ]) + ); +} diff --git a/packages/cli/src/cmds/query/verbs/endpoints.ts b/packages/cli/src/cmds/query/verbs/endpoints.ts new file mode 100644 index 0000000000..d12bb9d258 --- /dev/null +++ b/packages/cli/src/cmds/query/verbs/endpoints.ts @@ -0,0 +1,87 @@ +import yargs from 'yargs'; +import { log } from 'console'; + +import { handleWorkingDirectory } from '../../../lib/handleWorkingDirectory'; +import { locateAppMapDir } from '../../../lib/locateAppMapDir'; +import { verbose } from '../../../utils'; +import { openReadOnly } from '../lib/openReadOnly'; +import { truncationFooter } from '../lib/page'; +import { parseStatus, parseTime } from '../lib/parseFilter'; +import { + endpoints, + EndpointSort, + EndpointsFilter, +} from '../queries/endpoints'; +import { formatCount, formatMs, formatPct, formatTable } from '../lib/format'; + +export const command = 'endpoints'; +export const describe = 'Per-route summary table (orient verb)'; + +export const builder = (args: yargs.Argv) => { + return args + .option('directory', { type: 'string', alias: 'd', describe: 'program working directory' }) + .option('appmap-dir', { type: 'string', describe: 'directory of recordings' }) + .option('query-db', { type: 'string', describe: 'path to query.db (overrides default)' }) + .option('since', { type: 'string', describe: 'ISO timestamp or "Nd ago"' }) + .option('until', { type: 'string', describe: 'ISO timestamp or "Nd ago"' }) + .option('branch', { type: 'string' }) + .option('status', { + type: 'string', + describe: + 'route filter — e.g. 500, ">=500" (route is shown if any request matches; aggregates still cover all of that route\'s requests)', + }) + .option('sort', { + type: 'string', + choices: ['count', 'avg', 'p95', 'err'] as const, + default: 'count', + }) + .option('limit', { type: 'number', describe: 'default 20; pass 0 for unbounded' }) + .option('offset', { type: 'number' }) + .option('json', { type: 'boolean', default: false }); +}; + +type Argv = ReturnType extends yargs.Argv ? T : never; + +// Widened at the export so this module is assignable to CommandModule. +export const handler = async (argvIn: yargs.ArgumentsCamelCase): Promise => { + const argv = argvIn as yargs.ArgumentsCamelCase; + verbose(argv.verbose as boolean | undefined); + handleWorkingDirectory(argv.directory); + // When --query-db is supplied, the appmap dir is irrelevant — the user has + // already named a query.db. Otherwise, derive it from the appmap dir. + const appmapDir = argv.queryDb ? '' : await locateAppMapDir(argv.appmapDir); + + const filter: EndpointsFilter = { sort: argv.sort as EndpointSort }; + if (argv.since) filter.since = parseTime(argv.since); + if (argv.until) filter.until = parseTime(argv.until); + if (argv.branch) filter.branch = argv.branch; + if (argv.status) filter.status = parseStatus(argv.status); + if (argv.limit !== undefined) filter.limit = argv.limit; + if (argv.offset !== undefined) filter.offset = argv.offset; + + const db = openReadOnly(appmapDir, argv.queryDb); + try { + const page = endpoints(db, filter); + if (argv.json) { + log(JSON.stringify(page, null, 2)); + return; + } + log( + formatTable( + ['METHOD', 'ROUTE', 'COUNT', 'AVG', 'P95', 'ERR%'], + page.rows.map((r) => [ + r.method, + r.route, + formatCount(r.count), + formatMs(r.avg_ms), + formatMs(r.p95_ms), + formatPct(r.err_pct), + ]) + ) + ); + const footer = truncationFooter(page); + if (footer) log(footer); + } finally { + db.close(); + } +}; diff --git a/packages/cli/src/cmds/query/verbs/find.ts b/packages/cli/src/cmds/query/verbs/find.ts new file mode 100644 index 0000000000..6429415e34 --- /dev/null +++ b/packages/cli/src/cmds/query/verbs/find.ts @@ -0,0 +1,286 @@ +import yargs from 'yargs'; +import { log } from 'console'; + +import { handleWorkingDirectory } from '../../../lib/handleWorkingDirectory'; +import { locateAppMapDir } from '../../../lib/locateAppMapDir'; +import { verbose } from '../../../utils'; +import { openReadOnly } from '../lib/openReadOnly'; +import { truncationFooter } from '../lib/page'; +import { parseDuration, parseStatus, parseTime } from '../lib/parseFilter'; +import { parseClassRef } from '../lib/scope'; +import { + find, + FindFilter, + FindType, + FindAppmapRow, + FindCallRow, + FindExceptionRow, + FindLogRow, + FindQueryRow, + FindRequestRow, +} from '../queries/find'; +import { formatMs, formatTable } from '../lib/format'; +import { projectLogMessage } from '../lib/logMessage'; + +export { projectLogMessage }; + +const TYPES: readonly FindType[] = ['appmaps', 'requests', 'queries', 'calls', 'exceptions', 'logs']; +// 'recordings' is accepted as an alias for 'appmaps' to match the MCP +// naming (find_recordings) and the user-facing concept of a "recording". +const TYPE_CHOICES: readonly string[] = ['appmaps', 'recordings', ...TYPES.filter((t) => t !== 'appmaps')]; + +function normalizeType(input: string): FindType { + return (input === 'recordings' ? 'appmaps' : input) as FindType; +} + +export const command = 'find '; +export const describe = 'Row-level search across recordings'; + +export const builder = (args: yargs.Argv) => { + return args + .positional('type', { type: 'string', choices: TYPE_CHOICES }) + .option('directory', { type: 'string', alias: 'd' }) + .option('appmap-dir', { type: 'string' }) + .option('query-db', { type: 'string', describe: 'path to query.db (overrides default)' }) + .option('route', { + type: 'string', + describe: + 'e.g. "POST /orders" or "/orders" (path is exact match; method case-insensitive)', + }) + .option('class', { type: 'string', describe: 'defined_class or fqid Class part' }) + .option('method', { type: 'string', describe: 'method_id (not HTTP method)' }) + .option('label', { type: 'string' }) + .option('branch', { type: 'string' }) + .option('commit', { type: 'string' }) + .option('status', { type: 'string', describe: 'e.g. 500, ">=500"' }) + .option('duration', { type: 'string', describe: 'e.g. ">1s", ">=500ms"' }) + .option('since', { type: 'string' }) + .option('until', { type: 'string' }) + .option('appmap', { type: 'string', describe: 'appmap name' }) + .option('table', { type: 'string', describe: 'SQL table name (queries)' }) + .option('exception', { type: 'string', describe: 'exception class (exceptions)' }) + .option('logger', { type: 'string', describe: 'logger class (logs)' }) + .option('message', { type: 'string', describe: 'log message substring (logs)' }) + .option('with-logs', { + type: 'number', + describe: 'attach the last N log lines preceding each row (exceptions)', + }) + .option('limit', { type: 'number' }) + .option('offset', { type: 'number' }) + .option('json', { type: 'boolean', default: false }); +}; + +type Argv = ReturnType extends yargs.Argv ? T : never; + +// Per-type flag rejection list. Universal flags (--branch, --commit, +// --since, --until, --appmap) are accepted everywhere, as are output flags +// (--limit, --offset, --json). Other filter flags are accepted only on +// types where they make sense; flagging them on the wrong type is an +// error rather than a silent no-op. +const REJECTED_FLAGS: Record = { + appmaps: ['class', 'method', 'label', 'table', 'exception', 'logger', 'message', 'with-logs'], + requests: ['class', 'method', 'label', 'table', 'exception', 'logger', 'message', 'with-logs'], + queries: ['label', 'exception', 'logger', 'message', 'with-logs'], + calls: ['table', 'exception', 'logger', 'message', 'with-logs'], + exceptions: ['class', 'method', 'label', 'duration', 'table', 'logger', 'message'], + logs: ['class', 'method', 'label', 'route', 'status', 'duration', 'table', 'exception', 'with-logs'], +}; + +// Per-flag hints, attached to error messages when a rejected flag is used. +// Useful for nudging users toward the right flag (e.g., HTTP method +// belongs in --route, not --method, which is a function-method-name flag). +const REJECTED_HINTS: Partial>>> = { + requests: { + method: 'to filter by HTTP method, use --route "METHOD /path"', + }, + logs: { + class: 'to filter logs by logger class, use --logger', + label: '--label is implied (logs always means label=log)', + }, +}; + +// Exported for tests. Operates on a generic flag map so unit tests don't +// need a full yargs argv. +export function validateFlags(type: FindType, flags: Record): void { + const used: string[] = []; + const hints: string[] = []; + for (const flag of REJECTED_FLAGS[type]) { + if (flags[flag] != null) { + used.push(`--${flag}`); + const hint = REJECTED_HINTS[type]?.[flag]; + if (hint) hints.push(` --${flag}: ${hint}`); + } + } + if (used.length === 0) return; + const verb = used.length === 1 ? 'is' : 'are'; + let message = `find ${type}: ${used.join(', ')} ${verb} not supported for this type`; + if (hints.length > 0) message += `\n${hints.join('\n')}`; + throw new Error(message); +} + +// Build a FindFilter from a yargs argv. Exported for testing — also makes +// the verb-layer transformations (e.g. splitting Class#method off of +// --class so the method composes via filter.method) directly assertable. +export interface ParsedFind { + type: FindType; + filter: FindFilter; +} + +export function buildFindFilter(argv: Record): ParsedFind { + const type = normalizeType(argv.type as string); + validateFlags(type, argv); + + const filter: FindFilter = {}; + if (typeof argv.route === 'string') filter.route = argv.route; + if (typeof argv.label === 'string') filter.label = argv.label; + if (typeof argv.branch === 'string') filter.branch = argv.branch; + if (typeof argv.commit === 'string') filter.commit = argv.commit; + if (typeof argv.status === 'string') filter.status = parseStatus(argv.status); + if (typeof argv.duration === 'string') filter.duration = parseDuration(argv.duration); + if (typeof argv.since === 'string') filter.since = parseTime(argv.since); + if (typeof argv.until === 'string') filter.until = parseTime(argv.until); + if (typeof argv.appmap === 'string') filter.appmap = argv.appmap; + if (typeof argv.table === 'string') filter.table = argv.table; + if (typeof argv.exception === 'string') filter.exception = argv.exception; + if (typeof argv.logger === 'string') filter.logger = argv.logger; + if (typeof argv.message === 'string') filter.message = argv.message; + // yargs camelCases --with-logs into argv.withLogs and also keeps the + // kebab-case key. Read both so direct test invocations don't have to + // depend on yargs's coercion. + const withLogs = argv.withLogs ?? argv['with-logs']; + if (typeof withLogs === 'number') filter.withLogs = withLogs; + if (typeof argv.limit === 'number') filter.limit = argv.limit; + if (typeof argv.offset === 'number') filter.offset = argv.offset; + + // The documented --class form is "[pkg/]Class[#method]". Split the + // method off here so it composes through filter.method even when the + // user only supplied --class. Internal helpers (classFilterClauses / + // sqlCallerClassClauses) also re-parse, but doing it at the verb gives + // us a uniform contract: filter.className is "[pkg/]Class" only; + // method, if any, lives on filter.method (and an explicit --method + // wins over a method embedded in --class). + let methodFilter = typeof argv.method === 'string' ? argv.method : undefined; + if (typeof argv.class === 'string') { + const parsed = parseClassRef(argv.class); + if (parsed.method && !methodFilter) methodFilter = parsed.method; + filter.className = argv.class; + } + if (methodFilter) filter.method = methodFilter; + + return { type, filter }; +} + +// Widened at the export so this module is assignable to CommandModule. +export const handler = async (argvIn: yargs.ArgumentsCamelCase): Promise => { + const argv = argvIn as yargs.ArgumentsCamelCase; + verbose(argv.verbose as boolean | undefined); + handleWorkingDirectory(argv.directory); + const appmapDir = argv.queryDb ? '' : await locateAppMapDir(argv.appmapDir); + + const { type, filter } = buildFindFilter(argv as Record); + + const db = openReadOnly(appmapDir, argv.queryDb); + try { + const page = find(db, type, filter); + if (argv.json) { + log(JSON.stringify(page, null, 2)); + return; + } + log(renderTable(type, page.rows)); + const footer = truncationFooter(page); + if (footer) log(footer); + } finally { + db.close(); + } +}; + +function renderTable(type: FindType, rows: unknown[]): string { + switch (type) { + case 'appmaps': + return formatTable( + ['APPMAP', 'ROUTE', 'STATUS', 'ELAPSED', 'SQL', 'BRANCH', 'TIMESTAMP'], + (rows as FindAppmapRow[]).map((r) => [ + r.appmap_name, + r.route ?? '', + r.status_code != null ? String(r.status_code) : '', + formatMs(r.elapsed_ms), + String(r.sql_count), + r.branch ?? '', + r.timestamp ?? '', + ]) + ); + case 'requests': + return formatTable( + ['APPMAP', 'METHOD', 'ROUTE', 'STATUS', 'ELAPSED', 'BRANCH'], + (rows as FindRequestRow[]).map((r) => [ + r.appmap_name, + r.method, + r.route, + String(r.status_code), + formatMs(r.elapsed_ms), + r.branch ?? '', + ]) + ); + case 'queries': + return formatTable( + ['APPMAP', 'ELAPSED', 'CALLER', 'SQL'], + (rows as FindQueryRow[]).map((r) => [ + r.appmap_name, + formatMs(r.elapsed_ms), + r.caller_class && r.caller_method ? `${r.caller_class}#${r.caller_method}` : '', + r.sql_text, + ]) + ); + case 'calls': + return formatTable( + ['APPMAP', 'FQID', 'LOCATION', 'ELAPSED', 'PARAMS', 'RETURN'], + (rows as FindCallRow[]).map((r) => [ + r.appmap_name, + r.fqid ?? `${r.defined_class}#${r.method_id}`, + r.path != null ? `${r.path}${r.lineno != null ? `:${r.lineno}` : ''}` : '', + formatMs(r.elapsed_ms), + formatParams(r.parameters_json), + r.return_value ?? '', + ]) + ); + case 'exceptions': { + const exRows = rows as FindExceptionRow[]; + const withLogs = exRows.some((r) => r.recent_logs !== undefined); + const headers = withLogs + ? ['APPMAP', 'CLASS', 'MESSAGE', 'EVENT', 'LOGS'] + : ['APPMAP', 'CLASS', 'MESSAGE', 'EVENT']; + return formatTable( + headers, + exRows.map((r) => { + const base = [r.appmap_name, r.exception_class, r.message ?? '', String(r.event_id)]; + if (withLogs) base.push(String(r.recent_logs?.length ?? 0)); + return base; + }) + ); + } + case 'logs': + return formatTable( + ['APPMAP', 'LOGGER', 'METHOD', 'MESSAGE', 'EVENT'], + (rows as FindLogRow[]).map((r) => [ + r.appmap_name, + r.logger, + r.method_id, + projectLogMessage(r.parameters_json, r.return_value), + String(r.event_id), + ]) + ); + } +} + + +function formatParams(json: string | null): string { + if (!json) return ''; + try { + const parsed = JSON.parse(json) as { name?: string; value?: unknown }[]; + return parsed + .map((p) => `${p.name ?? '?'}=${typeof p.value === 'string' ? p.value : JSON.stringify(p.value)}`) + .join(', '); + } catch { + return json; + } +} diff --git a/packages/cli/src/cmds/query/verbs/hotspots.ts b/packages/cli/src/cmds/query/verbs/hotspots.ts new file mode 100644 index 0000000000..f7f00f7fd7 --- /dev/null +++ b/packages/cli/src/cmds/query/verbs/hotspots.ts @@ -0,0 +1,129 @@ +import yargs from 'yargs'; +import { log } from 'console'; + +import { handleWorkingDirectory } from '../../../lib/handleWorkingDirectory'; +import { locateAppMapDir } from '../../../lib/locateAppMapDir'; +import { verbose } from '../../../utils'; +import { openReadOnly } from '../lib/openReadOnly'; +import { truncationFooter } from '../lib/page'; +import { parseTime } from '../lib/parseFilter'; +import { + FunctionHotspotRow, + HotspotsFilter, + HotspotType, + SqlHotspotRow, + hotspots, +} from '../queries/hotspots'; +import { formatCount, formatMs, formatTable } from '../lib/format'; + +export const command = 'hotspots'; +export const describe = 'Rank functions or SQL queries by cumulative elapsed'; + +export const builder = (args: yargs.Argv) => { + return args + .option('directory', { type: 'string', alias: 'd' }) + .option('appmap-dir', { type: 'string' }) + .option('query-db', { type: 'string', describe: 'path to query.db (overrides default)' }) + .option('type', { + type: 'string', + choices: ['function', 'sql'] as const, + default: 'function', + }) + .option('route', { + type: 'string', + describe: 'e.g. "GET /reports" (path is exact match; method case-insensitive)', + }) + .option('class', { type: 'string', describe: 'class filter (function mode only)' }) + .option('branch', { type: 'string' }) + .option('since', { type: 'string' }) + .option('until', { type: 'string' }) + .option('limit', { type: 'number', describe: 'default 20; pass 0 for unbounded' }) + .option('offset', { type: 'number' }) + .option('json', { type: 'boolean', default: false }); +}; + +type Argv = ReturnType extends yargs.Argv ? T : never; + +// Per-type flag rejection list. Same shape as find's: a small allow-list +// surfaces user mistakes (e.g. --class on --type=sql) instead of silently +// dropping them, and pre-empts future filter additions that only one +// type can honor. +const REJECTED_FLAGS: Record = { + function: [], + sql: ['class'], +}; + +// Exported for tests. +export function validateFlags(type: HotspotType, flags: Record): void { + const used: string[] = []; + for (const flag of REJECTED_FLAGS[type]) { + if (flags[flag] != null) used.push(`--${flag}`); + } + if (used.length === 0) return; + const verb = used.length === 1 ? 'is' : 'are'; + throw new Error( + `hotspots --type=${type}: ${used.join(', ')} ${verb} not supported for this type` + ); +} + +// Widened at the export so this module is assignable to CommandModule. +export const handler = async (argvIn: yargs.ArgumentsCamelCase): Promise => { + const argv = argvIn as yargs.ArgumentsCamelCase; + verbose(argv.verbose as boolean | undefined); + handleWorkingDirectory(argv.directory); + const appmapDir = argv.queryDb ? '' : await locateAppMapDir(argv.appmapDir); + + const type = argv.type as HotspotType; + validateFlags(type, argv as Record); + + const filter: HotspotsFilter = { type }; + if (argv.route) filter.route = argv.route; + if (argv.class) filter.className = argv.class; + if (argv.branch) filter.branch = argv.branch; + if (argv.since) filter.since = parseTime(argv.since); + if (argv.until) filter.until = parseTime(argv.until); + if (argv.limit !== undefined) filter.limit = argv.limit; + if (argv.offset !== undefined) filter.offset = argv.offset; + + const db = openReadOnly(appmapDir, argv.queryDb); + try { + const page = hotspots(db, filter); + if (argv.json) { + log(JSON.stringify(page, null, 2)); + return; + } + log( + filter.type === 'sql' + ? renderSql(page.rows as readonly SqlHotspotRow[]) + : renderFunctions(page.rows as readonly FunctionHotspotRow[]) + ); + const footer = truncationFooter(page); + if (footer) log(footer); + } finally { + db.close(); + } +}; + +function renderFunctions(rows: readonly FunctionHotspotRow[]): string { + return formatTable( + ['FQID', 'CALLS', 'TOTAL_MS', 'SELF_MS'], + rows.map((r) => [ + r.fqid ?? `${r.defined_class}#${r.method_id}`, + formatCount(r.calls), + formatMs(r.total_ms), + formatMs(r.self_ms), + ]) + ); +} + +function renderSql(rows: readonly SqlHotspotRow[]): string { + return formatTable( + ['COUNT', 'AVG', 'TOTAL', 'SQL'], + rows.map((r) => [ + formatCount(r.count), + formatMs(r.avg_ms), + formatMs(r.total_ms), + r.sql_text, + ]) + ); +} diff --git a/packages/cli/src/cmds/query/verbs/mcp.ts b/packages/cli/src/cmds/query/verbs/mcp.ts new file mode 100644 index 0000000000..fcabfcd2b1 --- /dev/null +++ b/packages/cli/src/cmds/query/verbs/mcp.ts @@ -0,0 +1,81 @@ +import { createInterface } from 'readline'; + +import yargs from 'yargs'; + +import { handleWorkingDirectory } from '../../../lib/handleWorkingDirectory'; +import { locateAppMapDir } from '../../../lib/locateAppMapDir'; +import { verbose } from '../../../utils'; +import { openReadOnly } from '../lib/openReadOnly'; +import { + buildMcpHandler, + JsonRpcRequest, + JsonRpcResponse, +} from '../queries/mcp'; + +export const command = 'mcp'; +export const describe = + 'Run an MCP (Model Context Protocol) server on stdio that exposes the query verbs as tools'; + +export const builder = (args: yargs.Argv) => { + return args + .option('directory', { type: 'string', alias: 'd' }) + .option('appmap-dir', { type: 'string' }) + .option('query-db', { + type: 'string', + describe: 'path to query.db (overrides default)', + }); +}; + +type Argv = ReturnType extends yargs.Argv ? T : never; + +// Widened at the export so this module is assignable to CommandModule. +export const handler = async (argvIn: yargs.ArgumentsCamelCase): Promise => { + const argv = argvIn as yargs.ArgumentsCamelCase; + verbose(argv.verbose as boolean | undefined); + handleWorkingDirectory(argv.directory); + const appmapDir = argv.queryDb ? '' : await locateAppMapDir(argv.appmapDir); + + const db = openReadOnly(appmapDir, argv.queryDb); + const handle = buildMcpHandler(db); + + // MCP transport: newline-delimited JSON-RPC 2.0 over stdio. Logging + // goes to stderr only — anything on stdout corrupts the protocol stream. + process.stderr.write(`appmap mcp listening on stdio\n`); + + const rl = createInterface({ input: process.stdin }); + rl.on('line', (line) => { + const trimmed = line.trim(); + if (trimmed.length === 0) return; + let msg: JsonRpcRequest; + try { + msg = JSON.parse(trimmed) as JsonRpcRequest; + } catch (err) { + writeResponse({ + jsonrpc: '2.0', + id: null, + error: { code: -32700, message: `parse error: ${(err as Error).message}` }, + }); + return; + } + let response: JsonRpcResponse | null; + try { + response = handle(msg); + } catch (err) { + response = { + jsonrpc: '2.0', + id: msg.id ?? null, + error: { code: -32603, message: (err as Error).message }, + }; + } + if (response) writeResponse(response); + }); + + rl.on('close', () => { + db.close(); + process.exit(0); + }); +}; + +function writeResponse(response: JsonRpcResponse): void { + process.stdout.write(`${JSON.stringify(response)}\n`); +} diff --git a/packages/cli/src/cmds/query/verbs/related.ts b/packages/cli/src/cmds/query/verbs/related.ts new file mode 100644 index 0000000000..49ae01ba38 --- /dev/null +++ b/packages/cli/src/cmds/query/verbs/related.ts @@ -0,0 +1,89 @@ +import yargs from 'yargs'; +import { log } from 'console'; + +import { handleWorkingDirectory } from '../../../lib/handleWorkingDirectory'; +import { locateAppMapDir } from '../../../lib/locateAppMapDir'; +import { verbose } from '../../../utils'; +import { openReadOnly } from '../lib/openReadOnly'; +import { truncationFooter } from '../lib/page'; +import { parseStatus, parseTime } from '../lib/parseFilter'; +import { related, RelatedFilter, RelatedRow } from '../queries/related'; +import { formatCount, formatMs, formatTable } from '../lib/format'; + +export const command = 'related '; +export const describe = 'Rank recordings similar to '; + +export const builder = (args: yargs.Argv) => { + return args + .positional('appmap', { type: 'string', describe: 'source appmap (name or basename)' }) + .option('directory', { type: 'string', alias: 'd' }) + .option('appmap-dir', { type: 'string' }) + .option('query-db', { type: 'string', describe: 'path to query.db (overrides default)' }) + .option('branch', { type: 'string' }) + .option('commit', { type: 'string' }) + .option('since', { type: 'string' }) + .option('until', { type: 'string' }) + .option('status', { + type: 'string', + describe: + 'route filter — e.g. 200, ">=500" (route is shown if any request matches)', + }) + .option('route', { + type: 'string', + describe: 'e.g. "POST /orders" (path is exact match; method case-insensitive)', + }) + .option('limit', { type: 'number', describe: 'default 20; pass 0 for unbounded' }) + .option('offset', { type: 'number' }) + .option('json', { type: 'boolean', default: false }); +}; + +type Argv = ReturnType extends yargs.Argv ? T : never; + +// Widened at the export so this module is assignable to CommandModule. +export const handler = async (argvIn: yargs.ArgumentsCamelCase): Promise => { + const argv = argvIn as yargs.ArgumentsCamelCase; + verbose(argv.verbose as boolean | undefined); + handleWorkingDirectory(argv.directory); + const appmapDir = argv.queryDb ? '' : await locateAppMapDir(argv.appmapDir); + + const ref = argv.appmap; + if (!ref) throw new Error(' is required'); + + const filter: RelatedFilter = {}; + if (argv.branch) filter.branch = argv.branch; + if (argv.commit) filter.commit = argv.commit; + if (argv.since) filter.since = parseTime(argv.since); + if (argv.until) filter.until = parseTime(argv.until); + if (argv.status) filter.status = parseStatus(argv.status); + if (argv.route) filter.route = argv.route; + if (argv.limit !== undefined) filter.limit = argv.limit; + if (argv.offset !== undefined) filter.offset = argv.offset; + + const db = openReadOnly(appmapDir, argv.queryDb); + try { + const page = related(db, ref, filter); + if (argv.json) { + log(JSON.stringify(page, null, 2)); + return; + } + log(renderRelated(page.rows)); + const footer = truncationFooter(page); + if (footer) log(footer); + } finally { + db.close(); + } +}; + +function renderRelated(rows: readonly RelatedRow[]): string { + return formatTable( + ['APPMAP', 'SCORE', 'ROUTE', 'STATUS', 'ELAPSED', 'SHARED'], + rows.map((r) => [ + r.appmap_name, + formatCount(r.score), + r.method && r.route ? `${r.method} ${r.route}` : r.route ?? '', + r.status_code != null ? String(r.status_code) : '', + formatMs(r.elapsed_ms), + r.shared.join(', '), + ]) + ); +} diff --git a/packages/cli/src/cmds/query/verbs/tree.ts b/packages/cli/src/cmds/query/verbs/tree.ts new file mode 100644 index 0000000000..577e3e29eb --- /dev/null +++ b/packages/cli/src/cmds/query/verbs/tree.ts @@ -0,0 +1,125 @@ +import yargs from 'yargs'; +import { log } from 'console'; + +import { handleWorkingDirectory } from '../../../lib/handleWorkingDirectory'; +import { locateAppMapDir } from '../../../lib/locateAppMapDir'; +import { verbose } from '../../../utils'; +import { openReadOnly } from '../lib/openReadOnly'; +import { tree, treeSummary, TreeNode, TreeOptions } from '../queries/tree'; +import { renderFlat, renderSummary, renderTree } from '../lib/treeRender'; + +export const command = 'tree '; +export const describe = 'Render the call tree of one recording'; + +export const builder = (args: yargs.Argv) => { + return args + .positional('appmap', { type: 'string', describe: 'appmap name (or basename of source path)' }) + .option('directory', { type: 'string', alias: 'd' }) + .option('appmap-dir', { type: 'string' }) + .option('query-db', { type: 'string', describe: 'path to query.db' }) + .option('format', { + type: 'string', + choices: ['tree', 'summary'] as const, + default: 'tree', + }) + .option('filter', { + type: 'string', + choices: ['all', 'http', 'sql', 'logs'] as const, + default: 'all', + }) + .option('focus-fn', { + type: 'string', + describe: 'centre on function calls matching this fqid', + }) + .option('focus-sql', { + type: 'string', + describe: 'centre on SQL queries containing this substring', + }) + .option('focus-route', { + type: 'string', + describe: 'centre on a server request matching this normalized path', + }) + .option('focus-url', { + type: 'string', + describe: 'centre on an outbound HTTP call whose URL contains this substring', + }) + .option('ancestors', { + type: 'number', + describe: 'ancestor levels to keep above each focus match (default 5)', + }) + .option('descendants', { + type: 'number', + describe: 'descendant levels below each focus match (default 3)', + }) + .option('min-elapsed-ms', { + type: 'number', + describe: 'prune subtrees whose max elapsed is below this threshold', + }) + .option('json', { type: 'boolean', default: false }); +}; + +type Argv = ReturnType extends yargs.Argv ? T : never; + +// Widened at the export so this module is assignable to CommandModule. +export const handler = async (argvIn: yargs.ArgumentsCamelCase): Promise => { + const argv = argvIn as yargs.ArgumentsCamelCase; + verbose(argv.verbose as boolean | undefined); + handleWorkingDirectory(argv.directory); + const appmapDir = argv.queryDb ? '' : await locateAppMapDir(argv.appmapDir); + + const ref = argv.appmap; + if (!ref) throw new Error(' is required'); + + const db = openReadOnly(appmapDir, argv.queryDb); + try { + if (argv.format === 'summary') { + // The summary aggregates over all event types; combining with + // --filter would be ambiguous, so reject rather than silently drop. + const f = argv.filter as TreeFilter; + if (f !== 'all') { + throw new Error( + 'tree --format=summary does not accept --filter; remove one of them' + ); + } + const s = treeSummary(db, ref); + if (argv.json) log(JSON.stringify(s, null, 2)); + else log(renderSummary(s)); + return; + } + + const treeOptions: TreeOptions = {}; + if (argv.focusFn) treeOptions.focusFn = argv.focusFn; + if (argv.focusSql) treeOptions.focusSql = argv.focusSql; + if (argv.focusRoute) treeOptions.focusRoute = argv.focusRoute; + if (argv.focusUrl) treeOptions.focusUrl = argv.focusUrl; + if (argv.ancestors !== undefined) treeOptions.ancestors = argv.ancestors; + if (argv.descendants !== undefined) treeOptions.descendants = argv.descendants; + if (argv.minElapsedMs !== undefined) treeOptions.minElapsedMs = argv.minElapsedMs; + + const nodes = tree(db, ref, treeOptions); + const filtered = applyFilter(nodes, argv.filter as TreeFilter); + if (argv.json) { + log(JSON.stringify(filtered, null, 2)); + } else { + const f = argv.filter as TreeFilter; + log(f === 'all' ? renderTree(filtered) : renderFlat(filtered)); + } + } finally { + db.close(); + } +}; + +type TreeFilter = 'all' | 'http' | 'sql' | 'logs'; + +export function applyFilter(nodes: readonly TreeNode[], filter: TreeFilter): TreeNode[] { + switch (filter) { + case 'all': + return [...nodes]; + case 'sql': + return nodes.filter((n) => n.kind === 'sql'); + case 'logs': + return nodes.filter((n) => n.kind === 'log'); + case 'http': + return nodes.filter((n) => n.kind === 'http_server' || n.kind === 'http_client'); + } +} diff --git a/packages/cli/src/fingerprint/fingerprintDirectoryCommand.ts b/packages/cli/src/fingerprint/fingerprintDirectoryCommand.ts index a7a172b0a6..46ea8fd113 100644 --- a/packages/cli/src/fingerprint/fingerprintDirectoryCommand.ts +++ b/packages/cli/src/fingerprint/fingerprintDirectoryCommand.ts @@ -2,13 +2,17 @@ import type { Metadata } from '@appland/models'; import { findFiles, verbose } from '../utils'; import FingerprintQueue from './fingerprintQueue'; import writeUsage, { collectUsageData } from '../lib/emitUsage'; +import type { QueryDbIndexer } from '../cmds/query/db/import/QueryDbIndexer'; class FingerprintDirectoryCommand { private appmaps = 0; private events = 0; private metadata?: Metadata; - constructor(private readonly directory: string) {} + constructor( + private readonly directory: string, + private readonly indexer?: QueryDbIndexer + ) {} async execute() { if (verbose()) { @@ -21,6 +25,7 @@ class FingerprintDirectoryCommand { this.events += numEvents; this.metadata = metadata; }); + if (this.indexer) this.indexer.attach(fpQueue); let count = 0; await this.files((file) => { @@ -29,6 +34,8 @@ class FingerprintDirectoryCommand { }); if (count > 0) await fpQueue.process(); + if (this.indexer) await this.indexer.syncDirectory(this.directory); + const usageData = await collectUsageData( this.directory, this.events, diff --git a/packages/cli/src/fingerprint/fingerprintWatchCommand.ts b/packages/cli/src/fingerprint/fingerprintWatchCommand.ts index 25b01d598b..a2868cfb15 100644 --- a/packages/cli/src/fingerprint/fingerprintWatchCommand.ts +++ b/packages/cli/src/fingerprint/fingerprintWatchCommand.ts @@ -11,6 +11,7 @@ import { FingerprintEvent } from './fingerprinter'; import { Metadata } from '@appland/models'; import { rm } from 'fs/promises'; import AppMapIndex from './appmapIndex'; +import type { QueryDbIndexer } from '../cmds/query/db/import/QueryDbIndexer'; export default class FingerprintWatchCommand { private pidfilePath: string | undefined; @@ -31,7 +32,7 @@ export default class FingerprintWatchCommand { this._numProcessed = value; } - constructor(private directory: string) { + constructor(private directory: string, private readonly indexer?: QueryDbIndexer) { this.pidfilePath = process.env.APPMAP_WRITE_PIDFILE && join(this.directory, 'index.pid'); this.fpQueue = new FingerprintQueue(); this.eventAggregator = new EventAggregator(async (events) => { @@ -39,6 +40,7 @@ export default class FingerprintWatchCommand { this.numProcessed += events.length; }); this.eventAggregator.attach(this.fpQueue, 'index'); + if (this.indexer) this.indexer.attach(this.fpQueue); } removePidfile() { @@ -198,6 +200,8 @@ export default class FingerprintWatchCommand { this.poller.start(); await pollReady; + if (this.indexer) await this.indexer.syncDirectory(this.directory); + this.ready(); } @@ -229,6 +233,7 @@ export default class FingerprintWatchCommand { const { indexDir } = new AppMapIndex(file); rm(indexDir, { force: true, recursive: true }); + if (this.indexer) this.indexer.onRemoved(file); } ready() { diff --git a/packages/cli/tests/unit/cmds/query/db/import/QueryDbIndexer.spec.ts b/packages/cli/tests/unit/cmds/query/db/import/QueryDbIndexer.spec.ts new file mode 100644 index 0000000000..6c381fe098 --- /dev/null +++ b/packages/cli/tests/unit/cmds/query/db/import/QueryDbIndexer.spec.ts @@ -0,0 +1,104 @@ +import { EventEmitter } from 'events'; +import { mkdtempSync, rmSync, writeFileSync } from 'fs'; +import { tmpdir } from 'os'; +import { join, resolve } from 'path'; + +import { QueryDbIndexer } from '../../../../../../src/cmds/query/db/import/QueryDbIndexer'; +import { freshDb } from './helpers'; + +function writeAppmap(dir: string, name: string, body: object): string { + const p = join(dir, name); + writeFileSync(p, JSON.stringify(body)); + return p; +} + +function minimalAppmap(): object { + return { + metadata: { timestamp: 1700000000 }, + events: [ + { id: 1, event: 'call', http_server_request: { request_method: 'GET', path_info: '/x' } }, + { id: 2, event: 'return', parent_id: 1, http_server_response: { status_code: 200 } }, + ], + }; +} + +describe('QueryDbIndexer', () => { + let tmp: string; + + beforeEach(() => { + tmp = mkdtempSync(join(tmpdir(), 'qdb-indexer-')); + }); + + afterEach(() => { + rmSync(tmp, { recursive: true, force: true }); + }); + + it('attach() routes index events into importAppmap', () => { + const db = freshDb(); + const indexer = new QueryDbIndexer(db); + try { + const queue = new EventEmitter(); + indexer.attach(queue as any); + const path = writeAppmap(tmp, 'a.appmap.json', minimalAppmap()); + queue.emit('index', { path }); + expect((db.prepare('SELECT COUNT(*) AS n FROM appmaps').get() as any).n).toBe(1); + expect(indexer.stats()).toEqual({ imported: 1, failed: 0 }); + } finally { + // not closing db here — the indexer owns close + indexer.close(); + } + }); + + it('logs and counts failures without throwing on per-file errors', () => { + const db = freshDb(); + const indexer = new QueryDbIndexer(db); + const warn = jest.spyOn(console, 'warn').mockImplementation(() => undefined); + try { + const path = join(tmp, 'broken.appmap.json'); + writeFileSync(path, '{not json'); + indexer.onIndexed(path); + expect(indexer.stats()).toEqual({ imported: 0, failed: 1 }); + expect(warn).toHaveBeenCalledWith(expect.stringContaining('failed to import')); + } finally { + indexer.close(); + warn.mockRestore(); + } + }); + + it('syncDirectory imports only files not yet in the DB', async () => { + const db = freshDb(); + const indexer = new QueryDbIndexer(db); + try { + const a = writeAppmap(tmp, 'a.appmap.json', minimalAppmap()); + const b = writeAppmap(tmp, 'b.appmap.json', minimalAppmap()); + indexer.onIndexed(a); // pre-import a + const beforeFailed = indexer.stats().failed; + const beforeImported = indexer.stats().imported; + await indexer.syncDirectory(tmp); + // a was already in DB (skip); b was new (imported). + expect(indexer.stats().imported).toBe(beforeImported + 1); + expect(indexer.stats().failed).toBe(beforeFailed); + const rows = db + .prepare('SELECT source_path FROM appmaps ORDER BY source_path') + .all() + .map((r: any) => r.source_path); + expect(rows).toEqual([resolve(a), resolve(b)].sort()); + } finally { + indexer.close(); + } + }); + + it('onRemoved deletes by source_path', () => { + const db = freshDb(); + const indexer = new QueryDbIndexer(db); + try { + const path = writeAppmap(tmp, 'a.appmap.json', minimalAppmap()); + indexer.onIndexed(path); + expect((db.prepare('SELECT COUNT(*) AS n FROM appmaps').get() as any).n).toBe(1); + indexer.onRemoved(path); + expect((db.prepare('SELECT COUNT(*) AS n FROM appmaps').get() as any).n).toBe(0); + } finally { + indexer.close(); + } + }); +}); diff --git a/packages/cli/tests/unit/cmds/query/db/import/appmapRecord.spec.ts b/packages/cli/tests/unit/cmds/query/db/import/appmapRecord.spec.ts new file mode 100644 index 0000000000..d160ac163c --- /dev/null +++ b/packages/cli/tests/unit/cmds/query/db/import/appmapRecord.spec.ts @@ -0,0 +1,106 @@ +import { mkdtempSync, rmSync, writeFileSync } from 'fs'; +import { tmpdir } from 'os'; +import { join } from 'path'; + +import { insertAppmapRecord } from '../../../../../../src/cmds/query/db/import/appmapRecord'; +import { freshDb } from './helpers'; + +describe('insertAppmapRecord', () => { + let tmp: string; + + beforeEach(() => { + tmp = mkdtempSync(join(tmpdir(), 'appmap-record-')); + }); + + afterEach(() => { + rmSync(tmp, { recursive: true, force: true }); + }); + + it('inserts a row with derived metadata, counts, and elapsed', () => { + const db = freshDb(); + try { + const path = join(tmp, 'test.appmap.json'); + writeFileSync(path, '{}'); + + const result = insertAppmapRecord(db, path, { + events: [ + { id: 1, event: 'call', http_server_request: { request_method: 'GET' } }, + { id: 2, event: 'return', parent_id: 1, http_server_response: { status_code: 200 }, elapsed: 0.42 }, + { id: 3, event: 'call', sql_query: { sql: 'SELECT 1' } }, + { id: 4, event: 'return', parent_id: 3 }, + ], + metadata: { + name: 'demo', + language: { name: 'ruby' }, + frameworks: [{ name: 'rails' }], + recorder: { type: 'rspec' }, + git: { repository: 'r', branch: 'main', commit: 'abc' }, + timestamp: 1700000000, + labels: ['lab1', 'lab2'], + }, + }); + + expect(result.appmapId).toBe(1); + expect(result.timestampIso).toBe(new Date(1700000000 * 1000).toISOString()); + + const row = db.prepare('SELECT * FROM appmaps WHERE id = ?').get(result.appmapId) as any; + expect(row.name).toBe('demo'); + expect(row.source_path).toBe(path); + expect(row.language).toBe('ruby'); + expect(row.framework).toBe('rails'); + expect(row.recorder_type).toBe('rspec'); + expect(row.git_repository).toBe('r'); + expect(row.git_branch).toBe('main'); + expect(row.git_commit).toBe('abc'); + expect(row.event_count).toBe(4); + expect(row.sql_query_count).toBe(1); + expect(row.http_request_count).toBe(1); + expect(row.elapsed_ms).toBeCloseTo(420); + expect(JSON.parse(row.metadata_labels)).toEqual(['lab1', 'lab2']); + } finally { + db.close(); + } + }); + + it('falls back to file mtime when metadata has no timestamp', () => { + const db = freshDb(); + try { + const path = join(tmp, 'no-ts.appmap.json'); + writeFileSync(path, '{}'); + const result = insertAppmapRecord(db, path, { events: [], metadata: {} }); + // Just assert it parses as a valid date. + expect(Number.isNaN(Date.parse(result.timestampIso))).toBe(false); + } finally { + db.close(); + } + }); + + it('uses the file basename as the name when metadata.name is missing', () => { + const db = freshDb(); + try { + const path = join(tmp, 'unnamed.appmap.json'); + writeFileSync(path, '{}'); + insertAppmapRecord(db, path, { events: [], metadata: {} }); + const row = db.prepare('SELECT name FROM appmaps').get() as any; + expect(row.name).toBe('unnamed.appmap.json'); + } finally { + db.close(); + } + }); + + it('leaves elapsed_ms null when there is no http_server_response return', () => { + const db = freshDb(); + try { + const path = join(tmp, 'no-http.appmap.json'); + writeFileSync(path, '{}'); + insertAppmapRecord(db, path, { + events: [{ id: 1, event: 'call' }], + metadata: { timestamp: 1700000000 }, + }); + const row = db.prepare('SELECT elapsed_ms FROM appmaps').get() as any; + expect(row.elapsed_ms).toBeNull(); + } finally { + db.close(); + } + }); +}); diff --git a/packages/cli/tests/unit/cmds/query/db/import/codeObjects.spec.ts b/packages/cli/tests/unit/cmds/query/db/import/codeObjects.spec.ts new file mode 100644 index 0000000000..8845f61423 --- /dev/null +++ b/packages/cli/tests/unit/cmds/query/db/import/codeObjects.spec.ts @@ -0,0 +1,361 @@ +import sqlite3 from 'better-sqlite3'; + +import { openQueryDb } from '../../../../../../src/cmds/query/db/openQueryDb'; +import { + ClassMapNode, + importCodeObjects, +} from '../../../../../../src/cmds/query/db/import/codeObjects'; + +function freshDb(): sqlite3.Database { + return openQueryDb('/tmp/ignored', ':memory:').db; +} + +describe('importCodeObjects', () => { + it('returns an empty map and writes nothing for an empty classMap', () => { + const db = freshDb(); + try { + const lookup = importCodeObjects(db, []); + expect(lookup.size).toBe(0); + expect((db.prepare('SELECT COUNT(*) AS n FROM code_objects').get() as any).n).toBe(0); + } finally { + db.close(); + } + }); + + it('inserts a single instance method with the canonical fqid', () => { + const db = freshDb(); + try { + const tree: ClassMapNode[] = [ + { + type: 'package', + name: 'app', + children: [ + { + type: 'class', + name: 'User', + children: [ + { + type: 'function', + name: 'save', + static: false, + location: 'app/models/user.rb:10', + }, + ], + }, + ], + }, + ]; + const lookup = importCodeObjects(db, tree); + + const row = db + .prepare('SELECT fqid, package, classes, leaf_class, method, is_static FROM code_objects') + .get() as any; + expect(row.fqid).toBe('app/User#save'); + expect(row.package).toBe('app'); + expect(JSON.parse(row.classes)).toEqual(['User']); + expect(row.leaf_class).toBe('User'); + expect(row.method).toBe('save'); + expect(row.is_static).toBe(0); + expect(lookup.get('app/models/user.rb:10|save')).toBe(1); + } finally { + db.close(); + } + }); + + it('uses "." for static methods', () => { + const db = freshDb(); + try { + const tree: ClassMapNode[] = [ + { + type: 'package', + name: 'core', + children: [ + { + type: 'class', + name: 'Date', + children: [ + { + type: 'function', + name: 'parse', + static: true, + location: 'core/date.rb:1', + }, + ], + }, + ], + }, + ]; + importCodeObjects(db, tree); + const fqid = (db.prepare('SELECT fqid FROM code_objects').get() as any).fqid; + expect(fqid).toBe('core/Date.parse'); + } finally { + db.close(); + } + }); + + it('strips an auxtype suffix from the method name', () => { + const db = freshDb(); + try { + const tree: ClassMapNode[] = [ + { + type: 'package', + name: 'app', + children: [ + { + type: 'class', + name: 'User', + children: [ + { + type: 'function', + name: 'is_authenticated (get)', + location: 'app/models/user.rb:1', + }, + ], + }, + ], + }, + ]; + importCodeObjects(db, tree); + const row = db.prepare('SELECT fqid, method FROM code_objects').get() as any; + expect(row.method).toBe('is_authenticated'); + expect(row.fqid).toBe('app/User#is_authenticated'); + } finally { + db.close(); + } + }); + + it('skips function nodes without a location', () => { + const db = freshDb(); + try { + const tree: ClassMapNode[] = [ + { + type: 'package', + name: 'lib', + children: [ + { + type: 'class', + name: 'Cipher', + children: [{ type: 'function', name: 'decrypt' /* no location */ }], + }, + ], + }, + ]; + const lookup = importCodeObjects(db, tree); + expect(lookup.size).toBe(0); + expect((db.prepare('SELECT COUNT(*) AS n FROM code_objects').get() as any).n).toBe(0); + } finally { + db.close(); + } + }); + + it('inserts labels for the function and dedups them', () => { + const db = freshDb(); + try { + const tree: ClassMapNode[] = [ + { + type: 'package', + name: 'app', + children: [ + { + type: 'class', + name: 'Logger', + children: [ + { + type: 'function', + name: 'error', + location: 'app/lib/logger.rb:5', + labels: ['log', 'log'], // duplicate in source + }, + ], + }, + ], + }, + ]; + importCodeObjects(db, tree); + const labels = db + .prepare('SELECT label FROM labels ORDER BY label') + .all() + .map((r: any) => r.label); + expect(labels).toEqual(['log']); + } finally { + db.close(); + } + }); + + it('builds nested-package fqids correctly', () => { + const db = freshDb(); + try { + const tree: ClassMapNode[] = [ + { + type: 'package', + name: 'app', + children: [ + { + type: 'package', + name: 'controllers', + children: [ + { + type: 'package', + name: 'orders', + children: [ + { + type: 'class', + name: 'OrdersController', + children: [ + { + type: 'function', + name: 'create', + location: 'app/controllers/orders/orders_controller.rb:42', + }, + ], + }, + ], + }, + ], + }, + ], + }, + ]; + importCodeObjects(db, tree); + const fqid = (db.prepare('SELECT fqid FROM code_objects').get() as any).fqid; + expect(fqid).toBe('app/controllers/orders/OrdersController#create'); + } finally { + db.close(); + } + }); + + it('uses :: between nested class names (matches @appland/models codeObjectId)', () => { + const db = freshDb(); + try { + const tree: ClassMapNode[] = [ + { + type: 'package', + name: 'app', + children: [ + { + type: 'class', + name: 'Outer', + children: [ + { + type: 'class', + name: 'Inner', + children: [ + { type: 'function', name: 'foo', location: 'app/outer.rb:1' }, + ], + }, + ], + }, + ], + }, + ]; + importCodeObjects(db, tree); + const fqid = (db.prepare('SELECT fqid FROM code_objects').get() as any).fqid; + expect(fqid).toBe('app/Outer::Inner#foo'); + } finally { + db.close(); + } + }); + + it('uses :: when a class is the immediate child of another class with a static method', () => { + const db = freshDb(); + try { + const tree: ClassMapNode[] = [ + { + type: 'package', + name: 'lib', + children: [ + { + type: 'class', + name: 'Outer', + children: [ + { + type: 'class', + name: 'Inner', + children: [ + { type: 'function', name: 'parse', static: true, location: 'lib/x.rb:1' }, + ], + }, + ], + }, + ], + }, + ]; + importCodeObjects(db, tree); + const fqid = (db.prepare('SELECT fqid FROM code_objects').get() as any).fqid; + expect(fqid).toBe('lib/Outer::Inner.parse'); + } finally { + db.close(); + } + }); + + it('disambiguates two functions sharing the same path:lineno by method name', () => { + const db = freshDb(); + try { + // Spring Data proxy-style: two distinct methods at the same + // synthetic source location. + const tree: ClassMapNode[] = [ + { + type: 'package', + name: 'org/example', + children: [ + { + type: 'class', + name: 'OwnerRepository', + children: [ + { type: 'function', name: 'findById', location: 'Proxy.java:0' }, + { type: 'function', name: 'findPetTypes', location: 'Proxy.java:0' }, + ], + }, + ], + }, + ]; + const lookup = importCodeObjects(db, tree); + + // Both code_objects exist... + expect( + (db.prepare('SELECT COUNT(*) AS n FROM code_objects').get() as any).n + ).toBe(2); + + // ...and the lookup keys disambiguate them by method. + const findByIdId = lookup.get('Proxy.java:0|findById'); + const findPetTypesId = lookup.get('Proxy.java:0|findPetTypes'); + expect(findByIdId).toBeDefined(); + expect(findPetTypesId).toBeDefined(); + expect(findByIdId).not.toBe(findPetTypesId); + } finally { + db.close(); + } + }); + + it('is idempotent on re-import (INSERT OR IGNORE)', () => { + const db = freshDb(); + try { + const tree: ClassMapNode[] = [ + { + type: 'package', + name: 'app', + children: [ + { + type: 'class', + name: 'User', + children: [ + { + type: 'function', + name: 'save', + location: 'app/models/user.rb:10', + labels: ['dao'], + }, + ], + }, + ], + }, + ]; + importCodeObjects(db, tree); + importCodeObjects(db, tree); + expect((db.prepare('SELECT COUNT(*) AS n FROM code_objects').get() as any).n).toBe(1); + expect((db.prepare('SELECT COUNT(*) AS n FROM labels').get() as any).n).toBe(1); + } finally { + db.close(); + } + }); +}); diff --git a/packages/cli/tests/unit/cmds/query/db/import/exceptions.spec.ts b/packages/cli/tests/unit/cmds/query/db/import/exceptions.spec.ts new file mode 100644 index 0000000000..572119b80e --- /dev/null +++ b/packages/cli/tests/unit/cmds/query/db/import/exceptions.spec.ts @@ -0,0 +1,180 @@ +import { importExceptions } from '../../../../../../src/cmds/query/db/import/exceptions'; +import { buildParentEventMap } from '../../../../../../src/cmds/query/db/import/parentEventMap'; +import { freshDb } from './helpers'; + +function seedAppmap(db: any): number { + const info = db + .prepare("INSERT INTO appmaps (name, source_path) VALUES ('rec', '/tmp/rec.appmap.json')") + .run(); + return Number(info.lastInsertRowid); +} + +describe('importExceptions', () => { + it('inserts one row per exception entry, preserving class/message/path/lineno', () => { + const db = freshDb(); + try { + const appmapId = seedAppmap(db); + const events = [ + { id: 1, event: 'call', thread_id: 7 }, + { + id: 2, + event: 'return', + parent_id: 1, + thread_id: 7, + exceptions: [ + { + class: 'IntegrityError', + message: 'duplicate key', + path: 'app/models/order.rb', + lineno: 42, + }, + ], + }, + ]; + importExceptions(db, appmapId, events, buildParentEventMap(events)); + const row = db.prepare('SELECT * FROM exceptions').get() as any; + expect(row.exception_class).toBe('IntegrityError'); + expect(row.message).toBe('duplicate key'); + expect(row.path).toBe('app/models/order.rb'); + expect(row.lineno).toBe(42); + expect(row.thread_id).toBe(7); + } finally { + db.close(); + } + }); + + it('expands multiple exceptions on the same return event into multiple rows', () => { + const db = freshDb(); + try { + const appmapId = seedAppmap(db); + const events = [ + { id: 1, event: 'call', thread_id: 1 }, + { + id: 2, + event: 'return', + parent_id: 1, + exceptions: [{ class: 'A' }, { class: 'B' }], + }, + ]; + importExceptions(db, appmapId, events, buildParentEventMap(events)); + const rows = db + .prepare('SELECT exception_class FROM exceptions ORDER BY id') + .all() + .map((r: any) => r.exception_class); + expect(rows).toEqual(['A', 'B']); + } finally { + db.close(); + } + }); + + it('skips events without an exceptions array', () => { + const db = freshDb(); + try { + const appmapId = seedAppmap(db); + importExceptions(db, appmapId, [{ id: 1, event: 'call' }], new Map()); + expect((db.prepare('SELECT COUNT(*) AS n FROM exceptions').get() as any).n).toBe(0); + } finally { + db.close(); + } + }); + + it('uses the call event id (not the return event id) for event_id and derives parent_event_id from the call', () => { + const db = freshDb(); + try { + const appmapId = seedAppmap(db); + const events = [ + { id: 10, event: 'call', thread_id: 1 }, // outer + { id: 11, event: 'call', thread_id: 1 }, // inner — parent is 10 + { + id: 12, + event: 'return', + parent_id: 11, + thread_id: 1, + exceptions: [{ class: 'BoomError' }], + }, + ]; + importExceptions(db, appmapId, events, buildParentEventMap(events)); + const row = db.prepare('SELECT event_id, parent_event_id FROM exceptions').get() as any; + expect(row.event_id).toBe(11); // call id, not return id + expect(row.parent_event_id).toBe(10); // parent of the call + } finally { + db.close(); + } + }); + + it('leaves parent_event_id NULL when the failing call is at the top of its thread', () => { + const db = freshDb(); + try { + const appmapId = seedAppmap(db); + const events = [ + { id: 1, event: 'call', thread_id: 1 }, // top-level call + { + id: 2, + event: 'return', + parent_id: 1, + thread_id: 1, + exceptions: [{ class: 'TopLevelError' }], + }, + ]; + importExceptions(db, appmapId, events, buildParentEventMap(events)); + const row = db.prepare('SELECT event_id, parent_event_id FROM exceptions').get() as any; + expect(row.event_id).toBe(1); + expect(row.parent_event_id).toBeNull(); + } finally { + db.close(); + } + }); + + it('still imports legacy recordings that place exceptions on the call event', () => { + const db = freshDb(); + try { + const appmapId = seedAppmap(db); + const events = [ + { id: 1, event: 'call', thread_id: 1 }, // outer + { + id: 2, + event: 'call', + thread_id: 1, // inner — exceptions attached here directly + exceptions: [{ class: 'LegacyError' }], + }, + { id: 3, event: 'return', parent_id: 2, thread_id: 1 }, + ]; + importExceptions(db, appmapId, events, buildParentEventMap(events)); + const row = db.prepare('SELECT event_id, parent_event_id FROM exceptions').get() as any; + expect(row.event_id).toBe(2); + expect(row.parent_event_id).toBe(1); + } finally { + db.close(); + } + }); + + it('de-dups when the same call has exceptions on both call and return events (return wins)', () => { + const db = freshDb(); + try { + const appmapId = seedAppmap(db); + const events = [ + { + id: 1, + event: 'call', + thread_id: 1, + exceptions: [{ class: 'OnCall' }], + }, + { + id: 2, + event: 'return', + parent_id: 1, + thread_id: 1, + exceptions: [{ class: 'OnReturn' }], + }, + ]; + importExceptions(db, appmapId, events, buildParentEventMap(events)); + const rows = db + .prepare('SELECT exception_class FROM exceptions ORDER BY id') + .all() + .map((r: any) => r.exception_class); + expect(rows).toEqual(['OnReturn']); + } finally { + db.close(); + } + }); +}); diff --git a/packages/cli/tests/unit/cmds/query/db/import/functionCalls.spec.ts b/packages/cli/tests/unit/cmds/query/db/import/functionCalls.spec.ts new file mode 100644 index 0000000000..9b4c295e9e --- /dev/null +++ b/packages/cli/tests/unit/cmds/query/db/import/functionCalls.spec.ts @@ -0,0 +1,206 @@ +import { + importCodeObjects, + ClassMapNode, +} from '../../../../../../src/cmds/query/db/import/codeObjects'; +import { importFunctionCalls } from '../../../../../../src/cmds/query/db/import/functionCalls'; +import { buildReturnEventMap } from '../../../../../../src/cmds/query/db/import/returnEventMap'; +import { buildParentEventMap } from '../../../../../../src/cmds/query/db/import/parentEventMap'; +import { freshDb } from './helpers'; + +function seedAppmap(db: any): number { + const info = db + .prepare("INSERT INTO appmaps (name, source_path) VALUES ('rec', '/tmp/rec.appmap.json')") + .run(); + return Number(info.lastInsertRowid); +} + +describe('importFunctionCalls', () => { + it('inserts call events, links code_object via path:lineno, and records elapsed', () => { + const db = freshDb(); + try { + const appmapId = seedAppmap(db); + const classMap: ClassMapNode[] = [ + { + type: 'package', + name: 'app', + children: [ + { + type: 'class', + name: 'User', + children: [ + { type: 'function', name: 'save', location: 'app/models/user.rb:10' }, + ], + }, + ], + }, + ]; + const lookup = importCodeObjects(db, classMap); + const events = [ + { + id: 1, + event: 'call', + thread_id: 1, + defined_class: 'User', + method_id: 'save', + path: 'app/models/user.rb', + lineno: 10, + }, + { id: 2, event: 'return', parent_id: 1, elapsed: 0.001 }, + ]; + importFunctionCalls( + db, + appmapId, + events, + buildReturnEventMap(events), + buildParentEventMap(events), + lookup + ); + const row = db.prepare('SELECT * FROM function_calls').get() as any; + expect(row.code_object_id).toBe(1); + expect(row.defined_class).toBe('User'); + expect(row.method_id).toBe('save'); + expect(row.path).toBe('app/models/user.rb'); + expect(row.lineno).toBe(10); + expect(row.elapsed_ms).toBeCloseTo(1); + expect(row.parameters_json).toBeNull(); + expect(row.return_value).toBeNull(); + } finally { + db.close(); + } + }); + + it('captures parameters and return value for labeled functions', () => { + const db = freshDb(); + try { + const appmapId = seedAppmap(db); + const classMap: ClassMapNode[] = [ + { + type: 'package', + name: 'app', + children: [ + { + type: 'class', + name: 'IdempotencyKey', + children: [ + { + type: 'function', + name: 'generate', + static: true, + location: 'app/services/idempotency.rb:12', + labels: ['security.idempotency'], + }, + ], + }, + ], + }, + ]; + const lookup = importCodeObjects(db, classMap); + const events = [ + { + id: 1, + event: 'call', + thread_id: 1, + defined_class: 'IdempotencyKey', + method_id: 'generate', + path: 'app/services/idempotency.rb', + lineno: 12, + static: true, + parameters: [{ name: 'request_id', class: 'String', value: "'req-9281'" }], + }, + { + id: 2, + event: 'return', + parent_id: 1, + return_value: { class: 'String', value: "'k-9281'" }, + }, + ]; + importFunctionCalls( + db, + appmapId, + events, + buildReturnEventMap(events), + buildParentEventMap(events), + lookup + ); + const row = db.prepare('SELECT * FROM function_calls').get() as any; + expect(JSON.parse(row.parameters_json)).toEqual([ + { name: 'request_id', class: 'String', value: "'req-9281'" }, + ]); + expect(row.return_value).toBe("'k-9281'"); + expect(row.is_static).toBe(1); + } finally { + db.close(); + } + }); + + it('skips calls that are http_server_request or sql_query carriers', () => { + const db = freshDb(); + try { + const appmapId = seedAppmap(db); + const events = [ + { + id: 1, + event: 'call', + defined_class: 'Foo', + method_id: 'bar', + http_server_request: { request_method: 'GET', path_info: '/x' }, + }, + { + id: 2, + event: 'call', + defined_class: 'Foo', + method_id: 'bar', + sql_query: { sql: 'SELECT 1' }, + }, + { + id: 3, + event: 'call', + defined_class: 'Foo', + method_id: 'bar', + }, + ]; + importFunctionCalls( + db, + appmapId, + events, + buildReturnEventMap(events), + buildParentEventMap(events), + new Map() + ); + expect((db.prepare('SELECT COUNT(*) AS n FROM function_calls').get() as any).n).toBe(1); + const row = db.prepare('SELECT event_id FROM function_calls').get() as any; + expect(row.event_id).toBe(3); + } finally { + db.close(); + } + }); + + it('leaves code_object_id null when path:lineno does not match any classMap location', () => { + const db = freshDb(); + try { + const appmapId = seedAppmap(db); + const events = [ + { + id: 1, + event: 'call', + defined_class: 'Foo', + method_id: 'bar', + path: 'unknown.rb', + lineno: 1, + }, + ]; + importFunctionCalls( + db, + appmapId, + events, + buildReturnEventMap(events), + buildParentEventMap(events), + new Map() + ); + const row = db.prepare('SELECT code_object_id FROM function_calls').get() as any; + expect(row.code_object_id).toBeNull(); + } finally { + db.close(); + } + }); +}); diff --git a/packages/cli/tests/unit/cmds/query/db/import/helpers.ts b/packages/cli/tests/unit/cmds/query/db/import/helpers.ts new file mode 100644 index 0000000000..3617e9ae96 --- /dev/null +++ b/packages/cli/tests/unit/cmds/query/db/import/helpers.ts @@ -0,0 +1,8 @@ +import sqlite3 from 'better-sqlite3'; + +import { openQueryDb } from '../../../../../../src/cmds/query/db/openQueryDb'; + +// Open an in-memory query DB with the schema applied. +export function freshDb(): sqlite3.Database { + return openQueryDb('/tmp/ignored', ':memory:').db; +} diff --git a/packages/cli/tests/unit/cmds/query/db/import/httpClientRequests.spec.ts b/packages/cli/tests/unit/cmds/query/db/import/httpClientRequests.spec.ts new file mode 100644 index 0000000000..d2b05faaee --- /dev/null +++ b/packages/cli/tests/unit/cmds/query/db/import/httpClientRequests.spec.ts @@ -0,0 +1,72 @@ +import { importHttpClientRequests } from '../../../../../../src/cmds/query/db/import/httpClientRequests'; +import { buildReturnEventMap } from '../../../../../../src/cmds/query/db/import/returnEventMap'; +import { buildParentEventMap } from '../../../../../../src/cmds/query/db/import/parentEventMap'; +import { freshDb } from './helpers'; + +function seedAppmap(db: any): number { + const info = db + .prepare("INSERT INTO appmaps (name, source_path) VALUES ('rec', '/tmp/rec.appmap.json')") + .run(); + return Number(info.lastInsertRowid); +} + +describe('importHttpClientRequests', () => { + it('inserts one row per http_client_request', () => { + const db = freshDb(); + try { + const appmapId = seedAppmap(db); + const events = [ + { + id: 1, + event: 'call', + thread_id: 12, + http_client_request: { request_method: 'GET', url: 'https://api.example/v1' }, + }, + { + id: 2, + event: 'return', + parent_id: 1, + http_client_response: { status_code: 503 }, + elapsed: 0.04, + }, + ]; + importHttpClientRequests( + db, + appmapId, + events, + buildReturnEventMap(events), + buildParentEventMap(events) + ); + const row = db.prepare('SELECT * FROM http_client_requests').get() as any; + expect(row.method).toBe('GET'); + expect(row.url).toBe('https://api.example/v1'); + expect(row.status_code).toBe(503); + expect(row.elapsed_ms).toBeCloseTo(40); + expect(row.thread_id).toBe(12); + } finally { + db.close(); + } + }); + + it('defaults missing method to GET and missing url to ""', () => { + const db = freshDb(); + try { + const appmapId = seedAppmap(db); + const events = [ + { id: 1, event: 'call', http_client_request: {} }, + ]; + importHttpClientRequests( + db, + appmapId, + events, + buildReturnEventMap(events), + buildParentEventMap(events) + ); + const row = db.prepare('SELECT method, url FROM http_client_requests').get() as any; + expect(row.method).toBe('GET'); + expect(row.url).toBe(''); + } finally { + db.close(); + } + }); +}); diff --git a/packages/cli/tests/unit/cmds/query/db/import/httpRequests.spec.ts b/packages/cli/tests/unit/cmds/query/db/import/httpRequests.spec.ts new file mode 100644 index 0000000000..570c686582 --- /dev/null +++ b/packages/cli/tests/unit/cmds/query/db/import/httpRequests.spec.ts @@ -0,0 +1,97 @@ +import { importHttpRequests } from '../../../../../../src/cmds/query/db/import/httpRequests'; +import { buildReturnEventMap } from '../../../../../../src/cmds/query/db/import/returnEventMap'; +import { buildParentEventMap } from '../../../../../../src/cmds/query/db/import/parentEventMap'; +import { freshDb } from './helpers'; + +function seedAppmap(db: any): number { + const info = db + .prepare("INSERT INTO appmaps (name, source_path) VALUES ('rec', '/tmp/rec.appmap.json')") + .run(); + return Number(info.lastInsertRowid); +} + +describe('importHttpRequests', () => { + it('inserts one row per http_server_request, joining the matching return for status + elapsed', () => { + const db = freshDb(); + try { + const appmapId = seedAppmap(db); + const events = [ + { + id: 1, + event: 'call', + thread_id: 99, + http_server_request: { + request_method: 'POST', + path_info: '/orders', + normalized_path_info: '/orders', + protocol: 'HTTP/1.1', + }, + }, + { + id: 2, + event: 'call', + thread_id: 99, + }, + { + id: 3, + event: 'return', + parent_id: 2, + }, + { + id: 4, + event: 'return', + parent_id: 1, + http_server_response: { status_code: 500, mime_type: 'application/json' }, + elapsed: 0.52, + }, + ]; + + importHttpRequests( + db, + appmapId, + events, + buildReturnEventMap(events), + buildParentEventMap(events) + ); + + const row = db.prepare('SELECT * FROM http_requests').get() as any; + expect(row.method).toBe('POST'); + expect(row.path).toBe('/orders'); + expect(row.normalized_path).toBe('/orders'); + expect(row.protocol).toBe('HTTP/1.1'); + expect(row.status_code).toBe(500); + expect(row.mime_type).toBe('application/json'); + expect(row.elapsed_ms).toBeCloseTo(520); + expect(row.thread_id).toBe(99); + expect(row.parent_event_id).toBeNull(); + } finally { + db.close(); + } + }); + + it('records status_code 0 when no return event was emitted', () => { + const db = freshDb(); + try { + const appmapId = seedAppmap(db); + const events = [ + { + id: 1, + event: 'call', + http_server_request: { request_method: 'GET', path_info: '/x' }, + }, + ]; + importHttpRequests( + db, + appmapId, + events, + buildReturnEventMap(events), + buildParentEventMap(events) + ); + const row = db.prepare('SELECT status_code, elapsed_ms FROM http_requests').get() as any; + expect(row.status_code).toBe(0); + expect(row.elapsed_ms).toBeNull(); + } finally { + db.close(); + } + }); +}); diff --git a/packages/cli/tests/unit/cmds/query/db/import/importAppmap.spec.ts b/packages/cli/tests/unit/cmds/query/db/import/importAppmap.spec.ts new file mode 100644 index 0000000000..4d25c85ece --- /dev/null +++ b/packages/cli/tests/unit/cmds/query/db/import/importAppmap.spec.ts @@ -0,0 +1,196 @@ +import { mkdtempSync, rmSync, writeFileSync } from 'fs'; +import { tmpdir } from 'os'; +import { join, resolve } from 'path'; + +import { + deleteAppmap, + importAppmap, +} from '../../../../../../src/cmds/query/db/import/importAppmap'; +import { freshDb } from './helpers'; + +function writeAppmap(dir: string, name: string, body: object): string { + const p = join(dir, name); + writeFileSync(p, JSON.stringify(body)); + return p; +} + +describe('importAppmap', () => { + let tmp: string; + + beforeEach(() => { + tmp = mkdtempSync(join(tmpdir(), 'import-appmap-')); + }); + + afterEach(() => { + rmSync(tmp, { recursive: true, force: true }); + }); + + it('imports an end-to-end recording into all tables', () => { + const db = freshDb(); + try { + const path = writeAppmap(tmp, 'rec.appmap.json', { + metadata: { + name: 'orders_create_42', + language: { name: 'ruby' }, + frameworks: [{ name: 'rails' }], + recorder: { type: 'rspec' }, + git: { branch: 'feature/foo', commit: 'abc' }, + timestamp: 1700000000, + }, + classMap: [ + { + type: 'package', + name: 'app', + children: [ + { + type: 'class', + name: 'OrdersController', + children: [ + { + type: 'function', + name: 'create', + location: 'app/controllers/orders_controller.rb:42', + }, + ], + }, + ], + }, + ], + events: [ + { + id: 1, + event: 'call', + thread_id: 1, + http_server_request: { request_method: 'POST', path_info: '/orders' }, + }, + { + id: 2, + event: 'call', + thread_id: 1, + defined_class: 'OrdersController', + method_id: 'create', + path: 'app/controllers/orders_controller.rb', + lineno: 42, + }, + { + id: 3, + event: 'call', + thread_id: 1, + sql_query: { sql: 'INSERT INTO orders (...)' }, + }, + { id: 4, event: 'return', parent_id: 3, elapsed: 0.014 }, + { + id: 5, + event: 'return', + parent_id: 2, + exceptions: [{ class: 'IntegrityError', message: 'duplicate key' }], + }, + { + id: 6, + event: 'return', + parent_id: 1, + http_server_response: { status_code: 500 }, + elapsed: 0.52, + }, + ], + }); + + const result = importAppmap(db, path); + expect(result.eventCount).toBe(6); + expect(result.sqlCount).toBe(1); + expect(result.httpCount).toBe(1); + + const am = db.prepare('SELECT * FROM appmaps').get() as any; + expect(am.source_path).toBe(resolve(path)); + expect(am.git_branch).toBe('feature/foo'); + expect(am.elapsed_ms).toBeCloseTo(520); + + expect((db.prepare('SELECT COUNT(*) AS n FROM http_requests').get() as any).n).toBe(1); + expect((db.prepare('SELECT COUNT(*) AS n FROM sql_queries').get() as any).n).toBe(1); + expect((db.prepare('SELECT COUNT(*) AS n FROM function_calls').get() as any).n).toBe(1); + expect((db.prepare('SELECT COUNT(*) AS n FROM exceptions').get() as any).n).toBe(1); + expect((db.prepare('SELECT COUNT(*) AS n FROM code_objects').get() as any).n).toBe(1); + } finally { + db.close(); + } + }); + + it('is idempotent on re-import — rows are replaced, not duplicated', () => { + const db = freshDb(); + try { + const path = writeAppmap(tmp, 'rec.appmap.json', { + metadata: { timestamp: 1700000000 }, + events: [ + { + id: 1, + event: 'call', + http_server_request: { request_method: 'GET', path_info: '/x' }, + }, + { id: 2, event: 'return', parent_id: 1, http_server_response: { status_code: 200 } }, + ], + }); + importAppmap(db, path); + importAppmap(db, path); + expect((db.prepare('SELECT COUNT(*) AS n FROM appmaps').get() as any).n).toBe(1); + expect((db.prepare('SELECT COUNT(*) AS n FROM http_requests').get() as any).n).toBe(1); + } finally { + db.close(); + } + }); + + it('rolls back on a parse error, leaving no partial rows', () => { + const db = freshDb(); + try { + const path = join(tmp, 'broken.appmap.json'); + writeFileSync(path, '{not valid json'); + expect(() => importAppmap(db, path)).toThrow(); + expect((db.prepare('SELECT COUNT(*) AS n FROM appmaps').get() as any).n).toBe(0); + } finally { + db.close(); + } + }); +}); + +describe('deleteAppmap', () => { + let tmp: string; + + beforeEach(() => { + tmp = mkdtempSync(join(tmpdir(), 'delete-appmap-')); + }); + + afterEach(() => { + rmSync(tmp, { recursive: true, force: true }); + }); + + it('removes the recording and cascades to child rows', () => { + const db = freshDb(); + try { + const path = writeAppmap(tmp, 'rec.appmap.json', { + metadata: { timestamp: 1700000000 }, + events: [ + { + id: 1, + event: 'call', + http_server_request: { request_method: 'GET', path_info: '/x' }, + }, + { id: 2, event: 'return', parent_id: 1, http_server_response: { status_code: 200 } }, + ], + }); + importAppmap(db, path); + expect(deleteAppmap(db, path)).toBe(true); + expect((db.prepare('SELECT COUNT(*) AS n FROM appmaps').get() as any).n).toBe(0); + expect((db.prepare('SELECT COUNT(*) AS n FROM http_requests').get() as any).n).toBe(0); + } finally { + db.close(); + } + }); + + it('returns false when no matching row exists', () => { + const db = freshDb(); + try { + expect(deleteAppmap(db, '/tmp/nonexistent.appmap.json')).toBe(false); + } finally { + db.close(); + } + }); +}); diff --git a/packages/cli/tests/unit/cmds/query/db/import/parentEventMap.spec.ts b/packages/cli/tests/unit/cmds/query/db/import/parentEventMap.spec.ts new file mode 100644 index 0000000000..f2b4a536f7 --- /dev/null +++ b/packages/cli/tests/unit/cmds/query/db/import/parentEventMap.spec.ts @@ -0,0 +1,74 @@ +import { + buildParentEventMap, + AppMapEventLike, +} from '../../../../../../src/cmds/query/db/import/parentEventMap'; + +describe('buildParentEventMap', () => { + it('returns an empty map for an empty event stream', () => { + expect(buildParentEventMap([])).toEqual(new Map()); + }); + + it('assigns the call directly above as the parent on a single thread', () => { + const events: AppMapEventLike[] = [ + { id: 1, thread_id: 1, event: 'call' }, // root + { id: 2, thread_id: 1, event: 'call' }, // child of 1 + { id: 3, thread_id: 1, event: 'call' }, // child of 2 + { id: 4, thread_id: 1, event: 'return' }, // returns from 3 + { id: 5, thread_id: 1, event: 'call' }, // child of 2 again + { id: 6, thread_id: 1, event: 'return' }, // returns from 5 + { id: 7, thread_id: 1, event: 'return' }, // returns from 2 + { id: 8, thread_id: 1, event: 'return' }, // returns from 1 + ]; + const map = buildParentEventMap(events); + expect(map.get(1)).toBeUndefined(); + expect(map.get(2)).toBe(1); + expect(map.get(3)).toBe(2); + expect(map.get(5)).toBe(2); + }); + + it('keeps threads independent', () => { + const events: AppMapEventLike[] = [ + { id: 1, thread_id: 1, event: 'call' }, + { id: 2, thread_id: 2, event: 'call' }, // different thread; no parent + { id: 3, thread_id: 1, event: 'call' }, // child of 1 + { id: 4, thread_id: 2, event: 'call' }, // child of 2 + ]; + const map = buildParentEventMap(events); + expect(map.get(1)).toBeUndefined(); + expect(map.get(2)).toBeUndefined(); + expect(map.get(3)).toBe(1); + expect(map.get(4)).toBe(2); + }); + + it('skips events missing id or thread_id', () => { + const events: AppMapEventLike[] = [ + { id: 1, thread_id: 1, event: 'call' }, + { thread_id: 1, event: 'call' }, // missing id + { id: 3, event: 'call' }, // missing thread_id + { id: 4, thread_id: 1, event: 'call' }, // child of 1 (the malformed events were skipped) + ]; + const map = buildParentEventMap(events); + expect(map.get(4)).toBe(1); + }); + + it('tolerates extra returns past an empty stack', () => { + const events: AppMapEventLike[] = [ + { id: 1, thread_id: 1, event: 'return' }, // no matching call + { id: 2, thread_id: 1, event: 'call' }, // root after a stray return + { id: 3, thread_id: 1, event: 'call' }, // child of 2 + ]; + const map = buildParentEventMap(events); + expect(map.get(2)).toBeUndefined(); + expect(map.get(3)).toBe(2); + }); + + it('ignores events with neither call nor return', () => { + const events: AppMapEventLike[] = [ + { id: 1, thread_id: 1, event: 'call' }, + { id: 2, thread_id: 1, event: 'log' }, // synthetic; not a call/return + { id: 3, thread_id: 1, event: 'call' }, // still child of 1 + ]; + const map = buildParentEventMap(events); + expect(map.get(3)).toBe(1); + }); +}); diff --git a/packages/cli/tests/unit/cmds/query/db/import/parseLocation.spec.ts b/packages/cli/tests/unit/cmds/query/db/import/parseLocation.spec.ts new file mode 100644 index 0000000000..e2aa48242a --- /dev/null +++ b/packages/cli/tests/unit/cmds/query/db/import/parseLocation.spec.ts @@ -0,0 +1,35 @@ +import { parseLocation } from '../../../../../../src/cmds/query/db/import/parseLocation'; + +describe('parseLocation', () => { + it('parses a simple relative path with a line number', () => { + expect(parseLocation('app/views.py:10')).toEqual(['app/views.py', 10]); + }); + + it('parses an absolute path with a line number', () => { + expect(parseLocation('/abs/path/file.rb:511')).toEqual(['/abs/path/file.rb', 511]); + }); + + it('parses a negative line number (synthetic locations)', () => { + expect(parseLocation('File.java:-1')).toEqual(['File.java', -1]); + }); + + it('returns nulls for locations without a colon', () => { + expect(parseLocation('OpenSSL::Cipher#decrypt')).toEqual([null, null]); + }); + + it('returns nulls for empty input', () => { + expect(parseLocation('')).toEqual([null, null]); + expect(parseLocation(undefined)).toEqual([null, null]); + expect(parseLocation(null)).toEqual([null, null]); + }); + + it('returns nulls when the suffix after the last colon is not an integer', () => { + expect(parseLocation('file.rb:abc')).toEqual([null, null]); + expect(parseLocation('file.rb:')).toEqual([null, null]); + expect(parseLocation('file.rb:1a')).toEqual([null, null]); + }); + + it('uses the rightmost colon so namespaced paths are preserved', () => { + expect(parseLocation('OpenSSL::Cipher:42')).toEqual(['OpenSSL::Cipher', 42]); + }); +}); diff --git a/packages/cli/tests/unit/cmds/query/db/import/returnEventMap.spec.ts b/packages/cli/tests/unit/cmds/query/db/import/returnEventMap.spec.ts new file mode 100644 index 0000000000..c8124c1f3d --- /dev/null +++ b/packages/cli/tests/unit/cmds/query/db/import/returnEventMap.spec.ts @@ -0,0 +1,26 @@ +import { buildReturnEventMap } from '../../../../../../src/cmds/query/db/import/returnEventMap'; + +describe('buildReturnEventMap', () => { + it('maps call event id → return event via parent_id', () => { + const events = [ + { id: 1, event: 'call' }, + { id: 2, event: 'return', parent_id: 1, elapsed: 0.5 }, + { id: 3, event: 'call' }, + { id: 4, event: 'return', parent_id: 3, http_server_response: { status_code: 200 } }, + ]; + const map = buildReturnEventMap(events); + expect(map.get(1)?.elapsed).toBe(0.5); + expect(map.get(3)?.http_server_response).toEqual({ status_code: 200 }); + expect(map.size).toBe(2); + }); + + it('ignores returns without parent_id', () => { + const events = [{ id: 1, event: 'return' /* no parent_id */ }]; + expect(buildReturnEventMap(events).size).toBe(0); + }); + + it('ignores non-return events', () => { + const events = [{ id: 1, event: 'call', parent_id: 999 }]; + expect(buildReturnEventMap(events).size).toBe(0); + }); +}); diff --git a/packages/cli/tests/unit/cmds/query/db/import/sqlQueries.spec.ts b/packages/cli/tests/unit/cmds/query/db/import/sqlQueries.spec.ts new file mode 100644 index 0000000000..f93ac8fdf7 --- /dev/null +++ b/packages/cli/tests/unit/cmds/query/db/import/sqlQueries.spec.ts @@ -0,0 +1,84 @@ +import { importSqlQueries } from '../../../../../../src/cmds/query/db/import/sqlQueries'; +import { buildReturnEventMap } from '../../../../../../src/cmds/query/db/import/returnEventMap'; +import { buildParentEventMap } from '../../../../../../src/cmds/query/db/import/parentEventMap'; +import { freshDb } from './helpers'; + +function seedAppmap(db: any): number { + const info = db + .prepare("INSERT INTO appmaps (name, source_path) VALUES ('rec', '/tmp/rec.appmap.json')") + .run(); + return Number(info.lastInsertRowid); +} + +describe('importSqlQueries', () => { + it('inserts one row per sql_query event with caller from the event', () => { + const db = freshDb(); + try { + const appmapId = seedAppmap(db); + const events = [ + { + id: 1, + event: 'call', + thread_id: 1, + defined_class: 'OrdersController', + method_id: 'create', + sql_query: { sql: 'INSERT INTO orders (...)', database_type: 'postgres', server_version: '14.5' }, + }, + { id: 2, event: 'return', parent_id: 1, elapsed: 0.014 }, + ]; + importSqlQueries( + db, + appmapId, + events, + buildReturnEventMap(events), + buildParentEventMap(events) + ); + const row = db.prepare('SELECT * FROM sql_queries').get() as any; + expect(row.sql_text).toBe('INSERT INTO orders (...)'); + expect(row.database_type).toBe('postgres'); + expect(row.server_version).toBe('14.5'); + expect(row.caller_class).toBe('OrdersController'); + expect(row.caller_method).toBe('create'); + expect(row.elapsed_ms).toBeCloseTo(14); + } finally { + db.close(); + } + }); + + it('derives caller from the parent call event when the sql event lacks defined_class', () => { + const db = freshDb(); + try { + const appmapId = seedAppmap(db); + const events = [ + { + id: 1, + event: 'call', + thread_id: 1, + defined_class: 'OrdersController', + method_id: 'create', + }, + { + id: 2, + event: 'call', + thread_id: 1, + // no defined_class on the SQL event itself + sql_query: { sql: 'SELECT 1' }, + }, + { id: 3, event: 'return', parent_id: 2 }, + { id: 4, event: 'return', parent_id: 1 }, + ]; + importSqlQueries( + db, + appmapId, + events, + buildReturnEventMap(events), + buildParentEventMap(events) + ); + const row = db.prepare('SELECT caller_class, caller_method FROM sql_queries').get() as any; + expect(row.caller_class).toBe('OrdersController'); + expect(row.caller_method).toBe('create'); + } finally { + db.close(); + } + }); +}); diff --git a/packages/cli/tests/unit/cmds/query/db/openQueryDb.spec.ts b/packages/cli/tests/unit/cmds/query/db/openQueryDb.spec.ts new file mode 100644 index 0000000000..96ea05485f --- /dev/null +++ b/packages/cli/tests/unit/cmds/query/db/openQueryDb.spec.ts @@ -0,0 +1,119 @@ +import { mkdtempSync, rmSync } from 'fs'; +import { tmpdir } from 'os'; +import { join } from 'path'; + +import { openQueryDb } from '../../../../../src/cmds/query/db/openQueryDb'; +import { SCHEMA_TABLES, SCHEMA_VERSION } from '../../../../../src/cmds/query/db/schema'; + +describe('openQueryDb', () => { + let tmpDir: string; + let dbPath: string; + + beforeEach(() => { + tmpDir = mkdtempSync(join(tmpdir(), 'appmap-query-db-')); + dbPath = join(tmpDir, 'query.db'); + }); + + afterEach(() => { + rmSync(tmpDir, { recursive: true, force: true }); + }); + + it('creates all schema tables on a fresh DB and stamps user_version', () => { + const { db, version, rebuilt, path } = openQueryDb('/tmp/ignored', dbPath); + try { + expect(path).toBe(dbPath); + expect(version).toBe(SCHEMA_VERSION); + expect(rebuilt).toBe(false); + + const tables = db + .prepare("SELECT name FROM sqlite_master WHERE type='table' ORDER BY name") + .all() + .map((r: any) => r.name); + for (const t of SCHEMA_TABLES) expect(tables).toContain(t); + + expect(db.pragma('user_version', { simple: true })).toBe(SCHEMA_VERSION); + } finally { + db.close(); + } + }); + + it('reopens an existing DB at the same version without rebuilding', () => { + const first = openQueryDb('/tmp/ignored', dbPath); + first.db.exec("INSERT INTO appmaps (name, source_path) VALUES ('canary', '/tmp/canary')"); + first.db.close(); + + const second = openQueryDb('/tmp/ignored', dbPath); + try { + expect(second.rebuilt).toBe(false); + const row = second.db + .prepare("SELECT name FROM appmaps WHERE source_path = '/tmp/canary'") + .get() as { name: string } | undefined; + expect(row?.name).toBe('canary'); + } finally { + second.db.close(); + } + }); + + it('drops and rebuilds tables when on-disk user_version does not match', () => { + const first = openQueryDb('/tmp/ignored', dbPath); + first.db.exec("INSERT INTO appmaps (name, source_path) VALUES ('canary', '/tmp/canary')"); + first.db.pragma('user_version = 999'); + first.db.close(); + + const second = openQueryDb('/tmp/ignored', dbPath); + try { + expect(second.rebuilt).toBe(true); + expect(second.version).toBe(SCHEMA_VERSION); + const count = second.db.prepare('SELECT COUNT(*) AS n FROM appmaps').get() as { n: number }; + expect(count.n).toBe(0); + expect(second.db.pragma('user_version', { simple: true })).toBe(SCHEMA_VERSION); + } finally { + second.db.close(); + } + }); + + it('enables WAL and foreign_keys pragmas', () => { + const { db } = openQueryDb('/tmp/ignored', dbPath); + try { + expect(String(db.pragma('journal_mode', { simple: true })).toLowerCase()).toBe('wal'); + expect(db.pragma('foreign_keys', { simple: true })).toBe(1); + } finally { + db.close(); + } + }); + + it('cascades deletes from appmaps to dependent rows', () => { + const { db } = openQueryDb('/tmp/ignored', dbPath); + try { + const info = db + .prepare("INSERT INTO appmaps (name, source_path) VALUES (?, ?)") + .run('rec', '/tmp/rec.appmap.json'); + const appmapId = info.lastInsertRowid; + + db.prepare( + `INSERT INTO http_requests + (appmap_id, event_id, method, path, status_code) + VALUES (?, 1, 'GET', '/x', 200)` + ).run(appmapId); + + db.prepare('DELETE FROM appmaps WHERE id = ?').run(appmapId); + + const remaining = db + .prepare('SELECT COUNT(*) AS n FROM http_requests WHERE appmap_id = ?') + .get(appmapId) as { n: number }; + expect(remaining.n).toBe(0); + } finally { + db.close(); + } + }); + + it('creates the parent directory when it does not exist', () => { + const nested = join(tmpDir, 'a', 'b', 'c', 'query.db'); + const { db } = openQueryDb('/tmp/ignored', nested); + try { + expect(db.pragma('user_version', { simple: true })).toBe(SCHEMA_VERSION); + } finally { + db.close(); + } + }); +}); diff --git a/packages/cli/tests/unit/cmds/query/db/path.spec.ts b/packages/cli/tests/unit/cmds/query/db/path.spec.ts new file mode 100644 index 0000000000..6186768b59 --- /dev/null +++ b/packages/cli/tests/unit/cmds/query/db/path.spec.ts @@ -0,0 +1,28 @@ +import { existsSync } from 'fs'; +import { dirname } from 'path'; + +import { queryDbPath } from '../../../../../src/cmds/query/db/path'; + +describe('queryDbPath', () => { + it('returns the same path for equivalent directory inputs', () => { + expect(queryDbPath('/tmp/a')).toBe(queryDbPath('/tmp/a/')); + expect(queryDbPath('/tmp/a')).toBe(queryDbPath('/tmp/a/./')); + }); + + it('produces different paths for different directories', () => { + expect(queryDbPath('/tmp/a')).not.toBe(queryDbPath('/tmp/b')); + }); + + it('lands under ~/.appmap/data//query.db', () => { + const path = queryDbPath('/tmp/path-test-dir'); + expect(path).toMatch(/[/\\]\.appmap[/\\]data[/\\][0-9a-f]{12}[/\\]query\.db$/); + }); + + it('does not create the parent directory', () => { + const path = queryDbPath('/tmp/never-created-dir-xyz'); + // The parent may exist if a prior test created it, but queryDbPath + // itself must not create anything; assert it returns a path without I/O. + expect(typeof path).toBe('string'); + void existsSync(dirname(path)); + }); +}); diff --git a/packages/cli/tests/unit/cmds/query/lib/parseFilter.spec.ts b/packages/cli/tests/unit/cmds/query/lib/parseFilter.spec.ts new file mode 100644 index 0000000000..b6a3de55fe --- /dev/null +++ b/packages/cli/tests/unit/cmds/query/lib/parseFilter.spec.ts @@ -0,0 +1,77 @@ +import { + numberFilterSql, + parseDuration, + parseStatus, + parseTime, +} from '../../../../../src/cmds/query/lib/parseFilter'; + +describe('parseStatus', () => { + it('parses bare integer as equality', () => { + expect(parseStatus('500')).toEqual({ op: '=', value: 500 }); + }); + + it('parses each comparator', () => { + expect(parseStatus('>=500')).toEqual({ op: '>=', value: 500 }); + expect(parseStatus('<=399')).toEqual({ op: '<=', value: 399 }); + expect(parseStatus('>200')).toEqual({ op: '>', value: 200 }); + expect(parseStatus('<400')).toEqual({ op: '<', value: 400 }); + expect(parseStatus('=500')).toEqual({ op: '=', value: 500 }); + }); + + it('tolerates whitespace around the operator', () => { + expect(parseStatus('>= 500')).toEqual({ op: '>=', value: 500 }); + expect(parseStatus(' >=500 ')).toEqual({ op: '>=', value: 500 }); + }); + + it('throws on garbage input', () => { + expect(() => parseStatus('5xx')).toThrow(/invalid/); + expect(() => parseStatus('')).toThrow(/invalid/); + }); +}); + +describe('parseDuration', () => { + it('defaults to ms when no unit is given', () => { + expect(parseDuration('>500')).toEqual({ op: '>', value: 500 }); + }); + + it('converts s/m/h to ms', () => { + expect(parseDuration('>1s')).toEqual({ op: '>', value: 1000 }); + expect(parseDuration('>=2m')).toEqual({ op: '>=', value: 120_000 }); + expect(parseDuration('<1h')).toEqual({ op: '<', value: 3_600_000 }); + }); + + it('accepts decimals', () => { + expect(parseDuration('>1.5s')).toEqual({ op: '>', value: 1500 }); + }); +}); + +describe('parseTime', () => { + it('parses ISO timestamps', () => { + expect(parseTime('2026-04-29T14:21:08Z')).toBe('2026-04-29T14:21:08.000Z'); + }); + + it('parses ISO dates', () => { + expect(parseTime('2026-04-29')).toBe('2026-04-29T00:00:00.000Z'); + }); + + it('parses relative offsets', () => { + const now = new Date('2026-05-01T12:00:00Z'); + expect(parseTime('7d ago', now)).toBe('2026-04-24T12:00:00.000Z'); + expect(parseTime('30m ago', now)).toBe('2026-05-01T11:30:00.000Z'); + expect(parseTime('2h ago', now)).toBe('2026-05-01T10:00:00.000Z'); + expect(parseTime('45s ago', now)).toBe('2026-05-01T11:59:15.000Z'); + }); + + it('throws on garbage input', () => { + expect(() => parseTime('not a time')).toThrow(/invalid/); + }); +}); + +describe('numberFilterSql', () => { + it('builds a sql fragment + value', () => { + expect(numberFilterSql('status_code', { op: '>=', value: 500 })).toEqual({ + sql: 'status_code >= ?', + value: 500, + }); + }); +}); diff --git a/packages/cli/tests/unit/cmds/query/lib/scope.spec.ts b/packages/cli/tests/unit/cmds/query/lib/scope.spec.ts new file mode 100644 index 0000000000..9279662bc9 --- /dev/null +++ b/packages/cli/tests/unit/cmds/query/lib/scope.spec.ts @@ -0,0 +1,321 @@ +import sqlite3 from 'better-sqlite3'; + +import { openQueryDb } from '../../../../../src/cmds/query/db/openQueryDb'; +import { + appmapRefClause, + classFilterClauses, + methodFilterClauses, + parseClassRef, + parseRoute, +} from '../../../../../src/cmds/query/lib/scope'; + +function freshDb(): sqlite3.Database { + return openQueryDb('/tmp/ignored', ':memory:').db; +} + +describe('parseRoute', () => { + it('parses an HTTP method case-insensitively and uppercases it', () => { + expect(parseRoute('post /orders')).toEqual({ method: 'POST', path: '/orders' }); + expect(parseRoute('Get /reports')).toEqual({ method: 'GET', path: '/reports' }); + expect(parseRoute('DELETE /orders/:id')).toEqual({ + method: 'DELETE', + path: '/orders/:id', + }); + }); + + it('treats an unrecognised prefix as part of the path', () => { + expect(parseRoute('FOO /bar')).toEqual({ path: 'FOO /bar' }); + expect(parseRoute('/orders')).toEqual({ path: '/orders' }); + }); +}); + +describe('appmapRefClause (basename matching)', () => { + it('matches Unix-style source_path with .appmap.json suffix', () => { + const db = freshDb(); + try { + db.prepare( + `INSERT INTO appmaps (name, source_path) VALUES ('rec1', '/tmp/path/rec1.appmap.json')` + ).run(); + const m = appmapRefClause('rec1', 'a'); + const row = db + .prepare(`SELECT a.name FROM appmaps a WHERE ${m.sql}`) + .get(...m.params) as { name: string } | undefined; + expect(row?.name).toBe('rec1'); + } finally { + db.close(); + } + }); + + it('matches Windows-style source_path with backslash separator', () => { + const db = freshDb(); + try { + db.prepare( + `INSERT INTO appmaps (name, source_path) VALUES ('rec1', 'C:\\Users\\me\\rec1.appmap.json')` + ).run(); + const m = appmapRefClause('rec1', 'a'); + const row = db + .prepare(`SELECT a.name FROM appmaps a WHERE ${m.sql}`) + .get(...m.params) as { name: string } | undefined; + expect(row?.name).toBe('rec1'); + } finally { + db.close(); + } + }); + + it('matches source_path without the .appmap.json suffix', () => { + const db = freshDb(); + try { + db.prepare( + `INSERT INTO appmaps (name, source_path) VALUES ('odd', '/store/abc/odd')` + ).run(); + const m = appmapRefClause('odd', 'a'); + const row = db + .prepare(`SELECT a.name FROM appmaps a WHERE ${m.sql}`) + .get(...m.params) as { name: string } | undefined; + expect(row?.name).toBe('odd'); + } finally { + db.close(); + } + }); + + it('matches by appmap.name when source_path differs', () => { + const db = freshDb(); + try { + db.prepare( + `INSERT INTO appmaps (name, source_path) VALUES ('Friendly Name', '/x/foo.appmap.json')` + ).run(); + const m = appmapRefClause('Friendly Name', 'a'); + const row = db + .prepare(`SELECT a.name FROM appmaps a WHERE ${m.sql}`) + .get(...m.params) as { name: string } | undefined; + expect(row?.name).toBe('Friendly Name'); + } finally { + db.close(); + } + }); +}); + +describe('parseClassRef', () => { + it('short form is just a class', () => { + expect(parseClassRef('UserRepository')).toEqual({ class: 'UserRepository' }); + }); + + it('short form with method via #', () => { + expect(parseClassRef('UserRepository#findById')).toEqual({ + class: 'UserRepository', + method: 'findById', + }); + }); + + it('Ruby/C++ "::" chain in short form is kept as one class name', () => { + expect(parseClassRef('Cls1::Cls2')).toEqual({ class: 'Cls1::Cls2' }); + expect(parseClassRef('OpenSSL::Cipher')).toEqual({ class: 'OpenSSL::Cipher' }); + }); + + it('Ruby/C++ "::" chain with method via #', () => { + expect(parseClassRef('Net::HTTP#get')).toEqual({ class: 'Net::HTTP', method: 'get' }); + }); + + it('Java/Python dot-form in short form is kept whole (no method split)', () => { + // We can't unambiguously split "org.example.Foo" without context; treat + // the whole input as the class name and let the fallback match it via + // defined_class. + expect(parseClassRef('org.example.Foo')).toEqual({ class: 'org.example.Foo' }); + }); + + it('canonical fqid: package + class', () => { + expect(parseClassRef('app/services/UserRepository')).toEqual({ + package: 'app/services', + class: 'UserRepository', + }); + }); + + it('canonical fqid: package + class + instance method', () => { + expect(parseClassRef('app/services/UserRepository#findById')).toEqual({ + package: 'app/services', + class: 'UserRepository', + method: 'findById', + }); + }); + + it('canonical fqid: package + class + static method', () => { + expect(parseClassRef('core/date/Date.parse')).toEqual({ + package: 'core/date', + class: 'Date', + method: 'parse', + }); + }); + + it('canonical fqid with nested classes via ::', () => { + expect(parseClassRef('app/Outer::Inner#foo')).toEqual({ + package: 'app', + class: 'Outer::Inner', + method: 'foo', + }); + expect(parseClassRef('lib/Outer::Inner.parse')).toEqual({ + package: 'lib', + class: 'Outer::Inner', + method: 'parse', + }); + }); +}); + +function seedCodeObject( + db: sqlite3.Database, + fqid: string, + pkg: string, + classes: string[], + method: string, + isStatic = 0 +): number { + const leaf = classes.length > 0 ? classes[classes.length - 1] : ''; + db.prepare( + `INSERT OR IGNORE INTO code_objects (fqid, package, classes, leaf_class, method, is_static) + VALUES (?, ?, ?, ?, ?, ?)` + ).run(fqid, pkg, JSON.stringify(classes), leaf, method, isStatic); + return (db.prepare(`SELECT id FROM code_objects WHERE fqid = ?`).get(fqid) as { id: number }).id; +} + +function seedAppmap(db: sqlite3.Database): number { + return Number( + db + .prepare(`INSERT INTO appmaps (name, source_path) VALUES ('a', '/tmp/a.appmap.json')`) + .run().lastInsertRowid + ); +} + +function seedCall( + db: sqlite3.Database, + appmapId: number, + eventId: number, + defined_class: string, + method_id: string, + code_object_id: number | null +): void { + db.prepare( + `INSERT INTO function_calls (appmap_id, event_id, defined_class, method_id, code_object_id) + VALUES (?, ?, ?, ?, ?)` + ).run(appmapId, eventId, defined_class, method_id, code_object_id); +} + +describe('classFilterClauses', () => { + it('short class name matches a nested ::-chain via suffix', () => { + const db = freshDb(); + try { + const aid = seedAppmap(db); + const co1 = seedCodeObject(db, 'app/OpenSSL::Cipher#decrypt', 'app', ['OpenSSL', 'Cipher'], 'decrypt'); + const co2 = seedCodeObject(db, 'app/Cipher#decrypt', 'app', ['Cipher'], 'decrypt'); + const co3 = seedCodeObject(db, 'app/Other#m', 'app', ['Other'], 'm'); + seedCall(db, aid, 1, 'OpenSSL::Cipher', 'decrypt', co1); + seedCall(db, aid, 2, 'Cipher', 'decrypt', co2); + seedCall(db, aid, 3, 'Other', 'm', co3); + + const c = classFilterClauses('Cipher', 'fc'); + const sql = `SELECT fc.event_id FROM function_calls fc WHERE ${c.where.join(' AND ')} ORDER BY fc.event_id`; + const eids = (db.prepare(sql).all(...c.params) as { event_id: number }[]).map((r) => r.event_id); + // Both co1 (nested) and co2 (top-level) should match; co3 should not. + expect(eids).toEqual([1, 2]); + } finally { + db.close(); + } + }); + + it('full canonical fqid matches strictly on all components', () => { + const db = freshDb(); + try { + const aid = seedAppmap(db); + const co1 = seedCodeObject( + db, + 'org/example/UserRepository#findById', + 'org/example', + ['UserRepository'], + 'findById' + ); + const co2 = seedCodeObject( + db, + 'org/other/UserRepository#findById', + 'org/other', + ['UserRepository'], + 'findById' + ); + seedCall(db, aid, 1, 'org.example.UserRepository', 'findById', co1); + seedCall(db, aid, 2, 'org.other.UserRepository', 'findById', co2); + + const c = classFilterClauses('org/example/UserRepository#findById', 'fc'); + const sql = `SELECT fc.event_id FROM function_calls fc WHERE ${c.where.join(' AND ')}`; + const eids = (db.prepare(sql).all(...c.params) as { event_id: number }[]).map((r) => r.event_id); + expect(eids).toEqual([1]); + } finally { + db.close(); + } + }); + + it('Ruby short-form matches via defined_class fallback when not linked to a code_object', () => { + const db = freshDb(); + try { + const aid = seedAppmap(db); + // Unlinked call (code_object_id = NULL); defined_class is Ruby-form. + seedCall(db, aid, 1, 'OpenSSL::Cipher', 'decrypt', null); + seedCall(db, aid, 2, 'Some::Other::Class', 'm', null); + + const c = classFilterClauses('Cipher', 'fc'); + const sql = `SELECT fc.event_id FROM function_calls fc WHERE ${c.where.join(' AND ')}`; + const eids = (db.prepare(sql).all(...c.params) as { event_id: number }[]).map((r) => r.event_id); + expect(eids).toEqual([1]); + } finally { + db.close(); + } + }); + + it('Java dot-form input matches the full defined_class on unlinked rows', () => { + const db = freshDb(); + try { + const aid = seedAppmap(db); + seedCall(db, aid, 1, 'org.example.Foo', 'm', null); + seedCall(db, aid, 2, 'org.example.Bar', 'm', null); + + const c = classFilterClauses('org.example.Foo', 'fc'); + const sql = `SELECT fc.event_id FROM function_calls fc WHERE ${c.where.join(' AND ')}`; + const eids = (db.prepare(sql).all(...c.params) as { event_id: number }[]).map((r) => r.event_id); + expect(eids).toEqual([1]); + } finally { + db.close(); + } + }); +}); + +describe('methodFilterClauses', () => { + it('matches via normalized code_objects.method', () => { + const db = freshDb(); + try { + const aid = seedAppmap(db); + const co1 = seedCodeObject(db, 'app/X#findById', 'app', ['X'], 'findById'); + const co2 = seedCodeObject(db, 'app/Y#save', 'app', ['Y'], 'save'); + seedCall(db, aid, 1, 'X', 'findById', co1); + seedCall(db, aid, 2, 'Y', 'save', co2); + + const m = methodFilterClauses('findById', 'fc'); + const sql = `SELECT fc.event_id FROM function_calls fc WHERE ${m.where.join(' AND ')}`; + const eids = (db.prepare(sql).all(...m.params) as { event_id: number }[]).map((r) => r.event_id); + expect(eids).toEqual([1]); + } finally { + db.close(); + } + }); + + it('falls back to function_calls.method_id for unlinked rows', () => { + const db = freshDb(); + try { + const aid = seedAppmap(db); + seedCall(db, aid, 1, 'X', 'findById', null); + seedCall(db, aid, 2, 'Y', 'save', null); + + const m = methodFilterClauses('findById', 'fc'); + const sql = `SELECT fc.event_id FROM function_calls fc WHERE ${m.where.join(' AND ')}`; + const eids = (db.prepare(sql).all(...m.params) as { event_id: number }[]).map((r) => r.event_id); + expect(eids).toEqual([1]); + } finally { + db.close(); + } + }); +}); diff --git a/packages/cli/tests/unit/cmds/query/lib/treeRender.spec.ts b/packages/cli/tests/unit/cmds/query/lib/treeRender.spec.ts new file mode 100644 index 0000000000..7ad6ae0ad5 --- /dev/null +++ b/packages/cli/tests/unit/cmds/query/lib/treeRender.spec.ts @@ -0,0 +1,64 @@ +import { renderFlat, renderTree } from '../../../../../src/cmds/query/lib/treeRender'; +import { LogNode, TreeNode } from '../../../../../src/cmds/query/queries/tree'; + +function logNode(overrides: Partial = {}): LogNode { + return { + kind: 'log', + event_id: 5, + parent_event_id: 1, + thread_id: null, + depth: 1, + fqid: 'app/AppLogger#error', + logger: 'AppLogger', + method_id: 'error', + path: 'app/log.rb', + lineno: 12, + elapsed_ms: 0.1, + message: 'connection refused', + parameters_json: JSON.stringify([ + { name: 'message', class: 'String', value: 'connection refused' }, + ]), + return_value: null, + ...overrides, + }; +} + +describe('renderTree (log lines)', () => { + it('renders a log node with its projected message inline', () => { + const out = renderTree([logNode()] as TreeNode[]); + expect(out).toContain('LOG'); + expect(out).toContain('AppLogger.error'); + expect(out).toContain('connection refused'); + }); + + it('respects the indentation of the log node depth', () => { + const out = renderTree([logNode({ depth: 3 })] as TreeNode[]); + // 3 levels of two-space indent = 6 leading spaces. + expect(out.startsWith(' LOG')).toBe(true); + }); + + it('falls back to logger.method when no message can be projected', () => { + const out = renderTree([ + logNode({ parameters_json: null, return_value: null }), + ] as TreeNode[]); + // No trailing colon when message is empty. + expect(out).toMatch(/LOG\s+AppLogger\.error\s*$/); + }); + + it('uses a structured return_value when present', () => { + const out = renderTree([ + logNode({ + parameters_json: null, + return_value: JSON.stringify({ level: 'error', message: 'from return' }), + }), + ] as TreeNode[]); + expect(out).toContain('AppLogger.error: from return'); + }); +}); + +describe('renderFlat (log lines)', () => { + it('emits a LOG row when filtering down to log nodes', () => { + const out = renderFlat([logNode()] as TreeNode[]); + expect(out).toMatch(/^LOG\s+AppLogger\.error: connection refused$/); + }); +}); diff --git a/packages/cli/tests/unit/cmds/query/queries/compare.spec.ts b/packages/cli/tests/unit/cmds/query/queries/compare.spec.ts new file mode 100644 index 0000000000..b9a1800be2 --- /dev/null +++ b/packages/cli/tests/unit/cmds/query/queries/compare.spec.ts @@ -0,0 +1,133 @@ +import sqlite3 from 'better-sqlite3'; + +import { openQueryDb } from '../../../../../src/cmds/query/db/openQueryDb'; +import { compare } from '../../../../../src/cmds/query/queries/compare'; + +function freshDb(): sqlite3.Database { + return openQueryDb('/tmp/ignored', ':memory:').db; +} + +interface SeedReq { + branch: string; + method: string; + path: string; + status: number; + elapsed_ms?: number; +} + +let nextEvent = 1; +function seed(db: sqlite3.Database, reqs: SeedReq[]): void { + const insAm = db.prepare( + `INSERT INTO appmaps (name, source_path, git_branch, timestamp) VALUES (?, ?, ?, ?)` + ); + const insReq = db.prepare( + `INSERT INTO http_requests (appmap_id, event_id, method, path, status_code, elapsed_ms) + VALUES (?, ?, ?, ?, ?, ?)` + ); + for (let i = 0; i < reqs.length; i++) { + const r = reqs[i]; + const am = insAm.run( + `rec-${i}`, + `/tmp/rec-${i}.appmap.json`, + r.branch, + '2026-04-29T12:00:00.000Z' + ); + insReq.run( + am.lastInsertRowid, + nextEvent++, + r.method, + r.path, + r.status, + r.elapsed_ms ?? null + ); + } +} + +describe('compare', () => { + beforeEach(() => { + nextEvent = 1; + }); + + it('reports a per-route delta = b_p95 / a_p95', () => { + const db = freshDb(); + try { + // Same route on both branches: main is fast, feature is slow. + seed(db, [ + { branch: 'main', method: 'GET', path: '/reports', status: 200, elapsed_ms: 200 }, + { branch: 'main', method: 'GET', path: '/reports', status: 200, elapsed_ms: 210 }, + { branch: 'feat', method: 'GET', path: '/reports', status: 200, elapsed_ms: 6000 }, + { branch: 'feat', method: 'GET', path: '/reports', status: 200, elapsed_ms: 6100 }, + ]); + const rows = compare(db, { branch_a: 'main', branch_b: 'feat' }).rows; + expect(rows).toHaveLength(1); + const r = rows[0]; + expect(r.method).toBe('GET'); + expect(r.route).toBe('/reports'); + expect(r.delta).not.toBeNull(); + // ~30× slowdown + expect(r.delta!).toBeGreaterThan(20); + } finally { + db.close(); + } + }); + + it('preserves routes that exist on only one side', () => { + const db = freshDb(); + try { + seed(db, [ + { branch: 'main', method: 'GET', path: '/old', status: 200, elapsed_ms: 100 }, + { branch: 'feat', method: 'GET', path: '/new', status: 200, elapsed_ms: 50 }, + ]); + const rows = compare(db, { branch_a: 'main', branch_b: 'feat' }).rows; + const old = rows.find((r) => r.route === '/old')!; + const fresh = rows.find((r) => r.route === '/new')!; + expect(old.a_p95_ms).toBe(100); + expect(old.b_p95_ms).toBeNull(); + expect(old.delta).toBeNull(); + expect(fresh.a_p95_ms).toBeNull(); + expect(fresh.b_p95_ms).toBe(50); + expect(fresh.delta).toBeNull(); + } finally { + db.close(); + } + }); + + it('--sort=delta puts the biggest absolute change first (in either direction)', () => { + const db = freshDb(); + try { + seed(db, [ + // /a: 10× slowdown + { branch: 'main', method: 'GET', path: '/a', status: 200, elapsed_ms: 100 }, + { branch: 'feat', method: 'GET', path: '/a', status: 200, elapsed_ms: 1000 }, + // /b: 5× speedup + { branch: 'main', method: 'GET', path: '/b', status: 200, elapsed_ms: 500 }, + { branch: 'feat', method: 'GET', path: '/b', status: 200, elapsed_ms: 100 }, + // /c: ~unchanged + { branch: 'main', method: 'GET', path: '/c', status: 200, elapsed_ms: 100 }, + { branch: 'feat', method: 'GET', path: '/c', status: 200, elapsed_ms: 105 }, + ]); + const rows = compare(db, { branch_a: 'main', branch_b: 'feat', sort: 'delta' }).rows; + // /a (10×) and /b (1/5×) have the largest log-delta; /c last. + expect(rows[rows.length - 1].route).toBe('/c'); + } finally { + db.close(); + } + }); + + it('--limit caps the result set', () => { + const db = freshDb(); + try { + seed(db, [ + { branch: 'main', method: 'GET', path: '/a', status: 200, elapsed_ms: 100 }, + { branch: 'main', method: 'GET', path: '/b', status: 200, elapsed_ms: 100 }, + { branch: 'feat', method: 'GET', path: '/a', status: 200, elapsed_ms: 100 }, + { branch: 'feat', method: 'GET', path: '/b', status: 200, elapsed_ms: 100 }, + ]); + expect( + compare(db, { branch_a: 'main', branch_b: 'feat', limit: 1 }).rows + ).toHaveLength(1); + } finally { + db.close(); + } + }); +}); diff --git a/packages/cli/tests/unit/cmds/query/queries/endpoints.spec.ts b/packages/cli/tests/unit/cmds/query/queries/endpoints.spec.ts new file mode 100644 index 0000000000..f166e69850 --- /dev/null +++ b/packages/cli/tests/unit/cmds/query/queries/endpoints.spec.ts @@ -0,0 +1,245 @@ +import sqlite3 from 'better-sqlite3'; + +import { openQueryDb } from '../../../../../src/cmds/query/db/openQueryDb'; +import { endpoints } from '../../../../../src/cmds/query/queries/endpoints'; + +function freshDb(): sqlite3.Database { + return openQueryDb('/tmp/ignored', ':memory:').db; +} + +interface SeedReq { + method: string; + path: string; + normalized_path?: string; + status: number; + elapsed_ms: number | null; + timestamp?: string; + branch?: string; +} + +let nextEvent = 1; +function seed(db: sqlite3.Database, reqs: SeedReq[]): void { + const insertAppmap = db.prepare( + `INSERT INTO appmaps (name, source_path, git_branch, timestamp) VALUES (?, ?, ?, ?)` + ); + const insertReq = db.prepare( + `INSERT INTO http_requests (appmap_id, event_id, method, path, normalized_path, + status_code, elapsed_ms) + VALUES (?, ?, ?, ?, ?, ?, ?)` + ); + for (let i = 0; i < reqs.length; i++) { + const r = reqs[i]; + const ts = r.timestamp ?? '2026-04-29T12:00:00.000Z'; + const am = insertAppmap.run( + `rec-${i}`, + `/tmp/rec-${i}.appmap.json`, + r.branch ?? null, + ts + ); + insertReq.run( + am.lastInsertRowid, + nextEvent++, + r.method, + r.path, + r.normalized_path ?? null, + r.status, + r.elapsed_ms + ); + } +} + +describe('endpoints', () => { + beforeEach(() => { + nextEvent = 1; + }); + + it('returns an empty array when there are no requests', () => { + const db = freshDb(); + try { + expect(endpoints(db).rows).toEqual([]); + } finally { + db.close(); + } + }); + + it('groups by (method, route) and counts', () => { + const db = freshDb(); + try { + seed(db, [ + { method: 'GET', path: '/x', status: 200, elapsed_ms: 100 }, + { method: 'GET', path: '/x', status: 200, elapsed_ms: 200 }, + { method: 'POST', path: '/x', status: 201, elapsed_ms: 150 }, + ]); + const out = endpoints(db).rows; + const get = out.find((r) => r.method === 'GET'); + const post = out.find((r) => r.method === 'POST'); + expect(get?.count).toBe(2); + expect(post?.count).toBe(1); + expect(out).toHaveLength(2); + } finally { + db.close(); + } + }); + + it('uses normalized_path when present, otherwise raw path', () => { + const db = freshDb(); + try { + seed(db, [ + { method: 'GET', path: '/orders/42', normalized_path: '/orders/:id', status: 200, elapsed_ms: 100 }, + { method: 'GET', path: '/orders/99', normalized_path: '/orders/:id', status: 200, elapsed_ms: 200 }, + { method: 'GET', path: '/raw-only', status: 200, elapsed_ms: 50 }, + ]); + const out = endpoints(db).rows; + expect(out.find((r) => r.route === '/orders/:id')?.count).toBe(2); + expect(out.find((r) => r.route === '/raw-only')?.count).toBe(1); + } finally { + db.close(); + } + }); + + it('computes avg, p95, and err_pct', () => { + const db = freshDb(); + try { + // 10 requests on /x: 9 with status 200 / elapsed [10,20,...,90], 1 with status 500 / elapsed 1000. + const reqs: SeedReq[] = []; + for (let i = 1; i <= 9; i++) { + reqs.push({ method: 'GET', path: '/x', status: 200, elapsed_ms: i * 10 }); + } + reqs.push({ method: 'GET', path: '/x', status: 500, elapsed_ms: 1000 }); + seed(db, reqs); + + const row = endpoints(db).rows.find((r) => r.route === '/x')!; + expect(row.count).toBe(10); + expect(row.err_pct).toBeCloseTo(10); + expect(row.avg_ms).toBeCloseTo((10 + 20 + 30 + 40 + 50 + 60 + 70 + 80 + 90 + 1000) / 10); + // sorted [10,20,...,90,1000]; p95 = ceil(0.95 * 10) - 1 = 9 → idx 9 → 1000 + expect(row.p95_ms).toBe(1000); + } finally { + db.close(); + } + }); + + it('--status filter shows only routes with at least one matching response, but counts remain over all', () => { + const db = freshDb(); + try { + seed(db, [ + { method: 'GET', path: '/quiet', status: 200, elapsed_ms: 50 }, + { method: 'GET', path: '/quiet', status: 200, elapsed_ms: 60 }, + { method: 'POST', path: '/orders', status: 201, elapsed_ms: 100 }, + { method: 'POST', path: '/orders', status: 201, elapsed_ms: 110 }, + { method: 'POST', path: '/orders', status: 500, elapsed_ms: 520 }, + ]); + const out = endpoints(db, { status: { op: '>=', value: 500 } }).rows; + // /quiet has no 5xx → excluded. + // /orders has one 5xx → included; count=3, err_pct=33%. + expect(out).toHaveLength(1); + const row = out[0]; + expect(row.route).toBe('/orders'); + expect(row.count).toBe(3); + expect(row.err_pct).toBeCloseTo((1 / 3) * 100); + } finally { + db.close(); + } + }); + + it('filters by branch', () => { + const db = freshDb(); + try { + seed(db, [ + { method: 'GET', path: '/x', status: 200, elapsed_ms: 100, branch: 'main' }, + { method: 'GET', path: '/y', status: 200, elapsed_ms: 100, branch: 'feature' }, + ]); + expect(endpoints(db, { branch: 'main' }).rows).toHaveLength(1); + expect(endpoints(db, { branch: 'main' }).rows[0].route).toBe('/x'); + } finally { + db.close(); + } + }); + + it('filters by since/until', () => { + const db = freshDb(); + try { + seed(db, [ + { method: 'GET', path: '/x', status: 200, elapsed_ms: 100, timestamp: '2026-04-01T00:00:00.000Z' }, + { method: 'GET', path: '/x', status: 200, elapsed_ms: 100, timestamp: '2026-04-15T00:00:00.000Z' }, + { method: 'GET', path: '/x', status: 200, elapsed_ms: 100, timestamp: '2026-04-30T00:00:00.000Z' }, + ]); + expect(endpoints(db, { since: '2026-04-10T00:00:00.000Z' }).rows[0].count).toBe(2); + expect( + endpoints(db, { + since: '2026-04-10T00:00:00.000Z', + until: '2026-04-20T00:00:00.000Z', + }).rows[0].count + ).toBe(1); + } finally { + db.close(); + } + }); + + it('sorts by the requested key', () => { + const db = freshDb(); + try { + seed(db, [ + { method: 'GET', path: '/a', status: 200, elapsed_ms: 100 }, + { method: 'GET', path: '/b', status: 200, elapsed_ms: 50 }, + { method: 'GET', path: '/b', status: 200, elapsed_ms: 50 }, + { method: 'GET', path: '/c', status: 500, elapsed_ms: 20 }, + { method: 'GET', path: '/d', status: 200, elapsed_ms: 200 }, + ]); + const byCount = endpoints(db, { sort: 'count' }).rows.map((r) => r.route); + expect(byCount[0]).toBe('/b'); // count 2 + const byErr = endpoints(db, { sort: 'err' }).rows.map((r) => r.route); + expect(byErr[0]).toBe('/c'); // 100% err + const byAvg = endpoints(db, { sort: 'avg' }).rows.map((r) => r.route); + expect(byAvg[0]).toBe('/d'); // 200ms avg + const byP95 = endpoints(db, { sort: 'p95' }).rows.map((r) => r.route); + expect(byP95[0]).toBe('/d'); // 200ms p95 + } finally { + db.close(); + } + }); + + it('sorts nulls last (a route with no measured duration ranks below a real 0)', () => { + const db = freshDb(); + try { + seed(db, [ + { method: 'GET', path: '/measured', status: 200, elapsed_ms: 0 }, + { method: 'GET', path: '/unmeasured', status: 200, elapsed_ms: null }, + ]); + const byP95 = endpoints(db, { sort: 'p95' }).rows.map((r) => r.route); + expect(byP95).toEqual(['/measured', '/unmeasured']); + } finally { + db.close(); + } + }); + + it('limits the result set', () => { + const db = freshDb(); + try { + seed(db, [ + { method: 'GET', path: '/a', status: 200, elapsed_ms: 100 }, + { method: 'GET', path: '/b', status: 200, elapsed_ms: 100 }, + { method: 'GET', path: '/c', status: 200, elapsed_ms: 100 }, + ]); + expect(endpoints(db, { limit: 2 }).rows).toHaveLength(2); + } finally { + db.close(); + } + }); + + it('handles null elapsed_ms (avg and p95 derived from non-null values only)', () => { + const db = freshDb(); + try { + seed(db, [ + { method: 'GET', path: '/x', status: 200, elapsed_ms: 100 }, + { method: 'GET', path: '/x', status: 200, elapsed_ms: null }, + ]); + const row = endpoints(db).rows[0]; + expect(row.count).toBe(2); + expect(row.avg_ms).toBe(100); + expect(row.p95_ms).toBe(100); + } finally { + db.close(); + } + }); +}); diff --git a/packages/cli/tests/unit/cmds/query/queries/find.spec.ts b/packages/cli/tests/unit/cmds/query/queries/find.spec.ts new file mode 100644 index 0000000000..9966d15515 --- /dev/null +++ b/packages/cli/tests/unit/cmds/query/queries/find.spec.ts @@ -0,0 +1,1074 @@ +import sqlite3 from 'better-sqlite3'; + +import { openQueryDb } from '../../../../../src/cmds/query/db/openQueryDb'; +import { + findAppmaps, + findCalls, + findExceptions, + findLogs, + findQueries, + findRequests, +} from '../../../../../src/cmds/query/queries/find'; + +function freshDb(): sqlite3.Database { + return openQueryDb('/tmp/ignored', ':memory:').db; +} + +interface Recording { + name: string; + branch?: string; + commit?: string; + timestamp?: string; + requests?: { + event_id: number; + method: string; + path: string; + normalized_path?: string; + status: number; + elapsed_ms?: number; + }[]; + queries?: { + event_id: number; + parent_event_id?: number; + sql: string; + caller_class?: string; + caller_method?: string; + elapsed_ms?: number; + }[]; + calls?: { + event_id: number; + parent_event_id?: number; + defined_class: string; + method_id: string; + elapsed_ms?: number; + fqid?: string; + labels?: string[]; + parameters?: { name: string; class?: string; value: unknown }[]; + return_value?: unknown; + }[]; + exceptions?: { + event_id: number; + exception_class: string; + message?: string; + }[]; +} + +function seed(db: sqlite3.Database, recs: Recording[]): void { + const insAm = db.prepare( + `INSERT INTO appmaps (name, source_path, git_branch, git_commit, timestamp, sql_query_count, elapsed_ms) + VALUES (?, ?, ?, ?, ?, ?, ?)` + ); + const insReq = db.prepare( + `INSERT INTO http_requests (appmap_id, event_id, method, path, normalized_path, status_code, elapsed_ms) + VALUES (?, ?, ?, ?, ?, ?, ?)` + ); + const insQ = db.prepare( + `INSERT INTO sql_queries (appmap_id, event_id, parent_event_id, sql_text, + caller_class, caller_method, elapsed_ms) + VALUES (?, ?, ?, ?, ?, ?, ?)` + ); + // Test seed: derive package + class chain from defined_class so call + // sites don't have to specify them. defined_class may be Java dot-form + // ("org.example.Foo"), in which case we treat the trailing segment as + // the leaf class and the rest as a slash-form package. + const insCo = db.prepare( + `INSERT OR IGNORE INTO code_objects (fqid, package, classes, leaf_class, method, is_static) + VALUES (?, ?, ?, ?, ?, ?)` + ); + const selCoId = db.prepare(`SELECT id FROM code_objects WHERE fqid = ?`); + const insLabel = db.prepare( + `INSERT OR IGNORE INTO labels (code_object_id, label) VALUES (?, ?)` + ); + const insCall = db.prepare( + `INSERT INTO function_calls (appmap_id, event_id, parent_event_id, defined_class, method_id, + code_object_id, elapsed_ms, parameters_json, return_value) + VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)` + ); + const insExc = db.prepare( + `INSERT INTO exceptions (appmap_id, event_id, exception_class, message) + VALUES (?, ?, ?, ?)` + ); + + for (const r of recs) { + const sqlCount = r.queries?.length ?? 0; + const am = insAm.run( + r.name, + `/tmp/${r.name}.appmap.json`, + r.branch ?? null, + r.commit ?? null, + r.timestamp ?? '2026-04-29T12:00:00.000Z', + sqlCount, + r.requests?.[0]?.elapsed_ms ?? null + ); + const aid = am.lastInsertRowid; + for (const req of r.requests ?? []) { + insReq.run( + aid, + req.event_id, + req.method, + req.path, + req.normalized_path ?? null, + req.status, + req.elapsed_ms ?? null + ); + } + for (const q of r.queries ?? []) { + insQ.run( + aid, + q.event_id, + q.parent_event_id ?? null, + q.sql, + q.caller_class ?? null, + q.caller_method ?? null, + q.elapsed_ms ?? null + ); + } + for (const c of r.calls ?? []) { + const fqid = c.fqid ?? `${c.defined_class}#${c.method_id}`; + const dotIdx = c.defined_class.lastIndexOf('.'); + const pkg = dotIdx >= 0 ? c.defined_class.slice(0, dotIdx).replace(/\./g, '/') : ''; + const leaf = dotIdx >= 0 ? c.defined_class.slice(dotIdx + 1) : c.defined_class; + insCo.run(fqid, pkg, JSON.stringify([leaf]), leaf, c.method_id, 0); + const coId = (selCoId.get(fqid) as { id: number }).id; + for (const label of c.labels ?? []) insLabel.run(coId, label); + const paramsJson = c.parameters ? JSON.stringify(c.parameters) : null; + const returnVal = + c.return_value === undefined + ? null + : typeof c.return_value === 'string' + ? c.return_value + : JSON.stringify(c.return_value); + insCall.run( + aid, + c.event_id, + c.parent_event_id ?? null, + c.defined_class, + c.method_id, + coId, + c.elapsed_ms ?? null, + paramsJson, + returnVal + ); + } + for (const e of r.exceptions ?? []) { + insExc.run(aid, e.event_id, e.exception_class, e.message ?? null); + } + } +} + +describe('findRequests', () => { + it('filters by route, method, status, duration, branch', () => { + const db = freshDb(); + try { + seed(db, [ + { + name: 'a', + branch: 'main', + requests: [ + { event_id: 1, method: 'GET', path: '/orders/42', normalized_path: '/orders/:id', status: 200, elapsed_ms: 50 }, + { event_id: 2, method: 'POST', path: '/orders', status: 500, elapsed_ms: 520 }, + ], + }, + { + name: 'b', + branch: 'feature', + requests: [{ event_id: 1, method: 'POST', path: '/orders', status: 500, elapsed_ms: 600 }], + }, + ]); + + // Method-prefixed route + const r1 = findRequests(db, { route: 'POST /orders' }).rows; + expect(r1).toHaveLength(2); + expect(r1.every((r) => r.method === 'POST' && r.route === '/orders')).toBe(true); + + // Status filter + const r2 = findRequests(db, { status: { op: '>=', value: 500 } }).rows; + expect(r2).toHaveLength(2); + + // Duration filter + const r3 = findRequests(db, { duration: { op: '>', value: 550 } }).rows; + expect(r3).toHaveLength(1); + expect(r3[0].appmap_name).toBe('b'); + + // Branch filter + const r4 = findRequests(db, { branch: 'feature' }).rows; + expect(r4).toHaveLength(1); + expect(r4[0].appmap_name).toBe('b'); + } finally { + db.close(); + } + }); + + it('--limit/--offset trims results', () => { + const db = freshDb(); + try { + seed(db, [ + { + name: 'a', + requests: [ + { event_id: 1, method: 'GET', path: '/x', status: 200 }, + { event_id: 2, method: 'GET', path: '/x', status: 200 }, + { event_id: 3, method: 'GET', path: '/x', status: 200 }, + ], + }, + ]); + expect(findRequests(db, { limit: 2 }).rows).toHaveLength(2); + expect(findRequests(db, { limit: 2, offset: 1 }).rows[0].event_id).toBe(2); + } finally { + db.close(); + } + }); +}); + +describe('findAppmaps', () => { + it('returns one row per recording, sample request fields populated', () => { + const db = freshDb(); + try { + seed(db, [ + { + name: 'a', + branch: 'main', + requests: [ + { event_id: 1, method: 'GET', path: '/x', status: 200, elapsed_ms: 100 }, + { event_id: 2, method: 'POST', path: '/y', status: 500, elapsed_ms: 200 }, + ], + }, + { name: 'b', branch: 'feature' }, + ]); + const rows = findAppmaps(db, {}).rows; + expect(rows).toHaveLength(2); + const a = rows.find((r) => r.appmap_name === 'a')!; + expect(a.route).toBe('/x'); // first request by event_id + expect(a.branch).toBe('main'); + expect(rows.find((r) => r.appmap_name === 'b')?.route).toBeNull(); + } finally { + db.close(); + } + }); + + it('is deterministic when route filtering — picks the lowest event_id matching request', () => { + const db = freshDb(); + try { + // Two POST /orders requests in one recording with different elapsed. + // The query must consistently pick event_id=1 (the smaller). + seed(db, [ + { + name: 'a', + requests: [ + { event_id: 1, method: 'POST', path: '/orders', status: 500, elapsed_ms: 100 }, + { event_id: 2, method: 'POST', path: '/orders', status: 500, elapsed_ms: 999 }, + ], + }, + ]); + const rows = findAppmaps(db, { route: 'POST /orders' }).rows; + expect(rows).toHaveLength(1); + expect(rows[0].elapsed_ms).toBe(100); // event_id=1 wins, not 2 + } finally { + db.close(); + } + }); + + it('--duration filters on the appmap row (a.elapsed_ms)', () => { + const db = freshDb(); + try { + seed(db, [ + { + name: 'fast', + requests: [{ event_id: 1, method: 'GET', path: '/x', status: 200, elapsed_ms: 50 }], + }, + { + name: 'slow', + requests: [{ event_id: 1, method: 'GET', path: '/x', status: 200, elapsed_ms: 5000 }], + }, + ]); + const rows = findAppmaps(db, { duration: { op: '>', value: 1000 } }).rows; + expect(rows).toHaveLength(1); + expect(rows[0].appmap_name).toBe('slow'); + } finally { + db.close(); + } + }); + + it('--route narrows to recordings with a matching request and reports that request', () => { + const db = freshDb(); + try { + seed(db, [ + { + name: 'a', + requests: [ + { event_id: 1, method: 'GET', path: '/x', status: 200 }, + { event_id: 2, method: 'POST', path: '/orders', status: 500, elapsed_ms: 520 }, + ], + }, + { name: 'b', requests: [{ event_id: 1, method: 'GET', path: '/x', status: 200 }] }, + ]); + const rows = findAppmaps(db, { route: 'POST /orders', status: { op: '>=', value: 500 } }).rows; + expect(rows).toHaveLength(1); + expect(rows[0].appmap_name).toBe('a'); + expect(rows[0].route).toBe('/orders'); + expect(rows[0].status_code).toBe(500); + expect(rows[0].elapsed_ms).toBe(520); + } finally { + db.close(); + } + }); +}); + +describe('findQueries', () => { + it('--table filters via SQL text LIKE; --status scopes via owning request', () => { + const db = freshDb(); + try { + seed(db, [ + { + name: 'a', + requests: [{ event_id: 1, method: 'POST', path: '/orders', status: 500 }], + queries: [ + { event_id: 2, sql: 'INSERT INTO orders (...) VALUES (...)', elapsed_ms: 14 }, + { event_id: 3, sql: 'SELECT * FROM users WHERE id = ?' }, + ], + }, + { + name: 'b', + requests: [{ event_id: 1, method: 'GET', path: '/x', status: 200 }], + queries: [{ event_id: 2, sql: 'INSERT INTO orders (...) VALUES (...)' }], + }, + ]); + const rows = findQueries(db, { table: 'orders', status: { op: '>=', value: 500 } }).rows; + expect(rows).toHaveLength(1); + expect(rows[0].appmap_name).toBe('a'); + expect(rows[0].sql_text).toContain('INSERT INTO orders'); + } finally { + db.close(); + } + }); +}); + +describe('find filters: --commit, --since/--until, --duration', () => { + it('--commit on findRequests', () => { + const db = freshDb(); + try { + seed(db, [ + { + name: 'a', + commit: 'abc123', + requests: [{ event_id: 1, method: 'GET', path: '/x', status: 200 }], + }, + { + name: 'b', + commit: 'def456', + requests: [{ event_id: 1, method: 'GET', path: '/y', status: 200 }], + }, + ]); + const rows = findRequests(db, { commit: 'abc123' }).rows; + expect(rows).toHaveLength(1); + expect(rows[0].appmap_name).toBe('a'); + } finally { + db.close(); + } + }); + + it('--commit on findAppmaps', () => { + const db = freshDb(); + try { + seed(db, [ + { name: 'a', commit: 'abc' }, + { name: 'b', commit: 'def' }, + ]); + expect(findAppmaps(db, { commit: 'abc' }).rows).toHaveLength(1); + } finally { + db.close(); + } + }); + + it('--since / --until on findRequests filter via the appmap timestamp', () => { + const db = freshDb(); + try { + seed(db, [ + { + name: 'a', + timestamp: '2026-04-01T00:00:00.000Z', + requests: [{ event_id: 1, method: 'GET', path: '/x', status: 200 }], + }, + { + name: 'b', + timestamp: '2026-04-15T00:00:00.000Z', + requests: [{ event_id: 1, method: 'GET', path: '/x', status: 200 }], + }, + { + name: 'c', + timestamp: '2026-04-30T00:00:00.000Z', + requests: [{ event_id: 1, method: 'GET', path: '/x', status: 200 }], + }, + ]); + const rows = findRequests(db, { + since: '2026-04-10T00:00:00.000Z', + until: '2026-04-20T00:00:00.000Z', + }).rows; + expect(rows).toHaveLength(1); + expect(rows[0].appmap_name).toBe('b'); + } finally { + db.close(); + } + }); + + it('--since on findCalls scopes via the recording', () => { + const db = freshDb(); + try { + seed(db, [ + { + name: 'old', + timestamp: '2026-04-01T00:00:00.000Z', + calls: [{ event_id: 1, defined_class: 'X', method_id: 'm' }], + }, + { + name: 'new', + timestamp: '2026-04-30T00:00:00.000Z', + calls: [{ event_id: 1, defined_class: 'X', method_id: 'm' }], + }, + ]); + const rows = findCalls(db, { since: '2026-04-15T00:00:00.000Z' }).rows; + expect(rows).toHaveLength(1); + expect(rows[0].appmap_name).toBe('new'); + } finally { + db.close(); + } + }); + + it('--duration on findCalls filters per-row elapsed_ms', () => { + const db = freshDb(); + try { + seed(db, [ + { + name: 'a', + calls: [ + { event_id: 1, defined_class: 'X', method_id: 'fast', elapsed_ms: 5 }, + { event_id: 2, defined_class: 'X', method_id: 'slow', elapsed_ms: 500 }, + ], + }, + ]); + const rows = findCalls(db, { duration: { op: '>', value: 100 } }).rows; + expect(rows).toHaveLength(1); + expect(rows[0].method_id).toBe('slow'); + } finally { + db.close(); + } + }); + + it('--duration on findQueries filters per-row elapsed_ms', () => { + const db = freshDb(); + try { + seed(db, [ + { + name: 'a', + queries: [ + { event_id: 1, sql: 'SELECT 1', elapsed_ms: 1 }, + { event_id: 2, sql: 'SELECT 2', elapsed_ms: 50 }, + ], + }, + ]); + const rows = findQueries(db, { duration: { op: '>=', value: 10 } }).rows; + expect(rows).toHaveLength(1); + expect(rows[0].sql_text).toBe('SELECT 2'); + } finally { + db.close(); + } + }); + + it('findQueries --class matches via the parent function_call code_object when the linked parent has the right class', () => { + const db = freshDb(); + try { + // Seed a function_call with a code_object link, then a sql_query + // whose parent_event_id references that call. caller_class is set + // to a deliberately mismatching raw string so we can prove the + // code_object path (not the fallback) is matching. + seed(db, [ + { + name: 'a', + calls: [ + { + event_id: 10, + defined_class: 'org.example.UserRepository', + method_id: 'findById', + fqid: 'org/example/UserRepository#findById', + }, + ], + queries: [ + { + event_id: 11, + parent_event_id: 10, + sql: 'SELECT 1', + caller_class: 'WrongClassName', + caller_method: 'wrong', + }, + ], + }, + ]); + // Class part is read from code_objects (UserRepository), not from + // the WrongClassName caller_class string. + expect(findQueries(db, { className: 'UserRepository' }).rows).toHaveLength(1); + // Full chain match also works. + expect(findQueries(db, { className: 'org/example/UserRepository' }).rows).toHaveLength( + 1 + ); + // Misspelled — no match. + expect(findQueries(db, { className: 'OtherRepository' }).rows).toHaveLength(0); + } finally { + db.close(); + } + }); + + it('findQueries --class matches caller_class via the suffix-aware helper', () => { + const db = freshDb(); + try { + seed(db, [ + { + name: 'a', + queries: [ + { + event_id: 1, + sql: 'SELECT 1', + caller_class: 'org.example.UserRepository', + caller_method: 'findById', + }, + { + event_id: 2, + sql: 'SELECT 2', + caller_class: 'OpenSSL::Cipher', + caller_method: 'decrypt', + }, + { + event_id: 3, + sql: 'SELECT 3', + caller_class: 'Other', + caller_method: 'm', + }, + ], + }, + ]); + // Java dot-suffix + expect(findQueries(db, { className: 'UserRepository' }).rows).toHaveLength(1); + // Ruby ::-suffix + expect(findQueries(db, { className: 'Cipher' }).rows).toHaveLength(1); + // Exact match also works + expect(findQueries(db, { className: 'OpenSSL::Cipher' }).rows).toHaveLength(1); + // Top-level + expect(findQueries(db, { className: 'Other' }).rows).toHaveLength(1); + } finally { + db.close(); + } + }); +}); + +describe('findCalls', () => { + it('--class and --method filter directly; --route scopes by recording', () => { + const db = freshDb(); + try { + seed(db, [ + { + name: 'a', + requests: [{ event_id: 1, method: 'POST', path: '/orders', status: 500 }], + calls: [ + { event_id: 2, defined_class: 'IdempotencyKey', method_id: 'generate', fqid: 'app/IdempotencyKey.generate' }, + { event_id: 3, defined_class: 'OrdersController', method_id: 'create' }, + ], + }, + { + name: 'b', + requests: [{ event_id: 1, method: 'GET', path: '/x', status: 200 }], + calls: [{ event_id: 2, defined_class: 'IdempotencyKey', method_id: 'generate' }], + }, + ]); + const rows = findCalls(db, { + className: 'IdempotencyKey', + route: 'POST /orders', + status: { op: '>=', value: 500 }, + }).rows; + expect(rows).toHaveLength(1); + expect(rows[0].appmap_name).toBe('a'); + expect(rows[0].fqid).toBe('app/IdempotencyKey.generate'); + } finally { + db.close(); + } + }); + + it('--label filters via the labels table', () => { + const db = freshDb(); + try { + seed(db, [ + { + name: 'a', + calls: [ + { event_id: 1, defined_class: 'Logger', method_id: 'error', labels: ['log'] }, + { event_id: 2, defined_class: 'OrdersController', method_id: 'create' }, + ], + }, + ]); + const rows = findCalls(db, { label: 'log' }).rows; + expect(rows).toHaveLength(1); + expect(rows[0].defined_class).toBe('Logger'); + } finally { + db.close(); + } + }); +}); + +describe('findLogs', () => { + it('returns only label=log calls; non-log calls are excluded', () => { + const db = freshDb(); + try { + seed(db, [ + { + name: 'a', + calls: [ + { + event_id: 1, + defined_class: 'Logger', + method_id: 'info', + labels: ['log'], + parameters: [{ name: 'message', class: 'String', value: 'starting up' }], + }, + { event_id: 2, defined_class: 'OrdersController', method_id: 'create' }, + ], + }, + ]); + const rows = findLogs(db, {}).rows; + expect(rows).toHaveLength(1); + expect(rows[0].logger).toBe('Logger'); + expect(rows[0].method_id).toBe('info'); + } finally { + db.close(); + } + }); + + it('--message matches a substring inside parameters_json', () => { + const db = freshDb(); + try { + seed(db, [ + { + name: 'a', + calls: [ + { + event_id: 1, + defined_class: 'Logger', + method_id: 'error', + labels: ['log'], + parameters: [{ name: 'message', class: 'String', value: 'connection refused' }], + }, + { + event_id: 2, + defined_class: 'Logger', + method_id: 'info', + labels: ['log'], + parameters: [{ name: 'message', class: 'String', value: 'started worker' }], + }, + ], + }, + ]); + const rows = findLogs(db, { message: 'refused' }).rows; + expect(rows).toHaveLength(1); + expect(rows[0].event_id).toBe(1); + } finally { + db.close(); + } + }); + + it('--message also matches against return_value (structured-return contract)', () => { + const db = freshDb(); + try { + seed(db, [ + { + name: 'a', + calls: [ + { + event_id: 1, + defined_class: 'Logger', + method_id: 'info', + labels: ['log'], + // No params; the message lives in a structured return_value. + return_value: { level: 'info', message: 'connection refused at host:5432' }, + }, + { + event_id: 2, + defined_class: 'Logger', + method_id: 'info', + labels: ['log'], + return_value: { level: 'info', message: 'all systems nominal' }, + }, + ], + }, + ]); + const rows = findLogs(db, { message: 'refused' }).rows; + expect(rows).toHaveLength(1); + expect(rows[0].event_id).toBe(1); + } finally { + db.close(); + } + }); + + it('--logger filters by the logging class (uses classFilterClauses)', () => { + const db = freshDb(); + try { + seed(db, [ + { + name: 'a', + calls: [ + { + event_id: 1, + defined_class: 'app.AppLogger', + method_id: 'info', + labels: ['log'], + parameters: [{ name: 'msg', class: 'String', value: 'hello' }], + }, + { + event_id: 2, + defined_class: 'lib.AuditLogger', + method_id: 'info', + labels: ['log'], + parameters: [{ name: 'msg', class: 'String', value: 'audited' }], + }, + ], + }, + ]); + // Suffix-aware short-form match: "AppLogger" hits "app.AppLogger". + const rows = findLogs(db, { logger: 'AppLogger' }).rows; + expect(rows).toHaveLength(1); + expect(rows[0].event_id).toBe(1); + } finally { + db.close(); + } + }); + + it('combines --message with appmap-scope filters (--branch)', () => { + const db = freshDb(); + try { + seed(db, [ + { + name: 'a', + branch: 'main', + calls: [ + { + event_id: 1, + defined_class: 'Logger', + method_id: 'info', + labels: ['log'], + parameters: [{ name: 'message', class: 'String', value: 'connection refused' }], + }, + ], + }, + { + name: 'b', + branch: 'feature', + calls: [ + { + event_id: 1, + defined_class: 'Logger', + method_id: 'info', + labels: ['log'], + parameters: [{ name: 'message', class: 'String', value: 'connection refused' }], + }, + ], + }, + ]); + const rows = findLogs(db, { message: 'refused', branch: 'feature' }).rows; + expect(rows).toHaveLength(1); + expect(rows[0].appmap_name).toBe('b'); + } finally { + db.close(); + } + }); + + it('false positives are accepted: --message matches a parameter name', () => { + // Documents the design choice — broad LIKE over the JSON blob means + // a search for "message" matches the parameter name, not just the + // value. Display-time projection can tighten this if needed. + const db = freshDb(); + try { + seed(db, [ + { + name: 'a', + calls: [ + { + event_id: 1, + defined_class: 'Logger', + method_id: 'info', + labels: ['log'], + parameters: [{ name: 'message', class: 'String', value: 'hi' }], + }, + ], + }, + ]); + const rows = findLogs(db, { message: 'message' }).rows; + expect(rows).toHaveLength(1); + } finally { + db.close(); + } + }); +}); + +describe('findCalls --class / --method (fqid-aware)', () => { + it('matches the canonical fqid prefix', () => { + const db = freshDb(); + try { + seed(db, [ + { + name: 'a', + calls: [ + { + event_id: 1, + defined_class: 'org.example.UserRepository', + method_id: 'findById', + fqid: 'org/example/UserRepository#findById', + }, + ], + }, + ]); + // Canonical V3 fqid prefix (slash form, sans method) + const rows = findCalls(db, { className: 'org/example/UserRepository' }).rows; + expect(rows).toHaveLength(1); + } finally { + db.close(); + } + }); + + it('matches a short class name as the trailing fqid segment', () => { + const db = freshDb(); + try { + seed(db, [ + { + name: 'a', + calls: [ + { + event_id: 1, + defined_class: 'org.example.UserRepository', + method_id: 'findById', + fqid: 'org/example/UserRepository#findById', + }, + { + event_id: 2, + defined_class: 'org.other.OrdersController', + method_id: 'create', + fqid: 'org/other/OrdersController#create', + }, + ], + }, + ]); + const rows = findCalls(db, { className: 'UserRepository' }).rows; + expect(rows).toHaveLength(1); + expect(rows[0].method_id).toBe('findById'); + } finally { + db.close(); + } + }); + + it('matches the trailing dot-segment of a Java-style defined_class even without code_object', () => { + const db = freshDb(); + try { + // Insert a function_call that has NO code_object (code_object_id NULL) + // but has a Java dot-form defined_class. + const am = db + .prepare(`INSERT INTO appmaps (name, source_path) VALUES ('a', '/tmp/a.appmap.json')`) + .run(); + db.prepare( + `INSERT INTO function_calls (appmap_id, event_id, defined_class, method_id) + VALUES (?, 1, 'org.example.UserRepository', 'findById')` + ).run(am.lastInsertRowid); + + const rows = findCalls(db, { className: 'UserRepository' }).rows; + expect(rows).toHaveLength(1); + } finally { + db.close(); + } + }); + + it('--method matches via fqid suffix', () => { + const db = freshDb(); + try { + seed(db, [ + { + name: 'a', + calls: [ + { + event_id: 1, + defined_class: 'org.example.UserRepository', + method_id: 'findById', + fqid: 'org/example/UserRepository#findById', + }, + { + event_id: 2, + defined_class: 'org.example.OrderRepository', + method_id: 'findById', + fqid: 'org/example/OrderRepository#findById', + }, + { + event_id: 3, + defined_class: 'org.example.UserRepository', + method_id: 'save', + fqid: 'org/example/UserRepository#save', + }, + ], + }, + ]); + const rows = findCalls(db, { method: 'findById' }).rows; + expect(rows).toHaveLength(2); + expect(rows.every((r) => r.method_id === 'findById')).toBe(true); + } finally { + db.close(); + } + }); +}); + +describe('findExceptions', () => { + it('--exception filters by class; --route scopes by recording', () => { + const db = freshDb(); + try { + seed(db, [ + { + name: 'a', + requests: [{ event_id: 1, method: 'POST', path: '/orders', status: 500 }], + exceptions: [{ event_id: 2, exception_class: 'IntegrityError', message: 'duplicate key' }], + }, + { + name: 'b', + requests: [{ event_id: 1, method: 'GET', path: '/x', status: 404 }], + exceptions: [{ event_id: 2, exception_class: 'RecordNotFound' }], + }, + ]); + expect(findExceptions(db, { exception: 'IntegrityError' }).rows).toHaveLength(1); + expect(findExceptions(db, { route: 'POST /orders' }).rows).toHaveLength(1); + expect(findExceptions(db, { route: 'POST /orders' }).rows[0].appmap_name).toBe('a'); + } finally { + db.close(); + } + }); + + it('--with-logs attaches the last N preceding logs in chronological order', () => { + const db = freshDb(); + try { + seed(db, [ + { + name: 'a', + calls: [ + { + event_id: 1, + defined_class: 'Logger', + method_id: 'info', + labels: ['log'], + parameters: [{ name: 'message', class: 'String', value: 'starting up' }], + }, + { + event_id: 2, + defined_class: 'Logger', + method_id: 'warn', + labels: ['log'], + parameters: [{ name: 'message', class: 'String', value: 'connection slow' }], + }, + { + event_id: 3, + defined_class: 'Logger', + method_id: 'error', + labels: ['log'], + parameters: [{ name: 'message', class: 'String', value: 'connection refused' }], + }, + ], + // Exception lands at event 4, after the three logs. + exceptions: [{ event_id: 4, exception_class: 'IOError', message: 'broken pipe' }], + }, + ]); + const rows = findExceptions(db, { withLogs: 2 }).rows; + expect(rows).toHaveLength(1); + expect(rows[0].recent_logs).toBeDefined(); + // Last 2 in chronological order: the warn at event 2, then error at event 3. + const logs = rows[0].recent_logs!; + expect(logs).toHaveLength(2); + expect(logs[0].event_id).toBe(2); + expect(logs[1].event_id).toBe(3); + } finally { + db.close(); + } + }); + + it('--with-logs is omitted when not requested (recent_logs undefined)', () => { + const db = freshDb(); + try { + seed(db, [ + { + name: 'a', + calls: [ + { + event_id: 1, + defined_class: 'Logger', + method_id: 'info', + labels: ['log'], + parameters: [{ name: 'message', class: 'String', value: 'hi' }], + }, + ], + exceptions: [{ event_id: 2, exception_class: 'IOError' }], + }, + ]); + const rows = findExceptions(db, {}).rows; + expect(rows[0].recent_logs).toBeUndefined(); + } finally { + db.close(); + } + }); + + it('--with-logs with no preceding logs returns an empty array', () => { + const db = freshDb(); + try { + seed(db, [ + { + name: 'a', + // Exception at event 1; no logs at all. + exceptions: [{ event_id: 1, exception_class: 'IOError' }], + }, + ]); + const rows = findExceptions(db, { withLogs: 5 }).rows; + expect(rows[0].recent_logs).toEqual([]); + } finally { + db.close(); + } + }); + + it('--with-logs only includes logs from the same recording', () => { + const db = freshDb(); + try { + seed(db, [ + { + name: 'a', + calls: [ + { + event_id: 1, + defined_class: 'Logger', + method_id: 'info', + labels: ['log'], + parameters: [{ name: 'message', class: 'String', value: 'recording-a log' }], + }, + ], + exceptions: [{ event_id: 2, exception_class: 'IOError' }], + }, + { + name: 'b', + calls: [ + { + event_id: 1, + defined_class: 'Logger', + method_id: 'info', + labels: ['log'], + parameters: [{ name: 'message', class: 'String', value: 'recording-b log' }], + }, + ], + exceptions: [{ event_id: 2, exception_class: 'IOError' }], + }, + ]); + const rows = findExceptions(db, { withLogs: 5 }).rows; + expect(rows).toHaveLength(2); + // Each exception's recent_logs is scoped to its own recording. + for (const row of rows) { + expect(row.recent_logs).toHaveLength(1); + expect(row.recent_logs![0].appmap_name).toBe(row.appmap_name); + } + } finally { + db.close(); + } + }); +}); diff --git a/packages/cli/tests/unit/cmds/query/queries/hotspots.spec.ts b/packages/cli/tests/unit/cmds/query/queries/hotspots.spec.ts new file mode 100644 index 0000000000..ad91e0b7bd --- /dev/null +++ b/packages/cli/tests/unit/cmds/query/queries/hotspots.spec.ts @@ -0,0 +1,306 @@ +import sqlite3 from 'better-sqlite3'; + +import { openQueryDb } from '../../../../../src/cmds/query/db/openQueryDb'; +import { + functionHotspots, + sqlHotspots, +} from '../../../../../src/cmds/query/queries/hotspots'; + +function freshDb(): sqlite3.Database { + return openQueryDb('/tmp/ignored', ':memory:').db; +} + +interface CallSeed { + event_id: number; + parent_event_id?: number; + defined_class: string; + method_id: string; + fqid?: string; + elapsed_ms: number; +} + +function seedRecording( + db: sqlite3.Database, + opts: { + name: string; + branch?: string; + request?: { event_id: number; method: string; path: string; status: number }; + calls?: CallSeed[]; + queries?: { event_id: number; parent_event_id?: number; sql: string; elapsed_ms: number }[]; + } +): number { + const am = db + .prepare( + `INSERT INTO appmaps (name, source_path, git_branch) VALUES (?, ?, ?)` + ) + .run(opts.name, `/tmp/${opts.name}.appmap.json`, opts.branch ?? null); + const aid = Number(am.lastInsertRowid); + + if (opts.request) { + db.prepare( + `INSERT INTO http_requests (appmap_id, event_id, method, path, status_code) + VALUES (?, ?, ?, ?, ?)` + ).run(aid, opts.request.event_id, opts.request.method, opts.request.path, opts.request.status); + } + + for (const c of opts.calls ?? []) { + const fqid = c.fqid ?? `${c.defined_class}#${c.method_id}`; + const dotIdx = c.defined_class.lastIndexOf('.'); + const pkg = dotIdx >= 0 ? c.defined_class.slice(0, dotIdx).replace(/\./g, '/') : ''; + const leaf = dotIdx >= 0 ? c.defined_class.slice(dotIdx + 1) : c.defined_class; + db.prepare( + `INSERT OR IGNORE INTO code_objects (fqid, package, classes, leaf_class, method, is_static) + VALUES (?, ?, ?, ?, ?, ?)` + ).run(fqid, pkg, JSON.stringify([leaf]), leaf, c.method_id, 0); + const coId = (db + .prepare(`SELECT id FROM code_objects WHERE fqid = ?`) + .get(fqid) as { id: number }).id; + db.prepare( + `INSERT INTO function_calls (appmap_id, event_id, parent_event_id, + code_object_id, defined_class, method_id, elapsed_ms) + VALUES (?, ?, ?, ?, ?, ?, ?)` + ).run(aid, c.event_id, c.parent_event_id ?? null, coId, c.defined_class, c.method_id, c.elapsed_ms); + } + + for (const q of opts.queries ?? []) { + db.prepare( + `INSERT INTO sql_queries (appmap_id, event_id, parent_event_id, sql_text, elapsed_ms) + VALUES (?, ?, ?, ?, ?)` + ).run(aid, q.event_id, q.parent_event_id ?? null, q.sql, q.elapsed_ms); + } + + return aid; +} + +describe('functionHotspots', () => { + it('groups by code_object_id and ranks by total_ms desc', () => { + const db = freshDb(); + try { + seedRecording(db, { + name: 'a', + calls: [ + { event_id: 1, defined_class: 'X', method_id: 'fast', fqid: 'app/X#fast', elapsed_ms: 5 }, + { event_id: 2, defined_class: 'X', method_id: 'fast', fqid: 'app/X#fast', elapsed_ms: 5 }, + { event_id: 3, defined_class: 'Y', method_id: 'slow', fqid: 'app/Y#slow', elapsed_ms: 100 }, + ], + }); + const rows = functionHotspots(db, {}).rows; + expect(rows[0].fqid).toBe('app/Y#slow'); + expect(rows[0].calls).toBe(1); + expect(rows[0].total_ms).toBe(100); + expect(rows[1].fqid).toBe('app/X#fast'); + expect(rows[1].calls).toBe(2); + expect(rows[1].total_ms).toBe(10); + } finally { + db.close(); + } + }); + + it('computes self_ms as elapsed minus the sum of immediate children', () => { + const db = freshDb(); + try { + // outer call (10) calls inner1 (3) and inner2 (4) and an SQL (1). + // self_ms(outer) should be 10 - (3 + 4 + 1) = 2. + seedRecording(db, { + name: 'a', + calls: [ + { event_id: 1, defined_class: 'X', method_id: 'outer', fqid: 'app/X#outer', elapsed_ms: 10 }, + { event_id: 2, parent_event_id: 1, defined_class: 'X', method_id: 'inner1', fqid: 'app/X#inner1', elapsed_ms: 3 }, + { event_id: 3, parent_event_id: 1, defined_class: 'X', method_id: 'inner2', fqid: 'app/X#inner2', elapsed_ms: 4 }, + ], + queries: [{ event_id: 4, parent_event_id: 1, sql: 'SELECT 1', elapsed_ms: 1 }], + }); + const outer = functionHotspots(db, {}).rows.find((r) => r.fqid === 'app/X#outer')!; + expect(outer.total_ms).toBe(10); + expect(outer.self_ms).toBe(2); + const inner1 = functionHotspots(db, {}).rows.find((r) => r.fqid === 'app/X#inner1')!; + expect(inner1.self_ms).toBe(3); // leaf — self equals total + } finally { + db.close(); + } + }); + + it('aggregates across recordings', () => { + const db = freshDb(); + try { + seedRecording(db, { + name: 'a', + calls: [{ event_id: 1, defined_class: 'X', method_id: 'm', fqid: 'app/X#m', elapsed_ms: 10 }], + }); + seedRecording(db, { + name: 'b', + calls: [{ event_id: 1, defined_class: 'X', method_id: 'm', fqid: 'app/X#m', elapsed_ms: 20 }], + }); + const rows = functionHotspots(db, {}).rows; + expect(rows).toHaveLength(1); + expect(rows[0].calls).toBe(2); + expect(rows[0].total_ms).toBe(30); + } finally { + db.close(); + } + }); + + it('--route scopes to recordings with a matching server request', () => { + const db = freshDb(); + try { + seedRecording(db, { + name: 'reports', + request: { event_id: 0, method: 'GET', path: '/reports', status: 200 }, + calls: [{ event_id: 1, defined_class: 'R', method_id: 'calc', fqid: 'app/R#calc', elapsed_ms: 100 }], + }); + seedRecording(db, { + name: 'orders', + request: { event_id: 0, method: 'POST', path: '/orders', status: 200 }, + calls: [{ event_id: 1, defined_class: 'O', method_id: 'create', fqid: 'app/O#create', elapsed_ms: 50 }], + }); + const rows = functionHotspots(db, { route: 'GET /reports' }).rows; + expect(rows).toHaveLength(1); + expect(rows[0].fqid).toBe('app/R#calc'); + } finally { + db.close(); + } + }); + + it('--class filters by defined_class', () => { + const db = freshDb(); + try { + seedRecording(db, { + name: 'a', + calls: [ + { event_id: 1, defined_class: 'OrdersController', method_id: 'create', elapsed_ms: 100 }, + { event_id: 2, defined_class: 'OrdersController', method_id: 'index', elapsed_ms: 50 }, + { event_id: 3, defined_class: 'UsersController', method_id: 'show', elapsed_ms: 200 }, + ], + }); + const rows = functionHotspots(db, { className: 'OrdersController' }).rows; + expect(rows.map((r) => r.method_id).sort()).toEqual(['create', 'index']); + } finally { + db.close(); + } + }); + + it('--class also matches via the canonical fqid (not just defined_class)', () => { + const db = freshDb(); + try { + seedRecording(db, { + name: 'a', + calls: [ + { + event_id: 1, + defined_class: 'org.example.UserRepository', + method_id: 'findById', + fqid: 'org/example/UserRepository#findById', + elapsed_ms: 100, + }, + { + event_id: 2, + defined_class: 'org.example.OrderRepository', + method_id: 'findById', + fqid: 'org/example/OrderRepository#findById', + elapsed_ms: 50, + }, + ], + }); + const rows = functionHotspots(db, { className: 'UserRepository' }).rows; + expect(rows).toHaveLength(1); + expect(rows[0].fqid).toBe('org/example/UserRepository#findById'); + } finally { + db.close(); + } + }); + + it('--limit truncates the result set', () => { + const db = freshDb(); + try { + seedRecording(db, { + name: 'a', + calls: [ + { event_id: 1, defined_class: 'X', method_id: 'a', elapsed_ms: 100 }, + { event_id: 2, defined_class: 'X', method_id: 'b', elapsed_ms: 50 }, + { event_id: 3, defined_class: 'X', method_id: 'c', elapsed_ms: 25 }, + ], + }); + expect(functionHotspots(db, { limit: 2 }).rows).toHaveLength(2); + } finally { + db.close(); + } + }); +}); + +describe('sqlHotspots', () => { + it('groups by sql_text, ranks by total_ms desc', () => { + const db = freshDb(); + try { + seedRecording(db, { + name: 'a', + queries: [ + { event_id: 1, sql: 'SELECT * FROM users WHERE id = ?', elapsed_ms: 2 }, + { event_id: 2, sql: 'SELECT * FROM users WHERE id = ?', elapsed_ms: 2 }, + { event_id: 3, sql: 'SELECT * FROM tenants WHERE slug = ?', elapsed_ms: 80 }, + ], + }); + const rows = sqlHotspots(db, {}).rows; + expect(rows[0].sql_text).toBe('SELECT * FROM tenants WHERE slug = ?'); + expect(rows[0].count).toBe(1); + expect(rows[0].avg_ms).toBeCloseTo(80); + expect(rows[1].count).toBe(2); + expect(rows[1].avg_ms).toBeCloseTo(2); + expect(rows[1].total_ms).toBe(4); + } finally { + db.close(); + } + }); + + it('--since / --until scope to recordings within the time range', () => { + const db = freshDb(); + try { + const oldId = (db + .prepare( + `INSERT INTO appmaps (name, source_path, timestamp) VALUES ('old', '/o', '2026-04-01T00:00:00.000Z')` + ) + .run().lastInsertRowid as number); + const newId = (db + .prepare( + `INSERT INTO appmaps (name, source_path, timestamp) VALUES ('new', '/n', '2026-04-30T00:00:00.000Z')` + ) + .run().lastInsertRowid as number); + db.prepare( + `INSERT INTO sql_queries (appmap_id, event_id, sql_text, elapsed_ms) VALUES (?, 1, 'SELECT a', 1)` + ).run(oldId); + db.prepare( + `INSERT INTO sql_queries (appmap_id, event_id, sql_text, elapsed_ms) VALUES (?, 1, 'SELECT b', 2)` + ).run(newId); + + const since = sqlHotspots(db, { since: '2026-04-15T00:00:00.000Z' }).rows; + expect(since).toHaveLength(1); + expect(since[0].sql_text).toBe('SELECT b'); + + const until = sqlHotspots(db, { until: '2026-04-15T00:00:00.000Z' }).rows; + expect(until).toHaveLength(1); + expect(until[0].sql_text).toBe('SELECT a'); + } finally { + db.close(); + } + }); + + it('--branch filter applies', () => { + const db = freshDb(); + try { + seedRecording(db, { + name: 'a', + branch: 'main', + queries: [{ event_id: 1, sql: 'SELECT 1', elapsed_ms: 1 }], + }); + seedRecording(db, { + name: 'b', + branch: 'feature', + queries: [{ event_id: 1, sql: 'SELECT 2', elapsed_ms: 2 }], + }); + const main = sqlHotspots(db, { branch: 'main' }).rows; + expect(main).toHaveLength(1); + expect(main[0].sql_text).toBe('SELECT 1'); + } finally { + db.close(); + } + }); +}); diff --git a/packages/cli/tests/unit/cmds/query/queries/mcp.spec.ts b/packages/cli/tests/unit/cmds/query/queries/mcp.spec.ts new file mode 100644 index 0000000000..b0821c7419 --- /dev/null +++ b/packages/cli/tests/unit/cmds/query/queries/mcp.spec.ts @@ -0,0 +1,525 @@ +import sqlite3 from 'better-sqlite3'; + +import { openQueryDb } from '../../../../../src/cmds/query/db/openQueryDb'; +import { + buildMcpHandler, + JsonRpcRequest, + listResources, + listResourceTemplates, + listTools, +} from '../../../../../src/cmds/query/queries/mcp'; + +function freshDb(): sqlite3.Database { + return openQueryDb('/tmp/ignored', ':memory:').db; +} + +function seedMinimal(db: sqlite3.Database): void { + // One recording with a request, a SQL query, an exception, and a labelled + // function call — enough to exercise most tools. + const am = db + .prepare( + `INSERT INTO appmaps (name, source_path, git_branch, sql_query_count, elapsed_ms, timestamp) + VALUES ('rec', '/tmp/rec.appmap.json', 'main', 1, 100, '2026-04-29T12:00:00.000Z')` + ) + .run(); + const id = am.lastInsertRowid; + db.prepare( + `INSERT INTO http_requests (appmap_id, event_id, parent_event_id, method, path, status_code, elapsed_ms) + VALUES (?, 1, NULL, 'POST', '/orders', 500, 100)` + ).run(id); + db.prepare( + `INSERT OR IGNORE INTO code_objects (fqid, package, classes, leaf_class, method, is_static) + VALUES ('app/Logger#error', 'app', '["Logger"]', 'Logger', 'error', 0)` + ).run(); + const co = (db + .prepare(`SELECT id FROM code_objects WHERE fqid = 'app/Logger#error'`) + .get() as { id: number }).id; + db.prepare(`INSERT OR IGNORE INTO labels (code_object_id, label) VALUES (?, 'log')`).run(co); + db.prepare( + `INSERT INTO function_calls (appmap_id, event_id, parent_event_id, code_object_id, + defined_class, method_id, elapsed_ms, parameters_json, return_value) + VALUES (?, 2, 1, ?, 'Logger', 'error', 0.1, + '[{"name":"message","class":"String","value":"connection refused"}]', NULL)` + ).run(id, co); + db.prepare( + `INSERT INTO sql_queries (appmap_id, event_id, parent_event_id, sql_text, elapsed_ms) + VALUES (?, 3, 2, 'INSERT INTO orders (id) VALUES (?)', 14)` + ).run(id); + // Exception's call entry is event_id=2 (the Logger.error call), and the + // throw materialised at the return event id=4. with_logs uses event_id=2 + // as the call boundary and event_id=4 as the throw boundary. + db.prepare( + `INSERT INTO exceptions (appmap_id, event_id, return_event_id, parent_event_id, + exception_class, message) + VALUES (?, 2, 4, 1, 'IntegrityError', 'duplicate key')` + ).run(id); +} + +function call(handler: ReturnType, msg: JsonRpcRequest) { + const r = handler(msg); + return r; +} + +describe('MCP handler', () => { + it('initialize returns server info and capabilities', () => { + const db = freshDb(); + try { + const r = call(buildMcpHandler(db), { jsonrpc: '2.0', id: 1, method: 'initialize' }); + expect(r).not.toBeNull(); + expect((r!.result as any).serverInfo.name).toBe('appmap-query'); + expect((r!.result as any).protocolVersion).toBeDefined(); + expect((r!.result as any).capabilities.tools).toBeDefined(); + expect((r!.result as any).capabilities.resources).toBeDefined(); + } finally { + db.close(); + } + }); + + it('notifications/initialized returns null (notification, no response)', () => { + const db = freshDb(); + try { + const r = call(buildMcpHandler(db), { + jsonrpc: '2.0', + method: 'notifications/initialized', + }); + expect(r).toBeNull(); + } finally { + db.close(); + } + }); + + it('tools/list returns the V3 tool surface', () => { + const db = freshDb(); + try { + const r = call(buildMcpHandler(db), { jsonrpc: '2.0', id: 2, method: 'tools/list' }); + const names = ((r!.result as any).tools as { name: string }[]).map((t) => t.name); + expect(names).toEqual( + expect.arrayContaining([ + 'list_endpoints', + 'function_hotspots', + 'sql_hotspots', + 'list_labels', + 'find_recordings', + 'find_requests', + 'find_queries', + 'find_calls', + 'find_logs', + 'find_exceptions', + 'get_call_tree', + 'find_related', + 'compare_branches', + ]) + ); + } finally { + db.close(); + } + }); + + it('resources/list returns the appmap://endpoints resource', () => { + const db = freshDb(); + try { + const r = call(buildMcpHandler(db), { jsonrpc: '2.0', id: 3, method: 'resources/list' }); + const uris = ((r!.result as any).resources as { uri: string }[]).map((x) => x.uri); + expect(uris).toContain('appmap://endpoints'); + } finally { + db.close(); + } + }); + + it('unknown method → -32601 method-not-found', () => { + const db = freshDb(); + try { + const r = call(buildMcpHandler(db), { jsonrpc: '2.0', id: 4, method: 'no/such/method' }); + expect(r!.error?.code).toBe(-32601); + } finally { + db.close(); + } + }); + + it('tools/call to an unknown tool → -32601', () => { + const db = freshDb(); + try { + const r = call(buildMcpHandler(db), { + jsonrpc: '2.0', + id: 5, + method: 'tools/call', + params: { name: 'no_such_tool', arguments: {} }, + }); + expect(r!.error?.code).toBe(-32601); + } finally { + db.close(); + } + }); + + it('tools/call wraps the result as a content block of type=text', () => { + const db = freshDb(); + try { + seedMinimal(db); + const r = call(buildMcpHandler(db), { + jsonrpc: '2.0', + id: 6, + method: 'tools/call', + params: { name: 'find_exceptions', arguments: { limit: 10 } }, + }); + const content = (r!.result as any).content; + expect(Array.isArray(content)).toBe(true); + expect(content[0].type).toBe('text'); + const parsed = JSON.parse(content[0].text); + expect(parsed.rows).toHaveLength(1); + expect(parsed.total).toBe(1); + expect(parsed.rows[0].exception_class).toBe('IntegrityError'); + } finally { + db.close(); + } + }); + + it('get_call_tree resolves appmap (numeric id or name) and applies focus_type', () => { + const db = freshDb(); + try { + seedMinimal(db); + const handler = buildMcpHandler(db); + + // Numeric id. + const byId = call(handler, { + jsonrpc: '2.0', + id: 7, + method: 'tools/call', + params: { + name: 'get_call_tree', + arguments: { appmap: 1, focus_type: 'sql_query', focus_value: 'INSERT INTO orders' }, + }, + }); + const idRows = JSON.parse((byId!.result as any).content[0].text); + expect(Array.isArray(idRows)).toBe(true); + expect(idRows.some((n: any) => n.kind === 'sql')).toBe(true); + + // Name-based ref. + const byName = call(handler, { + jsonrpc: '2.0', + id: 8, + method: 'tools/call', + params: { + name: 'get_call_tree', + arguments: { appmap: 'rec' }, + }, + }); + const nameRows = JSON.parse((byName!.result as any).content[0].text); + expect(Array.isArray(nameRows)).toBe(true); + } finally { + db.close(); + } + }); + + it('list_labels returns labels with counts and a sample fqid', () => { + const db = freshDb(); + try { + seedMinimal(db); + const r = call(buildMcpHandler(db), { + jsonrpc: '2.0', + id: 90, + method: 'tools/call', + params: { name: 'list_labels', arguments: {} }, + }); + const rows = JSON.parse((r!.result as any).content[0].text); + expect(rows).toEqual([{ label: 'log', count: 1, sample_fqid: 'app/Logger#error' }]); + } finally { + db.close(); + } + }); + + it('find_logs returns label=log calls and filters by --message', () => { + const db = freshDb(); + try { + seedMinimal(db); + // Add a parameters_json + return_value to the seeded log call so + // --message has something to LIKE against. + db.prepare( + `UPDATE function_calls + SET parameters_json = ?, return_value = ? + WHERE method_id = 'error'` + ).run( + JSON.stringify([{ name: 'message', class: 'String', value: 'connection refused' }]), + null + ); + const handler = buildMcpHandler(db); + + // No filter: returns the log row. + const all = call(handler, { + jsonrpc: '2.0', + id: 100, + method: 'tools/call', + params: { name: 'find_logs', arguments: {} }, + }); + const allRows = JSON.parse((all!.result as any).content[0].text); + expect(allRows.rows).toHaveLength(1); + expect(allRows.rows[0].logger).toBe('Logger'); + expect(allRows.rows[0].method_id).toBe('error'); + expect(allRows.rows[0].parameters_json).toContain('connection refused'); + + // Substring filter against parameters_json. + const matched = call(handler, { + jsonrpc: '2.0', + id: 101, + method: 'tools/call', + params: { name: 'find_logs', arguments: { message: 'refused' } }, + }); + expect(JSON.parse((matched!.result as any).content[0].text).rows).toHaveLength(1); + + // Substring that doesn't appear: zero rows. + const empty = call(handler, { + jsonrpc: '2.0', + id: 102, + method: 'tools/call', + params: { name: 'find_logs', arguments: { message: 'this never appears' } }, + }); + expect(JSON.parse((empty!.result as any).content[0].text).rows).toHaveLength(0); + } finally { + db.close(); + } + }); + + it('find_exceptions with_logs attaches recent_logs', () => { + const db = freshDb(); + try { + seedMinimal(db); + // Give the seeded log call a message so recent_logs has content. + db.prepare( + `UPDATE function_calls + SET parameters_json = ? + WHERE method_id = 'error'` + ).run( + JSON.stringify([{ name: 'message', class: 'String', value: 'connection refused' }]) + ); + const handler = buildMcpHandler(db); + + // No with_logs: recent_logs is absent. + const noLogs = call(handler, { + jsonrpc: '2.0', + id: 200, + method: 'tools/call', + params: { name: 'find_exceptions', arguments: {} }, + }); + const noLogsRows = JSON.parse((noLogs!.result as any).content[0].text); + expect(noLogsRows.rows[0].recent_logs).toBeUndefined(); + // appmap_id is now exposed. + expect(typeof noLogsRows.rows[0].appmap_id).toBe('number'); + + // with_logs=5: recent_logs is present and non-empty (the seed has + // a log call at event 2, exception at event 2 — the log shares the + // event_id with the exception so it doesn't qualify; verify the + // shape regardless). + const withLogsRes = call(handler, { + jsonrpc: '2.0', + id: 201, + method: 'tools/call', + params: { name: 'find_exceptions', arguments: { with_logs: 5 } }, + }); + const withLogsRows = JSON.parse((withLogsRes!.result as any).content[0].text); + expect(Array.isArray(withLogsRows.rows[0].recent_logs)).toBe(true); + } finally { + db.close(); + } + }); + + it('find_logs row carries a derived message field', () => { + const db = freshDb(); + try { + seedMinimal(db); + const r = call(buildMcpHandler(db), { + jsonrpc: '2.0', + id: 91, + method: 'tools/call', + params: { name: 'find_logs', arguments: {} }, + }); + const page = JSON.parse((r!.result as any).content[0].text); + expect(page.rows).toHaveLength(1); + expect(page.rows[0].message).toBe('connection refused'); + expect(page.rows[0].logger).toBe('Logger'); + expect(page.rows[0].parameters_json).toContain('connection refused'); + } finally { + db.close(); + } + }); + + it('find_exceptions with_logs uses return_event_id for ordering (regression)', () => { + const db = freshDb(); + try { + seedMinimal(db); + const r = call(buildMcpHandler(db), { + jsonrpc: '2.0', + id: 92, + method: 'tools/call', + params: { name: 'find_exceptions', arguments: { with_logs: 5 } }, + }); + const page = JSON.parse((r!.result as any).content[0].text); + expect(page.rows).toHaveLength(1); + expect(page.rows[0].return_event_id).toBe(4); + // Pre-fix the with_logs SQL filtered by `event_id < exception.event_id` + // (=2), which excluded the Logger.error log call at event_id=2 entirely. + // With return_event_id (=4) as the upper bound, the log call (event 2) + // is included — that's the regression we're guarding against. + expect(page.rows[0].recent_logs).toHaveLength(1); + expect(page.rows[0].recent_logs[0].event_id).toBe(2); + expect(page.rows[0].recent_logs[0].message).toBe('connection refused'); + } finally { + db.close(); + } + }); + + it('find_calls --label filters by the AppMap label', () => { + const db = freshDb(); + try { + seedMinimal(db); + const r = call(buildMcpHandler(db), { + jsonrpc: '2.0', + id: 9, + method: 'tools/call', + params: { name: 'find_calls', arguments: { label: 'log' } }, + }); + const page = JSON.parse((r!.result as any).content[0].text); + expect(page.rows).toHaveLength(1); + expect(page.rows[0].method_id).toBe('error'); + } finally { + db.close(); + } + }); + + it('list_endpoints / function_hotspots / sql_hotspots produce expected rows', () => { + const db = freshDb(); + try { + seedMinimal(db); + const handler = buildMcpHandler(db); + + const ep = call(handler, { + jsonrpc: '2.0', + id: 11, + method: 'tools/call', + params: { name: 'list_endpoints', arguments: {} }, + }); + const epPage = JSON.parse((ep!.result as any).content[0].text); + expect(epPage.rows[0].route).toBe('/orders'); + + const fh = call(handler, { + jsonrpc: '2.0', + id: 12, + method: 'tools/call', + params: { name: 'function_hotspots', arguments: { limit: 5 } }, + }); + expect(JSON.parse((fh!.result as any).content[0].text).rows.length).toBeGreaterThan(0); + + const sh = call(handler, { + jsonrpc: '2.0', + id: 13, + method: 'tools/call', + params: { name: 'sql_hotspots', arguments: { limit: 5 } }, + }); + expect(JSON.parse((sh!.result as any).content[0].text).rows.length).toBeGreaterThan(0); + } finally { + db.close(); + } + }); + + it('resources/read returns the endpoints summary as JSON', () => { + const db = freshDb(); + try { + seedMinimal(db); + const r = call(buildMcpHandler(db), { + jsonrpc: '2.0', + id: 10, + method: 'resources/read', + params: { uri: 'appmap://endpoints' }, + }); + const contents = (r!.result as any).contents; + expect(contents[0].uri).toBe('appmap://endpoints'); + const parsed = JSON.parse(contents[0].text); + expect(parsed.rows[0].route).toBe('/orders'); + } finally { + db.close(); + } + }); + + it('listTools / listResources / listResourceTemplates are stable for documentation use', () => { + expect(listTools().length).toBeGreaterThan(0); + expect(listResources().length).toBeGreaterThan(0); + expect(listResourceTemplates().length).toBeGreaterThan(0); + }); + + it('resources/templates/list advertises the per-recording logs template', () => { + const db = freshDb(); + try { + const r = call(buildMcpHandler(db), { + jsonrpc: '2.0', + id: 300, + method: 'resources/templates/list', + }); + const templates = (r!.result as any).resourceTemplates as { uriTemplate: string }[]; + expect(templates.some((t) => t.uriTemplate === 'appmap://recording/{ref}/logs')).toBe( + true + ); + } finally { + db.close(); + } + }); + + it('resources/read on appmap://recording//logs returns the recording\'s log rows', () => { + const db = freshDb(); + try { + seedMinimal(db); + // Give the seeded log call a captured message. + db.prepare( + `UPDATE function_calls + SET parameters_json = ? + WHERE method_id = 'error'` + ).run( + JSON.stringify([{ name: 'message', class: 'String', value: 'connection refused' }]) + ); + + const r = call(buildMcpHandler(db), { + jsonrpc: '2.0', + id: 301, + method: 'resources/read', + params: { uri: 'appmap://recording/rec/logs' }, + }); + const contents = (r!.result as any).contents; + expect(contents[0].uri).toBe('appmap://recording/rec/logs'); + const page = JSON.parse(contents[0].text); + expect(page.rows).toHaveLength(1); + expect(page.rows[0].logger).toBe('Logger'); + expect(page.rows[0].method_id).toBe('error'); + } finally { + db.close(); + } + }); + + it('resources/read on a recording-logs URI with an unknown ref returns an error', () => { + const db = freshDb(); + try { + const r = call(buildMcpHandler(db), { + jsonrpc: '2.0', + id: 302, + method: 'resources/read', + params: { uri: 'appmap://recording/no-such-recording/logs' }, + }); + expect(r!.error).toBeDefined(); + expect(r!.error!.message).toMatch(/appmap not found/); + } finally { + db.close(); + } + }); + + it('resources/read on a URI that matches no resource or template returns an error', () => { + const db = freshDb(); + try { + const r = call(buildMcpHandler(db), { + jsonrpc: '2.0', + id: 303, + method: 'resources/read', + params: { uri: 'appmap://nope' }, + }); + expect(r!.error).toBeDefined(); + expect(r!.error!.message).toMatch(/unknown resource/); + } finally { + db.close(); + } + }); +}); diff --git a/packages/cli/tests/unit/cmds/query/queries/related.spec.ts b/packages/cli/tests/unit/cmds/query/queries/related.spec.ts new file mode 100644 index 0000000000..19e330de1b --- /dev/null +++ b/packages/cli/tests/unit/cmds/query/queries/related.spec.ts @@ -0,0 +1,205 @@ +import sqlite3 from 'better-sqlite3'; + +import { openQueryDb } from '../../../../../src/cmds/query/db/openQueryDb'; +import { + extractTables, + related, +} from '../../../../../src/cmds/query/queries/related'; + +function freshDb(): sqlite3.Database { + return openQueryDb('/tmp/ignored', ':memory:').db; +} + +interface SeedRecording { + name: string; + branch?: string; + request?: { method: string; path: string; status: number; elapsed_ms?: number }; + sqls?: string[]; + classes?: string[]; // leaf names; we register code_objects for each +} + +function seed(db: sqlite3.Database, recs: SeedRecording[]): void { + const insAm = db.prepare( + `INSERT INTO appmaps (name, source_path, git_branch, elapsed_ms) VALUES (?, ?, ?, ?)` + ); + const insReq = db.prepare( + `INSERT INTO http_requests (appmap_id, event_id, method, path, status_code, elapsed_ms) + VALUES (?, 1, ?, ?, ?, ?)` + ); + const insQ = db.prepare( + `INSERT INTO sql_queries (appmap_id, event_id, sql_text) VALUES (?, ?, ?)` + ); + const insCo = db.prepare( + `INSERT OR IGNORE INTO code_objects (fqid, package, classes, leaf_class, method, is_static) + VALUES (?, ?, ?, ?, ?, ?)` + ); + const selCo = db.prepare(`SELECT id FROM code_objects WHERE fqid = ?`); + const insCall = db.prepare( + `INSERT INTO function_calls (appmap_id, event_id, defined_class, method_id, code_object_id) + VALUES (?, ?, ?, ?, ?)` + ); + + let nextEvent = 100; + for (const r of recs) { + const am = insAm.run( + r.name, + `/tmp/${r.name}.appmap.json`, + r.branch ?? null, + r.request?.elapsed_ms ?? null + ); + const aid = Number(am.lastInsertRowid); + if (r.request) { + insReq.run( + aid, + r.request.method, + r.request.path, + r.request.status, + r.request.elapsed_ms ?? null + ); + } + for (const sql of r.sqls ?? []) { + insQ.run(aid, nextEvent++, sql); + } + for (const cls of r.classes ?? []) { + const fqid = `app/${cls}#m`; + insCo.run(fqid, 'app', JSON.stringify([cls]), cls, 'm', 0); + const co = selCo.get(fqid) as { id: number }; + insCall.run(aid, nextEvent++, cls, 'm', co.id); + } + } +} + +describe('extractTables', () => { + it('extracts FROM/JOIN/INTO/UPDATE table names case-insensitively', () => { + const sql = + "SELECT * FROM users u JOIN orders o ON u.id = o.user_id WHERE u.id = 1; INSERT INTO logs VALUES (1); UPDATE Sessions SET x=1"; + expect([...extractTables(sql)].sort()).toEqual(['logs', 'orders', 'sessions', 'users']); + }); + + it('strips a single leading schema qualifier', () => { + expect(extractTables('SELECT * FROM public.orders')).toEqual(new Set(['orders'])); + }); +}); + +describe('related', () => { + it('scores by route + tables + classes; excludes the source', () => { + const db = freshDb(); + try { + seed(db, [ + { + name: 'source', + request: { method: 'POST', path: '/orders', status: 500 }, + sqls: ['INSERT INTO orders VALUES (1)', 'SELECT * FROM users WHERE id = 1'], + classes: ['OrdersController', 'IdempotencyKey'], + }, + { + name: 'best', + request: { method: 'POST', path: '/orders', status: 201, elapsed_ms: 140 }, + sqls: ['INSERT INTO orders VALUES (1)', 'SELECT * FROM users WHERE id = 1'], + classes: ['OrdersController', 'IdempotencyKey'], + }, + { + name: 'partial', + request: { method: 'POST', path: '/orders', status: 201 }, + sqls: ['INSERT INTO orders VALUES (1)'], + classes: ['OrdersController'], + }, + { + name: 'unrelated', + request: { method: 'GET', path: '/healthz', status: 200 }, + sqls: [], + classes: ['HealthController'], + }, + ]); + const rows = related(db, 'source').rows; + expect(rows.find((r) => r.appmap_name === 'source')).toBeUndefined(); + expect(rows.find((r) => r.appmap_name === 'unrelated')).toBeUndefined(); + const best = rows.find((r) => r.appmap_name === 'best')!; + const partial = rows.find((r) => r.appmap_name === 'partial')!; + // best: route(5) + 2 tables*3 + 2 classes*2 = 15 + expect(best.score).toBe(15); + expect(best.shared).toContain('route'); + expect(best.shared).toContain('orders'); + expect(best.shared).toContain('users'); + expect(best.shared).toContain('OrdersController'); + // partial: route(5) + 1 table*3 + 1 class*2 = 10 + expect(partial.score).toBe(10); + // best ranks first + expect(rows[0].appmap_name).toBe('best'); + } finally { + db.close(); + } + }); + + it('--branch scopes the candidate pool', () => { + const db = freshDb(); + try { + seed(db, [ + { + name: 'src', + branch: 'main', + request: { method: 'GET', path: '/x', status: 200 }, + classes: ['Foo'], + }, + { + name: 'main_match', + branch: 'main', + request: { method: 'GET', path: '/x', status: 200 }, + classes: ['Foo'], + }, + { + name: 'feature_match', + branch: 'feature', + request: { method: 'GET', path: '/x', status: 200 }, + classes: ['Foo'], + }, + ]); + const rows = related(db, 'src', { branch: 'main' }).rows; + expect(rows.map((r) => r.appmap_name)).toEqual(['main_match']); + } finally { + db.close(); + } + }); + + it('--status filters candidates to recordings with a matching response', () => { + const db = freshDb(); + try { + seed(db, [ + { + name: 'src', + request: { method: 'POST', path: '/orders', status: 500 }, + classes: ['Foo'], + }, + { + name: 'succeeded', + request: { method: 'POST', path: '/orders', status: 201 }, + classes: ['Foo'], + }, + { + name: 'also_failed', + request: { method: 'POST', path: '/orders', status: 500 }, + classes: ['Foo'], + }, + ]); + const rows = related(db, 'src', { status: { op: '<', value: 400 } }).rows; + expect(rows.map((r) => r.appmap_name)).toEqual(['succeeded']); + } finally { + db.close(); + } + }); + + it('--limit caps the result set', () => { + const db = freshDb(); + try { + seed(db, [ + { name: 'src', request: { method: 'GET', path: '/x', status: 200 }, classes: ['A', 'B'] }, + { name: 'a', request: { method: 'GET', path: '/x', status: 200 }, classes: ['A', 'B'] }, + { name: 'b', request: { method: 'GET', path: '/x', status: 200 }, classes: ['A'] }, + { name: 'c', request: { method: 'GET', path: '/x', status: 200 }, classes: ['B'] }, + ]); + expect(related(db, 'src', { limit: 2 }).rows).toHaveLength(2); + } finally { + db.close(); + } + }); +}); diff --git a/packages/cli/tests/unit/cmds/query/queries/tree.spec.ts b/packages/cli/tests/unit/cmds/query/queries/tree.spec.ts new file mode 100644 index 0000000000..304e30f485 --- /dev/null +++ b/packages/cli/tests/unit/cmds/query/queries/tree.spec.ts @@ -0,0 +1,495 @@ +import sqlite3 from 'better-sqlite3'; + +import { openQueryDb } from '../../../../../src/cmds/query/db/openQueryDb'; +import { + resolveAppmap, + tree, + treeSummary, +} from '../../../../../src/cmds/query/queries/tree'; + +function freshDb(): sqlite3.Database { + return openQueryDb('/tmp/ignored', ':memory:').db; +} + +// Seed a recording shaped like the V3 worked-session example: an HTTP request +// at depth 0, a controller call beneath it, an SQL query beneath the +// controller, and an exception on the same call. +function seed( + db: sqlite3.Database, + opts: { + name?: string; + branch?: string; + addLabel?: boolean; + addOutbound?: boolean; + } = {} +): number { + const am = db + .prepare( + `INSERT INTO appmaps (name, source_path, git_branch, sql_query_count) + VALUES (?, ?, ?, ?)` + ) + .run( + opts.name ?? 'orders_create_42', + `/tmp/${opts.name ?? 'orders_create_42'}.appmap.json`, + opts.branch ?? 'main', + 1 + ); + const id = am.lastInsertRowid; + + // event_id 1: HTTP server request, parent_event_id null (root) + db.prepare( + `INSERT INTO http_requests (appmap_id, event_id, parent_event_id, thread_id, + method, path, status_code, elapsed_ms) + VALUES (?, 1, NULL, 1, 'POST', '/orders', 500, 520.0)` + ).run(id); + + // event_id 2: controller call, parent = 1 + let coId = 1; + db.prepare( + `INSERT OR IGNORE INTO code_objects (fqid, package, classes, leaf_class, method, is_static) + VALUES ('app/OrdersController#create', 'app', '["OrdersController"]', 'OrdersController', 'create', 0)` + ).run(); + coId = (db + .prepare(`SELECT id FROM code_objects WHERE fqid = 'app/OrdersController#create'`) + .get() as { id: number }).id; + + if (opts.addLabel) { + db.prepare(`INSERT OR IGNORE INTO labels (code_object_id, label) VALUES (?, 'log')`).run(coId); + } + + db.prepare( + `INSERT INTO function_calls (appmap_id, event_id, parent_event_id, thread_id, + code_object_id, defined_class, method_id, elapsed_ms) + VALUES (?, 2, 1, 1, ?, 'OrdersController', 'create', 519.0)` + ).run(id, coId); + + // event_id 3: sql_query, parent = 2 + db.prepare( + `INSERT INTO sql_queries (appmap_id, event_id, parent_event_id, thread_id, + sql_text, database_type, elapsed_ms) + VALUES (?, 3, 2, 1, 'INSERT INTO orders (...)', 'postgres', 14.0)` + ).run(id); + + // event_id 4: exception, owned by call 2 (carried by its return event) + db.prepare( + `INSERT INTO exceptions (appmap_id, event_id, parent_event_id, thread_id, + exception_class, message, path, lineno) + VALUES (?, 2, 1, 1, 'IntegrityError', 'duplicate key', + 'app/models/order.rb', 42)` + ).run(id); + + if (opts.addOutbound) { + db.prepare( + `INSERT INTO http_client_requests (appmap_id, event_id, parent_event_id, thread_id, + method, url, status_code, elapsed_ms) + VALUES (?, 5, 2, 1, 'GET', 'https://api.example/v1', 200, 40.0)` + ).run(id); + } + + return Number(id); +} + +describe('resolveAppmap', () => { + it('resolves by exact name match', () => { + const db = freshDb(); + try { + seed(db); + expect(resolveAppmap(db, 'orders_create_42').name).toBe('orders_create_42'); + } finally { + db.close(); + } + }); + + it('resolves by source-path basename', () => { + const db = freshDb(); + try { + // Insert an appmap whose name doesn't match the basename. + db.prepare( + `INSERT INTO appmaps (name, source_path) VALUES ('Friendly Name', '/x/foo.appmap.json')` + ).run(); + expect(resolveAppmap(db, 'foo').source_path).toBe('/x/foo.appmap.json'); + } finally { + db.close(); + } + }); + + it('throws on miss', () => { + const db = freshDb(); + try { + expect(() => resolveAppmap(db, 'nope')).toThrow(/not found/); + } finally { + db.close(); + } + }); + + it('throws on ambiguous match', () => { + const db = freshDb(); + try { + seed(db, { name: 'a' }); + db.prepare( + `INSERT INTO appmaps (name, source_path) VALUES ('a', '/y/a.appmap.json')` + ).run(); + expect(() => resolveAppmap(db, 'a')).toThrow(/ambiguous/); + } finally { + db.close(); + } + }); +}); + +describe('tree', () => { + it('returns nodes in event_id order with computed depths', () => { + const db = freshDb(); + try { + seed(db); + const nodes = tree(db, 'orders_create_42'); + + expect(nodes.map((n) => n.event_id)).toEqual([1, 2, 2, 3]); + // event 1: HTTP server (root). + expect(nodes[0].kind).toBe('http_server'); + expect(nodes[0].depth).toBe(0); + // event 2: function call (under request). + const fn = nodes.find((n) => n.kind === 'function')!; + expect(fn.depth).toBe(1); + // event 2 also has an exception attached (same event_id, separate row). + const exc = nodes.find((n) => n.kind === 'exception')!; + expect(exc.depth).toBe(1); + // event 3: SQL under the function call. + const sql = nodes.find((n) => n.kind === 'sql')!; + expect(sql.depth).toBe(2); + } finally { + db.close(); + } + }); + + it('exception nodes carry path and lineno', () => { + const db = freshDb(); + try { + seed(db); + const exc = tree(db, 'orders_create_42').find((n) => n.kind === 'exception'); + expect(exc).toBeDefined(); + if (exc?.kind === 'exception') { + expect(exc.path).toBe('app/models/order.rb'); + expect(exc.lineno).toBe(42); + } + } finally { + db.close(); + } + }); + + it('joins fqid into function nodes', () => { + const db = freshDb(); + try { + seed(db); + const nodes = tree(db, 'orders_create_42'); + const fn = nodes.find((n) => n.kind === 'function')!; + // @ts-expect-error narrowing not visible here without further check + expect(fn.fqid).toBe('app/OrdersController#create'); + } finally { + db.close(); + } + }); +}); + +describe('tree focus', () => { + // Build a richer recording: HTTP root → controller → 3 sibling calls + // (one of which calls a deeper helper) → SQL + EXC under controller. + function seedRich(db: sqlite3.Database): void { + const am = db + .prepare( + `INSERT INTO appmaps (name, source_path, sql_query_count) VALUES ('rich', '/tmp/rich.appmap.json', 1)` + ) + .run(); + const id = am.lastInsertRowid; + db.prepare( + `INSERT INTO http_requests (appmap_id, event_id, parent_event_id, method, path, status_code, elapsed_ms) + VALUES (?, 1, NULL, 'POST', '/orders', 500, 520.0)` + ).run(id); + db.prepare( + `INSERT INTO function_calls (appmap_id, event_id, parent_event_id, defined_class, method_id, elapsed_ms) + VALUES (?, 2, 1, 'OrdersController', 'create', 519.0)` + ).run(id); + db.prepare( + `INSERT OR IGNORE INTO code_objects (fqid, package, classes, leaf_class, method, is_static) + VALUES ('app/IdempotencyKey.generate', 'app', '["IdempotencyKey"]', 'IdempotencyKey', 'generate', 1)` + ).run(); + const co = (db + .prepare(`SELECT id FROM code_objects WHERE fqid = 'app/IdempotencyKey.generate'`) + .get() as { id: number }).id; + db.prepare( + `INSERT INTO function_calls (appmap_id, event_id, parent_event_id, code_object_id, defined_class, method_id, elapsed_ms) + VALUES (?, 3, 2, ?, 'IdempotencyKey', 'generate', 0.2)` + ).run(id, co); + db.prepare( + `INSERT INTO function_calls (appmap_id, event_id, parent_event_id, defined_class, method_id, elapsed_ms) + VALUES (?, 4, 2, 'Order', 'new', 0.4)` + ).run(id); + db.prepare( + `INSERT INTO sql_queries (appmap_id, event_id, parent_event_id, sql_text, elapsed_ms) + VALUES (?, 5, 2, 'INSERT INTO orders (id, name) VALUES (?, ?)', 14.0)` + ).run(id); + db.prepare( + `INSERT INTO exceptions (appmap_id, event_id, parent_event_id, exception_class, message) + VALUES (?, 2, 1, 'IntegrityError', 'duplicate key')` + ).run(id); + // Add an outbound HTTP call as a separate child of controller + db.prepare( + `INSERT INTO http_client_requests (appmap_id, event_id, parent_event_id, method, url, status_code, elapsed_ms) + VALUES (?, 6, 2, 'GET', 'https://api.example/v1', 200, 40.0)` + ).run(id); + // A deeper descendant under IdempotencyKey.generate (event_id 3) + db.prepare( + `INSERT INTO function_calls (appmap_id, event_id, parent_event_id, defined_class, method_id, elapsed_ms) + VALUES (?, 7, 3, 'Digest', 'sha256', 0.05)` + ).run(id); + } + + it('--focus-sql narrows to the matching SQL plus its ancestors and their children', () => { + const db = freshDb(); + try { + seedRich(db); + const nodes = tree(db, 'rich', { focusSql: 'INSERT INTO orders' }); + const ids = new Set(nodes.map((n) => n.event_id)); + // Includes: HTTP (1), controller (2), SQL (5), and the controller's + // direct children (3, 4, 5, 6, 7? No — children of controller are + // 3, 4, 5, 6 only; 7 is a descendant of 3, not a sibling of focus). + expect(ids.has(1)).toBe(true); + expect(ids.has(2)).toBe(true); + expect(ids.has(5)).toBe(true); + expect(ids.has(3)).toBe(true); // sibling + expect(ids.has(4)).toBe(true); // sibling + expect(ids.has(6)).toBe(true); // sibling + // 7 is a child of 3, not of an ancestor of the focus. + expect(ids.has(7)).toBe(false); + } finally { + db.close(); + } + }); + + it('--focus-fn matches by canonical fqid', () => { + const db = freshDb(); + try { + seedRich(db); + const nodes = tree(db, 'rich', { focusFn: 'app/IdempotencyKey.generate' }); + const ids = new Set(nodes.map((n) => n.event_id)); + // Focus event_id 3; ancestors 2, 1; children of ancestors include + // siblings 4, 5, 6 (children of 2). Descendants of 3: 7. + expect(ids.has(1)).toBe(true); + expect(ids.has(2)).toBe(true); + expect(ids.has(3)).toBe(true); + expect(ids.has(4)).toBe(true); + expect(ids.has(5)).toBe(true); + expect(ids.has(6)).toBe(true); + expect(ids.has(7)).toBe(true); // descendant + } finally { + db.close(); + } + }); + + it('--focus-route matches a server request', () => { + const db = freshDb(); + try { + seedRich(db); + const nodes = tree(db, 'rich', { focusRoute: '/orders' }); + // The HTTP request matches; ancestors of HTTP = none; descendants + // of the focus drill down. With descendants=3 we get the full tree + // up to depth 3 from the request. + expect(nodes.find((n) => n.kind === 'http_server')).toBeDefined(); + expect(nodes.find((n) => n.kind === 'function' && n.method_id === 'create')).toBeDefined(); + } finally { + db.close(); + } + }); + + it('--focus-url matches an outbound call', () => { + const db = freshDb(); + try { + seedRich(db); + const nodes = tree(db, 'rich', { focusUrl: 'api.example' }); + const ids = new Set(nodes.map((n) => n.event_id)); + expect(ids.has(6)).toBe(true); + } finally { + db.close(); + } + }); + + it('--ancestors=1 trims the path to root', () => { + const db = freshDb(); + try { + seedRich(db); + const nodes = tree(db, 'rich', { focusFn: 'app/IdempotencyKey.generate', ancestors: 1 }); + const ids = new Set(nodes.map((n) => n.event_id)); + // Only controller (1 ancestor) — not HTTP. + expect(ids.has(2)).toBe(true); + expect(ids.has(1)).toBe(false); + } finally { + db.close(); + } + }); + + it('--descendants=0 drops the subtree below focus', () => { + const db = freshDb(); + try { + seedRich(db); + const nodes = tree(db, 'rich', { + focusFn: 'app/IdempotencyKey.generate', + descendants: 0, + }); + const ids = new Set(nodes.map((n) => n.event_id)); + // 7 (descendant of focus) excluded. + expect(ids.has(3)).toBe(true); + expect(ids.has(7)).toBe(false); + } finally { + db.close(); + } + }); + + it('--min-elapsed-ms prunes fast subtrees', () => { + const db = freshDb(); + try { + seedRich(db); + const nodes = tree(db, 'rich', { minElapsedMs: 10 }); + const ids = new Set(nodes.map((n) => n.event_id)); + // SQL (14ms) and outbound HTTP (40ms) survive; IdempotencyKey + // (0.2ms with no fast descendant) and Order.new (0.4ms) are pruned. + expect(ids.has(5)).toBe(true); + expect(ids.has(6)).toBe(true); + expect(ids.has(3)).toBe(false); + expect(ids.has(4)).toBe(false); + } finally { + db.close(); + } + }); + + it('focus with no matches returns no events', () => { + const db = freshDb(); + try { + seedRich(db); + const nodes = tree(db, 'rich', { focusFn: 'app/Nope.nothing' }); + expect(nodes).toEqual([]); + } finally { + db.close(); + } + }); + + it('depths are recomputed relative to the highest included ancestor', () => { + const db = freshDb(); + try { + seedRich(db); + // Without focus, HTTP is depth 0, controller 1, IdempotencyKey 2. + const focused = tree(db, 'rich', { focusFn: 'app/IdempotencyKey.generate', ancestors: 1 }); + // Highest included is controller (event_id=2). It should now be + // depth 0; its child IdempotencyKey (focus) should be depth 1. + const controller = focused.find((n) => n.kind === 'function' && n.method_id === 'create'); + const idem = focused.find((n) => n.kind === 'function' && n.method_id === 'generate'); + expect(controller?.depth).toBe(0); + expect(idem?.depth).toBe(1); + } finally { + db.close(); + } + }); +}); + +describe('tree --filter', () => { + it('returns only http events when filter=http', () => { + const db = freshDb(); + try { + seed(db, { addOutbound: true }); + const nodes = tree(db, 'orders_create_42').filter( + (n) => n.kind === 'http_server' || n.kind === 'http_client' + ); + expect(nodes.map((n) => n.kind).sort()).toEqual(['http_client', 'http_server']); + } finally { + db.close(); + } + }); + + it('returns only sql events when filter=sql', () => { + const db = freshDb(); + try { + seed(db); + const nodes = tree(db, 'orders_create_42').filter((n) => n.kind === 'sql'); + expect(nodes).toHaveLength(1); + expect(nodes[0].kind).toBe('sql'); + } finally { + db.close(); + } + }); +}); + +describe('log nodes in tree', () => { + function seedWithLogger(db: sqlite3.Database): number { + const am = db + .prepare( + `INSERT INTO appmaps (name, source_path) VALUES ('with_logger', '/tmp/with_logger.appmap.json')` + ) + .run(); + const id = am.lastInsertRowid; + db.prepare( + `INSERT INTO http_requests (appmap_id, event_id, parent_event_id, method, path, status_code, elapsed_ms) + VALUES (?, 1, NULL, 'POST', '/orders', 500, 100)` + ).run(id); + db.prepare( + `INSERT INTO function_calls (appmap_id, event_id, parent_event_id, defined_class, method_id, elapsed_ms) + VALUES (?, 2, 1, 'OrdersController', 'create', 90)` + ).run(id); + db.prepare( + `INSERT OR IGNORE INTO code_objects (fqid, package, classes, leaf_class, method, is_static) + VALUES ('app/AppLogger#error', 'app', '["AppLogger"]', 'AppLogger', 'error', 0)` + ).run(); + const logCo = (db + .prepare(`SELECT id FROM code_objects WHERE fqid = 'app/AppLogger#error'`) + .get() as { id: number }).id; + db.prepare(`INSERT OR IGNORE INTO labels (code_object_id, label) VALUES (?, 'log')`).run(logCo); + db.prepare( + `INSERT INTO function_calls (appmap_id, event_id, parent_event_id, code_object_id, + defined_class, method_id, elapsed_ms, parameters_json) + VALUES (?, 3, 2, ?, 'AppLogger', 'error', 0.05, ?)` + ).run( + id, + logCo, + JSON.stringify([{ name: 'message', class: 'String', value: 'connection refused' }]) + ); + return Number(id); + } + + it('promotes function calls with the `log` label to a log node kind', () => { + const db = freshDb(); + try { + seedWithLogger(db); + const nodes = tree(db, 'with_logger'); + const log = nodes.find((n) => n.kind === 'log'); + expect(log).toBeDefined(); + if (log?.kind !== 'log') throw new Error('expected log'); + expect(log.logger).toBe('AppLogger'); + expect(log.method_id).toBe('error'); + expect(log.event_id).toBe(3); + // The same function_call should NOT also appear as a function node. + expect(nodes.filter((n) => n.event_id === 3 && n.kind === 'function')).toEqual([]); + // Other function_calls remain function nodes. + expect(nodes.find((n) => n.kind === 'function' && n.method_id === 'create')).toBeDefined(); + } finally { + db.close(); + } + }); +}); + +describe('treeSummary', () => { + it('counts SQL, surfaces entry/exception, and tallies labels', () => { + const db = freshDb(); + try { + seed(db, { addLabel: true, addOutbound: true }); + const s = treeSummary(db, 'orders_create_42'); + expect(s.entry?.method).toBe('POST'); + expect(s.entry?.route).toBe('/orders'); + expect(s.entry?.status_code).toBe(500); + expect(s.sql.count).toBe(1); + expect(s.sql.total_ms).toBeCloseTo(14); + expect(s.http_client.count).toBe(1); + expect(s.http_client.total_ms).toBeCloseTo(40); + expect(s.exceptions[0].exception_class).toBe('IntegrityError'); + expect(s.labels).toEqual([{ label: 'log', count: 1 }]); + } finally { + db.close(); + } + }); +}); diff --git a/packages/cli/tests/unit/cmds/query/verbs/find.spec.ts b/packages/cli/tests/unit/cmds/query/verbs/find.spec.ts new file mode 100644 index 0000000000..998f8cfd31 --- /dev/null +++ b/packages/cli/tests/unit/cmds/query/verbs/find.spec.ts @@ -0,0 +1,200 @@ +import { + buildFindFilter, + projectLogMessage, + validateFlags, +} from '../../../../../src/cmds/query/verbs/find'; + +describe('find verb flag validation', () => { + it('accepts the universal flags on every type', () => { + const universal = { + branch: 'main', + commit: 'abc', + since: '2026-01-01', + until: '2026-12-31', + appmap: 'demo', + // output flags are also always allowed + limit: 5, + offset: 0, + json: true, + }; + for (const type of ['appmaps', 'requests', 'queries', 'calls', 'exceptions', 'logs'] as const) { + expect(() => validateFlags(type, universal)).not.toThrow(); + } + }); + + it('rejects --class on find appmaps', () => { + expect(() => validateFlags('appmaps', { class: 'Foo' })).toThrow(/--class/); + expect(() => validateFlags('appmaps', { class: 'Foo' })).toThrow(/find appmaps/); + }); + + it('accepts --route and --status on find appmaps', () => { + expect(() => validateFlags('appmaps', { route: '/x', status: '500' })).not.toThrow(); + }); + + it('rejects --table on find calls and find exceptions', () => { + expect(() => validateFlags('calls', { table: 'orders' })).toThrow(/--table/); + expect(() => validateFlags('exceptions', { table: 'orders' })).toThrow(/--table/); + }); + + it('rejects --exception except on find exceptions', () => { + expect(() => validateFlags('exceptions', { exception: 'IntegrityError' })).not.toThrow(); + expect(() => validateFlags('calls', { exception: 'IntegrityError' })).toThrow(/--exception/); + expect(() => validateFlags('queries', { exception: 'IntegrityError' })).toThrow(/--exception/); + }); + + it('rejects --label everywhere except find calls', () => { + expect(() => validateFlags('calls', { label: 'log' })).not.toThrow(); + expect(() => validateFlags('appmaps', { label: 'log' })).toThrow(/--label/); + expect(() => validateFlags('requests', { label: 'log' })).toThrow(/--label/); + expect(() => validateFlags('queries', { label: 'log' })).toThrow(/--label/); + expect(() => validateFlags('exceptions', { label: 'log' })).toThrow(/--label/); + }); + + it('rejects --duration on exceptions; accepts elsewhere', () => { + expect(() => validateFlags('exceptions', { duration: '>1s' })).toThrow(/--duration/); + // appmaps (recording-level), calls, queries, requests all accept duration + expect(() => validateFlags('appmaps', { duration: '>1s' })).not.toThrow(); + expect(() => validateFlags('calls', { duration: '>1s' })).not.toThrow(); + expect(() => validateFlags('queries', { duration: '>1s' })).not.toThrow(); + expect(() => validateFlags('requests', { duration: '>1s' })).not.toThrow(); + }); + + it('hint message guides --method users on find requests', () => { + expect(() => validateFlags('requests', { method: 'findById' })).toThrow( + /--route/ + ); + }); + + it('lists multiple incompatible flags in a single error', () => { + expect(() => + validateFlags('appmaps', { class: 'Foo', table: 'orders' }) + ).toThrow(/--class.*--table|--table.*--class/); + }); + + it('ignores undefined / null flag values', () => { + expect(() => + validateFlags('appmaps', { class: undefined, table: null }) + ).not.toThrow(); + }); + + it('logs accepts --logger and --message; rejects --class with a hint', () => { + expect(() => validateFlags('logs', { logger: 'AppLogger' })).not.toThrow(); + expect(() => validateFlags('logs', { message: 'connection refused' })).not.toThrow(); + expect(() => validateFlags('logs', { class: 'AppLogger' })).toThrow(/--logger/); + expect(() => validateFlags('logs', { label: 'log' })).toThrow(/implied/); + }); + + it('logs rejects row-level filters that don\'t apply', () => { + expect(() => validateFlags('logs', { route: '/x' })).toThrow(/--route/); + expect(() => validateFlags('logs', { status: '500' })).toThrow(/--status/); + expect(() => validateFlags('logs', { duration: '>1s' })).toThrow(/--duration/); + expect(() => validateFlags('logs', { table: 'users' })).toThrow(/--table/); + expect(() => validateFlags('logs', { exception: 'X' })).toThrow(/--exception/); + }); + + it('--logger and --message are rejected on non-logs types', () => { + for (const type of ['appmaps', 'requests', 'queries', 'calls', 'exceptions'] as const) { + expect(() => validateFlags(type, { logger: 'X' })).toThrow(/--logger/); + expect(() => validateFlags(type, { message: 'x' })).toThrow(/--message/); + } + }); + + it('--with-logs is accepted only on find exceptions', () => { + expect(() => validateFlags('exceptions', { 'with-logs': 5 })).not.toThrow(); + for (const type of ['appmaps', 'requests', 'queries', 'calls', 'logs'] as const) { + expect(() => validateFlags(type, { 'with-logs': 5 })).toThrow(/--with-logs/); + } + }); +}); + +describe('buildFindFilter', () => { + it('splits Class#method off of --class so the method composes via filter.method', () => { + const { filter } = buildFindFilter({ + type: 'queries', + class: 'org.example.UserRepo#findById', + }); + expect(filter.className).toBe('org.example.UserRepo#findById'); + expect(filter.method).toBe('findById'); + }); + + it('explicit --method wins over a method embedded in --class', () => { + const { filter } = buildFindFilter({ + type: 'calls', + class: 'X#fromClass', + method: 'fromMethod', + }); + expect(filter.method).toBe('fromMethod'); + }); + + it('--class without # leaves filter.method undefined', () => { + const { filter } = buildFindFilter({ + type: 'calls', + class: 'OpenSSL::Cipher', + }); + expect(filter.className).toBe('OpenSSL::Cipher'); + expect(filter.method).toBeUndefined(); + }); + + it('returns the parsed type', () => { + expect(buildFindFilter({ type: 'appmaps' }).type).toBe('appmaps'); + }); + + it('plumbs --with-logs into filter.withLogs from either kebab or camel keys', () => { + expect(buildFindFilter({ type: 'exceptions', withLogs: 5 }).filter.withLogs).toBe(5); + expect(buildFindFilter({ type: 'exceptions', 'with-logs': 7 }).filter.withLogs).toBe(7); + expect(buildFindFilter({ type: 'exceptions' }).filter.withLogs).toBeUndefined(); + }); + + it('plumbs --logger and --message into the filter for logs', () => { + const { type, filter } = buildFindFilter({ + type: 'logs', + logger: 'AppLogger', + message: 'connection refused', + }); + expect(type).toBe('logs'); + expect(filter.logger).toBe('AppLogger'); + expect(filter.message).toBe('connection refused'); + }); +}); + +describe('projectLogMessage', () => { + it('prefers a structured-return message field when return_value is JSON', () => { + const r = JSON.stringify({ level: 'info', message: 'hello world' }); + expect(projectLogMessage(null, r)).toBe('hello world'); + }); + + it('uses a parameter named message when return_value lacks a structured message', () => { + const params = JSON.stringify([ + { name: 'tag', class: 'String', value: 'auth' }, + { name: 'message', class: 'String', value: 'login ok' }, + ]); + expect(projectLogMessage(params, null)).toBe('login ok'); + }); + + it('accepts msg as an alias for message', () => { + const params = JSON.stringify([{ name: 'msg', class: 'String', value: 'queued' }]); + expect(projectLogMessage(params, null)).toBe('queued'); + }); + + it('falls back to the first string-typed parameter value', () => { + const params = JSON.stringify([ + { name: 'count', class: 'Integer', value: 5 }, + { name: 'note', class: 'String', value: 'first text' }, + ]); + expect(projectLogMessage(params, null)).toBe('first text'); + }); + + it('returns a non-empty repr even when nothing matches', () => { + const params = JSON.stringify([{ name: 'count', class: 'Integer', value: 5 }]); + expect(projectLogMessage(params, null)).toBe('[5]'); + }); + + it('returns an empty string when both inputs are null', () => { + expect(projectLogMessage(null, null)).toBe(''); + }); + + it('return_value that is not JSON is treated as opaque (not the message)', () => { + // Falls through to parameters_json; if that's null, returns ''. + expect(projectLogMessage(null, 'true')).toBe(''); + }); +}); diff --git a/packages/cli/tests/unit/cmds/query/verbs/hotspots.spec.ts b/packages/cli/tests/unit/cmds/query/verbs/hotspots.spec.ts new file mode 100644 index 0000000000..7c119dd997 --- /dev/null +++ b/packages/cli/tests/unit/cmds/query/verbs/hotspots.spec.ts @@ -0,0 +1,29 @@ +import { validateFlags } from '../../../../../src/cmds/query/verbs/hotspots'; + +describe('hotspots verb flag validation', () => { + it('function-mode accepts --class', () => { + expect(() => validateFlags('function', { class: 'UserRepository' })).not.toThrow(); + }); + + it('sql-mode rejects --class', () => { + expect(() => validateFlags('sql', { class: 'UserRepository' })).toThrow( + /--type=sql:.*--class.*not supported/ + ); + }); + + it('sql-mode accepts --route, --branch, --since, --until, --limit', () => { + expect(() => + validateFlags('sql', { + route: '/x', + branch: 'main', + since: '2026-01-01', + until: '2026-12-31', + limit: 5, + }) + ).not.toThrow(); + }); + + it('ignores undefined / null flag values', () => { + expect(() => validateFlags('sql', { class: undefined })).not.toThrow(); + }); +}); diff --git a/packages/cli/tests/unit/cmds/query/verbs/tree.spec.ts b/packages/cli/tests/unit/cmds/query/verbs/tree.spec.ts new file mode 100644 index 0000000000..33a730a23d --- /dev/null +++ b/packages/cli/tests/unit/cmds/query/verbs/tree.spec.ts @@ -0,0 +1,63 @@ +import { applyFilter } from '../../../../../src/cmds/query/verbs/tree'; +import { + ExceptionNode, + FunctionNode, + HttpClientNode, + HttpServerNode, + LogNode, + SqlNode, + TreeNode, +} from '../../../../../src/cmds/query/queries/tree'; + +const baseFields = { + parent_event_id: null, + thread_id: null, + depth: 0, +}; + +const http: HttpServerNode = { + kind: 'http_server', event_id: 1, ...baseFields, + method: 'GET', route: '/x', status_code: 200, elapsed_ms: 1, +}; +const httpOut: HttpClientNode = { + kind: 'http_client', event_id: 2, ...baseFields, + method: 'GET', url: 'https://x', status_code: 200, elapsed_ms: 1, +}; +const sql: SqlNode = { + kind: 'sql', event_id: 3, ...baseFields, + sql_text: 'SELECT 1', database_type: null, elapsed_ms: 1, +}; +const fn: FunctionNode = { + kind: 'function', event_id: 4, ...baseFields, + fqid: 'app/X#m', defined_class: 'X', method_id: 'm', + path: null, lineno: null, is_static: false, + elapsed_ms: 1, parameters_json: null, return_value: null, +}; +const exc: ExceptionNode = { + kind: 'exception', event_id: 5, ...baseFields, + exception_class: 'IOError', message: null, path: null, lineno: null, +}; +const lg: LogNode = { + kind: 'log', event_id: 6, ...baseFields, + fqid: 'app/Logger#info', logger: 'Logger', method_id: 'info', + path: null, lineno: null, elapsed_ms: 0.1, + message: 'hi', + parameters_json: '[{"name":"message","value":"hi"}]', return_value: null, +}; + +const all: TreeNode[] = [http, httpOut, sql, fn, exc, lg]; + +describe('tree --filter', () => { + it('all returns every node', () => { + expect(applyFilter(all, 'all')).toHaveLength(6); + }); + it('http includes server and client requests', () => { + expect(applyFilter(all, 'http')).toEqual([http, httpOut]); + }); + it('sql returns only sql nodes', () => { + expect(applyFilter(all, 'sql')).toEqual([sql]); + }); + it('logs returns only log nodes', () => { + expect(applyFilter(all, 'logs')).toEqual([lg]); + }); +}); diff --git a/scripts/verify.mjs b/scripts/verify.mjs new file mode 100755 index 0000000000..d96bd61912 --- /dev/null +++ b/scripts/verify.mjs @@ -0,0 +1,162 @@ +#!/usr/bin/env node +// Run lint + typecheck on packages with modified files. +// +// Usage: +// node scripts/verify.mjs # check working-tree changes (staged + unstaged + untracked) +// node scripts/verify.mjs --staged # check only staged changes (intended for pre-commit hook use) +// +// Per affected package: +// 1. eslint on the package's changed *.ts / *.tsx / *.js / *.mjs files +// 2. tsc --noEmit on the whole package, if it has tsconfig.json + a typecheck script +// +// Exits non-zero if any check fails. Skips files outside packages/. + +import { spawnSync } from 'node:child_process'; +import { existsSync, readFileSync } from 'node:fs'; +import path from 'node:path'; +import { fileURLToPath } from 'node:url'; + +const ROOT = path.resolve(path.dirname(fileURLToPath(import.meta.url)), '..'); +const STAGED = process.argv.includes('--staged'); + +const LINT_EXTS = new Set(['.ts', '.tsx', '.js', '.mjs', '.cjs']); + +function git(...args) { + const r = spawnSync('git', args, { cwd: ROOT, encoding: 'utf8' }); + if (r.status !== 0) { + console.error(`git ${args.join(' ')} failed:\n${r.stderr}`); + process.exit(1); + } + return r.stdout; +} + +function modifiedFiles() { + if (STAGED) { + // Staged adds/copies/modifies/renames. + return git('diff', '--cached', '--name-only', '--diff-filter=ACMR') + .split('\n') + .filter(Boolean); + } + // Working tree: tracked changes + untracked (excluding ignored). + // --porcelain output is `XY filename`; strip the 3-char status prefix. + // Renames appear as `R old -> new`; --no-renames keeps both columns simple. + return git('status', '--porcelain', '--no-renames') + .split('\n') + .filter(Boolean) + .map((line) => line.slice(3)); +} + +function groupByPackage(files) { + const byPkg = new Map(); + const skipped = []; + for (const file of files) { + const m = /^packages\/([^/]+)\//.exec(file); + if (!m) { + skipped.push(file); + continue; + } + const pkg = m[1]; + if (!byPkg.has(pkg)) byPkg.set(pkg, []); + byPkg.get(pkg).push(file); + } + return { byPkg, skipped }; +} + +function hasScript(pkgDir, name) { + const pj = path.join(pkgDir, 'package.json'); + if (!existsSync(pj)) return false; + try { + const json = JSON.parse(readFileSync(pj, 'utf8')); + return Boolean(json.scripts && json.scripts[name]); + } catch { + return false; + } +} + +function resolveBin(pkgDir, name) { + const pkgBin = path.join(pkgDir, 'node_modules', '.bin', name); + if (existsSync(pkgBin)) return pkgBin; + const rootBin = path.join(ROOT, 'node_modules', '.bin', name); + if (existsSync(rootBin)) return rootBin; + return null; +} + +function run(label, command, args, opts) { + console.log(`\n→ ${label}: ${command} ${args.join(' ')} (cwd: ${opts.cwd})`); + const r = spawnSync(command, args, { stdio: 'inherit', ...opts }); + return r.status === 0; +} + +function verifyPackage(pkg, files) { + const pkgDir = path.join(ROOT, 'packages', pkg); + const pkgJsonPath = path.join(pkgDir, 'package.json'); + if (!existsSync(pkgJsonPath)) { + console.warn(`Skipping ${pkg}: no packages/${pkg}/package.json`); + return true; + } + + let ok = true; + + // Lint changed lintable files. + const lintable = files + .filter((f) => LINT_EXTS.has(path.extname(f))) + .filter((f) => existsSync(path.join(ROOT, f))) // skip deleted files + .map((f) => path.relative(pkgDir, path.join(ROOT, f))); + + if (lintable.length > 0 && hasScript(pkgDir, 'lint')) { + // Prefer the package-local eslint binary — root and packages can carry + // different major versions (e.g. ESLint 7 at root vs 8 in cli), and + // @typescript-eslint plugins are pinned to one major. + const eslintBin = resolveBin(pkgDir, 'eslint'); + if (!eslintBin) { + console.warn(`Skipping lint for ${pkg}: eslint not found in node_modules/.bin`); + } else { + ok = + run( + `${pkg}: eslint (${lintable.length} file${lintable.length === 1 ? '' : 's'})`, + eslintBin, + // --quiet hides warnings; CI rules that only warn are not blockers. + ['--quiet', ...lintable], + { cwd: pkgDir } + ) && ok; + } + } + + // Typecheck the whole package. + if (hasScript(pkgDir, 'typecheck') && existsSync(path.join(pkgDir, 'tsconfig.json'))) { + const tscBin = resolveBin(pkgDir, 'tsc'); + if (!tscBin) { + console.warn(`Skipping typecheck for ${pkg}: tsc not found in node_modules/.bin`); + } else { + ok = run(`${pkg}: tsc --noEmit`, tscBin, ['--noEmit'], { cwd: pkgDir }) && ok; + } + } + + return ok; +} + +const files = modifiedFiles(); +if (files.length === 0) { + console.log('No modified files. Nothing to verify.'); + process.exit(0); +} + +const { byPkg, skipped } = groupByPackage(files); +console.log(`Verifying ${byPkg.size} package(s) with modified files:`); +for (const [pkg, pkgFiles] of byPkg) { + console.log(` packages/${pkg} (${pkgFiles.length} file${pkgFiles.length === 1 ? '' : 's'})`); +} +if (skipped.length > 0) { + console.log(` skipped ${skipped.length} non-package file(s)`); +} + +let allOk = true; +for (const [pkg, pkgFiles] of byPkg) { + allOk = verifyPackage(pkg, pkgFiles) && allOk; +} + +if (!allOk) { + console.error('\nverify: FAILED'); + process.exit(1); +} +console.log('\nverify: OK');