diff --git a/.env.example b/.env.example new file mode 100644 index 0000000..b6653c6 --- /dev/null +++ b/.env.example @@ -0,0 +1,167 @@ +# ============================================================================= +# agentmemory configuration +# ============================================================================= +# +# Copy this file to `~/.agentmemory/.env` (or to your project root if you +# prefer scoped config) and uncomment the lines you want to override. +# +# Every line is OFF by default — `agentmemory` runs out of the box with no +# LLM key, no embedding key, and no API auth. Set keys here only when you +# want to enable the corresponding feature. +# +# Run `npx @agentmemory/agentmemory init` to copy this file into place +# automatically. Run `npx @agentmemory/agentmemory doctor` to verify that +# the daemon reads the env you expect. +# +# Defaults shown in comments. Listed in priority order — the first key +# present wins on the LLM detection path (see src/config.ts::detectProvider). + +# ----------------------------------------------------------------------------- +# 1. LLM provider — pick ONE +# ----------------------------------------------------------------------------- +# +# Without a provider key, agentmemory runs in noop mode: observations are +# indexed via zero-LLM synthetic compression, hybrid search still works, +# but LLM-backed summarisation / reflection / consolidation are disabled. +# The detection order is OPENAI_API_KEY → MINIMAX_API_KEY → ANTHROPIC_API_KEY +# → GEMINI_API_KEY → OPENROUTER_API_KEY → noop. + +# OPENAI_API_KEY=sk-... # Used for OpenAI-compatible embeddings today. PR #307 will extend this to chat completions (DeepSeek, SiliconFlow, vLLM, LM Studio, Ollama via `/v1`). +# OPENAI_BASE_URL=https://api.openai.com # Override for OpenAI-compatible providers + +# ANTHROPIC_API_KEY=sk-ant-... +# ANTHROPIC_MODEL=claude-sonnet-4-20250514 # Default Anthropic model +# ANTHROPIC_BASE_URL=https://api.anthropic.com # Override for Anthropic-compatible proxies / Azure AI Foundry + +# GEMINI_API_KEY=... # Either env name works; GEMINI_API_KEY takes precedence +# GOOGLE_API_KEY=... # Alias for GEMINI_API_KEY when set alone (emits a one-time stderr hint) +# GEMINI_MODEL=gemini-2.5-flash # Default Gemini model (auto-detected GA model) + +# OPENROUTER_API_KEY=sk-or-... +# OPENROUTER_MODEL=anthropic/claude-sonnet-4-20250514 + +# MINIMAX_API_KEY=... +# MINIMAX_MODEL=MiniMax-M2.7 + +# MAX_TOKENS=4096 # Cap LLM completion tokens for compression / summarise calls + +# Opt-in Claude-subscription fallback (spawns @anthropic-ai/claude-agent-sdk +# child sessions). Off by default — the agent-sdk fallback can trigger +# Stop-hook recursion (#149 follow-up) when invoked from inside Claude Code. +# AGENTMEMORY_ALLOW_AGENT_SDK=true + +# FALLBACK_PROVIDERS=anthropic,gemini # Comma-separated chain tried after the primary provider returns an error (e.g. rate limit) + +# ----------------------------------------------------------------------------- +# 2. Embedding provider — auto-detected, override via EMBEDDING_PROVIDER +# ----------------------------------------------------------------------------- +# +# Without an embedding key, agentmemory runs in BM25-only mode for hybrid +# search. Detection order: EMBEDDING_PROVIDER override → GEMINI_API_KEY → +# OPENAI_API_KEY → VOYAGE_API_KEY → COHERE_API_KEY → OPENROUTER_API_KEY → +# local (Xenova/all-MiniLM-L6-v2, 384-dim). + +# EMBEDDING_PROVIDER=local # local | openai | voyage | cohere | gemini | openrouter + +# VOYAGE_API_KEY=pa-... # Optimised for code embeddings + +# COHERE_API_KEY=... # General-purpose embeddings + +# Reuses OPENAI_API_KEY / OPENAI_BASE_URL above when EMBEDDING_PROVIDER=openai. +# OPENAI_EMBEDDING_MODEL=text-embedding-3-small # Embedding model when EMBEDDING_PROVIDER=openai +# OPENAI_EMBEDDING_DIMENSIONS=1536 # Required when the model is not in the known-models table + +# OPENROUTER_EMBEDDING_MODEL=openai/text-embedding-3-small # When EMBEDDING_PROVIDER=openrouter + +# ----------------------------------------------------------------------------- +# 3. Auth & security +# ----------------------------------------------------------------------------- +# +# Bearer-token auth for the REST API + viewer + all integration plugins. +# Without a secret, REST endpoints are open on loopback. Set this when +# you expose the daemon beyond loopback or run behind a reverse proxy. + +# AGENTMEMORY_SECRET=your-secret-here + +# ----------------------------------------------------------------------------- +# 4. Search tuning +# ----------------------------------------------------------------------------- + +# BM25_WEIGHT=0.4 # Hybrid search weight for BM25 leg +# VECTOR_WEIGHT=0.6 # Hybrid search weight for vector leg +# AGENTMEMORY_GRAPH_WEIGHT=0.2 # Graph traversal bonus on smart-search ranking +# TOKEN_BUDGET=2000 # Max tokens injected via mem::context per session +# MAX_OBS_PER_SESSION=500 # Per-session observation cap before consolidation kicks in + +# ----------------------------------------------------------------------------- +# 5. Behaviour flags +# ----------------------------------------------------------------------------- + +# AGENTMEMORY_AUTO_COMPRESS=true # Run LLM compression on every observation batch (requires a provider key). Default off — synthetic compression handles most cases. +# AGENTMEMORY_INJECT_CONTEXT=true # Inject recalled memories back into agent prompts (#143). Default off — hooks capture observations but do not modify conversation. +# CONSOLIDATION_ENABLED=true # Run the 4-tier consolidation pipeline (memories → semantic → procedural). Default off — opt in once you've measured the LLM cost. +# CONSOLIDATION_DECAY_DAYS=30 # Age (days) after which non-reinforced memories decay during consolidation +# GRAPH_EXTRACTION_ENABLED=true # Extract concept-graph edges on remember; powers the graph-traversal recall path +# GRAPH_EXTRACTION_BATCH_SIZE=8 # Memories per graph-extraction batch +# AGENTMEMORY_REFLECT=true # Periodically auto-synthesize lessons from memories +# AGENTMEMORY_DROP_STALE_INDEX=true # Drop on-disk BM25 / vector index on startup if dim guard fires (#248). Recovery toggle for stuck-state debugging. +# AGENTMEMORY_IMAGE_EMBEDDINGS=true # Enable image embeddings when an image provider is present (experimental). + +# ----------------------------------------------------------------------------- +# 6. CLI / runtime knobs +# ----------------------------------------------------------------------------- + +# AGENTMEMORY_TOOLS=all # core (7 tools, default) | all (51 tools) — surface exposed to MCP clients +# AGENTMEMORY_SLOTS=memory # Comma-separated plugin slot names the CLI should claim +# AGENTMEMORY_DEBUG=1 # Trace MCP shim probe + standalone fallback decisions to stderr +# AGENTMEMORY_FORCE_PROXY=1 # Skip the MCP shim livez probe and trust AGENTMEMORY_URL (for sandboxed MCP clients that can't reach localhost) +# AGENTMEMORY_PROBE_TIMEOUT_MS=2000 # MCP shim livez probe timeout +# AGENTMEMORY_URL=http://localhost:3111 # REST base URL — honored by status, doctor, MCP shim +# AGENTMEMORY_VIEWER_URL=http://localhost:3113 # Override the viewer URL printed by `agentmemory status` +# AGENTMEMORY_EXPORT_ROOT=~/agentmemory-backup # Default destination for `agentmemory export` + +# STANDALONE_MCP=1 # MCP shim only — bypass the worker and run @agentmemory/mcp in-process +# STANDALONE_PERSIST_PATH=~/.agentmemory/local.db # Path used by the standalone MCP shim's local fallback store + +# Snapshot exporter — periodic snapshots of state_store + stream_store. +# SNAPSHOT_ENABLED=true +# SNAPSHOT_DIR=~/.agentmemory/snapshots +# SNAPSHOT_INTERVAL=3600 # Seconds between snapshots + +# Team sharing — when set, memories are scoped to (TEAM_ID, USER_ID) tuples. +# TEAM_MODE=shared +# TEAM_ID=acme +# USER_ID=rohit + +# ----------------------------------------------------------------------------- +# 7. Ports +# ----------------------------------------------------------------------------- + +# III_REST_PORT=3111 # REST API port (also affects viewer at +2) +# III_STREAMS_PORT=3112 # Streams API port +# III_ENGINE_URL=ws://localhost:49134 # iii-engine WebSocket URL (used by the worker) + +# ----------------------------------------------------------------------------- +# 8. iii engine pin +# ----------------------------------------------------------------------------- +# +# agentmemory currently pins iii-engine to v0.11.2 — v0.11.6 introduces a +# new sandbox-everything-via-`iii worker add` model that agentmemory +# hasn't been refactored for yet. Override with AGENTMEMORY_III_VERSION +# only after migrating to the sandbox model manually. + +# AGENTMEMORY_III_VERSION=0.11.2 + +# ----------------------------------------------------------------------------- +# 9. Claude Code bridge (opt-in) +# ----------------------------------------------------------------------------- + +# CLAUDE_MEMORY_BRIDGE=true # Mirror compressed memories into Claude Code's CLAUDE.md +# CLAUDE_PROJECT_PATH=/path/to/your/project # Required when CLAUDE_MEMORY_BRIDGE=true +# CLAUDE_MEMORY_LINE_BUDGET=200 # Lines of memory CLAUDE.md should hold + +# ----------------------------------------------------------------------------- +# 10. Obsidian export (opt-in) +# ----------------------------------------------------------------------------- + +# OBSIDIAN_AUTO_EXPORT=true # Auto-export memories to an Obsidian vault on every consolidation diff --git a/CHANGELOG.md b/CHANGELOG.md index ec08905..5a3d682 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,13 +6,43 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/), ## [Unreleased] +## [0.9.13] — 2026-05-15 + +Six PRs landed since v0.9.12 — `.env.example` discovery shipped (#372), CJK BM25 tokenizer landed (#344 / PR #362), `benchmark/load-100k.ts` load harness landed (#346 / PR #363), one-click deploy templates for fly.io / Railway / Render / Coolify added (#343 / PR #361), Gemini provider defaults moved to current GA models (#246 + #368 / PR #370), and the in-tree Python ecosystem story switched from a duplicate REST client to a one-page `iii-sdk` example (#342 / PR #364). Plus 14 Dependabot security advisories closed via Next.js + PostCSS bumps. + ### Added -- **`benchmark/load-100k.ts` load harness** ([#346](https://github.com/rohitg00/agentmemory/issues/346)). Hand-rolled, dependency-free harness that seeds N synthetic memories against a local daemon at `http://localhost:3111` and records p50 / p90 / p99 latency + throughput for `POST /agentmemory/remember`, `POST /agentmemory/smart-search`, and `GET /agentmemory/memories?latest=true` across the matrix N ∈ {1k, 10k, 100k} × concurrency C ∈ {1, 10, 100}. Content drawn from a seedable `mulberry32` PRNG so re-running against the same build produces the same seed corpus. Results land in `benchmark/results/load-100k-.json` (schema-versioned). Wired as `npm run bench:load`. See `benchmark/README.md` for the matrix and env knobs. +- **`.env.example` at repo root + bundled in the npm tarball** ([#372](https://github.com/rohitg00/agentmemory/issues/372), closes [#47](https://github.com/rohitg00/agentmemory/issues/47), [#293](https://github.com/rohitg00/agentmemory/issues/293), partial [#233](https://github.com/rohitg00/agentmemory/issues/233)). Every env var actually read by `src/` is now documented in one place, grouped by surface (LLM provider, embedding provider, auth, search tuning, behaviour flags, CLI runtime, ports, iii engine pin, Claude Code bridge, Obsidian export). Every line is commented out by default so the file ships as a config template, not a config. The npm package now lists `.env.example` in its `files` field so `npm i -g @agentmemory/agentmemory` carries it. + +- **`agentmemory init` command**. Copies the bundled `.env.example` to `~/.agentmemory/.env` if that file doesn't already exist; refuses to overwrite an existing config and prints a diff command pointing at the latest template. Wired into the CLI help block alongside `status` / `doctor` / `demo` / `upgrade` / `mcp` / `import-jsonl`. + +- **CI sync-checker for `.env.example`** (`scripts/check-env-example.mjs`). Walks every `.ts` / `.mts` / `.mjs` / `.js` file under `src/`, extracts `process.env["KEY"]` / `env["KEY"]` / `getMergedEnv()["KEY"]` / `getEnvVar("KEY")` references, and fails CI when `src/` reads an env var the template doesn't document (or vice versa). Plugged into `.github/workflows/ci.yml` after `npm test`. Initial bootstrap: 60 keys in sync. + +- **CJK tokenizer for BM25 search** ([#344](https://github.com/rohitg00/agentmemory/issues/344), PR [#362](https://github.com/rohitg00/agentmemory/pull/362)). New `src/state/cjk-segmenter.ts` detects CJK input by Unicode block and routes to `@node-rs/jieba` (Chinese, native, no model download), `tiny-segmenter` (Japanese, pure JS, ~25 KB), or rule-based syllable-block split (Korean). Both segmenters declared in `optionalDependencies` so the base install stays lean; soft-fail with a one-time stderr hint when the dep is missing. Order-preserving single-pass tokenization across mixed CJK + non-CJK runs (regression test for `"abc 메모리 def 项目 ghi"` returns `["abc","메모리","def","项目","ghi"]`). + +- **`benchmark/load-100k.ts` load harness** ([#346](https://github.com/rohitg00/agentmemory/issues/346), PR [#363](https://github.com/rohitg00/agentmemory/pull/363)). Hand-rolled, dependency-free harness that seeds N synthetic memories against a local daemon at `http://localhost:3111` and records p50 / p90 / p99 latency + throughput for `POST /agentmemory/remember`, `POST /agentmemory/smart-search`, and `GET /agentmemory/memories?latest=true` across the matrix N ∈ {1k, 10k, 100k} × concurrency C ∈ {1, 10, 100}. Content drawn from a seedable `mulberry32` PRNG so re-running against the same build produces the same seed corpus. Results land in `benchmark/results/load-100k-.json`. Wired as `npm run bench:load`. + +- **One-click deploy templates** for fly.io, Railway, Render, and Coolify ([#343](https://github.com/rohitg00/agentmemory/issues/343), PR [#361](https://github.com/rohitg00/agentmemory/pull/361)). Each template under `deploy//` ships a multi-stage Dockerfile that `COPY --from=iiidev/iii:0.11.2`s the engine binary into a `node:22-slim` runtime, npm-installs `@agentmemory/agentmemory` under `/opt/agentmemory` with `iii-sdk` pinned via `package.json` overrides (avoids the caret-resolves-to-0.11.6 drift), and runs an entrypoint that rewrites the bundled `iii-config.yaml` to bind `0.0.0.0` + use absolute `/data` paths, chowns the platform-mounted volume to `node:node` via `gosu`, generates a first-boot HMAC secret, and exec's the agentmemory CLI as the unprivileged `node` user under `tini` (with `TINI_SUBREAPER=1`). Verified end-to-end on fly.io (machine in `iad`, 1 GB volume, healthcheck passing). + +- **`examples/python/`** quickstart + observation/recall flow showing `iii-sdk` (Python) calling `mem::remember` / `mem::smart-search` / `mem::context` directly over `ws://localhost:49134` ([#342](https://github.com/rohitg00/agentmemory/issues/342), PR [#364](https://github.com/rohitg00/agentmemory/pull/364)). Replaces a duplicate-transport Python REST client (initial PR #360, closed) with a single-SDK story — the same `iii-sdk` install (`pip install iii-sdk` / `cargo add iii-sdk` / `npm install iii-sdk`) talks to every agentmemory function from any language. + +### Changed + +- **Gemini provider defaults bumped to current GA models** (PR [#370](https://github.com/rohitg00/agentmemory/pull/370), closes [#368](https://github.com/rohitg00/agentmemory/pull/368), [#246](https://github.com/rohitg00/agentmemory/pull/246)). LLM default `gemini-2.0-flash` → `gemini-2.5-flash` (the moving `gemini-flash-latest` alias was rejected — release behaviour should be deterministic). Embedding default `text-embedding-004` → `gemini-embedding-001` (the previous default is deprecated and shuts down 2026-01-14 per `ai.google.dev/gemini-api/docs/deprecations`). Three implementation details ride along: (1) URL path `:batchEmbedContent` → `:batchEmbedContents`, (2) every request now sends `outputDimensionality: 768` so the returned vectors match `GeminiEmbeddingProvider.dimensions = 768` and the index-restore dim guard from #248 — no reindex needed, (3) returned vectors are L2-normalized before the result-array push because `gemini-embedding-001` does **not** normalize by default unlike `text-embedding-004` and without this the downstream cosine-similarity math silently collapses recall. `l2Normalize` warns once on a zero-norm embedding so operators can correlate index quality dips with upstream regressions. + +### Security + +- **14 open Dependabot advisories closed via Next.js + PostCSS bumps** (PR [#348](https://github.com/rohitg00/agentmemory/pull/348)). Closed: 13 Next.js advisories (middleware/proxy bypass + SSRF on WebSocket upgrades + DoS via connection exhaustion + CSP-nonce XSS + image-opt DoS + RSC cache poisoning + beforeInteractive XSS + segment-prefetch routes) by bumping the website's Next.js to `^16.2.6`. Plus the PostCSS XSS-via-unescaped-`` advisory closed by pinning to `^8.5.10` via `overrides` in `website/package.json`. Verified `npm audit --omit=dev` returns 0 and `npm run build` clean on Next 16.2.6. Dependabot now runs weekly against six update streams (npm × 5 paths + github-actions) per the new `.github/dependabot.yml`. + +### Contributors + +External contributors landed this release: -### Performance +- [@fatinghenji](https://github.com/fatinghenji) — pre-cleanup work on the OpenAI-compatible LLM provider (PR #240 / PR #307); the universal-adapter shape will land in the next minor once branch maintenance catches up. +- [@AmmarSaleh50](https://github.com/AmmarSaleh50) — Gemini embedding migration with L2-norm + 768-dim plumbing (PR #246, folded into #370). +- [@yut304](https://github.com/yut304) — Gemini LLM default deprecation fix (PR #368, folded into #370). -- This is the placeholder for per-release p50 / p90 / p99 numbers from `benchmark/load-100k.ts`. Each release should land a `benchmark/results/load-100k-.json` and reference the headline p99 here. Format suggestion: one bullet per (N, C) cell that materially regressed or improved versus the previous release. p99 is the capacity-planning number; p50 + throughput are context. See [`benchmark/README.md`](benchmark/README.md) for how to reproduce. +Thanks also to the issue reporters whose precise repros drove the search-quality + viewer + config-template work this cycle. ## [0.9.12] — 2026-05-13 diff --git a/package.json b/package.json index 35d413b..c305eed 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "@agentmemory/agentmemory", - "version": "0.9.12", + "version": "0.9.13", "description": "Persistent memory for AI coding agents, powered by iii-engine's three primitives", "type": "module", "main": "dist/index.mjs", @@ -17,7 +17,7 @@ "agentmemory": "dist/cli.mjs" }, "scripts": { - "build": "tsdown && (cp iii-config.yaml dist/ 2>/dev/null || true) && (cp iii-config.docker.yaml dist/ 2>/dev/null || true) && (cp docker-compose.yml dist/ 2>/dev/null || true) && mkdir -p dist/viewer && cp src/viewer/index.html dist/viewer/", + "build": "tsdown && (cp iii-config.yaml dist/ 2>/dev/null || true) && (cp iii-config.docker.yaml dist/ 2>/dev/null || true) && (cp docker-compose.yml dist/ 2>/dev/null || true) && (cp .env.example dist/ 2>/dev/null || true) && mkdir -p dist/viewer && cp src/viewer/index.html dist/viewer/", "dev": "tsx src/index.ts", "start": "node dist/cli.mjs", "migrate": "node dist/functions/migrate.js", @@ -44,6 +44,7 @@ "iii-config.yaml", "iii-config.docker.yaml", "docker-compose.yml", + ".env.example", "LICENSE", "README.md", "AGENTS.md" diff --git a/packages/mcp/package.json b/packages/mcp/package.json index 5dd2e22..569f5b8 100644 --- a/packages/mcp/package.json +++ b/packages/mcp/package.json @@ -1,6 +1,6 @@ { "name": "@agentmemory/mcp", - "version": "0.9.12", + "version": "0.9.13", "description": "Standalone MCP server for agentmemory — thin shim that re-exposes @agentmemory/agentmemory's MCP entrypoint", "type": "module", "bin": { diff --git a/plugin/.claude-plugin/plugin.json b/plugin/.claude-plugin/plugin.json index 78d7f73..6fff31d 100644 --- a/plugin/.claude-plugin/plugin.json +++ b/plugin/.claude-plugin/plugin.json @@ -1,6 +1,6 @@ { "name": "agentmemory", - "version": "0.9.12", + "version": "0.9.13", "description": "Persistent memory for AI coding agents -- captures tool usage, compresses via LLM, injects context into future sessions. 12 hooks, 51 MCP tools, 4 skills, real-time viewer.", "author": { "name": "Rohit Ghumare", diff --git a/plugin/.codex-plugin/plugin.json b/plugin/.codex-plugin/plugin.json index cf586da..c6e5778 100644 --- a/plugin/.codex-plugin/plugin.json +++ b/plugin/.codex-plugin/plugin.json @@ -1,6 +1,6 @@ { "name": "agentmemory", - "version": "0.9.12", + "version": "0.9.13", "description": "Persistent memory for AI coding agents -- captures tool usage, compresses via LLM, injects context into future sessions. 6 hooks, 51 MCP tools, 4 skills, real-time viewer.", "author": { "name": "Rohit Ghumare", diff --git a/scripts/check-env-example.mjs b/scripts/check-env-example.mjs new file mode 100644 index 0000000..ae5ea85 --- /dev/null +++ b/scripts/check-env-example.mjs @@ -0,0 +1,101 @@ +#!/usr/bin/env node +// +// Sync-check: every env var read by `src/` MUST be documented in +// `.env.example`. Runs in CI as a soft guard rail — keeps `.env.example` +// from drifting behind real config-surface additions. +// +// Usage: +// node scripts/check-env-example.mjs +// +// Returns 0 when in sync, 1 with a diff when out of sync. + +import { readFileSync, readdirSync, statSync } from "node:fs"; +import { join } from "node:path"; + +const ROOT = new URL("..", import.meta.url).pathname; +const SRC = join(ROOT, "src"); +const ENV_FILE = join(ROOT, ".env.example"); + +// Env vars read by the runtime but NOT user-facing config — these are +// either process-injected (HOME, PATH, USERPROFILE), set by the build / +// wrapper (NODE_*, npm_*), or set by tests (VITEST, *_TEST_*). Skipping +// them keeps `.env.example` a documented config surface rather than an +// inventory of every getenv anywhere in the codebase. +const RUNTIME_ONLY = new Set([ + "HOME", + "PATH", + "USERPROFILE", + "NODE_ENV", + "AGENTMEMORY_SDK_CHILD", +]); + +// Walk src/ for .ts / .mts / .mjs / .js files (excluding `.d.ts` declarations +// and dotfile dirs / node_modules). test/ lives outside src/ so it never enters. +function walk(dir) { + const out = []; + for (const entry of readdirSync(dir)) { + const full = join(dir, entry); + const s = statSync(full); + if (s.isDirectory()) { + if (entry === "node_modules" || entry.startsWith(".")) continue; + out.push(...walk(full)); + } else if (/\.(ts|mts|mjs|js)$/.test(entry) && !entry.endsWith(".d.ts")) { + out.push(full); + } + } + return out; +} + +// Multiple patterns: +// process.env["KEY"] — direct access +// env["KEY"] — local alias inside detectProvider, etc. +// getEnvVar("KEY") — helper from src/config.ts +// env: ProcessEnv → env.KEY — caught as `env["KEY"]` only; if you add +// a dotted-access path, extend the regex. +const PATTERNS = [ + // Direct map index: process.env["KEY"], env["KEY"], getMergedEnv()["KEY"]. + // The trailing `]\s*` form covers `…)["KEY"]` and `…env["KEY"]`. + /\[\s*"([A-Z][A-Z0-9_]+)"\s*\]/g, + /getEnvVar\(\s*"([A-Z][A-Z0-9_]+)"\s*\)/g, +]; +const used = new Set(); +for (const file of walk(SRC)) { + const text = readFileSync(file, "utf8"); + for (const pat of PATTERNS) { + pat.lastIndex = 0; + let m; + while ((m = pat.exec(text)) !== null) { + const name = m[1]; + if (!RUNTIME_ONLY.has(name)) used.add(name); + } + } +} + +const envText = readFileSync(ENV_FILE, "utf8"); +const documented = new Set(); +for (const line of envText.split("\n")) { + const m = line.match(/^#?\s*([A-Z][A-Z0-9_]+)=/); + if (m) documented.add(m[1]); +} + +const missing = [...used].filter((k) => !documented.has(k)).sort(); +const orphan = [...documented].filter((k) => !used.has(k)).sort(); + +if (missing.length === 0 && orphan.length === 0) { + console.log(`env-example: in sync (${used.size} keys documented)`); + process.exit(0); +} + +if (missing.length > 0) { + console.error( + `env-example: MISSING from .env.example — add documentation for these keys:`, + ); + for (const k of missing) console.error(` - ${k}`); +} +if (orphan.length > 0) { + console.error( + `env-example: ORPHAN in .env.example — no longer read by src/, remove or move to runtime-only allowlist:`, + ); + for (const k of orphan) console.error(` - ${k}`); +} +process.exit(1); diff --git a/src/cli.ts b/src/cli.ts index ced7a14..31c5cb0 100644 --- a/src/cli.ts +++ b/src/cli.ts @@ -73,6 +73,7 @@ Usage: agentmemory [command] [options] Commands: (default) Start agentmemory worker + init Copy bundled .env.example to ~/.agentmemory/.env if absent status Show connection status, memory count, flags, and health doctor Run diagnostic checks (server, flags, graph, providers) demo Seed sample sessions and show recall in action @@ -895,6 +896,71 @@ async function runDemoSearch(base: string, query: string): Promise }; } +// Prefer the packaged `.env.example` (next to `dist/cli.mjs`); fall back to +// the repo root when running from a source checkout. +function findEnvExample(): string | null { + const candidates = [ + join(__dirname, "..", ".env.example"), + join(__dirname, ".env.example"), + join(process.cwd(), ".env.example"), + ]; + for (const c of candidates) { + if (existsSync(c)) return c; + } + return null; +} + +async function runInit() { + p.intro("agentmemory init"); + const target = join(homedir(), ".agentmemory", ".env"); + const template = findEnvExample(); + if (!template) { + p.log.error( + "Could not locate .env.example in the package. Re-install with: npm i -g @agentmemory/agentmemory", + ); + process.exit(1); + } + const dir = dirname(target); + const { mkdir, copyFile } = await import("node:fs/promises"); + const { constants: fsConstants } = await import("node:fs"); + try { + await mkdir(dir, { recursive: true }); + // COPYFILE_EXCL collapses the exists-check + copy into one syscall — + // an existsSync(target) + copyFile() pair races with a parallel init + // (or any other process touching ~/.agentmemory/.env between the two + // calls) and would silently overwrite a config the operator just + // wrote. EEXIST out of copyFile is the only "already configured" + // signal we trust. + await copyFile(template, target, fsConstants.COPYFILE_EXCL); + } catch (err) { + if ((err as NodeJS.ErrnoException)?.code === "EEXIST") { + p.log.warn(`${target} already exists — leaving it untouched.`); + p.log.info( + `Compare against the latest template: diff ${target} ${template}`, + ); + p.outro("Nothing changed."); + return; + } + p.log.error( + `Failed to copy template: ${err instanceof Error ? err.message : String(err)}`, + ); + process.exit(1); + } + p.log.success(`Wrote ${target}`); + p.note( + [ + "All keys are commented out by default. Uncomment the ones you want.", + "", + "Common next steps:", + " 1. Pick an LLM provider key (ANTHROPIC_API_KEY / OPENAI_API_KEY / GEMINI_API_KEY / etc.)", + " 2. Run `npx @agentmemory/agentmemory doctor` to verify the daemon sees them", + " 3. Run `npx @agentmemory/agentmemory` to start the worker", + ].join("\n"), + "Next steps", + ); + p.outro(`Edit ${target} and you're set.`); +} + async function runDemo() { const port = getRestPort(); const base = `http://localhost:${port}`; @@ -1297,6 +1363,7 @@ async function runImportJsonl(): Promise { } const commands: Record Promise> = { + init: runInit, status: runStatus, doctor: runDoctor, demo: runDemo, diff --git a/src/functions/export-import.ts b/src/functions/export-import.ts index 694e6ba..4f0bca8 100644 --- a/src/functions/export-import.ts +++ b/src/functions/export-import.ts @@ -176,7 +176,7 @@ export function registerExportImportFunction(sdk: ISdk, kv: StateKV): void { const strategy = data.strategy || "merge"; const importData = data.exportData; - const supportedVersions = new Set(["0.3.0", "0.4.0", "0.5.0", "0.6.0", "0.6.1", "0.7.0", "0.7.2", "0.7.3", "0.7.4", "0.7.5", "0.7.6", "0.7.7", "0.7.9", "0.8.0", "0.8.1", "0.8.2", "0.8.3", "0.8.4", "0.8.5", "0.8.6", "0.8.7", "0.8.8", "0.8.9", "0.8.10", "0.8.11", "0.8.12", "0.8.13", "0.9.0", "0.9.1", "0.9.2", "0.9.3", "0.9.4", "0.9.5", "0.9.6", "0.9.7", "0.9.8", "0.9.9", "0.9.10", "0.9.11", "0.9.12"]); + const supportedVersions = new Set(["0.3.0", "0.4.0", "0.5.0", "0.6.0", "0.6.1", "0.7.0", "0.7.2", "0.7.3", "0.7.4", "0.7.5", "0.7.6", "0.7.7", "0.7.9", "0.8.0", "0.8.1", "0.8.2", "0.8.3", "0.8.4", "0.8.5", "0.8.6", "0.8.7", "0.8.8", "0.8.9", "0.8.10", "0.8.11", "0.8.12", "0.8.13", "0.9.0", "0.9.1", "0.9.2", "0.9.3", "0.9.4", "0.9.5", "0.9.6", "0.9.7", "0.9.8", "0.9.9", "0.9.10", "0.9.11", "0.9.12", "0.9.13"]); if (!supportedVersions.has(importData.version)) { return { success: false, diff --git a/src/version.ts b/src/version.ts index a5d36db..14b4f5e 100644 --- a/src/version.ts +++ b/src/version.ts @@ -1 +1 @@ -export const VERSION = "0.9.12"; +export const VERSION = "0.9.13"; diff --git a/test/export-import.test.ts b/test/export-import.test.ts index 7b46716..a81ed7f 100644 --- a/test/export-import.test.ts +++ b/test/export-import.test.ts @@ -119,7 +119,7 @@ describe("Export/Import Functions", () => { it("export produces valid ExportData structure", async () => { const result = (await sdk.trigger("mem::export", {})) as ExportData; - expect(result.version).toBe("0.9.12"); + expect(result.version).toBe("0.9.13"); expect(result.exportedAt).toBeDefined(); expect(result.sessions.length).toBe(1); expect(result.sessions[0].id).toBe("ses_1");