diff --git a/CHANGELOG.md b/CHANGELOG.md index 0db4f14..c75fac5 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,31 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](https://keepachangelog.com/), and this project adheres to [Semantic Versioning](https://semver.org/). +## [Unreleased] + +### Added + +- `edit` command (alias `e`) — open an entry's value in `$EDITOR` / `$VISUAL` with `--decrypt` support +- `--json` / `-j` flag on `get` and `find` for machine-readable JSON output +- Stdin piping for `set` — read value from stdin when piped (`echo "val" | ccli set key`) +- `confirm` as a standalone type for `data export`, `data import`, and `data reset` +- Advisory file locking (`fileLock.ts`) — all writes are lock-protected with stale-lock detection +- Auto-backup before destructive operations (`data reset`, non-merge `data import`) in `~/.codexcli/.backups/` +- MCP `codex_set`: `encrypt` and `password` parameters for encrypted storage +- MCP `codex_get`: `decrypt` and `password` parameters for encrypted retrieval +- MCP `codex_run`: `force` parameter to skip confirm check on protected entries +- MCP `codex_export`, `codex_import`, `codex_reset`: support for `confirm` data type +- Windows clipboard support via `clip` command + +### Fixed + +- `showExamples()` referenced non-existent flags `-k`, `-v`, `-e` — now uses valid flags +- `showHelp()` config signature and subcommands were incorrect — now shows `` with correct list +- `displayAliases` empty-state message referenced deleted command — now shows `set -a ` +- `data export all -o ` overwrote the same file three times — filenames now suffixed with type +- MCP `codex_run` ignored `confirm` metadata — now checks confirm before executing +- Data files used default permissions (0644) — now use 0600; directories use 0700 + ## [0.1.0] - 2026-02-20 ### Added diff --git a/ISSUES.md b/ISSUES.md new file mode 100644 index 0000000..8a1b370 --- /dev/null +++ b/ISSUES.md @@ -0,0 +1,142 @@ +# CodexCLI — Known Issues & Missing Features + +Comprehensive audit of bugs, inconsistencies, and missing features. + +--- + +## P0 — Bugs (FIXED) + +### 1. ~~`showExamples()` references non-existent flags `-k`, `-v`, `-e`~~ FIXED + +**File:** `src/formatting.ts` + +Examples now use valid flags: `get -a` (aliases only), `find prod -e` (entries only), `find ip -a` (aliases only), `find server -t` (tree). + +### 2. ~~`showHelp()` config signature and subcommands are wrong~~ FIXED + +**File:** `src/formatting.ts` + +Config line now shows `` and SUBCOMMANDS section includes `set, get, info, examples, completions`. + +### 3. ~~`displayAliases` empty-state message references deleted command~~ FIXED + +**File:** `src/commands/helpers.ts` + +Message now shows the correct command: `set -a `. + +### 4. ~~`data export all -o ` overwrites same file three times~~ FIXED + +**File:** `src/commands/data-management.ts` + +When `type === 'all'` and `-o` is specified, filenames are suffixed with the type (e.g., `backup-entries.json`, `backup-aliases.json`, `backup-confirm.json`). + +--- + +## P1 — Security & Platform Gaps (FIXED) + +### 5. ~~MCP `codex_run` ignores `confirm` metadata~~ FIXED + +**File:** `src/mcp-server.ts` + +`codex_run` now imports `hasConfirm` and checks confirm metadata before executing. If an entry has confirm set and `force` is not `true` (and not a dry run), execution is refused with an error message. Added `force` parameter to the tool schema. + +### 6. ~~Windows clipboard is unsupported~~ FIXED + +**File:** `src/utils/clipboard.ts` + +Added `win32` platform support using `clip` command. + +### 7. ~~Data files use default permissions (0644)~~ FIXED + +**File:** `src/utils/atomicWrite.ts`, `src/utils/paths.ts`, `src/commands/data-management.ts` + +- `atomicWriteFileSync` now writes files with mode `0o600` (owner read/write only) +- `ensureDataDirectoryExists` now creates directories with mode `0o700` +- Export files in `data-management.ts` also use mode `0o600` + +--- + +## P2 — Missing Core Features (FIXED) + +### 8. ~~No stdin piping for `set`~~ FIXED + +`set` now reads from stdin when piped (non-TTY): `echo "value" | ccli set key`. + +### 9. ~~No `edit` command (`$EDITOR` support)~~ FIXED + +Added `edit` (alias `e`) command: `ccli edit ` opens the value in `$EDITOR`/`$VISUAL`. Supports `--decrypt` for encrypted entries. + +### 10. ~~MCP has no encryption support (set/get)~~ FIXED + +`codex_set` now accepts `encrypt` and `password` parameters. `codex_get` now accepts `decrypt` and `password` parameters. + +### 11. ~~`confirm` is not a standalone export/import type~~ FIXED + +`confirm` is now a valid standalone type for `data export`, `data import`, and `data reset`. Also added to MCP `codex_export`, `codex_import`, and `codex_reset`. + +### 12. ~~No file locking for concurrent access~~ FIXED + +Added advisory file locking (`src/utils/fileLock.ts`) using `.lock` files with atomic `O_CREAT|O_EXCL`. Integrated into `saveJsonSorted` — all writes are now lock-protected. Stale locks (>10s) are automatically broken. + +### 13. ~~No auto-backup before destructive operations~~ FIXED + +Added `src/utils/autoBackup.ts`. Automatic backups are created in `~/.codexcli/.backups/` before `data reset` and non-merge `data import`. + +### 14. ~~No `--json` output format~~ FIXED + +Added `--json` / `-j` flag to `get` and `find` commands for machine-readable JSON output. + +--- + +## P3 — Nice-to-Have Features + +### 15. Fish/PowerShell shell completion + +Only Bash and Zsh are supported. Fish and PowerShell users get no completions or wrapper. + +### 16. No `copy`/`cp` command + +Cannot duplicate an entry to a new key without get + set. + +### 17. No import preview/diff + +`data import --merge` silently overwrites conflicting keys with no way to preview what will change. + +### 18. No advanced search (regex, boolean operators) + +`find` only does case-insensitive substring matching. No regex, field-specific search, or boolean operators. + +### 19. No backup rotation / automatic backup management + +No built-in way to maintain a set of N recent backups. + +### 20. No command output capture + +`run` inherits stdio — no way to capture command output for chaining. + +### 21. No change log / audit trail + +No record of what was added, changed, or deleted over time. + +### 22. No fuzzy finder integration + +No `fzf` or similar interactive selection for keys. + +### 23. No conditional interpolation + +No `${ref:-default}` or `${ref:?error}` syntax for fallback values. + +### 24. No batch operations + +Cannot set multiple entries in one command. + +--- + +## Summary + +| Priority | Count | Description | +|----------|-------|-------------| +| **P0** | 4 | ~~Bugs showing incorrect info or causing data loss~~ ALL FIXED | +| **P1** | 3 | ~~Security and platform gaps~~ ALL FIXED | +| **P2** | 7 | ~~Missing core features~~ ALL FIXED | +| **P3** | 10 | Nice-to-have features | diff --git a/README.md b/README.md index f296d40..6aaa8bf 100644 --- a/README.md +++ b/README.md @@ -14,6 +14,7 @@ A command-line information store for quick reference of frequently used data. - [Searching](#searching) - [Aliases](#aliases) - [Renaming](#renaming) + - [Editing Data](#editing-data) - [Removing Data](#removing-data) - [Interpolation](#interpolation) - [Encryption](#encryption) @@ -41,7 +42,12 @@ CodexCLI is a command-line tool designed to help you store, organize, and retrie - **Encryption**: Password-protect sensitive values - **Search**: Find entries by searching keys or values - **Tree Visualization**: Display nested data in a tree-like structure -- **Clipboard Integration**: Copy values directly to clipboard +- **Clipboard Integration**: Copy values directly to clipboard (macOS, Linux, Windows) +- **Inline Editing**: Open entries in `$EDITOR` / `$VISUAL` for quick edits +- **JSON Output**: Machine-readable `--json` flag on `get` and `find` for scripting +- **Stdin Piping**: Pipe values into `set` from other commands +- **Auto-Backup**: Automatic timestamped backups before destructive operations +- **File Locking**: Advisory locking prevents data corruption from concurrent access - **Shell Tab-Completion**: Full tab-completion for Bash and Zsh (commands, flags, keys, aliases) - **MCP Server**: Expose CodexCLI as a tool for AI agents (Claude Code, Claude Desktop) via the Model Context Protocol @@ -84,6 +90,8 @@ ccli ### Install from Source +> **Note:** Installing from source registers the development binary `cclid` (not `ccli`). All examples in this README use `ccli`, but substitute `cclid` if you installed from source. The production `ccli` binary is available via Homebrew or the GitHub Releases download above. + Ensure npm's global binaries are in your PATH by adding the following to your shell profile (`.bashrc`, `.zshrc`, or equivalent): ```bash @@ -98,7 +106,7 @@ npm run build npm install -g . ``` -If `ccli` is not found after installing, verify that npm's global bin directory is in your PATH: +If `cclid` is not found after installing, verify that npm's global bin directory is in your PATH: ```bash echo $PATH | grep -o "$(npm config get prefix)/bin" @@ -135,6 +143,12 @@ ccli set commands.deploy "./deploy.sh" --confirm # Remove the confirmation requirement from an entry ccli set commands.deploy --no-confirm + +# Pipe a value from stdin +echo "my value" | ccli set mykey + +# Pipe from another command +curl -s https://api.example.com/token | ccli set api.token ``` After setting an entry, you'll be asked interactively whether it should require confirmation to run. Use `--confirm` or `--no-confirm` to skip the prompt. @@ -166,6 +180,9 @@ ccli get api.key -d # Copy value to clipboard ccli get server.ip -c +# Output as JSON (for scripting) +ccli get server --json + # Show aliases only ccli get -a ``` @@ -218,6 +235,9 @@ ccli find ip -a # Show results as a tree ccli find server -t + +# Output as JSON (for scripting) +ccli find prod --json ``` ### Aliases @@ -260,6 +280,18 @@ ccli rename -a oldalias newalias ccli rename server.old server.new --set-alias sn ``` +### Editing Data + +Open a stored value in your `$EDITOR` (or `$VISUAL`) for inline editing: + +```bash +# Edit an entry in your default editor +ccli edit server.production.ip + +# Edit an encrypted entry (decrypts before editing, re-encrypts on save) +ccli edit api.key --decrypt +``` + ### Removing Data Removing an entry prompts for confirmation. Use `-f` to skip. @@ -359,8 +391,14 @@ ccli data export entries # Export to a specific file ccli data export aliases -o my-aliases.json -# Export everything -ccli data export all -o backup.json +# Export with pretty-printed JSON +ccli data export entries --pretty + +# Export confirm metadata +ccli data export confirm + +# Export everything (entries, aliases, confirm metadata) +ccli data export all # Import data from a file (replaces existing) ccli data import entries backup.json @@ -375,6 +413,8 @@ ccli data reset entries ccli data reset all -f ``` +> **Auto-backup:** Before destructive operations (`data reset`, non-merge `data import`), CodexCLI automatically creates a timestamped backup in `~/.codexcli/.backups/`. + ### Shell Wrapper By default, `ccli run` executes commands in a child process. This means shell builtins like `cd`, `export`, and `alias` have no effect on your current shell. @@ -432,7 +472,7 @@ eval "$(ccli config completions bash)" | `ccli set ` | Flags + namespace prefixes (one level at a time) | | `ccli config ` | Subcommands (`set`, `get`, `info`, `examples`, `completions`) | | `ccli config set ` | Config keys (`colors`, `theme`) | -| `ccli data export ` | `entries`, `aliases`, `all` | +| `ccli data export ` | `entries`, `aliases`, `confirm`, `all` | ### Scripting Tips @@ -464,12 +504,13 @@ ccli --debug get server.production | `get` | `g` | `[key]` | Retrieve entries or specific data | | `run` | `r` | `` | Execute stored command(s) (`:` compose, `&&` chain) | | `find` | `f` | `` | Find entries by key or value | +| `edit` | `e` | `` | Open an entry's value in `$EDITOR` | | `remove` | `rm` | `` | Remove an entry and its alias | | `rename` | `rn` | ` ` | Rename an entry key or alias | -| `config` | | `[setting] [value]` | View or change configuration settings | +| `config` | | `` | View or change configuration settings | | `data` | | `` | Manage stored data (export, import, reset) | -**Config subcommands:** `info`, `examples`, `completions ` +**Config subcommands:** `set `, `get [key]`, `info`, `examples`, `completions ` **Data subcommands:** `export `, `import `, `reset ` @@ -485,10 +526,10 @@ CodexCLI includes a built-in [Model Context Protocol](https://modelcontextprotoc claude mcp add codexcli -- node /absolute/path/to/dist/mcp-server.js ``` -If you installed CodexCLI globally, you can also use: +If you installed from source via `npm install -g .`, you can also use: ```bash -claude mcp add codexcli -- ccli-mcp +claude mcp add codexcli -- cclid-mcp ``` #### Claude Desktop @@ -510,14 +551,14 @@ Add the following to your Claude Desktop MCP config file: | Tool | Description | |---|---| -| `codex_set` | Set an entry in the data store (key + value, optional alias) | -| `codex_get` | Retrieve entries (specific key, subtree, or all; flat or tree format) | +| `codex_set` | Set an entry (key + value, optional alias, optional encrypt + password) | +| `codex_get` | Retrieve entries (specific key, subtree, or all; optional decrypt + password) | | `codex_remove` | Remove an entry or alias by key | | `codex_search` | Search entries by key or value (case-insensitive) | | `codex_alias_set` | Create or update an alias for a dot-notation path | | `codex_alias_remove` | Remove an alias | | `codex_alias_list` | List all defined aliases | -| `codex_run` | Execute a stored command (with optional dry-run mode) | +| `codex_run` | Execute a stored command (dry-run, force to skip confirm check) | | `codex_config_get` | Get one or all configuration settings | | `codex_config_set` | Set a configuration setting (colors, theme) | | `codex_export` | Export data and/or aliases as JSON text | diff --git a/docs/P3-FEATURE-REQUESTS.md b/docs/P3-FEATURE-REQUESTS.md new file mode 100644 index 0000000..993e8e9 --- /dev/null +++ b/docs/P3-FEATURE-REQUESTS.md @@ -0,0 +1,433 @@ +# P3 Feature Requests — GitHub Issue Templates + +Each section below is a self-contained GitHub issue. Copy the **Title** into the +issue title and the **Body** into the issue description. Apply labels: +`enhancement`, `P3`, `good first issue` (where noted). + +--- + +## Issue #15 — Fish & PowerShell Shell Completions + +**Title:** Add Fish and PowerShell shell completions + +**Labels:** `enhancement`, `P3`, `good first issue` + +**Body:** + +### Problem + +`ccli config completions` only generates scripts for **Bash** and **Zsh**. +Users on Fish or PowerShell have no tab-completion support and no shell wrapper +function. + +### Desired Behavior + +- `ccli config completions fish` outputs a Fish completion script. +- `ccli config completions powershell` outputs a PowerShell completion script. +- Both scripts should cover all subcommands, flags, and dynamic key completion + (if feasible). + +### Relevant Files + +- `src/commands/config.ts` — completions subcommand handler +- `src/formatting.ts` — help text references + +### Acceptance Criteria + +- [ ] Fish completions script generated and working +- [ ] PowerShell completions script generated and working +- [ ] Help text updated to list Fish and PowerShell as supported shells +- [ ] Tests added for both output paths + +--- + +## Issue #16 — `copy` / `cp` Command + +**Title:** Add `copy` (cp) command to duplicate entries + +**Labels:** `enhancement`, `P3`, `good first issue` + +**Body:** + +### Problem + +There is no way to duplicate an entry to a new key without manually running +`ccli get ` then `ccli set `. This is tedious and +error-prone, especially for entries with aliases, confirm flags, or encrypted +values. + +### Desired Behavior + +``` +ccli copy [options] +ccli cp [options] +``` + +Options: +- `--with-aliases` — also copy aliases from the source entry +- `--with-confirm` — also copy the confirm flag +- `--overwrite` — allow overwriting an existing destination key + +### Relevant Files + +- `src/commands/` — new command file needed +- `src/main.ts` — command registration + +### Acceptance Criteria + +- [ ] `copy`/`cp` command implemented +- [ ] Copies value (and optionally aliases/confirm) to new key +- [ ] Refuses to overwrite existing key unless `--overwrite` is passed +- [ ] MCP `codex_copy` tool added +- [ ] Tests covering happy path, overwrite protection, and options + +--- + +## Issue #17 — Import Preview / Diff + +**Title:** Add preview/diff mode for `data import --merge` + +**Labels:** `enhancement`, `P3` + +**Body:** + +### Problem + +`data import --merge` silently overwrites conflicting keys. Users have no way +to see what will change before committing the merge. + +### Desired Behavior + +``` +ccli data import entries backup.json --merge --dry-run +``` + +Output should show: +- **Added** keys (exist in file but not locally) +- **Modified** keys (exist in both, values differ) with old → new diff +- **Unchanged** keys (exist in both, values match) + +When `--dry-run` is not passed, the import proceeds as normal. + +### Relevant Files + +- `src/commands/data-management.ts` — import logic + +### Acceptance Criteria + +- [ ] `--dry-run` (or `--preview`) flag added to `data import` +- [ ] Output clearly shows added / modified / unchanged keys +- [ ] Modified keys show before and after values +- [ ] No data is written when `--dry-run` is active +- [ ] Tests for each category (added, modified, unchanged) + +--- + +## Issue #18 — Advanced Search (Regex, Boolean Operators) + +**Title:** Add regex and advanced search operators to `find` + +**Labels:** `enhancement`, `P3` + +**Body:** + +### Problem + +`find` only supports case-insensitive substring matching. Power users need +regex patterns, field-specific search, and boolean operators. + +### Desired Behavior + +``` +ccli find '/^prod-.*db$/' --regex # regex pattern +ccli find prod --keys-only # search keys only +ccli find password --values-only # search values only +ccli find 'prod AND db' # boolean AND +ccli find 'staging OR dev' # boolean OR +``` + +### Relevant Files + +- `src/commands/helpers.ts` — `findEntries()` function +- `src/formatting.ts` — help text + +### Acceptance Criteria + +- [ ] `--regex` / `-r` flag for regex pattern matching +- [ ] `--keys-only` and `--values-only` flags for field-specific search +- [ ] Basic boolean operators (AND, OR) supported +- [ ] Existing substring behavior unchanged (backward compatible) +- [ ] Tests for regex, field-specific, and boolean queries + +--- + +## Issue #19 — Backup Rotation / Automatic Backup Management + +**Title:** Add backup rotation to limit stored backups + +**Labels:** `enhancement`, `P3` + +**Body:** + +### Problem + +Auto-backups (added in P2 #13) accumulate indefinitely in +`~/.codexcli/.backups/`. There is no built-in way to keep only the N most +recent backups or delete old ones. + +### Desired Behavior + +``` +ccli config set backup-retention 10 # keep last 10 backups +ccli data backups # list all backups with timestamps +ccli data backups --prune # delete backups beyond retention limit +``` + +- Auto-backup should respect the retention setting and prune old backups after + creating a new one. +- Default retention: 10 backups. + +### Relevant Files + +- `src/utils/autoBackup.ts` — backup creation +- `src/commands/data-management.ts` — data subcommands +- `src/commands/config.ts` — config settings + +### Acceptance Criteria + +- [ ] Configurable retention count (default 10) +- [ ] `data backups` command lists existing backups +- [ ] `data backups --prune` manually prunes old backups +- [ ] Auto-backup automatically prunes after creating a new backup +- [ ] Tests for retention logic and pruning + +--- + +## Issue #20 — Command Output Capture + +**Title:** Add output capture mode to `run` command + +**Labels:** `enhancement`, `P3` + +**Body:** + +### Problem + +`run` inherits stdio, so command output goes directly to the terminal. There is +no way to capture the output for piping, storing, or chaining with other +commands. + +### Desired Behavior + +``` +ccli run deploy-script --capture # print output after execution +ccli run deploy-script --capture --quiet # suppress live output, print at end +ccli run health-check --capture --set-result health-status + # capture output and store as new entry +``` + +The `--capture` flag buffers stdout/stderr and makes it available for: +- Printing after the command exits +- Storing as a new codexCLI entry via `--set-result ` +- Piping to other commands via stdout + +### Relevant Files + +- `src/commands/run.ts` — run command implementation + +### Acceptance Criteria + +- [ ] `--capture` flag implemented +- [ ] `--quiet` suppresses live output when combined with `--capture` +- [ ] `--set-result ` stores captured output as a new entry +- [ ] Exit code still propagated correctly +- [ ] Tests for capture, quiet, and set-result modes + +--- + +## Issue #21 — Change Log / Audit Trail + +**Title:** Add change log / audit trail for entry modifications + +**Labels:** `enhancement`, `P3` + +**Body:** + +### Problem + +There is no record of what was added, changed, or deleted. Users cannot answer +"when did this value change?" or "who deleted that key?". + +### Desired Behavior + +``` +ccli log # show recent changes +ccli log --key prod-db # show history for a specific key +ccli log --limit 20 # show last 20 changes +ccli log --since 2025-01-01 # show changes since a date +``` + +Each log entry should record: +- Timestamp +- Operation (set, delete, import, reset, copy, edit) +- Key affected +- Old value (truncated/redacted for encrypted entries) +- New value (truncated/redacted for encrypted entries) + +### Relevant Files + +- New file: `src/utils/auditLog.ts` +- `src/commands/` — integration into set, delete, import, reset, edit, copy + +### Acceptance Criteria + +- [ ] Audit log written to `~/.codexcli/.audit.log` (or similar) +- [ ] All mutating operations log their changes +- [ ] `log` command with filtering by key, count, and date +- [ ] Encrypted values are redacted in the log +- [ ] Log file uses restrictive permissions (0600) +- [ ] Tests for logging and query filters + +--- + +## Issue #22 — Fuzzy Finder Integration + +**Title:** Add fuzzy finder (fzf) integration for interactive key selection + +**Labels:** `enhancement`, `P3` + +**Body:** + +### Problem + +Users with many entries have no interactive way to browse and select keys. +They must know the exact key name or use `find` first. + +### Desired Behavior + +``` +ccli get --interactive # launch fzf to pick a key, then show value +ccli get -i # short form +ccli run -i # pick a runnable entry interactively +ccli edit -i # pick an entry to edit interactively +``` + +- If `fzf` is installed, pipe keys into it for fuzzy selection. +- If `fzf` is not installed, fall back to a simple numbered list prompt. +- Preview pane shows the value of the highlighted key. + +### Relevant Files + +- New file: `src/utils/fuzzySelect.ts` +- `src/commands/` — integration into get, run, edit, delete + +### Acceptance Criteria + +- [ ] `--interactive` / `-i` flag on get, run, edit, delete +- [ ] fzf integration with preview pane +- [ ] Graceful fallback when fzf is not installed +- [ ] Tests (mocked fzf process) + +--- + +## Issue #23 — Conditional Interpolation / Fallback Syntax + +**Title:** Add conditional interpolation with fallback values + +**Labels:** `enhancement`, `P3` + +**Body:** + +### Problem + +Cross-references (`${key}`) fail with an error when the referenced key doesn't +exist. There is no way to specify a default/fallback value or produce a custom +error message. + +### Desired Behavior + +``` +ccli set conn '${db-host:-localhost}:${db-port:-5432}' +ccli set deploy '${deploy-target:?ERROR: deploy-target must be set}' +``` + +Syntax (follows Bash parameter expansion conventions): +- `${key:-default}` — use `default` if `key` is unset or empty +- `${key:+alternate}` — use `alternate` if `key` IS set +- `${key:?error message}` — abort with error message if `key` is unset + +### Relevant Files + +- `src/utils/interpolation.ts` (or wherever `${}` resolution lives) +- `src/commands/helpers.ts` + +### Acceptance Criteria + +- [ ] `${key:-default}` returns default when key is missing +- [ ] `${key:+alternate}` returns alternate when key exists +- [ ] `${key:?message}` throws with message when key is missing +- [ ] Existing `${key}` behavior unchanged +- [ ] Nested references resolved correctly +- [ ] Tests for all three operators plus edge cases + +--- + +## Issue #24 — Batch Operations + +**Title:** Add batch set/delete operations + +**Labels:** `enhancement`, `P3` + +**Body:** + +### Problem + +Setting or deleting multiple entries requires separate commands for each key. +This is slow and produces multiple auto-backups. + +### Desired Behavior + +``` +ccli set --batch key1=val1 key2=val2 key3=val3 +ccli set --from-file pairs.txt # file with key=value per line +ccli set --from-json '{"k1":"v1","k2":"v2"}' +ccli delete --batch key1 key2 key3 +``` + +- Batch operations should be atomic (all-or-nothing). +- A single auto-backup is created before the batch, not one per entry. +- A single file-lock is held for the entire batch. + +### Relevant Files + +- `src/commands/` — set and delete command handlers +- `src/utils/fileLock.ts` — locking +- `src/utils/autoBackup.ts` — backup integration + +### Acceptance Criteria + +- [ ] `set --batch key=value ...` sets multiple entries atomically +- [ ] `set --from-file` reads key=value pairs from a file +- [ ] `set --from-json` reads from a JSON object +- [ ] `delete --batch key ...` deletes multiple entries atomically +- [ ] Only one auto-backup per batch operation +- [ ] Only one file-lock acquisition per batch +- [ ] MCP `codex_batch_set` and `codex_batch_delete` tools added +- [ ] Tests for all input modes and atomicity + +--- + +## Quick Reference + +| Issue | Title | Labels | +|-------|-------|--------| +| 15 | Fish & PowerShell Shell Completions | `enhancement`, `P3`, `good first issue` | +| 16 | `copy`/`cp` Command | `enhancement`, `P3`, `good first issue` | +| 17 | Import Preview / Diff | `enhancement`, `P3` | +| 18 | Advanced Search (Regex, Boolean) | `enhancement`, `P3` | +| 19 | Backup Rotation | `enhancement`, `P3` | +| 20 | Command Output Capture | `enhancement`, `P3` | +| 21 | Change Log / Audit Trail | `enhancement`, `P3` | +| 22 | Fuzzy Finder Integration | `enhancement`, `P3` | +| 23 | Conditional Interpolation | `enhancement`, `P3` | +| 24 | Batch Operations | `enhancement`, `P3` | diff --git a/package-lock.json b/package-lock.json index ae04a5b..e3b2842 100644 --- a/package-lock.json +++ b/package-lock.json @@ -15,8 +15,8 @@ "zod": "^4.3.6" }, "bin": { - "ccli-mcp": "dist/mcp-server.js", - "cclid": "dist/index.js" + "cclid": "dist/index.js", + "cclid-mcp": "dist/mcp-server.js" }, "devDependencies": { "@types/node": "^22.13.8", @@ -532,7 +532,6 @@ "integrity": "sha512-uVSdg/V4dfQmTjJzR0szNczjOH/J+FyUMMjYtr07xFRXR7EDf9i1qdxrD0VusZH9knj1/ecxzCQQxyic5NzAiA==", "dev": true, "license": "Apache-2.0", - "peer": true, "dependencies": { "@eslint/object-schema": "^3.0.1", "debug": "^4.3.1", @@ -548,7 +547,6 @@ "integrity": "sha512-a5MxrdDXEvqnIq+LisyCX6tQMPF/dSJpCfBgBauY+pNZ28yCtSsTvyTYrMhaI+LK26bVyCJfJkT0u8KIj2i1dQ==", "dev": true, "license": "Apache-2.0", - "peer": true, "dependencies": { "@eslint/core": "^1.1.0" }, @@ -562,7 +560,6 @@ "integrity": "sha512-/nr9K9wkr3P1EzFTdFdMoLuo1PmIxjmwvPozwoSodjNBdefGujXQUF93u1DDZpEaTuDvMsIQddsd35BwtrW9Xw==", "dev": true, "license": "Apache-2.0", - "peer": true, "dependencies": { "@types/json-schema": "^7.0.15" }, @@ -576,7 +573,6 @@ "integrity": "sha512-P9cq2dpr+LU8j3qbLygLcSZrl2/ds/pUpfnHNNuk5HW7mnngHs+6WSq5C9mO3rqRX8A1poxqLTC9cu0KOyJlBg==", "dev": true, "license": "Apache-2.0", - "peer": true, "engines": { "node": "^20.19.0 || ^22.13.0 || >=24" } @@ -587,7 +583,6 @@ "integrity": "sha512-bIZEUzOI1jkhviX2cp5vNyXQc6olzb2ohewQubuYlMXZ2Q/XjBO0x0XhGPvc9fjSIiUN0vw+0hq53BJ4eQSJKQ==", "dev": true, "license": "Apache-2.0", - "peer": true, "dependencies": { "@eslint/core": "^1.1.0", "levn": "^0.4.1" @@ -614,7 +609,6 @@ "integrity": "sha512-5DyQ4+1JEUzejeK1JGICcideyfUbGixgS9jNgex5nqkW+cY7WZhxBigmieN5Qnw9ZosSNVC9KQKyb+GUaGyKUA==", "dev": true, "license": "Apache-2.0", - "peer": true, "engines": { "node": ">=18.18.0" } @@ -625,7 +619,6 @@ "integrity": "sha512-/zUx+yOsIrG4Y43Eh2peDeKCxlRt/gET6aHfaKpuq267qXdYDFViVHfMaLyygZOnl0kGWxFIgsBy8QFuTLUXEQ==", "dev": true, "license": "Apache-2.0", - "peer": true, "dependencies": { "@humanfs/core": "^0.19.1", "@humanwhocodes/retry": "^0.4.0" @@ -640,7 +633,6 @@ "integrity": "sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA==", "dev": true, "license": "Apache-2.0", - "peer": true, "engines": { "node": ">=12.22" }, @@ -655,7 +647,6 @@ "integrity": "sha512-bV0Tgo9K4hfPCek+aMAn81RppFKv2ySDQeMoSZuvTASywNTnVJCArCZE2FWqpvIatKu7VMRLWlR1EazvVhDyhQ==", "dev": true, "license": "Apache-2.0", - "peer": true, "engines": { "node": ">=18.18" }, @@ -1140,8 +1131,7 @@ "resolved": "https://registry.npmjs.org/@types/esrecurse/-/esrecurse-4.3.1.tgz", "integrity": "sha512-xJBAbDifo5hpffDBuHl0Y8ywswbiAp/Wi7Y/GtAgSlZyIABppyurxVueOPE8LUQOxdlgi6Zqce7uoEpqNTeiUw==", "dev": true, - "license": "MIT", - "peer": true + "license": "MIT" }, "node_modules/@types/estree": { "version": "1.0.8", @@ -1155,8 +1145,7 @@ "resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.15.tgz", "integrity": "sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA==", "dev": true, - "license": "MIT", - "peer": true + "license": "MIT" }, "node_modules/@types/node": { "version": "22.19.11", @@ -1164,6 +1153,7 @@ "integrity": "sha512-BH7YwL6rA93ReqeQS1c4bsPpcfOmJasG+Fkr6Y59q83f9M1WcBRHR2vM+P9eOisYRcN3ujQoiZY8uk5W+1WL8w==", "dev": true, "license": "MIT", + "peer": true, "dependencies": { "undici-types": "~6.21.0" } @@ -1213,6 +1203,7 @@ "integrity": "sha512-IgSWvLobTDOjnaxAfDTIHaECbkNlAlKv2j5SjpB2v7QHKv1FIfjwMy8FsDbVfDX/KjmCmYICcw7uGaXLhtsLNg==", "dev": true, "license": "MIT", + "peer": true, "dependencies": { "@typescript-eslint/scope-manager": "8.56.0", "@typescript-eslint/types": "8.56.0", @@ -1528,6 +1519,7 @@ "integrity": "sha512-UVJyE9MttOsBQIDKw1skb9nAwQuR5wuGD3+82K6JgJlm/Y+KI92oNsMNGZCYdDsVtRHSak0pcV5Dno5+4jh9sw==", "dev": true, "license": "MIT", + "peer": true, "bin": { "acorn": "bin/acorn" }, @@ -1541,7 +1533,6 @@ "integrity": "sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==", "dev": true, "license": "MIT", - "peer": true, "peerDependencies": { "acorn": "^6.0.0 || ^7.0.0 || ^8.0.0" } @@ -1862,8 +1853,7 @@ "resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.4.tgz", "integrity": "sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==", "dev": true, - "license": "MIT", - "peer": true + "license": "MIT" }, "node_modules/depd": { "version": "2.0.0", @@ -2004,7 +1994,6 @@ "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==", "dev": true, "license": "MIT", - "peer": true, "engines": { "node": ">=10" }, @@ -2075,7 +2064,6 @@ "integrity": "sha512-CkWE42hOJsNj9FJRaoMX9waUFYhqY4jmyLFdAdzZr6VaCg3ynLYx4WnOdkaIifGfH4gsUcBTn4OZbHXkpLD0FQ==", "dev": true, "license": "BSD-2-Clause", - "peer": true, "dependencies": { "@types/esrecurse": "^4.3.1", "@types/estree": "^1.0.8", @@ -2108,7 +2096,6 @@ "integrity": "sha512-IWrosm/yrn43eiKqkfkHis7QioDleaXQHdDVPKg0FSwwd/DuvyX79TZnFOnYpB7dcsFAMmtFztZuXPDvSePkFw==", "dev": true, "license": "MIT", - "peer": true, "dependencies": { "fast-deep-equal": "^3.1.1", "fast-json-stable-stringify": "^2.0.0", @@ -2125,8 +2112,7 @@ "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", "dev": true, - "license": "MIT", - "peer": true + "license": "MIT" }, "node_modules/espree": { "version": "11.1.0", @@ -2134,7 +2120,6 @@ "integrity": "sha512-WFWYhO1fV4iYkqOOvq8FbqIhr2pYfoDY0kCotMkDeNtGpiGGkZ1iov2u8ydjtgM8yF8rzK7oaTbw2NAzbAbehw==", "dev": true, "license": "BSD-2-Clause", - "peer": true, "dependencies": { "acorn": "^8.15.0", "acorn-jsx": "^5.3.2", @@ -2153,7 +2138,6 @@ "integrity": "sha512-Ap6G0WQwcU/LHsvLwON1fAQX9Zp0A2Y6Y/cJBl9r/JbW90Zyg4/zbG6zzKa2OTALELarYHmKu0GhpM5EO+7T0g==", "dev": true, "license": "BSD-3-Clause", - "peer": true, "dependencies": { "estraverse": "^5.1.0" }, @@ -2167,7 +2151,6 @@ "integrity": "sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==", "dev": true, "license": "BSD-2-Clause", - "peer": true, "dependencies": { "estraverse": "^5.2.0" }, @@ -2181,7 +2164,6 @@ "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==", "dev": true, "license": "BSD-2-Clause", - "peer": true, "engines": { "node": ">=4.0" } @@ -2202,7 +2184,6 @@ "integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==", "dev": true, "license": "BSD-2-Clause", - "peer": true, "engines": { "node": ">=0.10.0" } @@ -2252,6 +2233,7 @@ "resolved": "https://registry.npmjs.org/express/-/express-5.2.1.tgz", "integrity": "sha512-hIS4idWWai69NezIdRt2xFVofaF4j+6INOpJlVOLDO8zXGpUVEVzIYk12UUi2JzjEzWL3IOAxcTubgz9Po0yXw==", "license": "MIT", + "peer": true, "dependencies": { "accepts": "^2.0.0", "body-parser": "^2.2.1", @@ -2319,16 +2301,14 @@ "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==", "dev": true, - "license": "MIT", - "peer": true + "license": "MIT" }, "node_modules/fast-levenshtein": { "version": "2.0.6", "resolved": "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz", "integrity": "sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==", "dev": true, - "license": "MIT", - "peer": true + "license": "MIT" }, "node_modules/fast-uri": { "version": "3.1.0", @@ -2370,7 +2350,6 @@ "integrity": "sha512-XXTUwCvisa5oacNGRP9SfNtYBNAMi+RPwBFmblZEF7N7swHYQS6/Zfk7SRwx4D5j3CH211YNRco1DEMNVfZCnQ==", "dev": true, "license": "MIT", - "peer": true, "dependencies": { "flat-cache": "^4.0.0" }, @@ -2405,7 +2384,6 @@ "integrity": "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==", "dev": true, "license": "MIT", - "peer": true, "dependencies": { "locate-path": "^6.0.0", "path-exists": "^4.0.0" @@ -2423,7 +2401,6 @@ "integrity": "sha512-f7ccFPK3SXFHpx15UIGyRJ/FJQctuKZ0zVuN3frBo4HnK3cay9VEW0R6yPYFHC0AgqhukPzKjq22t5DmAyqGyw==", "dev": true, "license": "MIT", - "peer": true, "dependencies": { "flatted": "^3.2.9", "keyv": "^4.5.4" @@ -2437,8 +2414,7 @@ "resolved": "https://registry.npmjs.org/flatted/-/flatted-3.3.3.tgz", "integrity": "sha512-GX+ysw4PBCz0PzosHDepZGANEuFCMLrnRTiEy9McGjmkCQYwRq4A/X786G/fjM/+OjsWSU1ZrY5qyARZmO/uwg==", "dev": true, - "license": "ISC", - "peer": true + "license": "ISC" }, "node_modules/forwarded": { "version": "0.2.0", @@ -2525,7 +2501,6 @@ "integrity": "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==", "dev": true, "license": "ISC", - "peer": true, "dependencies": { "is-glob": "^4.0.3" }, @@ -2583,6 +2558,7 @@ "resolved": "https://registry.npmjs.org/hono/-/hono-4.12.0.tgz", "integrity": "sha512-NekXntS5M94pUfiVZ8oXXK/kkri+5WpX2/Ik+LVsl+uvw+soj4roXIsPqO+XsWrAw20mOzaXOZf3Q7PfB9A/IA==", "license": "MIT", + "peer": true, "engines": { "node": ">=16.9.0" } @@ -2629,7 +2605,6 @@ "integrity": "sha512-hsBTNUqQTDwkWtcdYI2i06Y/nUBEsNEDJKjWdigLvegy8kDuJAS8uRlpkkcQpyEXL0Z/pjDy5HBmMjRCJ2gq+g==", "dev": true, "license": "MIT", - "peer": true, "engines": { "node": ">= 4" } @@ -2640,7 +2615,6 @@ "integrity": "sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==", "dev": true, "license": "MIT", - "peer": true, "engines": { "node": ">=0.8.19" } @@ -2675,7 +2649,6 @@ "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==", "dev": true, "license": "MIT", - "peer": true, "engines": { "node": ">=0.10.0" } @@ -2686,7 +2659,6 @@ "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", "dev": true, "license": "MIT", - "peer": true, "dependencies": { "is-extglob": "^2.1.1" }, @@ -2720,8 +2692,7 @@ "resolved": "https://registry.npmjs.org/json-buffer/-/json-buffer-3.0.1.tgz", "integrity": "sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ==", "dev": true, - "license": "MIT", - "peer": true + "license": "MIT" }, "node_modules/json-schema-traverse": { "version": "1.0.0", @@ -2740,8 +2711,7 @@ "resolved": "https://registry.npmjs.org/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz", "integrity": "sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw==", "dev": true, - "license": "MIT", - "peer": true + "license": "MIT" }, "node_modules/keyv": { "version": "4.5.4", @@ -2749,7 +2719,6 @@ "integrity": "sha512-oxVHkHR/EJf2CNXnWxRLW6mg7JyCCUcG0DtEGmL2ctUo1PNTin1PUil+r/+4r5MpVgC/fn1kjsx7mjSujKqIpw==", "dev": true, "license": "MIT", - "peer": true, "dependencies": { "json-buffer": "3.0.1" } @@ -2760,7 +2729,6 @@ "integrity": "sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==", "dev": true, "license": "MIT", - "peer": true, "dependencies": { "prelude-ls": "^1.2.1", "type-check": "~0.4.0" @@ -2775,7 +2743,6 @@ "integrity": "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==", "dev": true, "license": "MIT", - "peer": true, "dependencies": { "p-locate": "^5.0.0" }, @@ -2974,7 +2941,6 @@ "integrity": "sha512-6IpQ7mKUxRcZNLIObR0hz7lxsapSSIYNZJwXPGeF0mTVqGKFIXj1DQcMoT22S3ROcLyY/rz0PWaWZ9ayWmad9g==", "dev": true, "license": "MIT", - "peer": true, "dependencies": { "deep-is": "^0.1.3", "fast-levenshtein": "^2.0.6", @@ -2993,7 +2959,6 @@ "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", "dev": true, "license": "MIT", - "peer": true, "dependencies": { "yocto-queue": "^0.1.0" }, @@ -3010,7 +2975,6 @@ "integrity": "sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==", "dev": true, "license": "MIT", - "peer": true, "dependencies": { "p-limit": "^3.0.2" }, @@ -3036,7 +3000,6 @@ "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", "dev": true, "license": "MIT", - "peer": true, "engines": { "node": ">=8" } @@ -3080,6 +3043,7 @@ "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==", "dev": true, "license": "MIT", + "peer": true, "engines": { "node": ">=12" }, @@ -3157,7 +3121,6 @@ "integrity": "sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==", "dev": true, "license": "MIT", - "peer": true, "engines": { "node": ">= 0.8.0" } @@ -3181,7 +3144,6 @@ "integrity": "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==", "dev": true, "license": "MIT", - "peer": true, "engines": { "node": ">=6" } @@ -3633,7 +3595,6 @@ "integrity": "sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==", "dev": true, "license": "MIT", - "peer": true, "dependencies": { "prelude-ls": "^1.2.1" }, @@ -3661,6 +3622,7 @@ "integrity": "sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw==", "dev": true, "license": "Apache-2.0", + "peer": true, "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" @@ -3715,7 +3677,6 @@ "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==", "dev": true, "license": "BSD-2-Clause", - "peer": true, "dependencies": { "punycode": "^2.1.0" } @@ -3742,6 +3703,7 @@ "integrity": "sha512-w+N7Hifpc3gRjZ63vYBXA56dvvRlNWRczTdmCBBa+CotUzAPf5b7YMdMR/8CQoeYE5LX3W4wj6RYTgonm1b9DA==", "dev": true, "license": "MIT", + "peer": true, "dependencies": { "esbuild": "^0.27.0", "fdir": "^6.5.0", @@ -4411,7 +4373,6 @@ "integrity": "sha512-BN22B5eaMMI9UMtjrGd5g5eCYPpCPDUy0FJXbYsaT5zYxjFOckS53SQDE3pWkVoWpHXVb3BrYcEN4Twa55B5cA==", "dev": true, "license": "MIT", - "peer": true, "engines": { "node": ">=0.10.0" } @@ -4438,7 +4399,6 @@ "integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==", "dev": true, "license": "MIT", - "peer": true, "engines": { "node": ">=10" }, @@ -4451,6 +4411,7 @@ "resolved": "https://registry.npmjs.org/zod/-/zod-4.3.6.tgz", "integrity": "sha512-rftlrkhHZOcjDwkGlnUtZZkvaPHCsDATp4pGpuOOMDaTdDDXF91wuVDJoWoPsKX/3YPQ5fHuF3STjcYyKr+Qhg==", "license": "MIT", + "peer": true, "funding": { "url": "https://github.com/sponsors/colinhacks" } diff --git a/src/__tests__/alias.test.ts b/src/__tests__/alias.test.ts index 41b9270..e175e1c 100644 --- a/src/__tests__/alias.test.ts +++ b/src/__tests__/alias.test.ts @@ -17,7 +17,12 @@ vi.mock('fs', () => { writeFileSync: vi.fn(), renameSync: vi.fn(), mkdirSync: vi.fn(), - statSync: vi.fn() + statSync: vi.fn(), + openSync: vi.fn(() => 3), + writeSync: vi.fn(), + closeSync: vi.fn(), + unlinkSync: vi.fn(), + constants: { O_CREAT: 0x40, O_EXCL: 0x80, O_WRONLY: 0x01 } }; return { default: mock, ...mock }; }); @@ -198,7 +203,7 @@ describe('Alias Management', () => { saveAliases({ myAlias: 'some.path' }); - expect(fs.mkdirSync).toHaveBeenCalledWith(expect.any(String), { recursive: true }); + expect(fs.mkdirSync).toHaveBeenCalledWith(expect.any(String), { recursive: true, mode: 0o700 }); expect(fs.writeFileSync).toHaveBeenCalled(); }); diff --git a/src/__tests__/clipboard.test.ts b/src/__tests__/clipboard.test.ts index 1b5d848..97c91d5 100644 --- a/src/__tests__/clipboard.test.ts +++ b/src/__tests__/clipboard.test.ts @@ -42,9 +42,17 @@ describe('copyToClipboard', () => { expect(execSync).toHaveBeenCalledWith('xsel --clipboard --input', { input: 'hello' }); }); - it('throws on unsupported platform', () => { + it('uses clip on Windows', () => { Object.defineProperty(process, 'platform', { value: 'win32' }); - expect(() => copyToClipboard('hello')).toThrow('Clipboard not supported on platform: win32'); + copyToClipboard('hello'); + + expect(execSync).toHaveBeenCalledWith('clip', { input: 'hello' }); + }); + + it('throws on unsupported platform', () => { + Object.defineProperty(process, 'platform', { value: 'freebsd' }); + + expect(() => copyToClipboard('hello')).toThrow('Clipboard not supported on platform: freebsd'); }); }); diff --git a/src/__tests__/commands.test.ts b/src/__tests__/commands.test.ts index 4e82f24..e098b69 100644 --- a/src/__tests__/commands.test.ts +++ b/src/__tests__/commands.test.ts @@ -36,7 +36,14 @@ vi.mock('fs', () => { writeFileSync: vi.fn(), renameSync: vi.fn(), mkdirSync: vi.fn(), - statSync: vi.fn() + statSync: vi.fn(), + openSync: vi.fn(() => 3), + writeSync: vi.fn(), + closeSync: vi.fn(), + unlinkSync: vi.fn(), + copyFileSync: vi.fn(), + rmdirSync: vi.fn(), + constants: { O_CREAT: 0x40, O_EXCL: 0x80, O_WRONLY: 0x01 } }; return { default: mock, ...mock }; }); diff --git a/src/__tests__/config.test.ts b/src/__tests__/config.test.ts index dc25e2d..fae7dc3 100644 --- a/src/__tests__/config.test.ts +++ b/src/__tests__/config.test.ts @@ -88,7 +88,7 @@ describe('Config', () => { expect(fs.writeFileSync).toHaveBeenCalledWith( '/mock/config.json.tmp', JSON.stringify({ colors: true, theme: 'dark' }, null, 2), - 'utf8' + { encoding: 'utf8', mode: 0o600 } ); expect(fs.renameSync).toHaveBeenCalledWith( '/mock/config.json.tmp', diff --git a/src/__tests__/confirm.test.ts b/src/__tests__/confirm.test.ts index fdedd97..0b59528 100644 --- a/src/__tests__/confirm.test.ts +++ b/src/__tests__/confirm.test.ts @@ -16,7 +16,12 @@ vi.mock('fs', () => { writeFileSync: vi.fn(), renameSync: vi.fn(), mkdirSync: vi.fn(), - statSync: vi.fn() + statSync: vi.fn(), + openSync: vi.fn(() => 3), + writeSync: vi.fn(), + closeSync: vi.fn(), + unlinkSync: vi.fn(), + constants: { O_CREAT: 0x40, O_EXCL: 0x80, O_WRONLY: 0x01 } }; return { default: mock, ...mock }; }); @@ -105,7 +110,7 @@ describe('Confirm Metadata', () => { saveConfirmKeys({ 'my.key': true }); - expect(fs.mkdirSync).toHaveBeenCalledWith(expect.any(String), { recursive: true }); + expect(fs.mkdirSync).toHaveBeenCalledWith(expect.any(String), { recursive: true, mode: 0o700 }); expect(fs.writeFileSync).toHaveBeenCalled(); }); diff --git a/src/__tests__/mcp-server.test.ts b/src/__tests__/mcp-server.test.ts index 5e81d2a..b56867e 100644 --- a/src/__tests__/mcp-server.test.ts +++ b/src/__tests__/mcp-server.test.ts @@ -4,7 +4,7 @@ type ToolHandler = (params: any) => Promise; const { toolHandlers, mockExecSync, mockFiles, mockWrittenFiles, - mockData, mockAliases, mockConfig, + mockData, mockAliases, mockConfig, mockConfirmKeys, } = vi.hoisted(() => ({ toolHandlers: {} as Record, mockExecSync: vi.fn(), @@ -13,6 +13,7 @@ const { mockData: {} as Record, mockAliases: {} as Record, mockConfig: { colors: true, theme: 'default' } as Record, + mockConfirmKeys: {} as Record, })); vi.mock('@modelcontextprotocol/sdk/server/mcp.js', () => { @@ -49,6 +50,11 @@ vi.mock('fs', () => { delete mockWrittenFiles[src]; } }), + openSync: vi.fn(() => 3), + writeSync: vi.fn(), + closeSync: vi.fn(), + unlinkSync: vi.fn(), + constants: { O_CREAT: 0x40, O_EXCL: 0x80, O_WRONLY: 0x01 }, }; return { default: mock, ...mock }; }); @@ -113,11 +119,22 @@ vi.mock('../alias', () => ({ removeAliasesForKey: vi.fn(), })); +// Mock confirm +vi.mock('../confirm', () => ({ + hasConfirm: vi.fn((key: string) => mockConfirmKeys[key] === true), + loadConfirmKeys: vi.fn(() => ({ ...mockConfirmKeys })), + saveConfirmKeys: vi.fn((c: any) => { + Object.keys(mockConfirmKeys).forEach(k => delete mockConfirmKeys[k]); + Object.assign(mockConfirmKeys, c); + }), +})); + vi.mock('../utils/paths', () => ({ ensureDataDirectoryExists: vi.fn(), getDataFilePath: vi.fn(() => '/mock/entries.json'), getAliasFilePath: vi.fn(() => '/mock/aliases.json'), getConfigFilePath: vi.fn(() => '/mock/config.json'), + getConfirmFilePath: vi.fn(() => '/mock/confirm.json'), })); vi.mock('../formatting', () => ({ @@ -149,6 +166,7 @@ vi.mock('../utils/deepMerge', () => ({ function resetMocks() { Object.keys(mockData).forEach(k => delete mockData[k]); Object.keys(mockAliases).forEach(k => delete mockAliases[k]); + Object.keys(mockConfirmKeys).forEach(k => delete mockConfirmKeys[k]); Object.keys(mockConfig).forEach(k => delete mockConfig[k]); Object.assign(mockConfig, { colors: true, theme: 'default' }); Object.keys(mockFiles).forEach(k => delete mockFiles[k]); @@ -175,6 +193,25 @@ describe('MCP Server Tools', () => { expect(result.content[0].text).toContain('Set: server.ip = 10.0.0.1'); expect(result.isError).toBeUndefined(); }); + + it('masks plaintext in response when encrypt is true', async () => { + const result = await toolHandlers['codex_set']({ + key: 'api.secret', value: 'mysecret', encrypt: true, password: 'pass', + }); + expect(result.isError).toBeUndefined(); + expect(result.content[0].text).toContain('[encrypted]'); + expect(result.content[0].text).not.toContain('mysecret'); + }); + + it('masks plaintext in response when encrypt is true with alias', async () => { + const result = await toolHandlers['codex_set']({ + key: 'api.secret', value: 'mysecret', encrypt: true, password: 'pass', alias: 'sec', + }); + expect(result.isError).toBeUndefined(); + expect(result.content[0].text).toContain('[encrypted]'); + expect(result.content[0].text).toContain('Alias set: sec ->'); + expect(result.content[0].text).not.toContain('mysecret'); + }); }); describe('codex_get', () => { @@ -489,7 +526,7 @@ describe('MCP Server Tools', () => { Object.assign(mockAliases, { x: 'y' }); const result = await toolHandlers['codex_export']({ type: 'all', pretty: undefined }); const parsed = JSON.parse(result.content[0].text); - expect(parsed).toEqual({ entries: { a: '1' }, aliases: { x: 'y' } }); + expect(parsed).toEqual({ entries: { a: '1' }, aliases: { x: 'y' }, confirm: {} }); }); it('pretty-prints when requested', async () => { @@ -567,7 +604,7 @@ describe('MCP Server Tools', () => { const result = await toolHandlers['codex_import']({ type: 'all', json, merge: undefined, }); - expect(result.content[0].text).toContain('Entries and aliases imported successfully'); + expect(result.content[0].text).toContain('Entries, aliases, and confirm keys imported successfully'); expect(mockData).toEqual({ new: 'data' }); expect(mockAliases).toEqual({ new: 'alias.path' }); }); @@ -579,7 +616,7 @@ describe('MCP Server Tools', () => { const result = await toolHandlers['codex_import']({ type: 'all', json, merge: true, }); - expect(result.content[0].text).toContain('Entries and aliases merged successfully'); + expect(result.content[0].text).toContain('Entries, aliases, and confirm keys merged successfully'); expect(mockData).toEqual({ existing: 'data', added: 'data' }); expect(mockAliases).toEqual({ existing: 'alias.path', added: 'alias.path' }); }); @@ -610,7 +647,7 @@ describe('MCP Server Tools', () => { // Import const result = await toolHandlers['codex_import']({ type: 'all', json, merge: undefined }); - expect(result.content[0].text).toContain('Entries and aliases imported successfully'); + expect(result.content[0].text).toContain('Entries, aliases, and confirm keys imported successfully'); expect(mockData).toEqual({ server: { ip: '10.0.0.1' } }); expect(mockAliases).toEqual({ srv: 'server.ip' }); }); @@ -635,7 +672,7 @@ describe('MCP Server Tools', () => { Object.assign(mockData, { a: '1' }); Object.assign(mockAliases, { srv: 'server.ip' }); const result = await toolHandlers['codex_reset']({ type: 'all' }); - expect(result.content[0].text).toContain('Entries and aliases reset to empty state'); + expect(result.content[0].text).toContain('Entries, aliases, and confirm keys reset to empty state'); expect(mockData).toEqual({}); expect(mockAliases).toEqual({}); }); diff --git a/src/__tests__/storage.test.ts b/src/__tests__/storage.test.ts index 414070a..765df95 100644 --- a/src/__tests__/storage.test.ts +++ b/src/__tests__/storage.test.ts @@ -7,7 +7,12 @@ vi.mock('fs', () => { readFileSync: vi.fn(), writeFileSync: vi.fn(), renameSync: vi.fn(), - statSync: vi.fn() + statSync: vi.fn(), + openSync: vi.fn(() => 3), + writeSync: vi.fn(), + closeSync: vi.fn(), + unlinkSync: vi.fn(), + constants: { O_CREAT: 0x40, O_EXCL: 0x80, O_WRONLY: 0x01 } }; return { default: mock, ...mock }; }); @@ -141,7 +146,7 @@ describe('Storage', () => { expect(fs.writeFileSync).toHaveBeenCalledWith( '/mock/entries.json.tmp', JSON.stringify({ key: 'value' }, null, 2), - 'utf8' + { encoding: 'utf8', mode: 0o600 } ); expect(fs.renameSync).toHaveBeenCalledWith( '/mock/entries.json.tmp', diff --git a/src/commands/data-management.ts b/src/commands/data-management.ts index 6687bfc..03ca5a3 100644 --- a/src/commands/data-management.ts +++ b/src/commands/data-management.ts @@ -9,6 +9,7 @@ import { validateDataType, confirmOrAbort, getInvalidDataTypeMessage, printSucce import { deepMerge } from '../utils/deepMerge'; import { maskEncryptedValues } from '../utils/crypto'; import { debug } from '../utils/debug'; +import { createAutoBackup } from '../utils/autoBackup'; export function exportData(type: string, options: ExportOptions): void { debug('exportData called', { type, options }); @@ -22,21 +23,30 @@ export function exportData(type: string, options: ExportOptions): void { const timestamp = new Date().toISOString().replace(/:/g, '-').split('.')[0]; const indent = options.pretty ? 2 : 0; + // When exporting 'all' with -o, suffix the filename per type to avoid overwriting + const getOutputFile = (typeName: string, defaultName: string): string => { + if (!options.output) return path.join(defaultDir, defaultName); + if (type !== 'all') return options.output; + const ext = path.extname(options.output); + const base = options.output.slice(0, options.output.length - ext.length); + return `${base}-${typeName}${ext || '.json'}`; + }; + if (type === 'entries' || type === 'all') { - const outputFile = options.output ?? path.join(defaultDir, `codexcli-entries-${timestamp}.json`); - fs.writeFileSync(outputFile, JSON.stringify(maskEncryptedValues(loadData()), null, indent), 'utf8'); + const outputFile = getOutputFile('entries', `codexcli-entries-${timestamp}.json`); + fs.writeFileSync(outputFile, JSON.stringify(maskEncryptedValues(loadData()), null, indent), { encoding: 'utf8', mode: 0o600 }); printSuccess(`Entries exported to: ${color.cyan(outputFile)}`); } if (type === 'aliases' || type === 'all') { - const outputFile = options.output ?? path.join(defaultDir, `codexcli-aliases-${timestamp}.json`); - fs.writeFileSync(outputFile, JSON.stringify(loadAliases(), null, indent), 'utf8'); + const outputFile = getOutputFile('aliases', `codexcli-aliases-${timestamp}.json`); + fs.writeFileSync(outputFile, JSON.stringify(loadAliases(), null, indent), { encoding: 'utf8', mode: 0o600 }); printSuccess(`Aliases exported to: ${color.cyan(outputFile)}`); } - if (type === 'all') { - const outputFile = options.output ?? path.join(defaultDir, `codexcli-confirm-${timestamp}.json`); - fs.writeFileSync(outputFile, JSON.stringify(loadConfirmKeys(), null, indent), 'utf8'); + if (type === 'confirm' || type === 'all') { + const outputFile = getOutputFile('confirm', `codexcli-confirm-${timestamp}.json`); + fs.writeFileSync(outputFile, JSON.stringify(loadConfirmKeys(), null, indent), { encoding: 'utf8', mode: 0o600 }); printSuccess(`Confirm keys exported to: ${color.cyan(outputFile)}`); } } catch (error) { @@ -82,6 +92,11 @@ export async function importData(type: string, file: string, options: ImportOpti const validData = importedData as Record; + // Auto-backup before destructive import (replace, not merge) + if (!options.merge) { + createAutoBackup('pre-import'); + } + if (type === 'entries' || type === 'all') { const currentData = options.merge ? loadData() : {}; @@ -110,8 +125,7 @@ export async function importData(type: string, file: string, options: ImportOpti printSuccess(`Aliases ${options.merge ? 'merged' : 'imported'} successfully`); } - if (type === 'all') { - // Import confirm keys — values must all be true + if (type === 'confirm' || type === 'all') { const currentConfirm = options.merge ? loadConfirmKeys() : {}; const newConfirm = options.merge ? { ...currentConfirm, ...(validData as Record) } @@ -140,6 +154,9 @@ export async function resetData(type: string, options: ResetOptions): Promise = {}; + for (const [k, v] of Object.entries(flat)) { + if (isEncrypted(v)) { + result[k] = '[encrypted]'; + } else { + try { result[k] = interpolate(v); } catch { result[k] = v; } + } + } + console.log(JSON.stringify(result, null, 2)); + } + return; + } + + const val = getValue(key); + if (val === undefined) { + console.error(JSON.stringify({ error: `Entry '${key}' not found` })); + process.exitCode = 1; + return; + } + if (typeof val === 'object' && val !== null) { + const flat = flattenObject({ [key]: val }); + const result: Record = {}; + for (const [k, v] of Object.entries(flat)) { + if (isEncrypted(v)) { + result[k] = '[encrypted]'; + } else { + try { result[k] = interpolate(v); } catch { result[k] = v; } + } + } + console.log(JSON.stringify(result, null, 2)); + } else { + const strVal = String(val); + let displayVal: string; + if (isEncrypted(strVal)) { + displayVal = '[encrypted]'; + } else { + try { displayVal = interpolate(strVal); } catch { displayVal = strVal; } + } + console.log(JSON.stringify({ [key]: displayVal })); + } + return; + } + if (!key) { // -a → aliases only if (options.aliases) { @@ -364,6 +418,86 @@ export async function getEntry(key?: string, options: GetOptions = {}): Promise< displayEntries({ [key]: displayValue }, aliasMap); } +export async function editEntry(key: string, options: { decrypt?: boolean } = {}): Promise { + debug('editEntry called', { key, options }); + try { + const editor = process.env.VISUAL || process.env.EDITOR; + if (!editor) { + printError('No editor configured. Set $EDITOR or $VISUAL environment variable.'); + process.exitCode = 1; + return; + } + + let value = getValue(key); + + if (value === undefined) { + printError(`Entry '${key}' not found.`); + process.exitCode = 1; + return; + } + + if (typeof value !== 'string') { + printError(`Entry '${key}' is a subtree, not a single value. Cannot edit.`); + process.exitCode = 1; + return; + } + + let password: string | undefined; + if (isEncrypted(value)) { + if (!options.decrypt) { + printError(`Entry '${key}' is encrypted. Use --decrypt to edit.`); + process.exitCode = 1; + return; + } + password = await askPassword('Password: '); + try { + value = decryptValue(value, password); + } catch { + printError('Decryption failed. Wrong password or corrupted data.'); + process.exitCode = 1; + return; + } + } + + const tmpFile = path.join(os.tmpdir(), `codexcli-edit-${Date.now()}.tmp`); + fs.writeFileSync(tmpFile, value, { encoding: 'utf8', mode: 0o600 }); + + try { + const isWindows = process.platform === 'win32'; + const shell = isWindows ? 'cmd' : (process.env.SHELL ?? '/bin/sh'); + const shellArgs = isWindows + ? ['/c', `${editor} "%CODEX_TMPFILE%"`] + : ['-c', `${editor} "$CODEX_TMPFILE"`]; + const result = spawnSync(shell, shellArgs, { + stdio: 'inherit', + env: { ...process.env, CODEX_TMPFILE: tmpFile }, + }); + if (result.error) throw result.error; + if (result.status !== 0 && result.status !== null) { + throw new Error(`Editor exited with code ${result.status}`); + } + const newValue = fs.readFileSync(tmpFile, 'utf8'); + + if (newValue === value) { + console.log('No changes made.'); + return; + } + + let storedValue = newValue; + if (password) { + storedValue = encryptValue(newValue, password); + } + + setValue(key, storedValue); + printSuccess(`Entry '${key}' updated successfully.`); + } finally { + try { fs.unlinkSync(tmpFile); } catch { /* ignore cleanup errors */ } + } + } catch (error) { + handleError('Failed to edit entry:', error); + } +} + export async function removeEntry(key: string, force = false): Promise { debug('removeEntry called', { key, force }); diff --git a/src/commands/helpers.ts b/src/commands/helpers.ts index 8d1bc07..13a4408 100644 --- a/src/commands/helpers.ts +++ b/src/commands/helpers.ts @@ -88,7 +88,7 @@ export function displayAliases(aliases: Record, options?: { tree } if (Object.keys(aliases).length === 0) { - console.log(`No aliases found. Add one with "${getBinaryName()} alias set "`); + console.log(`No aliases found. Add one with "${getBinaryName()} set -a "`); return; } @@ -161,14 +161,14 @@ export function askPassword(prompt: string): Promise { }); } -export const VALID_DATA_TYPES = ['entries', 'aliases', 'all'] as const; +export const VALID_DATA_TYPES = ['entries', 'aliases', 'confirm', 'all'] as const; export function validateDataType(type: string): boolean { return (VALID_DATA_TYPES as readonly string[]).includes(type); } export function getInvalidDataTypeMessage(type: string): string { - return `Invalid type: ${type}. Must be 'entries', 'aliases', or 'all'`; + return `Invalid type: ${type}. Must be 'entries', 'aliases', 'confirm', or 'all'`; } export async function confirmOrAbort(prompt: string): Promise { diff --git a/src/commands/index.ts b/src/commands/index.ts index 9a2a005..4a69a8a 100644 --- a/src/commands/index.ts +++ b/src/commands/index.ts @@ -1,4 +1,4 @@ -export { runCommand, setEntry, getEntry, removeEntry, renameEntry } from './entries'; +export { runCommand, setEntry, getEntry, editEntry, removeEntry, renameEntry } from './entries'; export { searchEntries } from './search'; export { exportData, importData, resetData } from './data-management'; export { handleConfig, configSet } from './config-commands'; diff --git a/src/commands/search.ts b/src/commands/search.ts index 3e0f74d..c917e54 100644 --- a/src/commands/search.ts +++ b/src/commands/search.ts @@ -95,6 +95,14 @@ export function searchEntries(searchTerm: string, options: SearchOptions = {}): const totalMatches = Object.keys(dataMatches).length + Object.keys(aliasMatches).length; + if (options.json) { + const result: { entries?: Record, aliases?: Record } = {}; + if (!options.aliases) result.entries = dataMatches; + if (!options.entries) result.aliases = aliasMatches; + console.log(JSON.stringify(result, null, 2)); + return; + } + if (totalMatches === 0) { console.log(`No matches found for '${searchTerm}'.`); return; diff --git a/src/completions.ts b/src/completions.ts index 11bafdd..ff5ecaa 100644 --- a/src/completions.ts +++ b/src/completions.ts @@ -64,6 +64,8 @@ const FLAG_DESCRIPTIONS: Record = { '--source': 'Show source/raw output', '--confirm': 'Require confirmation to run', '--no-confirm': 'Remove confirmation requirement', + '--json': 'Output as JSON', + '-j': 'Output as JSON', }; const GLOBAL_FLAGS: Record = { @@ -91,6 +93,7 @@ const CLI_TREE: Record = { '--decrypt': FLAG_DESCRIPTIONS['--decrypt'], '-d': FLAG_DESCRIPTIONS['-d'], '--copy': FLAG_DESCRIPTIONS['--copy'], '-c': FLAG_DESCRIPTIONS['-c'], '--aliases': FLAG_DESCRIPTIONS['--aliases'], '-a': FLAG_DESCRIPTIONS['--aliases'], + '--json': FLAG_DESCRIPTIONS['--json'], '-j': FLAG_DESCRIPTIONS['-j'], }, argType: 'dataKey', description: 'Retrieve entries', @@ -103,6 +106,7 @@ const CLI_TREE: Record = { '--decrypt': FLAG_DESCRIPTIONS['--decrypt'], '-d': FLAG_DESCRIPTIONS['-d'], '--copy': FLAG_DESCRIPTIONS['--copy'], '-c': FLAG_DESCRIPTIONS['-c'], '--aliases': FLAG_DESCRIPTIONS['--aliases'], '-a': FLAG_DESCRIPTIONS['--aliases'], + '--json': FLAG_DESCRIPTIONS['--json'], '-j': FLAG_DESCRIPTIONS['-j'], }, argType: 'dataKey', description: 'Retrieve entries', @@ -132,6 +136,7 @@ const CLI_TREE: Record = { '--entries': FLAG_DESCRIPTIONS['--entries'], '-e': FLAG_DESCRIPTIONS['--entries'], '--aliases': FLAG_DESCRIPTIONS['--aliases'], '-a': FLAG_DESCRIPTIONS['--aliases'], '--tree': FLAG_DESCRIPTIONS['--tree'], '-t': FLAG_DESCRIPTIONS['-t'], + '--json': FLAG_DESCRIPTIONS['--json'], '-j': FLAG_DESCRIPTIONS['-j'], }, argType: null, description: 'Find entries by key or value', @@ -141,10 +146,21 @@ const CLI_TREE: Record = { '--entries': FLAG_DESCRIPTIONS['--entries'], '-e': FLAG_DESCRIPTIONS['--entries'], '--aliases': FLAG_DESCRIPTIONS['--aliases'], '-a': FLAG_DESCRIPTIONS['--aliases'], '--tree': FLAG_DESCRIPTIONS['--tree'], '-t': FLAG_DESCRIPTIONS['-t'], + '--json': FLAG_DESCRIPTIONS['--json'], '-j': FLAG_DESCRIPTIONS['-j'], }, argType: null, description: 'Find entries by key or value', }, + edit: { + flags: { '--decrypt': FLAG_DESCRIPTIONS['--decrypt'], '-d': FLAG_DESCRIPTIONS['-d'] }, + argType: 'dataKey', + description: 'Edit entry in $EDITOR', + }, + e: { + flags: { '--decrypt': FLAG_DESCRIPTIONS['--decrypt'], '-d': FLAG_DESCRIPTIONS['-d'] }, + argType: 'dataKey', + description: 'Edit entry in $EDITOR', + }, rename: { flags: { '--alias': FLAG_DESCRIPTIONS['--alias'], '-a': 'Rename alias', '--set-alias': 'Set alias on renamed key' }, argType: 'dataKey', diff --git a/src/formatting.ts b/src/formatting.ts index c056eb4..69811ac 100644 --- a/src/formatting.ts +++ b/src/formatting.ts @@ -116,13 +116,14 @@ export function showHelp(): void { cmd('get', 'g', '[key]', 'Retrieve entries or specific data'); cmd('run', 'r', '', 'Execute stored command(s) (: compose, && chain)'); cmd('find', 'f', '', 'Find entries by key or value'); + cmd('edit', 'e', '', 'Open an entry in $EDITOR for editing'); cmd('rename', 'rn', ' ', 'Rename an entry key or alias'); cmd('remove', 'rm', '', 'Remove an entry and its alias'); - cmd('config', '', '[setting] [value]', 'View or change configuration settings'); + cmd('config', '', '', 'View or change configuration settings'); cmd('data', '', '', 'Manage stored data (export, import, reset)'); console.log(); console.log('SUBCOMMANDS:'); - console.log(` ${color.green('config')} info, examples, completions `); + console.log(` ${color.green('config')} set, get, info, examples, completions `); console.log(` ${color.green('data')} export , import , reset `); console.log(); console.log(` Use --help with any command for details (e.g. ${bin} set --help)`); @@ -151,6 +152,7 @@ export function showHelp(): void { opt(`${color.yellow('--decrypt')}, ${color.yellow('-d')}`, 'Decrypt an encrypted value'); opt(`${color.yellow('--copy')}, ${color.yellow('-c')}`, 'Copy value to clipboard'); opt(`${color.yellow('--aliases')}, ${color.yellow('-a')}`, 'Show aliases only'); + opt(`${color.yellow('--json')}, ${color.yellow('-j')}`, 'Output as JSON (for scripting)'); console.log('\n' + color.boldColors.magenta('OPTIONS (run):')); opt(`${color.yellow('--yes')}, ${color.yellow('-y')}`, 'Skip confirmation prompt (for entries marked --confirm)'); @@ -161,6 +163,10 @@ export function showHelp(): void { opt(`${color.yellow('--entries')}, ${color.yellow('-e')}`, 'Search only in data entries'); opt(`${color.yellow('--aliases')}, ${color.yellow('-a')}`, 'Search only in aliases'); opt(`${color.yellow('--tree')}, ${color.yellow('-t')}`, 'Display results in a tree structure'); + opt(`${color.yellow('--json')}, ${color.yellow('-j')}`, 'Output as JSON (for scripting)'); + + console.log('\n' + color.boldColors.magenta('OPTIONS (edit):')); + opt(`${color.yellow('--decrypt')}, ${color.yellow('-d')}`, 'Decrypt an encrypted value before editing'); console.log('\n' + color.boldColors.magenta('OPTIONS (rename):')); opt(`${color.yellow('--alias')}, ${color.yellow('-a')}`, 'Rename an alias instead of an entry key'); @@ -202,7 +208,6 @@ export function showExamples(): void { section('RETRIEVING DATA:'); ex(`${b} ${g('get')}`, '# List all entries and aliases'); - ex(`${b} ${g('get')} ${y('-e')}`, '# List entries only (no aliases)'); ex(`${b} ${g('get')} ${y('-a')}`, '# List aliases only'); ex(`${b} ${g('get')} ${c('server.ip')}`, '# Get a specific value'); ex(`${b} ${g('get')} ${c('server')}`, '# Get everything under a namespace'); @@ -221,8 +226,6 @@ export function showExamples(): void { section('SEARCHING:'); ex(`${b} ${g('find')} 192.168`, '# Search keys and values'); - ex(`${b} ${g('find')} server ${y('-k')}`, '# Search only in keys'); - ex(`${b} ${g('find')} production ${y('-v')}`, '# Search only in values'); ex(`${b} ${g('find')} prod ${y('-e')}`, '# Search data entries only (skip aliases)'); ex(`${b} ${g('find')} ip ${y('-a')}`, '# Search aliases only'); ex(`${b} ${g('find')} server ${y('-t')}`, '# Show results as a tree'); diff --git a/src/index.ts b/src/index.ts index f5a5531..b0749cc 100644 --- a/src/index.ts +++ b/src/index.ts @@ -72,13 +72,27 @@ codexCLI } } } else if (valueArray.length === 0) { - // Allow no value when -a or --confirm/--no-confirm is provided (metadata-only update) - if (!options.alias && options.confirm === undefined) { + // Read from stdin if piped (non-TTY) + if (!process.stdin.isTTY) { + const chunks: Buffer[] = []; + for await (const chunk of process.stdin) { + chunks.push(Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk as string)); + } + const stdinValue = Buffer.concat(chunks).toString('utf8').trimEnd(); + if (stdinValue.length > 0) { + value = stdinValue; + } else if (!options.alias && options.confirm === undefined) { + printError('No input received from stdin.'); + process.exitCode = 1; + return; + } + // value stays undefined — intentional for alias-only or confirm-only updates + } else if (!options.alias && options.confirm === undefined) { + // Allow no value when -a or --confirm/--no-confirm is provided (metadata-only update) printError('Missing value. Provide a value or use --prompt (-p) to enter it interactively.'); process.exitCode = 1; return; } - value = undefined; } else { value = valueArray.join(' '); } @@ -99,7 +113,8 @@ codexCLI .option('-d, --decrypt', 'Decrypt an encrypted value (prompts for password)') .option('-c, --copy', 'Copy value to clipboard') .option('-a, --aliases', 'Show aliases only') - .action(async (key: string | undefined, options: { tree?: boolean, raw?: boolean, source?: boolean, decrypt?: boolean, copy?: boolean, aliases?: boolean }) => { + .option('-j, --json', 'Output as JSON (for scripting)') + .action(async (key: string | undefined, options: { tree?: boolean, raw?: boolean, source?: boolean, decrypt?: boolean, copy?: boolean, aliases?: boolean, json?: boolean }) => { if (key) { key = resolveKey(key.replace(/:$/, '')); } @@ -127,14 +142,26 @@ codexCLI .option('-e, --entries', 'Search only in data entries') .option('-a, --aliases', 'Search only in aliases') .option('-t, --tree', 'Display results in a hierarchical tree structure') - .action(async (term: string, options: { entries?: boolean, aliases?: boolean, tree?: boolean }) => { + .option('-j, --json', 'Output as JSON (for scripting)') + .action(async (term: string, options: { entries?: boolean, aliases?: boolean, tree?: boolean, json?: boolean }) => { await withPager(() => commands.searchEntries(term, { entries: options.entries, aliases: options.aliases, - tree: options.tree + tree: options.tree, + json: options.json })); }); +// Edit command +codexCLI + .command('edit ') + .alias('e') + .description('Open an entry in $EDITOR for editing') + .option('-d, --decrypt', 'Decrypt an encrypted value before editing') + .action(async (key: string, options: { decrypt?: boolean }) => { + await commands.editEntry(resolveKey(key.replace(/:$/, '')), options); + }); + // Rename command codexCLI .command('rename ') diff --git a/src/mcp-server.ts b/src/mcp-server.ts index bdaa0cd..e62d093 100644 --- a/src/mcp-server.ts +++ b/src/mcp-server.ts @@ -22,11 +22,12 @@ import { import { ensureDataDirectoryExists, } from "./utils/paths"; +import { hasConfirm, loadConfirmKeys, saveConfirmKeys } from "./confirm"; import { loadConfig, getConfigSetting, setConfigSetting, VALID_CONFIG_KEYS } from "./config"; import { deepMerge } from "./utils/deepMerge"; import { version } from "../package.json"; import { formatTree } from "./formatting"; -import { isEncrypted, maskEncryptedValues } from "./utils/crypto"; +import { isEncrypted, maskEncryptedValues, encryptValue, decryptValue } from "./utils/crypto"; import { interpolate, interpolateObject } from "./utils/interpolate"; function textResponse(text: string) { @@ -45,12 +46,19 @@ const server = new McpServer({ server.tool( "codex_set", "Set an entry in the CodexCLI data store", - { key: z.string().describe("Dot-notation key (e.g. server.prod.ip)"), value: z.string().describe("Value to store"), alias: z.string().optional().describe("Create an alias for this key") }, - async ({ key, value, alias }) => { + { key: z.string().describe("Dot-notation key (e.g. server.prod.ip)"), value: z.string().describe("Value to store"), alias: z.string().optional().describe("Create an alias for this key"), encrypt: z.boolean().optional().describe("Encrypt the value with the provided password"), password: z.string().optional().describe("Password for encryption (required when encrypt is true)") }, + async ({ key, value, alias, encrypt, password }) => { try { ensureDataDirectoryExists(); const resolved = resolveKey(key); - setValue(resolved, value); + let storedValue = value; + if (encrypt) { + if (!password) { + return errorResponse("Password is required when encrypt is true."); + } + storedValue = encryptValue(value, password); + } + setValue(resolved, storedValue); if (alias) { const aliases = loadAliases(); // Enforce one alias per entry: remove any existing alias for the same target @@ -61,9 +69,9 @@ server.tool( } aliases[alias] = resolved; saveAliases(aliases); - return textResponse(`Set: ${resolved} = ${value}\nAlias set: ${alias} -> ${resolved}`); + return textResponse(`Set: ${resolved} = ${encrypt ? '[encrypted]' : value}\nAlias set: ${alias} -> ${resolved}`); } - return textResponse(`Set: ${resolved} = ${value}`); + return textResponse(`Set: ${resolved} = ${encrypt ? '[encrypted]' : value}`); } catch (err) { return errorResponse(`Error setting entry: ${String(err)}`); } @@ -78,8 +86,10 @@ server.tool( key: z.string().optional().describe("Dot-notation key to retrieve (omit for all entries)"), format: z.enum(["flat", "tree"]).optional().describe("Output format: flat (default) or tree"), aliases_only: z.boolean().optional().describe("Show aliases only"), + decrypt: z.boolean().optional().describe("Decrypt an encrypted value"), + password: z.string().optional().describe("Password for decryption (required when decrypt is true)"), }, - async ({ key, format, aliases_only }) => { + async ({ key, format, aliases_only, decrypt: decryptOpt, password }) => { try { const data = loadData(); const keyToAliasMap = buildKeyToAliasMap(); @@ -128,7 +138,19 @@ server.tool( const strVal = String(value); let display: string | number | boolean; if (isEncrypted(strVal)) { - display = '[encrypted]'; + if (decryptOpt) { + if (!password) { + return errorResponse("Password is required when decrypt is true."); + } + try { + const decrypted = decryptValue(strVal, password); + display = decrypted; + } catch { + return errorResponse("Decryption failed. Wrong password or corrupted data."); + } + } else { + display = '[encrypted]'; + } } else if (typeof value === 'string') { try { display = interpolate(strVal); } catch { display = value; } } else { @@ -311,8 +333,9 @@ server.tool( { key: z.string().describe("Dot-notation key (or alias) whose value is a shell command"), dry: z.boolean().optional().describe("If true, return the command without executing it"), + force: z.boolean().optional().describe("If true, skip the confirm check for entries marked --confirm"), }, - async ({ key, dry }) => { + async ({ key, dry, force }) => { const resolvedKey = resolveKey(key); const value = getValue(resolvedKey); @@ -327,6 +350,13 @@ server.tool( return errorResponse(`Value at '${key}' is encrypted. Decryption is not supported via MCP.`); } + // Respect confirm metadata: refuse unless --force or --dry + if (hasConfirm(resolvedKey) && !force && !dry) { + return errorResponse( + `Entry '${key}' requires confirmation (--confirm). Pass force: true to execute.` + ); + } + let command = value; try { command = interpolate(value); @@ -412,7 +442,7 @@ server.tool( "codex_export", "Export entries and/or aliases as JSON text", { - type: z.enum(["entries", "aliases", "all"]).describe("What to export"), + type: z.enum(["entries", "aliases", "confirm", "all"]).describe("What to export"), pretty: z.boolean().optional().describe("Pretty-print the JSON (default false)"), }, async ({ type, pretty }) => { @@ -420,10 +450,14 @@ server.tool( const indent = pretty ? 2 : 0; if (type === "all") { - const combined = { entries: maskEncryptedValues(loadData()), aliases: loadAliases() }; + const combined = { entries: maskEncryptedValues(loadData()), aliases: loadAliases(), confirm: loadConfirmKeys() }; return textResponse(JSON.stringify(combined, null, indent)); } + if (type === "confirm") { + return textResponse(JSON.stringify(loadConfirmKeys(), null, indent)); + } + const content = type === "entries" ? maskEncryptedValues(loadData()) : loadAliases(); return textResponse(JSON.stringify(content, null, indent)); } catch (err) { @@ -437,7 +471,7 @@ server.tool( "codex_import", "Import entries and/or aliases from a JSON string", { - type: z.enum(["entries", "aliases", "all"]).describe("What to import"), + type: z.enum(["entries", "aliases", "confirm", "all"]).describe("What to import"), json: z.string().describe("JSON string to import"), merge: z.boolean().optional().describe("Merge with existing data instead of replacing (default false)"), }, @@ -480,10 +514,23 @@ server.tool( const currentAliases = merge ? loadAliases() : {}; saveAliases(merge ? { ...currentAliases, ...(aliasesObj as Record) } : aliasesObj as Record); + + // Import confirm keys if present; reset to empty when replacing and key is absent + const confirmVal = obj.confirm; + if (confirmVal && typeof confirmVal === "object" && !Array.isArray(confirmVal)) { + const currentConfirm = merge ? loadConfirmKeys() : {}; + saveConfirmKeys(merge ? { ...currentConfirm, ...(confirmVal as Record) } : confirmVal as Record); + } else if (!merge) { + saveConfirmKeys({}); + } } else if (type === "entries") { const current = merge ? loadData() : {}; const newData = merge ? deepMerge(current, obj) : obj; saveData(newData as CodexData); + } else if (type === "confirm") { + const currentConfirm = merge ? loadConfirmKeys() : {}; + const newConfirm = merge ? { ...currentConfirm, ...(obj as Record) } : obj; + saveConfirmKeys(newConfirm as Record); } else { if (Object.values(obj).some(v => typeof v !== "string")) { return errorResponse("Alias values must all be strings (dot-notation paths)."); @@ -496,8 +543,9 @@ server.tool( saveAliases(newAliases as Record); } + const typeLabel = { all: "Entries, aliases, and confirm keys", entries: "Entries", aliases: "Aliases", confirm: "Confirm keys" }[type]; return textResponse( - `${type === "all" ? "Entries and aliases" : type === "entries" ? "Entries" : "Aliases"} ${merge ? "merged" : "imported"} successfully.` + `${typeLabel} ${merge ? "merged" : "imported"} successfully.` ); } catch (err) { return errorResponse(`Error importing: ${String(err)}`); @@ -510,7 +558,7 @@ server.tool( "codex_reset", "Reset entries and/or aliases to empty state", { - type: z.enum(["entries", "aliases", "all"]).describe("What to reset"), + type: z.enum(["entries", "aliases", "confirm", "all"]).describe("What to reset"), }, async ({ type }) => { try { @@ -520,8 +568,12 @@ server.tool( if (type === "aliases" || type === "all") { saveAliases({}); } + if (type === "confirm" || type === "all") { + saveConfirmKeys({}); + } + const typeLabel = { all: "Entries, aliases, and confirm keys", entries: "Entries", aliases: "Aliases", confirm: "Confirm keys" }[type]; return textResponse( - `${type === "all" ? "Entries and aliases" : type === "entries" ? "Entries" : "Aliases"} reset to empty state.` + `${typeLabel} reset to empty state.` ); } catch (err) { return errorResponse(`Error resetting: ${String(err)}`); diff --git a/src/types.ts b/src/types.ts index 404bf01..1e60e6e 100644 --- a/src/types.ts +++ b/src/types.ts @@ -9,12 +9,14 @@ export interface GetOptions { decrypt?: boolean | undefined; copy?: boolean | undefined; aliases?: boolean | undefined; + json?: boolean | undefined; } export interface SearchOptions { entries?: boolean | undefined; aliases?: boolean | undefined; tree?: boolean | undefined; + json?: boolean | undefined; } export interface ExportOptions { diff --git a/src/utils/atomicWrite.ts b/src/utils/atomicWrite.ts index b1ef5c2..fcc9850 100644 --- a/src/utils/atomicWrite.ts +++ b/src/utils/atomicWrite.ts @@ -4,9 +4,10 @@ import fs from 'fs'; * Write a file atomically by writing to a temporary file first, * then renaming into place. On POSIX systems, rename is atomic, * so the target file is never left in a partial/corrupt state. + * Files are created with mode 0600 (owner read/write only). */ export function atomicWriteFileSync(filePath: string, content: string): void { const tmpPath = filePath + '.tmp'; - fs.writeFileSync(tmpPath, content, 'utf8'); + fs.writeFileSync(tmpPath, content, { encoding: 'utf8', mode: 0o600 }); fs.renameSync(tmpPath, filePath); } diff --git a/src/utils/autoBackup.ts b/src/utils/autoBackup.ts new file mode 100644 index 0000000..5dded66 --- /dev/null +++ b/src/utils/autoBackup.ts @@ -0,0 +1,65 @@ +import fs from 'fs'; +import path from 'path'; +import { getDataDirectory } from './paths'; +import { debug } from './debug'; + +/** + * Create automatic backups of data files before destructive operations. + * Backups are stored in a `.backups` subdirectory within the data directory. + */ +export function createAutoBackup(label: string): string | null { + const dataDir = getDataDirectory(); + const backupDir = path.join(dataDir, '.backups'); + + try { + if (!fs.existsSync(backupDir)) { + fs.mkdirSync(backupDir, { recursive: true, mode: 0o700 }); + } + + const timestamp = new Date().toISOString().replace(/:/g, '-').replace(/\..+/, ''); + const backupSubDir = path.join(backupDir, `${label}-${timestamp}`); + fs.mkdirSync(backupSubDir, { mode: 0o700 }); + + const filesToBackup = ['entries.json', 'aliases.json', 'confirm.json']; + let backedUp = 0; + + for (const file of filesToBackup) { + const src = path.join(dataDir, file); + if (fs.existsSync(src)) { + const dest = path.join(backupSubDir, file); + fs.copyFileSync(src, dest); + fs.chmodSync(dest, 0o600); + backedUp++; + } + } + + if (backedUp === 0) { + // Nothing to back up — remove the empty directory + try { fs.rmSync(backupSubDir); } catch { /* ignore */ } + return null; + } + + debug(`Auto-backup created: ${backupSubDir} (${backedUp} files)`); + + // Rotate: keep only the 10 most recent backups + try { + const allBackups = fs.readdirSync(backupDir) + .filter(name => fs.statSync(path.join(backupDir, name)).isDirectory()) + .sort(); + if (allBackups.length > 10) { + const toRemove = allBackups.slice(0, allBackups.length - 10); + for (const old of toRemove) { + fs.rmSync(path.join(backupDir, old), { recursive: true, force: true }); + } + debug(`Removed ${toRemove.length} old backup(s)`); + } + } catch (cleanupErr) { + debug(`Backup cleanup failed: ${cleanupErr}`); + } + + return backupSubDir; + } catch (error) { + debug(`Auto-backup failed: ${error}`); + return null; + } +} diff --git a/src/utils/clipboard.ts b/src/utils/clipboard.ts index c559fa2..5a0c8c0 100644 --- a/src/utils/clipboard.ts +++ b/src/utils/clipboard.ts @@ -17,6 +17,8 @@ export function copyToClipboard(text: string): void { } catch { cmd = 'xsel --clipboard --input'; } + } else if (platform === 'win32') { + cmd = 'clip'; } else { throw new Error(`Clipboard not supported on platform: ${platform}`); } diff --git a/src/utils/fileLock.ts b/src/utils/fileLock.ts new file mode 100644 index 0000000..15cac89 --- /dev/null +++ b/src/utils/fileLock.ts @@ -0,0 +1,86 @@ +import fs from 'fs'; +import { debug } from './debug'; + +const LOCK_STALE_MS = 10_000; // Consider lock stale after 10 seconds +// Reusable buffer for Atomics.wait()-based sleep (avoids per-call allocation) +const _sleepBuf = new Int32Array(new SharedArrayBuffer(4)); + +/** + * Acquire an advisory file lock using a .lock file. + * Retries with backoff if the lock is held by another process. + * Automatically breaks stale locks (older than LOCK_STALE_MS). + */ +export function acquireLock(filePath: string, maxRetries = 5): void { + const lockPath = filePath + '.lock'; + + for (let attempt = 0; attempt <= maxRetries; attempt++) { + try { + // O_CREAT | O_EXCL: fail if file already exists (atomic) + const fd = fs.openSync(lockPath, fs.constants.O_CREAT | fs.constants.O_EXCL | fs.constants.O_WRONLY); + fs.writeSync(fd, String(process.pid)); + fs.closeSync(fd); + return; + } catch (err: unknown) { + if (err && typeof err === 'object' && 'code' in err && (err as { code: string }).code === 'EEXIST') { + // Lock file exists — check if stale + try { + const stat = fs.statSync(lockPath); + if (Date.now() - stat.mtimeMs > LOCK_STALE_MS) { + // Stale lock — remove and retry immediately. + // TOCTOU note: another process may unlink+recreate between our + // unlink and the next openSync, but that's fine — the O_CREAT|O_EXCL + // re-acquire is atomic so we'll just loop again. + try { fs.unlinkSync(lockPath); } catch { /* another process may have removed it */ } + continue; + } + } catch { + // Lock file disappeared — retry + continue; + } + + if (attempt < maxRetries) { + // Sleep with exponential backoff (1ms, 2ms, 4ms, 8ms, 16ms) + const waitMs = Math.pow(2, attempt); + Atomics.wait(_sleepBuf, 0, 0, waitMs); + continue; + } + + throw new Error(`Unable to acquire lock on ${filePath} after ${maxRetries} retries.`); + } + throw err; + } + } +} + +/** + * Release an advisory file lock. + */ +export function releaseLock(filePath: string): void { + const lockPath = filePath + '.lock'; + try { + fs.unlinkSync(lockPath); + } catch { + // Lock file already removed — ignore + } +} + +/** + * Execute a function while holding a file lock. + * Falls back to running without a lock if locking fails (e.g., in test environments). + */ +export function withFileLock(filePath: string, fn: () => T): T { + let locked = false; + try { + acquireLock(filePath); + locked = true; + } catch (err) { + debug(`Lock acquisition failed for ${filePath}, proceeding without lock: ${err}`); + } + try { + return fn(); + } finally { + if (locked) { + releaseLock(filePath); + } + } +} diff --git a/src/utils/paths.ts b/src/utils/paths.ts index 52e1240..b4e2d9c 100644 --- a/src/utils/paths.ts +++ b/src/utils/paths.ts @@ -38,7 +38,7 @@ export function ensureDataDirectoryExists(): string { const dataDir = getDataDirectory(); if (!fs.existsSync(dataDir)) { - fs.mkdirSync(dataDir, { recursive: true }); + fs.mkdirSync(dataDir, { recursive: true, mode: 0o700 }); } return dataDir; diff --git a/src/utils/saveJsonSorted.ts b/src/utils/saveJsonSorted.ts index 50bdf1d..3396583 100644 --- a/src/utils/saveJsonSorted.ts +++ b/src/utils/saveJsonSorted.ts @@ -1,8 +1,11 @@ import { atomicWriteFileSync } from './atomicWrite'; +import { withFileLock } from './fileLock'; export function saveJsonSorted(filePath: string, obj: Record): void { - const sorted = Object.fromEntries( - Object.entries(obj).sort(([a], [b]) => a.localeCompare(b)) - ); - atomicWriteFileSync(filePath, JSON.stringify(sorted, null, 2)); + withFileLock(filePath, () => { + const sorted = Object.fromEntries( + Object.entries(obj).sort(([a], [b]) => a.localeCompare(b)) + ); + atomicWriteFileSync(filePath, JSON.stringify(sorted, null, 2)); + }); }