diff --git a/AGENTS.md b/AGENTS.md index c291745..b4dfe32 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -69,4 +69,4 @@ - **Type**: ES Module package for opencode plugin system - **Target**: Bun runtime, ES2021+ -- **Purpose**: Sync global opencode config across machines via GitHub +- **Purpose**: Sync global opencode config across machines via GitHub, with optional secrets support (e.g., 1Password backend) diff --git a/docs/1Password.md b/docs/1Password.md new file mode 100644 index 0000000..e7cad7a --- /dev/null +++ b/docs/1Password.md @@ -0,0 +1,194 @@ +# Fork Feature: 1Password Secrets Backend (NO secrets in git) + +## Problem +Upstream `opencode-synced` can sync OpenCode secrets by committing: +- `~/.local/share/opencode/auth.json` +- `~/.local/share/opencode/mcp-auth.json` + +We want to keep using the plugin for config/sessions, but **never store tokens in git**. + +## Goal +Add an optional secrets backend to this plugin: +- `auth.json` and `mcp-auth.json` are stored in **1Password** (as opaque blobs) +- they are **pulled** from 1Password after syncing config +- they are **pushed** back to 1Password when changed +- they are **never committed** to git (even if `includeSecrets: true`) +- keep everything else working exactly like upstream + +## Non-goals +- Don’t parse or reinterpret `auth.json` / `mcp-auth.json` structure. +- Don’t implement a filesystem watcher daemon. Keep it command-based. +- Don’t leak secrets in logs/errors. + +## Configuration (add to opencode-synced.jsonc) +Add a new optional config block: + +```jsonc +{ + "includeSecrets": false, + "secretsBackend": { + "type": "1password", + "vault": "Personal", + "documents": { + "authJson": "opencode-auth.json", + "mcpAuthJson": "opencode-mcp-auth.json" + } + } +} +``` + +Rules: + +If secretsBackend.type is missing, run upstream behavior. + +If type === "1password", auth.json and mcp-auth.json must NOT be included in git sync, regardless of includeSecrets. + +1Password Storage Approach +Use 1Password Document items to store the raw files. + +Required CLI operations (execute via child process; never print file contents): + +op document get --vault --out-file + +op document create --vault --title + +op document edit --vault + +Implementation Plan (do in order) +1) Locate current secret sync logic +Search the repo for: + +includeSecrets + +auth.json / mcp-auth.json + +extraSecretPaths + +/sync-pull /sync-push +Identify the exact function(s) that assemble the list of paths to copy/commit. + +2) Add config typing + validation +Extend config types to include secretsBackend. + +Validate: + +vault required + +documents.authJson required + +documents.mcpAuthJson required + +documents.authJson and documents.mcpAuthJson must be unique + +3) Add a SecretsBackend interface +Internal interface: + +pull(): Promise // 1P -> local files + +push(): Promise // local files -> 1P + +status(): Promise (optional) + +4) Implement OnePasswordBackend +Implementation rules: + +Use child_process to call op. + +Detect if op is installed; return a clear, non-secret error. + +For pull: + +op document get --vault --out-file + +atomically write to target path (write temp + rename) + +set restrictive perms (0600) where possible + +If document is missing, do not fail hard; just skip. + +For push: + +if local file doesn’t exist: skip. + +create doc if missing; otherwise edit doc. + +Files to manage (XDG-aware): + +Linux/macOS: ~/.local/share/opencode/auth.json and ~/.local/share/opencode/mcp-auth.json + +Windows: %LOCALAPPDATA%\opencode\auth.json and %LOCALAPPDATA%\opencode\mcp-auth.json + +5) Wire backend into sync lifecycle +Hook points: + +After /sync-pull applies repo changes -> call backend.pull() + +After /sync-push successfully commits/pushes (or when no repo changes) -> call backend.push() + +Add explicit commands: + +/sync-secrets-pull + +/sync-secrets-push + +/sync-secrets-status + +6) Enforce “never commit auth files” +When secretsBackend.type === "1password": + +Ensure the git sync path list excludes: + +~/.local/share/opencode/auth.json + +~/.local/share/opencode/mcp-auth.json + +Additionally: + +Detect if these files are already tracked in the sync repo. + +If yes: stop and print remediation instructions (remove + rewrite history). + +7) Change detection (recommended) +Add lightweight hashing: + +compute SHA256 of local auth.json and mcp-auth.json + +store last pushed hash in plugin state + +only call backend.push() when changed (avoid unnecessary 1P calls) + +8) QA / Acceptance Tests (manual) +Machine A: + +Configure secretsBackend=1password + +Run /sync-secrets-push (creates docs if missing) + +Run /sync-push (must NOT commit auth files) + +Machine B: + +/sync-link then /sync-pull + +/sync-secrets-pull + +Verify OpenCode is authenticated without manual token copy + +Update tokens: + +Run opencode auth login or OpenCode /connect (updates auth.json) + +Run /sync-secrets-push + +On machine B run /sync-secrets-pull and verify updated auth works + +Security Constraints (strict) +Never print secrets. + +Never write secrets into the repo. + +Never include secrets in thrown error messages. + +Ensure local auth files are chmod 0600 where supported. + +If 1Password backend fails, do not destroy local auth files. diff --git a/opencode.json b/opencode.json index 0967ef4..720ece5 100644 --- a/opencode.json +++ b/opencode.json @@ -1 +1,3 @@ -{} +{ + "$schema": "https://opencode.ai/config.json" +} diff --git a/package.json b/package.json index 4bf337d..6b5f16c 100644 --- a/package.json +++ b/package.json @@ -22,9 +22,7 @@ "publishConfig": { "access": "public" }, - "files": [ - "dist" - ], + "files": ["dist"], "dependencies": { "@opencode-ai/plugin": "1.0.85" }, @@ -51,8 +49,6 @@ "prepare": "husky" }, "lint-staged": { - "*.{js,ts,json}": [ - "biome check --write --no-errors-on-unmatched" - ] + "*.{js,ts,json}": ["biome check --write --no-errors-on-unmatched"] } } diff --git a/src/command/sync-secrets-pull.md b/src/command/sync-secrets-pull.md new file mode 100644 index 0000000..a3bcb1b --- /dev/null +++ b/src/command/sync-secrets-pull.md @@ -0,0 +1,5 @@ +--- +description: Pull secrets from the configured backend +--- + +Use the opencode_sync tool with command "secrets-pull". diff --git a/src/command/sync-secrets-push.md b/src/command/sync-secrets-push.md new file mode 100644 index 0000000..4b4e834 --- /dev/null +++ b/src/command/sync-secrets-push.md @@ -0,0 +1,5 @@ +--- +description: Push secrets to the configured backend +--- + +Use the opencode_sync tool with command "secrets-push". diff --git a/src/command/sync-secrets-status.md b/src/command/sync-secrets-status.md new file mode 100644 index 0000000..67c15ad --- /dev/null +++ b/src/command/sync-secrets-status.md @@ -0,0 +1,5 @@ +--- +description: Show secrets backend status +--- + +Use the opencode_sync tool with command "secrets-status". diff --git a/src/index.ts b/src/index.ts index c6d5d2b..1f314ce 100644 --- a/src/index.ts +++ b/src/index.ts @@ -117,7 +117,18 @@ export const opencodeConfigSync: Plugin = async (ctx) => { description: 'Manage opencode config sync with a GitHub repo', args: { command: tool.schema - .enum(['status', 'init', 'link', 'pull', 'push', 'enable-secrets', 'resolve']) + .enum([ + 'status', + 'init', + 'link', + 'pull', + 'push', + 'enable-secrets', + 'resolve', + 'secrets-pull', + 'secrets-push', + 'secrets-status', + ]) .describe('Sync command to execute'), repo: tool.schema.string().optional().describe('Repo owner/name or URL'), owner: tool.schema.string().optional().describe('Repo owner'), @@ -182,6 +193,15 @@ export const opencodeConfigSync: Plugin = async (ctx) => { if (args.command === 'push') { return await service.push(); } + if (args.command === 'secrets-pull') { + return await service.secretsPull(); + } + if (args.command === 'secrets-push') { + return await service.secretsPush(); + } + if (args.command === 'secrets-status') { + return await service.secretsStatus(); + } if (args.command === 'enable-secrets') { return await service.enableSecrets({ extraSecretPaths: args.extraSecretPaths, diff --git a/src/sync/config.test.ts b/src/sync/config.test.ts index e8b925c..66b8894 100644 --- a/src/sync/config.test.ts +++ b/src/sync/config.test.ts @@ -3,11 +3,12 @@ import os from 'node:os'; import path from 'node:path'; import { describe, expect, it } from 'vitest'; - +import type { SyncConfig } from './config.js'; import { canCommitMcpSecrets, chmodIfExists, deepMerge, + normalizeSecretsBackend, normalizeSyncConfig, parseJsonc, stripOverrides, @@ -77,6 +78,44 @@ describe('normalizeSyncConfig', () => { const normalized = normalizeSyncConfig({}); expect(normalized.includeModelFavorites).toBe(true); }); + + it('defaults extra path lists when omitted', () => { + const normalized = normalizeSyncConfig({ includeSecrets: true }); + expect(normalized.extraSecretPaths).toEqual([]); + expect(normalized.extraConfigPaths).toEqual([]); + }); +}); + +describe('normalizeSecretsBackend', () => { + it('returns undefined when backend is missing', () => { + expect(normalizeSecretsBackend(undefined)).toBeUndefined(); + }); + + it('preserves unknown backend types for validation', () => { + const unknownBackend = { type: 'unknown' } as unknown as SyncConfig['secretsBackend']; + expect(normalizeSecretsBackend(unknownBackend)).toEqual({ type: 'unknown' }); + }); + + it('normalizes 1password documents', () => { + const raw = { + type: '1password', + vault: 'Personal', + documents: { + authJson: 'auth.json', + mcpAuthJson: 'mcp-auth.json', + extra: 'ignored', + }, + } as unknown as SyncConfig['secretsBackend']; + + expect(normalizeSecretsBackend(raw)).toEqual({ + type: '1password', + vault: 'Personal', + documents: { + authJson: 'auth.json', + mcpAuthJson: 'mcp-auth.json', + }, + }); + }); }); describe('canCommitMcpSecrets', () => { diff --git a/src/sync/config.ts b/src/sync/config.ts index cdc6b2d..b0d4af5 100644 --- a/src/sync/config.ts +++ b/src/sync/config.ts @@ -10,6 +10,20 @@ export interface SyncRepoConfig { branch?: string; } +export type KnownSecretsBackendType = '1password'; +export type SecretsBackendType = KnownSecretsBackendType | (string & {}); + +export interface SecretsBackendDocuments { + authJson?: string; + mcpAuthJson?: string; +} + +export interface SecretsBackendConfig { + type: SecretsBackendType; + vault?: string; + documents?: SecretsBackendDocuments; +} + export interface SyncConfig { repo?: SyncRepoConfig; localRepoPath?: string; @@ -18,14 +32,27 @@ export interface SyncConfig { includeSessions?: boolean; includePromptStash?: boolean; includeModelFavorites?: boolean; + secretsBackend?: SecretsBackendConfig; extraSecretPaths?: string[]; extraConfigPaths?: string[]; } +export interface NormalizedSyncConfig extends SyncConfig { + includeSecrets: boolean; + includeMcpSecrets: boolean; + includeSessions: boolean; + includePromptStash: boolean; + includeModelFavorites: boolean; + secretsBackend?: SecretsBackendConfig; + extraSecretPaths: string[]; + extraConfigPaths: string[]; +} + export interface SyncState { lastPull?: string; lastPush?: string; lastRemoteUpdate?: string; + lastSecretsHash?: string; } export async function pathExists(filePath: string): Promise { @@ -47,7 +74,36 @@ export async function chmodIfExists(filePath: string, mode: number): Promise { + if (!value || typeof value !== 'object') return false; + return Object.getPrototypeOf(value) === Object.prototype; +} + +export function normalizeSecretsBackend( + input: SyncConfig['secretsBackend'] +): SecretsBackendConfig | undefined { + if (!input || typeof input !== 'object') return undefined; + + const type = typeof input.type === 'string' ? input.type : undefined; + if (!type) return undefined; + + if (type !== '1password') { + return { type }; + } + + const vault = typeof input.vault === 'string' ? input.vault : undefined; + const documentsInput = isPlainObject(input.documents) ? input.documents : {}; + + const documents: SecretsBackendDocuments = { + authJson: typeof documentsInput.authJson === 'string' ? documentsInput.authJson : undefined, + mcpAuthJson: + typeof documentsInput.mcpAuthJson === 'string' ? documentsInput.mcpAuthJson : undefined, + }; + + return { type: '1password', vault, documents }; +} + +export function normalizeSyncConfig(config: SyncConfig): NormalizedSyncConfig { const includeSecrets = Boolean(config.includeSecrets); const includeModelFavorites = config.includeModelFavorites !== false; return { @@ -56,6 +112,7 @@ export function normalizeSyncConfig(config: SyncConfig): SyncConfig { includeSessions: Boolean(config.includeSessions), includePromptStash: Boolean(config.includePromptStash), includeModelFavorites, + secretsBackend: normalizeSecretsBackend(config.secretsBackend), extraSecretPaths: Array.isArray(config.extraSecretPaths) ? config.extraSecretPaths : [], extraConfigPaths: Array.isArray(config.extraConfigPaths) ? config.extraConfigPaths : [], localRepoPath: config.localRepoPath, @@ -67,7 +124,13 @@ export function canCommitMcpSecrets(config: SyncConfig): boolean { return Boolean(config.includeSecrets) && Boolean(config.includeMcpSecrets); } -export async function loadSyncConfig(locations: SyncLocations): Promise { +export function hasSecretsBackend(config: SyncConfig | NormalizedSyncConfig): boolean { + return Boolean(config.secretsBackend); +} + +export async function loadSyncConfig( + locations: SyncLocations +): Promise { if (!(await pathExists(locations.syncConfigPath))) { return null; } @@ -109,6 +172,14 @@ export async function writeState(locations: SyncLocations, state: SyncState): Pr await writeJsonFile(locations.statePath, state, { jsonc: false }); } +export async function updateState( + locations: SyncLocations, + update: Partial +): Promise { + const existing = await loadState(locations); + await writeState(locations, { ...existing, ...update }); +} + export function applyOverridesToRuntimeConfig( config: Record, overrides: Record @@ -266,11 +337,6 @@ export async function writeJsonFile( } } -export function isPlainObject(value: unknown): value is Record { - if (!value || typeof value !== 'object') return false; - return Object.getPrototypeOf(value) === Object.prototype; -} - export function hasOwn(target: Record, key: string): boolean { return Object.hasOwn(target, key); } diff --git a/src/sync/paths.test.ts b/src/sync/paths.test.ts index 06764f4..9ede236 100644 --- a/src/sync/paths.test.ts +++ b/src/sync/paths.test.ts @@ -1,6 +1,7 @@ import { describe, expect, it } from 'vitest'; import type { SyncConfig } from './config.js'; +import { normalizeSyncConfig } from './config.js'; import { buildSyncPlan, resolveSyncLocations, resolveXdgPaths } from './paths.js'; describe('resolveXdgPaths', () => { @@ -50,7 +51,7 @@ describe('buildSyncPlan', () => { extraConfigPaths: ['/home/test/.config/opencode/custom.json'], }; - const plan = buildSyncPlan(config, locations, '/repo', 'linux'); + const plan = buildSyncPlan(normalizeSyncConfig(config), locations, '/repo', 'linux'); const secretItems = plan.items.filter((item) => item.isSecret); expect(secretItems.length).toBe(0); @@ -58,6 +59,34 @@ describe('buildSyncPlan', () => { expect(plan.extraConfigs.allowlist.length).toBe(1); }); + it('includes opencode-synced config file in items', () => { + const env = { HOME: '/home/test' } as NodeJS.ProcessEnv; + const locations = resolveSyncLocations(env, 'linux'); + const config: SyncConfig = { + repo: { owner: 'acme', name: 'config' }, + includeSecrets: false, + }; + + const plan = buildSyncPlan(normalizeSyncConfig(config), locations, '/repo', 'linux'); + const syncItem = plan.items.find((item) => item.localPath === locations.syncConfigPath); + + expect(syncItem).toBeTruthy(); + }); + + it('filters sync config from extra config paths', () => { + const env = { HOME: '/home/test' } as NodeJS.ProcessEnv; + const locations = resolveSyncLocations(env, 'linux'); + const config: SyncConfig = { + repo: { owner: 'acme', name: 'config' }, + includeSecrets: false, + extraConfigPaths: [locations.syncConfigPath], + }; + + const plan = buildSyncPlan(normalizeSyncConfig(config), locations, '/repo', 'linux'); + + expect(plan.extraConfigs.allowlist.length).toBe(0); + }); + it('includes secrets when includeSecrets is true', () => { const env = { HOME: '/home/test' } as NodeJS.ProcessEnv; const locations = resolveSyncLocations(env, 'linux'); @@ -68,7 +97,7 @@ describe('buildSyncPlan', () => { extraConfigPaths: ['/home/test/.config/opencode/custom.json'], }; - const plan = buildSyncPlan(config, locations, '/repo', 'linux'); + const plan = buildSyncPlan(normalizeSyncConfig(config), locations, '/repo', 'linux'); const secretItems = plan.items.filter((item) => item.isSecret); expect(secretItems.length).toBe(2); @@ -76,6 +105,35 @@ describe('buildSyncPlan', () => { expect(plan.extraConfigs.allowlist.length).toBe(1); }); + it('excludes auth files when using 1password backend', () => { + const env = { HOME: '/home/test' } as NodeJS.ProcessEnv; + const locations = resolveSyncLocations(env, 'linux'); + const config: SyncConfig = { + repo: { owner: 'acme', name: 'config' }, + includeSecrets: true, + secretsBackend: { + type: '1password', + vault: 'Personal', + documents: { + authJson: 'opencode-auth.json', + mcpAuthJson: 'opencode-mcp-auth.json', + }, + }, + }; + + const plan = buildSyncPlan(normalizeSyncConfig(config), locations, '/repo', 'linux'); + + const authItem = plan.items.find((item) => + item.localPath.endsWith('/.local/share/opencode/auth.json') + ); + const mcpItem = plan.items.find((item) => + item.localPath.endsWith('/.local/share/opencode/mcp-auth.json') + ); + + expect(authItem).toBeUndefined(); + expect(mcpItem).toBeUndefined(); + }); + it('includes model favorites by default and allows disabling', () => { const env = { HOME: '/home/test' } as NodeJS.ProcessEnv; const locations = resolveSyncLocations(env, 'linux'); @@ -84,7 +142,7 @@ describe('buildSyncPlan', () => { includeSecrets: false, }; - const plan = buildSyncPlan(config, locations, '/repo', 'linux'); + const plan = buildSyncPlan(normalizeSyncConfig(config), locations, '/repo', 'linux'); const favoritesItem = plan.items.find((item) => item.localPath.endsWith('/.local/state/opencode/model.json') ); @@ -92,7 +150,7 @@ describe('buildSyncPlan', () => { expect(favoritesItem).toBeTruthy(); const disabledPlan = buildSyncPlan( - { ...config, includeModelFavorites: false }, + normalizeSyncConfig({ ...config, includeModelFavorites: false }), locations, '/repo', 'linux' diff --git a/src/sync/paths.ts b/src/sync/paths.ts index 6440394..995118d 100644 --- a/src/sync/paths.ts +++ b/src/sync/paths.ts @@ -1,7 +1,7 @@ import crypto from 'node:crypto'; import path from 'node:path'; - -import type { SyncConfig } from './config.js'; +import type { NormalizedSyncConfig, SyncConfig } from './config.js'; +import { hasSecretsBackend } from './config.js'; export interface XdgPaths { homeDir: string; @@ -167,7 +167,7 @@ export function resolveRepoRoot(config: SyncConfig | null, locations: SyncLocati } export function buildSyncPlan( - config: SyncConfig, + config: NormalizedSyncConfig, locations: SyncLocations, repoRoot: string, platform: NodeJS.Platform = process.platform @@ -185,6 +185,9 @@ export function buildSyncPlan( const configManifestPath = path.join(repoConfigRoot, 'extra-manifest.json'); const items: SyncItem[] = []; + const usingSecretsBackend = hasSecretsBackend(config); + const authJsonPath = path.join(dataRoot, 'auth.json'); + const mcpAuthJsonPath = path.join(dataRoot, 'mcp-auth.json'); const addFile = (name: string, isSecret: boolean, isConfigFile: boolean): void => { items.push({ @@ -199,6 +202,7 @@ export function buildSyncPlan( addFile(DEFAULT_CONFIG_NAME, false, true); addFile(DEFAULT_CONFIGC_NAME, false, true); addFile(DEFAULT_AGENTS_NAME, false, false); + addFile(DEFAULT_SYNC_CONFIG_NAME, false, false); for (const dirName of CONFIG_DIRS) { items.push({ @@ -221,22 +225,24 @@ export function buildSyncPlan( } if (config.includeSecrets) { - items.push( - { - localPath: path.join(dataRoot, 'auth.json'), - repoPath: path.join(repoDataRoot, 'auth.json'), - type: 'file', - isSecret: true, - isConfigFile: false, - }, - { - localPath: path.join(dataRoot, 'mcp-auth.json'), - repoPath: path.join(repoDataRoot, 'mcp-auth.json'), - type: 'file', - isSecret: true, - isConfigFile: false, - } - ); + if (!usingSecretsBackend) { + items.push( + { + localPath: authJsonPath, + repoPath: path.join(repoDataRoot, 'auth.json'), + type: 'file', + isSecret: true, + isConfigFile: false, + }, + { + localPath: mcpAuthJsonPath, + repoPath: path.join(repoDataRoot, 'mcp-auth.json'), + type: 'file', + isSecret: true, + isConfigFile: false, + } + ); + } if (config.includeSessions) { for (const dirName of SESSION_DIRS) { @@ -263,16 +269,29 @@ export function buildSyncPlan( } } + const extraSecretPaths = config.includeSecrets ? config.extraSecretPaths : []; + const filteredExtraSecrets = usingSecretsBackend + ? extraSecretPaths.filter( + (entry) => + !isSamePath(entry, authJsonPath, locations.xdg.homeDir, platform) && + !isSamePath(entry, mcpAuthJsonPath, locations.xdg.homeDir, platform) + ) + : extraSecretPaths; + const extraSecrets = buildExtraPathPlan( - config.includeSecrets ? config.extraSecretPaths : [], + filteredExtraSecrets, locations, repoExtraDir, manifestPath, platform ); + const extraConfigPaths = (config.extraConfigPaths ?? []).filter( + (entry) => !isSamePath(entry, locations.syncConfigPath, locations.xdg.homeDir, platform) + ); + const extraConfigs = buildExtraPathPlan( - config.extraConfigPaths, + extraConfigPaths, locations, repoConfigExtraDir, configManifestPath, diff --git a/src/sync/secrets-backend.test.ts b/src/sync/secrets-backend.test.ts new file mode 100644 index 0000000..a94092b --- /dev/null +++ b/src/sync/secrets-backend.test.ts @@ -0,0 +1,123 @@ +import { mkdir, mkdtemp, rm, writeFile } from 'node:fs/promises'; +import os from 'node:os'; +import path from 'node:path'; + +import { describe, expect, it } from 'vitest'; + +import { normalizeSyncConfig } from './config.js'; +import { resolveSyncLocations } from './paths.js'; +import { computeSecretsHash, resolveSecretsBackendConfig } from './secrets-backend.js'; + +describe('resolveSecretsBackendConfig', () => { + it('returns none when backend is missing', () => { + const resolution = resolveSecretsBackendConfig(normalizeSyncConfig({})); + expect(resolution.state).toBe('none'); + }); + + it('rejects unsupported backend types', () => { + const resolution = resolveSecretsBackendConfig( + normalizeSyncConfig({ + secretsBackend: { + type: 'vaultpass', + }, + }) + ); + + expect(resolution.state).toBe('invalid'); + if (resolution.state === 'invalid') { + expect(resolution.error).toContain('Unsupported'); + } + }); + + it('validates required vault', () => { + const resolution = resolveSecretsBackendConfig( + normalizeSyncConfig({ + secretsBackend: { + type: '1password', + documents: { + authJson: 'opencode-auth.json', + mcpAuthJson: 'opencode-mcp-auth.json', + }, + }, + }) + ); + + expect(resolution.state).toBe('invalid'); + if (resolution.state === 'invalid') { + expect(resolution.error).toContain('vault'); + } + }); + + it('requires unique document names', () => { + const resolution = resolveSecretsBackendConfig( + normalizeSyncConfig({ + secretsBackend: { + type: '1password', + vault: 'Personal', + documents: { + authJson: 'shared.json', + mcpAuthJson: 'SHARED.json', + }, + }, + }) + ); + + expect(resolution.state).toBe('invalid'); + if (resolution.state === 'invalid') { + expect(resolution.error).toContain('unique'); + } + }); + + it('returns ok when valid', () => { + const resolution = resolveSecretsBackendConfig( + normalizeSyncConfig({ + secretsBackend: { + type: '1password', + vault: 'Personal', + documents: { + authJson: 'opencode-auth.json', + mcpAuthJson: 'opencode-mcp-auth.json', + }, + }, + }) + ); + + expect(resolution.state).toBe('ok'); + if (resolution.state === 'ok') { + expect(resolution.config.vault).toBe('Personal'); + expect(resolution.config.authJson).toBe('opencode-auth.json'); + } + }); +}); + +describe('computeSecretsHash', () => { + it('changes when auth files change', async () => { + const root = await mkdtemp(path.join(os.tmpdir(), 'opencode-sync-')); + const env = { + HOME: root, + XDG_DATA_HOME: path.join(root, 'data'), + XDG_CONFIG_HOME: path.join(root, 'config'), + XDG_STATE_HOME: path.join(root, 'state'), + } as NodeJS.ProcessEnv; + + try { + const locations = resolveSyncLocations(env, 'linux'); + const dataRoot = path.join(locations.xdg.dataDir, 'opencode'); + const authPath = path.join(dataRoot, 'auth.json'); + const mcpPath = path.join(dataRoot, 'mcp-auth.json'); + + await mkdir(dataRoot, { recursive: true }); + + const emptyHash = await computeSecretsHash(locations); + await writeFile(authPath, 'first'); + const authHash = await computeSecretsHash(locations); + await writeFile(mcpPath, 'second'); + const bothHash = await computeSecretsHash(locations); + + expect(emptyHash).not.toBe(authHash); + expect(authHash).not.toBe(bothHash); + } finally { + await rm(root, { recursive: true, force: true }); + } + }); +}); diff --git a/src/sync/secrets-backend.ts b/src/sync/secrets-backend.ts new file mode 100644 index 0000000..4cfeb23 --- /dev/null +++ b/src/sync/secrets-backend.ts @@ -0,0 +1,437 @@ +import crypto from 'node:crypto'; +import { promises as fs } from 'node:fs'; +import path from 'node:path'; + +import type { PluginInput } from '@opencode-ai/plugin'; +import type { NormalizedSyncConfig, SecretsBackendConfig } from './config.js'; +import { chmodIfExists, pathExists } from './config.js'; +import { SyncCommandError } from './errors.js'; +import type { SyncLocations } from './paths.js'; + +type Shell = PluginInput['$']; + +export interface OnePasswordConfig { + type: '1password'; + vault: string; + authJson: string; + mcpAuthJson: string; +} + +export interface SecretsBackend { + pull: () => Promise; + push: () => Promise; + status: () => Promise; +} + +export type SecretsBackendResolution = + | { state: 'none' } + | { state: 'invalid'; error: string } + | { state: 'ok'; config: OnePasswordConfig }; + +type DocumentIndex = Map; + +interface VaultDocumentEntry { + id: string; + title: string; +} + +export function resolveSecretsBackendConfig( + config: NormalizedSyncConfig +): SecretsBackendResolution { + const backend = config.secretsBackend; + if (!backend) { + return { state: 'none' }; + } + const backendType = backend.type as string; + if (backendType !== '1password') { + return { + state: 'invalid', + error: `Unsupported secrets backend type "${backendType}".`, + }; + } + return resolveOnePasswordConfig(backend); +} + +export function resolveAuthFilePaths(locations: SyncLocations): { + authPath: string; + mcpAuthPath: string; +} { + const dataRoot = path.join(locations.xdg.dataDir, 'opencode'); + return { + authPath: path.join(dataRoot, 'auth.json'), + mcpAuthPath: path.join(dataRoot, 'mcp-auth.json'), + }; +} + +export function resolveRepoAuthPaths(repoRoot: string): { + authRepoPath: string; + mcpAuthRepoPath: string; +} { + const repoDataRoot = path.join(repoRoot, 'data'); + return { + authRepoPath: path.join(repoDataRoot, 'auth.json'), + mcpAuthRepoPath: path.join(repoDataRoot, 'mcp-auth.json'), + }; +} + +export async function computeSecretsHash(locations: SyncLocations): Promise { + const { authPath, mcpAuthPath } = resolveAuthFilePaths(locations); + return await hashFiles([authPath, mcpAuthPath]); +} + +export function createSecretsBackend(options: { + $: Shell; + locations: SyncLocations; + config: OnePasswordConfig; +}): SecretsBackend { + const backendType = options.config.type as string; + if (backendType === '1password') { + return createOnePasswordBackend(options); + } + throw new SyncCommandError(`Unsupported secrets backend type "${backendType}".`); +} + +function resolveOnePasswordConfig(backend: SecretsBackendConfig): SecretsBackendResolution { + const vault = backend.vault?.trim(); + if (!vault) { + return { + state: 'invalid', + error: 'secretsBackend.vault is required for type "1password".', + }; + } + + const documents = backend.documents ?? {}; + const authJson = documents.authJson?.trim(); + const mcpAuthJson = documents.mcpAuthJson?.trim(); + + if (!authJson || !mcpAuthJson) { + return { + state: 'invalid', + error: + 'secretsBackend.documents.authJson and secretsBackend.documents.mcpAuthJson ' + + 'are required for type "1password".', + }; + } + + if (normalizeDocumentName(authJson) === normalizeDocumentName(mcpAuthJson)) { + return { + state: 'invalid', + error: + 'secretsBackend.documents.authJson and secretsBackend.documents.mcpAuthJson must be unique.', + }; + } + + return { + state: 'ok', + config: { + type: '1password', + vault, + authJson, + mcpAuthJson, + }, + }; +} + +function normalizeDocumentName(name: string): string { + return name.trim().toLowerCase(); +} + +function createOnePasswordBackend(options: { + $: Shell; + locations: SyncLocations; + config: OnePasswordConfig; +}): SecretsBackend { + const { $, locations, config } = options; + const { authPath, mcpAuthPath } = resolveAuthFilePaths(locations); + + const pull = async (): Promise => { + await ensureOpAvailable($); + const index = await listVaultDocuments($, config.vault); + await pullDocument($, config.vault, config.authJson, authPath, index); + await pullDocument($, config.vault, config.mcpAuthJson, mcpAuthPath, index); + }; + + const push = async (): Promise => { + await ensureOpAvailable($); + const existing = await Promise.all([pathExists(authPath), pathExists(mcpAuthPath)]); + if (!existing.some(Boolean)) { + return; + } + const index = await listVaultDocuments($, config.vault); + await pushDocument($, config.vault, config.authJson, authPath, index); + await pushDocument($, config.vault, config.mcpAuthJson, mcpAuthPath, index); + }; + + const status = async (): Promise => { + await ensureOpAvailable($); + return `1Password backend configured for vault "${config.vault}".`; + }; + + return { pull, push, status }; +} + +async function ensureOpAvailable($: Shell): Promise { + try { + await $`op --version`.quiet(); + } catch { + throw new SyncCommandError('1Password CLI not found. Install it and sign in with `op signin`.'); + } +} + +async function listVaultDocuments($: Shell, vault: string): Promise { + let output: string; + try { + output = await $`op item list --vault ${vault} --categories Document --format json` + .quiet() + .text(); + } catch (error) { + throw new SyncCommandError(`1Password document list failed: ${formatShellError(error)}`); + } + + let parsed: unknown; + try { + parsed = JSON.parse(output) as unknown; + } catch { + throw new SyncCommandError('1Password document list returned invalid JSON.'); + } + + if (!Array.isArray(parsed)) { + throw new SyncCommandError('1Password document list returned unexpected data.'); + } + + const index: DocumentIndex = new Map(); + for (const entry of parsed) { + if (!entry || typeof entry !== 'object') continue; + const record = entry as { id?: unknown; title?: unknown }; + const id = typeof record.id === 'string' ? record.id : ''; + const title = typeof record.title === 'string' ? record.title : ''; + if (!id || !title) continue; + const key = normalizeDocumentName(title); + const existing = index.get(key); + const item = { id, title }; + if (existing) { + existing.push(item); + } else { + index.set(key, [item]); + } + } + + return index; +} + +function lookupDocument( + index: DocumentIndex, + documentName: string +): { + state: 'missing' | 'duplicate' | 'ok'; + count: number; +} { + const key = normalizeDocumentName(documentName); + const matches = index.get(key) ?? []; + if (matches.length === 0) { + return { state: 'missing', count: 0 }; + } + if (matches.length > 1) { + return { state: 'duplicate', count: matches.length }; + } + return { state: 'ok', count: 1 }; +} + +async function pullDocument( + $: Shell, + vault: string, + documentName: string, + targetPath: string, + index: DocumentIndex +): Promise { + const lookup = lookupDocument(index, documentName); + if (lookup.state === 'missing') { + return; + } + if (lookup.state === 'duplicate') { + throw new SyncCommandError( + `Multiple documents named "${documentName}" found in vault "${vault}". ` + + 'Rename them to be unique.' + ); + } + + const { tempDir, tempPath } = await createTempPath(targetPath); + try { + try { + await opDocumentGet($, vault, documentName, tempPath); + } catch (error) { + const retryLookup = await lookupDocumentWithRetry($, vault, documentName); + if (!retryLookup) { + throw error; + } + if (retryLookup.state === 'missing') { + return; + } + if (retryLookup.state === 'duplicate') { + throw new SyncCommandError( + `Multiple documents named "${documentName}" found in vault "${vault}". ` + + 'Rename them to be unique.' + ); + } + throw error; + } + await replaceFile(tempPath, targetPath); + } finally { + await fs.rm(tempDir, { recursive: true, force: true }); + } +} + +async function pushDocument( + $: Shell, + vault: string, + documentName: string, + sourcePath: string, + index: DocumentIndex +): Promise { + if (!(await pathExists(sourcePath))) { + return; + } + + const lookup = lookupDocument(index, documentName); + if (lookup.state === 'duplicate') { + throw new SyncCommandError( + `Multiple documents named "${documentName}" found in vault "${vault}". ` + + 'Rename them to be unique.' + ); + } + + if (lookup.state === 'missing') { + try { + await opDocumentCreate($, vault, documentName, sourcePath); + } catch (createError) { + throw new SyncCommandError(`1Password create failed: ${formatShellError(createError)}`); + } + return; + } + + try { + await opDocumentEdit($, vault, documentName, sourcePath); + } catch (error) { + const retryLookup = await lookupDocumentWithRetry($, vault, documentName); + if (!retryLookup) { + throw error; + } + if (retryLookup.state === 'missing') { + try { + await opDocumentCreate($, vault, documentName, sourcePath); + } catch (createError) { + throw new SyncCommandError(`1Password create failed: ${formatShellError(createError)}`); + } + return; + } + if (retryLookup.state === 'duplicate') { + throw new SyncCommandError( + `Multiple documents named "${documentName}" found in vault "${vault}". ` + + 'Rename them to be unique.' + ); + } + throw error; + } +} + +async function opDocumentGet( + $: Shell, + vault: string, + name: string, + outFile: string +): Promise { + try { + await $`op document get ${name} --vault ${vault} --out-file ${outFile}`.quiet(); + } catch (error) { + throw new SyncCommandError(`1Password download failed: ${formatShellError(error)}`); + } +} + +async function opDocumentCreate( + $: Shell, + vault: string, + name: string, + sourcePath: string +): Promise { + await $`op document create --vault ${vault} ${sourcePath} --title ${name}`.quiet(); +} + +async function opDocumentEdit( + $: Shell, + vault: string, + name: string, + sourcePath: string +): Promise { + try { + await $`op document edit ${name} --vault ${vault} ${sourcePath}`.quiet(); + } catch (error) { + throw new SyncCommandError(`1Password update failed: ${formatShellError(error)}`); + } +} + +async function lookupDocumentWithRetry( + $: Shell, + vault: string, + documentName: string +): Promise<{ state: 'missing' | 'duplicate' | 'ok'; count: number } | null> { + try { + const retryIndex = await listVaultDocuments($, vault); + return lookupDocument(retryIndex, documentName); + } catch { + return null; + } +} + +async function createTempPath(targetPath: string): Promise<{ tempDir: string; tempPath: string }> { + const targetDir = path.dirname(targetPath); + await fs.mkdir(targetDir, { recursive: true }); + const tempDir = await fs.mkdtemp(path.join(targetDir, '.opencode-synced-')); + const tempPath = path.join(tempDir, path.basename(targetPath)); + return { tempDir, tempPath }; +} + +async function replaceFile(sourcePath: string, targetPath: string): Promise { + await fs.mkdir(path.dirname(targetPath), { recursive: true }); + await chmodIfExists(sourcePath, 0o600); + try { + await fs.rename(sourcePath, targetPath); + } catch (error) { + const maybeErrno = error as NodeJS.ErrnoException; + if (maybeErrno.code !== 'EXDEV') { + throw error; + } + await fs.copyFile(sourcePath, targetPath); + await fs.unlink(sourcePath); + } + await chmodIfExists(targetPath, 0o600); +} + +async function hashFiles(paths: string[]): Promise { + const hash = crypto.createHash('sha256'); + for (const filePath of paths) { + hash.update(filePath); + hash.update('\0'); + const exists = await pathExists(filePath); + hash.update(exists ? '1' : '0'); + if (exists) { + const data = await fs.readFile(filePath); + hash.update(data); + } + hash.update('\0'); + } + return hash.digest('hex'); +} + +function formatShellError(error: unknown): string { + if (!error) return 'Unknown error'; + if (typeof error === 'string') return error; + if (error instanceof Error && error.message) return error.message; + + const maybe = error as { stderr?: string; message?: string }; + const parts = [maybe.stderr, maybe.message].filter( + (value): value is string => typeof value === 'string' && value.length > 0 + ); + if (parts.length > 0) return parts.join('\n'); + + return String(error); +} diff --git a/src/sync/service.ts b/src/sync/service.ts index df7071b..b9d4b65 100644 --- a/src/sync/service.ts +++ b/src/sync/service.ts @@ -3,13 +3,15 @@ import path from 'node:path'; import type { PluginInput } from '@opencode-ai/plugin'; import { syncLocalToRepo, syncRepoToLocal } from './apply.js'; import { generateCommitMessage } from './commit.js'; +import type { NormalizedSyncConfig } from './config.js'; import { canCommitMcpSecrets, + hasSecretsBackend, loadOverrides, loadState, loadSyncConfig, normalizeSyncConfig, - writeState, + updateState, writeSyncConfig, } from './config.js'; import { SyncCommandError, SyncConfigMissingError } from './errors.js'; @@ -31,6 +33,13 @@ import { resolveRepoBranch, resolveRepoIdentifier, } from './repo.js'; +import { + computeSecretsHash, + createSecretsBackend, + resolveRepoAuthPaths, + resolveSecretsBackendConfig, + type SecretsBackend, +} from './secrets-backend.js'; import { createLogger, extractTextFromResponse, @@ -72,6 +81,9 @@ export interface SyncService { link: (_options: LinkOptions) => Promise; pull: () => Promise; push: () => Promise; + secretsPull: () => Promise; + secretsPush: () => Promise; + secretsStatus: () => Promise; enableSecrets: (_options?: { extraSecretPaths?: string[]; includeMcpSecrets?: boolean; @@ -116,6 +128,128 @@ export function createSyncService(ctx: SyncServiceContext): SyncService { fn ); + const resolveSecretsBackend = (config: NormalizedSyncConfig): SecretsBackend | null => { + const resolution = resolveSecretsBackendConfig(config); + if (resolution.state === 'none') { + return null; + } + + if (resolution.state === 'invalid') { + throw new SyncCommandError(resolution.error); + } + + return createSecretsBackend({ $: ctx.$, locations, config: resolution.config }); + }; + + const ensureAuthFilesNotTracked = async ( + repoRoot: string, + config: NormalizedSyncConfig + ): Promise => { + if (!hasSecretsBackend(config)) return; + + const { authRepoPath, mcpAuthRepoPath } = resolveRepoAuthPaths(repoRoot); + const tracked: string[] = []; + const authRelPath = toRepoRelativePath(repoRoot, authRepoPath); + const mcpRelPath = toRepoRelativePath(repoRoot, mcpAuthRepoPath); + + if (await isRepoPathTracked(ctx.$, repoRoot, authRelPath)) { + tracked.push(authRelPath); + } + if (await isRepoPathTracked(ctx.$, repoRoot, mcpRelPath)) { + tracked.push(mcpRelPath); + } + + if (tracked.length === 0) return; + + const trackedList = tracked.join(', '); + throw new SyncCommandError( + `Sync repo already tracks secret auth files (${trackedList}). ` + + 'Remove them and rewrite history before enabling a secrets backend.' + ); + }; + + const computeSecretsHashSafe = async (): Promise => { + try { + return await computeSecretsHash(locations); + } catch (error) { + log.warn('Failed to compute secrets hash', { error: formatError(error) }); + return null; + } + }; + + const updateSecretsHashState = async (): Promise => { + const hash = await computeSecretsHashSafe(); + if (!hash) return; + await updateState(locations, { lastSecretsHash: hash }); + }; + + const pushSecretsWithBackend = async (backend: SecretsBackend): Promise<'skipped' | 'pushed'> => { + const hash = await computeSecretsHashSafe(); + if (hash) { + const state = await loadState(locations); + if (state.lastSecretsHash === hash) { + log.debug('Secrets unchanged; skipping secrets push'); + return 'skipped'; + } + } + + await backend.push(); + if (hash) { + await updateState(locations, { lastSecretsHash: hash }); + } + return 'pushed'; + }; + + const runSecretsPullIfConfigured = async (config: NormalizedSyncConfig): Promise => { + const backend = resolveSecretsBackend(config); + if (!backend) return; + await backend.pull(); + await updateSecretsHashState(); + }; + + const runSecretsPushIfConfigured = async ( + config: NormalizedSyncConfig + ): Promise<'not_configured' | 'skipped' | 'pushed'> => { + const backend = resolveSecretsBackend(config); + if (!backend) return 'not_configured'; + return await pushSecretsWithBackend(backend); + }; + + const secretsBackendNotConfiguredMessage = + 'Secrets backend not configured. Add secretsBackend to opencode-synced.jsonc.'; + + const resolveSecretsBackendForCommand = async (): Promise< + { backend: SecretsBackend } | { message: string } + > => { + const config = await getConfigOrThrow(locations); + const resolution = resolveSecretsBackendConfig(config); + if (resolution.state === 'none') { + return { + message: secretsBackendNotConfiguredMessage, + }; + } + if (resolution.state === 'invalid') { + throw new SyncCommandError(resolution.error); + } + return { + backend: createSecretsBackend({ + $: ctx.$, + locations, + config: resolution.config, + }), + }; + }; + + const runSecretsCommand = async ( + action: (backend: SecretsBackend) => Promise + ): Promise => { + const resolved = await resolveSecretsBackendForCommand(); + if ('message' in resolved) { + return resolved.message; + } + return await action(resolved.backend); + }; + return { startupSync: () => skipIfBusy(async () => { @@ -141,7 +275,11 @@ export function createSyncService(ctx: SyncServiceContext): SyncService { return; } try { - await runStartup(ctx, locations, config, log); + assertValidSecretsBackend(config); + await runStartup(ctx, locations, config, log, { + ensureAuthFilesNotTracked, + runSecretsPullIfConfigured, + }); } catch (error) { log.error('Startup sync failed', { error: formatError(error) }); await showToast(ctx.client, formatError(error), 'error'); @@ -153,6 +291,8 @@ export function createSyncService(ctx: SyncServiceContext): SyncService { return 'opencode-synced is not configured. Run /sync-init to set it up.'; } + assertValidSecretsBackend(config); + const repoRoot = resolveRepoRoot(config, locations); const state = await loadState(locations); let repoStatus: string[] = []; @@ -177,6 +317,7 @@ export function createSyncService(ctx: SyncServiceContext): SyncService { const includeSessions = config.includeSessions ? 'enabled' : 'disabled'; const includePromptStash = config.includePromptStash ? 'enabled' : 'disabled'; const includeModelFavorites = config.includeModelFavorites ? 'enabled' : 'disabled'; + const secretsBackend = config.secretsBackend?.type ?? 'none'; const lastPull = state.lastPull ?? 'never'; const lastPush = state.lastPush ?? 'never'; @@ -194,6 +335,7 @@ export function createSyncService(ctx: SyncServiceContext): SyncService { `Repo: ${repoIdentifier}`, `Branch: ${branch}`, `Secrets: ${includeSecrets}`, + `Secrets backend: ${secretsBackend}`, `MCP secrets: ${includeMcpSecrets}`, `Sessions: ${includeSessions}`, `Prompt stash: ${includePromptStash}`, @@ -237,7 +379,7 @@ export function createSyncService(ctx: SyncServiceContext): SyncService { const branch = resolveRepoBranch(config); await commitAll(ctx.$, repoRoot, 'Initial sync from opencode-synced'); await pushBranch(ctx.$, repoRoot, branch); - await writeState(locations, { lastPush: new Date().toISOString() }); + await updateState(locations, { lastPush: new Date().toISOString() }); } } @@ -293,7 +435,7 @@ export function createSyncService(ctx: SyncServiceContext): SyncService { const plan = buildSyncPlan(config, locations, repoRoot); await syncRepoToLocal(plan, overrides); - await writeState(locations, { + await updateState(locations, { lastPull: new Date().toISOString(), lastRemoteUpdate: new Date().toISOString(), }); @@ -320,6 +462,7 @@ export function createSyncService(ctx: SyncServiceContext): SyncService { const repoRoot = resolveRepoRoot(config, locations); await ensureRepoCloned(ctx.$, config, repoRoot); await ensureSecretsPolicy(ctx, config); + await ensureAuthFilesNotTracked(repoRoot, config); const branch = await resolveBranch(ctx, config, repoRoot); @@ -338,8 +481,9 @@ export function createSyncService(ctx: SyncServiceContext): SyncService { const overrides = await loadOverrides(locations); const plan = buildSyncPlan(config, locations, repoRoot); await syncRepoToLocal(plan, overrides); + await runSecretsPullIfConfigured(config); - await writeState(locations, { + await updateState(locations, { lastPull: new Date().toISOString(), lastRemoteUpdate: new Date().toISOString(), }); @@ -353,6 +497,7 @@ export function createSyncService(ctx: SyncServiceContext): SyncService { const repoRoot = resolveRepoRoot(config, locations); await ensureRepoCloned(ctx.$, config, repoRoot); await ensureSecretsPolicy(ctx, config); + await ensureAuthFilesNotTracked(repoRoot, config); const branch = await resolveBranch(ctx, config, repoRoot); const preDirty = await hasLocalChanges(ctx.$, repoRoot); @@ -371,19 +516,66 @@ export function createSyncService(ctx: SyncServiceContext): SyncService { const dirty = await hasLocalChanges(ctx.$, repoRoot); if (!dirty) { - return 'No local changes to push.'; + try { + const secretsResult = await runSecretsPushIfConfigured(config); + if (secretsResult === 'pushed') { + return 'No local changes to push. Secrets updated.'; + } + if (secretsResult === 'skipped') { + return 'No local changes to push. Secrets unchanged.'; + } + return 'No local changes to push.'; + } catch (error) { + log.warn('Secrets push failed after sync check', { error: formatError(error) }); + return `No local changes to push. Secrets push failed: ${formatError(error)}`; + } } const message = await generateCommitMessage({ client: ctx.client, $: ctx.$ }, repoRoot); await commitAll(ctx.$, repoRoot, message); await pushBranch(ctx.$, repoRoot, branch); - await writeState(locations, { + let secretsFailure: string | null = null; + try { + await runSecretsPushIfConfigured(config); + } catch (error) { + secretsFailure = formatError(error); + log.warn('Secrets push failed after repo push', { error: secretsFailure }); + } + + await updateState(locations, { lastPush: new Date().toISOString(), }); + if (secretsFailure) { + return `Pushed changes: ${message}. Secrets push failed: ${secretsFailure}`; + } return `Pushed changes: ${message}`; }), + secretsPull: () => + runExclusive(() => + runSecretsCommand(async (backend) => { + await backend.pull(); + await updateSecretsHashState(); + return 'Pulled secrets from 1Password.'; + }) + ), + secretsPush: () => + runExclusive(() => + runSecretsCommand(async (backend) => { + const result = await pushSecretsWithBackend(backend); + if (result === 'skipped') { + return 'Secrets unchanged; skipping 1Password push.'; + } + return 'Pushed secrets to 1Password.'; + }) + ), + secretsStatus: () => + runExclusive(() => + runSecretsCommand(async (backend) => { + return await backend.status(); + }) + ), enableSecrets: (options?: { extraSecretPaths?: string[]; includeMcpSecrets?: boolean }) => runExclusive(async () => { const config = await getConfigOrThrow(locations); @@ -439,17 +631,47 @@ export function createSyncService(ctx: SyncServiceContext): SyncService { }; } +function assertValidSecretsBackend(config: NormalizedSyncConfig): void { + const resolution = resolveSecretsBackendConfig(config); + if (resolution.state === 'invalid') { + throw new SyncCommandError(resolution.error); + } +} + +async function isRepoPathTracked( + $: Shell, + repoRoot: string, + repoRelativePath: string +): Promise { + const safePath = repoRelativePath.split(path.sep).join('/'); + try { + await $`git -C ${repoRoot} ls-files --error-unmatch ${safePath}`.quiet(); + return true; + } catch { + return false; + } +} + +function toRepoRelativePath(repoRoot: string, absolutePath: string): string { + return path.relative(repoRoot, absolutePath).split(path.sep).join('/'); +} + async function runStartup( ctx: SyncServiceContext, locations: ReturnType, config: ReturnType, - log: Logger + log: Logger, + options: { + ensureAuthFilesNotTracked: (repoRoot: string, config: NormalizedSyncConfig) => Promise; + runSecretsPullIfConfigured: (config: NormalizedSyncConfig) => Promise; + } ): Promise { const repoRoot = resolveRepoRoot(config, locations); log.debug('Starting sync', { repoRoot }); await ensureRepoCloned(ctx.$, config, repoRoot); await ensureSecretsPolicy(ctx, config); + await options.ensureAuthFilesNotTracked(repoRoot, config); const branch = await resolveBranch(ctx, config, repoRoot); log.debug('Resolved branch', { branch }); @@ -470,7 +692,8 @@ async function runStartup( const overrides = await loadOverrides(locations); const plan = buildSyncPlan(config, locations, repoRoot); await syncRepoToLocal(plan, overrides); - await writeState(locations, { + await options.runSecretsPullIfConfigured(config); + await updateState(locations, { lastPull: new Date().toISOString(), lastRemoteUpdate: new Date().toISOString(), }); @@ -494,7 +717,7 @@ async function runStartup( log.info('Pushing local changes', { message }); await commitAll(ctx.$, repoRoot, message); await pushBranch(ctx.$, repoRoot, branch); - await writeState(locations, { + await updateState(locations, { lastPush: new Date().toISOString(), }); } @@ -508,6 +731,7 @@ async function getConfigOrThrow( 'Missing opencode-synced config. Run /sync-init to set it up.' ); } + assertValidSecretsBackend(config); return config; }