diff --git a/TODO.md b/TODO.md index b5f71afb..164c7083 100644 --- a/TODO.md +++ b/TODO.md @@ -57,6 +57,30 @@ the item when done — git log is the history. Replace per-broker computation with one shared derive call; brokers emit raw fields (qty, markPrice, multiplier, side, avgCost) and downstream math is contract-uniform. +- [ ] Native Anthropic full provider — replaces `agent-sdk` for the + api-key chat path so non-subscription Anthropic credentials + (Claude API, MiniMax, GLM, Kimi, DeepSeek) stop spawning a + Claude Code subprocess every chat turn. Subscription credentials + (loginMethod=claudeai) physically need agent-sdk and stay there. + Shape: parallel to `CodexProvider` (~270 lines) — uses + `@anthropic-ai/sdk` directly, manual tool loop with + tool_use/tool_result content blocks, streaming events, history + serialization, Vercel→Anthropic tool format conversion. Then + wires into `GenerateRouter` (likely as new backend value + `anthropic-native`, or replaces `agent-sdk` for non-claudeai + profiles via the preset's chat adapter declaration once + preset-driven chat routing lands). Cleans up the per-vendor + `/v1` baseUrl hack in preset-catalog along the way (native SDK + hits `/v1/messages` by default, all four Anthropic-compat + vendors accept that path). ~4-6h focused work. +- [ ] Native OpenAI Chat Completions full provider — companion to the + Anthropic native work. Reuses the `openai` SDK we already have + (codex provider uses `client.responses.stream()`; this would + use `client.chat.completions.stream()`). Lets us drop + `vercel-openai` adapter entirely and gives Custom + OpenAI-compat + third parties (Together, Groq, vLLM, LM Studio, Ollama) a + proper light chat path. Same structural shape as the Anthropic + one. ~3-4h. - [ ] Unified config hot-reload. Right now every consumer of a config section has to solve "did the user edit this?" on its own — Telegram/MCP-Ask via `reconnectConnectors`, opentypebb via lazy diff --git a/package.json b/package.json index f2e7836a..4c9a67ed 100644 --- a/package.json +++ b/package.json @@ -1,13 +1,15 @@ { "name": "open-alice", - "version": "0.10.0-beta.0", + "version": "0.10.0-beta.1", "description": "File-based trading agent engine", "type": "module", "scripts": { "dev": "tsx watch src/main.ts", "dev:ui": "pnpm --filter open-alice-ui dev", "predev": "turbo run build --filter=@traderalice/opentypebb --filter=@traderalice/ibkr", + "prebuild": "tsx scripts/build-migration-index.ts", "build": "turbo run build && tsup src/main.ts --format esm --dts", + "build:migration-index": "tsx scripts/build-migration-index.ts", "start": "node dist/main.js", "test": "vitest run", "test:e2e": "vitest run --config vitest.e2e.config.ts", diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 0f294778..ddb31675 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -236,6 +236,9 @@ importers: lightweight-charts: specifier: ^5.1.0 version: 5.1.0 + lucide-react: + specifier: ^1.14.0 + version: 1.14.0(react@19.2.4) marked: specifier: ^15.0.12 version: 15.0.12 @@ -2497,6 +2500,11 @@ packages: lru-cache@5.1.1: resolution: {integrity: sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==} + lucide-react@1.14.0: + resolution: {integrity: sha512-+1mdWcfSJVUsaTIjN9zoezmUhfXo5l0vP7ekBMPo3jcS/aIkxHnXqAPsByszMZx/Y8oQBRJxJx5xg+RH3urzxA==} + peerDependencies: + react: ^16.5.1 || ^17.0.0 || ^18.0.0 || ^19.0.0 + lz-string@1.5.0: resolution: {integrity: sha512-h5bgJWpxJNswbU7qCrV0tIKQCaS3blPDrqKWx+QxzuzL1zGUzij9XCWLrSLsJPu5t+eWA/ycetzYAO5IOMcWAQ==} hasBin: true @@ -5460,6 +5468,10 @@ snapshots: dependencies: yallist: 3.1.1 + lucide-react@1.14.0(react@19.2.4): + dependencies: + react: 19.2.4 + lz-string@1.5.0: {} magic-string@0.30.21: diff --git a/scripts/build-migration-index.ts b/scripts/build-migration-index.ts new file mode 100644 index 00000000..0c7185b6 --- /dev/null +++ b/scripts/build-migration-index.ts @@ -0,0 +1,40 @@ +/** + * Generate src/migrations/INDEX.md from REGISTRY metadata. + * + * Run via: pnpm build:migration-index + * + * Wired to the `prebuild` hook so `pnpm build` always regenerates. + * INDEX.md is committed to the repo — a PR that adds a migration + * without an INDEX.md update is a visible red flag for reviewers. + */ +import { writeFileSync } from 'node:fs' +import { resolve, dirname } from 'node:path' +import { fileURLToPath } from 'node:url' +import { REGISTRY } from '../src/migrations/registry.js' + +const here = fileURLToPath(import.meta.url) +const repoRoot = resolve(dirname(here), '..') +const out = resolve(repoRoot, 'src/migrations/INDEX.md') + +function escape(s: string): string { + return s.replace(/\|/g, '\\|').replace(/\n/g, ' ') +} + +const rows = REGISTRY.map(m => + `| \`${m.id}\` | ${m.appVersion} | ${m.introducedAt} | ${m.affects.join(', ')} | ${escape(m.summary)} |`, +) + +const md = ` + + +# Migration Index + +Each row corresponds to one migration in \`src/migrations/\`. The runner applies pending migrations in this order on every boot, recording applied IDs in \`data/config/_meta.json\`. Migrations are idempotent in their body in addition to the journal-level guard. + +| ID | App Version | Date | Affects | Summary | +|----|-------------|------|---------|---------| +${rows.join('\n')} +` + +writeFileSync(out, md) +console.log(`Wrote ${out} (${REGISTRY.length} migrations)`) diff --git a/src/ai-providers/preset-catalog.ts b/src/ai-providers/preset-catalog.ts index d1a2d299..81c0d0d4 100644 --- a/src/ai-providers/preset-catalog.ts +++ b/src/ai-providers/preset-catalog.ts @@ -12,6 +12,7 @@ */ import { z } from 'zod' +import type { SdkAdapterDeclaration, SdkAdapterId } from './sdk-adapters.js' // ==================== Types ==================== @@ -25,6 +26,20 @@ export interface EndpointOption { label: string } +/** + * Adapter declaration block for a preset. `available` lists every SDK + * adapter the preset's credential can drive, each with a builder that + * maps the credential into that SDK's standard config shape. + * + * `test` names the adapter used by the wizard's "Test" button — pick + * the lightest available so non-subscription presets skip the heavy + * agent-sdk subprocess. + */ +export interface PresetSdkAdapters { + available: SdkAdapterDeclaration[] + test: SdkAdapterId +} + export interface PresetDef { id: string label: string @@ -36,6 +51,9 @@ export interface PresetDef { models?: ModelOption[] endpoints?: EndpointOption[] writeOnlyFields?: string[] + /** Internal — not exposed to the wizard JSON Schema. Drives the + * test-path adapter selection in GenerateRouter.askForTest. */ + sdkAdapters?: PresetSdkAdapters } // ==================== Official: Claude ==================== @@ -57,6 +75,12 @@ export const CLAUDE_OAUTH: PresetDef = { { id: 'claude-opus-4-6', label: 'Claude Opus 4.6' }, { id: 'claude-sonnet-4-6', label: 'Claude Sonnet 4.6' }, ], + sdkAdapters: { + available: [ + { id: 'agent-sdk', config: () => ({ loginMethod: 'claudeai' }) }, + ], + test: 'agent-sdk', + }, } export const CLAUDE_API: PresetDef = { @@ -79,6 +103,13 @@ export const CLAUDE_API: PresetDef = { { id: 'claude-haiku-4-5', label: 'Claude Haiku 4.5' }, ], writeOnlyFields: ['apiKey'], + sdkAdapters: { + available: [ + { id: 'vercel-anthropic', config: (c) => ({ apiKey: c.apiKey, baseURL: c.baseUrl }) }, + { id: 'agent-sdk', config: (c) => ({ apiKey: c.apiKey, baseUrl: c.baseUrl, loginMethod: 'api-key' }) }, + ], + test: 'vercel-anthropic', + }, } // ==================== Official: OpenAI Codex ==================== @@ -99,6 +130,12 @@ export const CODEX_OAUTH: PresetDef = { { id: 'gpt-5.4', label: 'GPT 5.4' }, { id: 'gpt-5.4-mini', label: 'GPT 5.4 Mini' }, ], + sdkAdapters: { + available: [ + { id: 'codex', config: () => ({ loginMethod: 'codex-oauth' }) }, + ], + test: 'codex', + }, } export const CODEX_API: PresetDef = { @@ -118,6 +155,13 @@ export const CODEX_API: PresetDef = { { id: 'gpt-5.4-mini', label: 'GPT 5.4 Mini' }, ], writeOnlyFields: ['apiKey'], + sdkAdapters: { + available: [ + { id: 'vercel-openai', config: (c) => ({ apiKey: c.apiKey, baseURL: c.baseUrl }) }, + { id: 'codex', config: (c) => ({ apiKey: c.apiKey, baseUrl: c.baseUrl, loginMethod: 'api-key' }) }, + ], + test: 'vercel-openai', + }, } // ==================== Official: Gemini ==================== @@ -139,6 +183,12 @@ export const GEMINI: PresetDef = { { id: 'gemini-2.5-flash', label: 'Gemini 2.5 Flash' }, ], writeOnlyFields: ['apiKey'], + sdkAdapters: { + available: [ + { id: 'vercel-google', config: (c) => ({ apiKey: c.apiKey, baseURL: c.baseUrl }) }, + ], + test: 'vercel-google', + }, } // ==================== Third-party: MiniMax ==================== @@ -165,6 +215,16 @@ export const MINIMAX: PresetDef = { { id: 'MiniMax-M2.7', label: 'MiniMax M2.7' }, ], writeOnlyFields: ['apiKey'], + sdkAdapters: { + available: [ + // MiniMax serves Anthropic API at `/anthropic/v1/messages`. + // @ai-sdk/anthropic appends `/messages` directly, so the + // preset must append `/v1` to the user's baseUrl. + { id: 'vercel-anthropic', config: (c) => ({ apiKey: c.apiKey, baseURL: c.baseUrl ? `${c.baseUrl}/v1` : undefined }) }, + { id: 'agent-sdk', config: (c) => ({ apiKey: c.apiKey, baseUrl: c.baseUrl, loginMethod: 'api-key' }) }, + ], + test: 'vercel-anthropic', + }, } // ==================== Third-party: GLM (Zhipu) ==================== @@ -194,6 +254,14 @@ export const GLM: PresetDef = { { id: 'glm-4.5-air', label: 'GLM 4.5 Air' }, ], writeOnlyFields: ['apiKey'], + sdkAdapters: { + available: [ + // GLM serves Anthropic API at `/anthropic/v1/messages` (path probe). + { id: 'vercel-anthropic', config: (c) => ({ apiKey: c.apiKey, baseURL: c.baseUrl ? `${c.baseUrl}/v1` : undefined }) }, + { id: 'agent-sdk', config: (c) => ({ apiKey: c.apiKey, baseUrl: c.baseUrl, loginMethod: 'api-key' }) }, + ], + test: 'vercel-anthropic', + }, } // ==================== Third-party: Kimi (Moonshot) ==================== @@ -226,6 +294,14 @@ export const KIMI: PresetDef = { { id: 'kimi-k2.5', label: 'Kimi K2.5' }, ], writeOnlyFields: ['apiKey'], + sdkAdapters: { + available: [ + // Moonshot serves Anthropic API at `/anthropic/v1/messages` (path probe). + { id: 'vercel-anthropic', config: (c) => ({ apiKey: c.apiKey, baseURL: c.baseUrl ? `${c.baseUrl}/v1` : undefined }) }, + { id: 'agent-sdk', config: (c) => ({ apiKey: c.apiKey, baseUrl: c.baseUrl, loginMethod: 'api-key' }) }, + ], + test: 'vercel-anthropic', + }, } // ==================== Third-party: DeepSeek ==================== @@ -252,6 +328,15 @@ export const DEEPSEEK: PresetDef = { { id: 'deepseek-v4-flash', label: 'DeepSeek V4 Flash (cheap/fast)' }, ], writeOnlyFields: ['apiKey'], + sdkAdapters: { + available: [ + // DeepSeek serves Anthropic API at `/anthropic/messages` (no /v1 + // segment), unlike MiniMax/GLM/Kimi which need /v1 appended. + { id: 'vercel-anthropic', config: (c) => ({ apiKey: c.apiKey, baseURL: c.baseUrl }) }, + { id: 'agent-sdk', config: (c) => ({ apiKey: c.apiKey, baseUrl: c.baseUrl, loginMethod: 'api-key' }) }, + ], + test: 'vercel-anthropic', + }, } // ==================== Custom ==================== diff --git a/src/ai-providers/sdk-adapters.spec.ts b/src/ai-providers/sdk-adapters.spec.ts new file mode 100644 index 00000000..94b7c556 --- /dev/null +++ b/src/ai-providers/sdk-adapters.spec.ts @@ -0,0 +1,321 @@ +import { describe, it, expect, vi, beforeEach } from 'vitest' +import { + resolveTestAdapter, + invokeAdapter, + SDK_INVOKERS, + SDK_ADAPTER_LABELS, + getSdkAdapterInfo, + type SdkAdapterDeclaration, +} from './sdk-adapters.js' +import { PRESET_CATALOG } from './preset-catalog.js' +import type { Credential, ResolvedProfile } from '../core/config.js' + +// ==================== Mocked SDK packages — vercel-* invokers ==================== + +const mockGenerateText = vi.fn().mockResolvedValue({ text: 'mock response' }) +const mockAnthropicClient = vi.fn().mockReturnValue('anthropic-model-instance') +const mockOpenAIClient = vi.fn().mockReturnValue('openai-model-instance') +const mockGoogleClient = vi.fn().mockReturnValue('google-model-instance') + +vi.mock('@ai-sdk/anthropic', () => ({ + createAnthropic: vi.fn().mockImplementation(() => mockAnthropicClient), +})) +vi.mock('@ai-sdk/openai', () => ({ + createOpenAI: vi.fn().mockImplementation(() => mockOpenAIClient), +})) +vi.mock('@ai-sdk/google', () => ({ + createGoogleGenerativeAI: vi.fn().mockImplementation(() => mockGoogleClient), +})) +vi.mock('ai', () => ({ generateText: mockGenerateText })) + +beforeEach(() => { + vi.clearAllMocks() + mockGenerateText.mockResolvedValue({ text: 'mock response' }) +}) + +// ==================== Preset declarations ==================== + +describe('PRESET_CATALOG sdkAdapters declarations', () => { + it('every non-Custom preset declares sdkAdapters', () => { + for (const preset of PRESET_CATALOG) { + if (preset.id === 'custom') continue + expect(preset.sdkAdapters, `preset ${preset.id} missing sdkAdapters`).toBeDefined() + } + }) + + it("each preset's `test` adapter exists in `available`", () => { + for (const preset of PRESET_CATALOG) { + if (!preset.sdkAdapters) continue + const ids = preset.sdkAdapters.available.map((a) => a.id) + expect(ids, `preset ${preset.id}`).toContain(preset.sdkAdapters.test) + } + }) + + it('Custom preset declares no sdkAdapters (fallback path)', () => { + const custom = PRESET_CATALOG.find((p) => p.id === 'custom')! + expect(custom.sdkAdapters).toBeUndefined() + }) +}) + +// ==================== resolveTestAdapter — preset path ==================== + +describe('resolveTestAdapter (preset path)', () => { + function profile(preset: string, overrides: Partial = {}): ResolvedProfile { + return { backend: 'agent-sdk', model: 'm', preset, ...overrides } + } + + it('DeepSeek preset → vercel-anthropic with credential.baseUrl mapped to baseURL', () => { + const decl = resolveTestAdapter(profile('deepseek'), PRESET_CATALOG) + expect(decl.id).toBe('vercel-anthropic') + + const cred: Credential = { vendor: 'deepseek', authType: 'api-key', apiKey: 'k', baseUrl: 'https://api.deepseek.com/anthropic' } + expect(decl.config(cred)).toEqual({ apiKey: 'k', baseURL: 'https://api.deepseek.com/anthropic' }) + }) + + it('Claude OAuth preset → agent-sdk with loginMethod claudeai', () => { + const decl = resolveTestAdapter(profile('claude-oauth'), PRESET_CATALOG) + expect(decl.id).toBe('agent-sdk') + + const cred: Credential = { vendor: 'anthropic', authType: 'subscription' } + expect(decl.config(cred)).toEqual({ loginMethod: 'claudeai' }) + }) + + it('Claude API preset → vercel-anthropic (lighter than agent-sdk)', () => { + const decl = resolveTestAdapter(profile('claude-api'), PRESET_CATALOG) + expect(decl.id).toBe('vercel-anthropic') + }) + + it('Codex OAuth preset → codex with codex-oauth', () => { + const decl = resolveTestAdapter(profile('codex-oauth', { backend: 'codex' }), PRESET_CATALOG) + expect(decl.id).toBe('codex') + const cred: Credential = { vendor: 'openai', authType: 'subscription' } + expect(decl.config(cred)).toEqual({ loginMethod: 'codex-oauth' }) + }) + + it('Codex API preset → vercel-openai', () => { + const decl = resolveTestAdapter(profile('codex-api', { backend: 'codex' }), PRESET_CATALOG) + expect(decl.id).toBe('vercel-openai') + }) + + it('Gemini preset → vercel-google', () => { + const decl = resolveTestAdapter(profile('gemini', { backend: 'vercel-ai-sdk' }), PRESET_CATALOG) + expect(decl.id).toBe('vercel-google') + }) + + it('MiniMax preset → vercel-anthropic with /v1 appended to baseUrl', () => { + const decl = resolveTestAdapter(profile('minimax'), PRESET_CATALOG) + expect(decl.id).toBe('vercel-anthropic') + const cred: Credential = { vendor: 'minimax', authType: 'api-key', apiKey: 'k', baseUrl: 'https://api.minimaxi.com/anthropic' } + expect(decl.config(cred)).toEqual({ + apiKey: 'k', + baseURL: 'https://api.minimaxi.com/anthropic/v1', // MiniMax's path is /anthropic/v1/messages + }) + }) + + it('DeepSeek preset does NOT append /v1 (path is /anthropic/messages)', () => { + const decl = resolveTestAdapter(profile('deepseek'), PRESET_CATALOG) + const cred: Credential = { vendor: 'deepseek', authType: 'api-key', apiKey: 'k', baseUrl: 'https://api.deepseek.com/anthropic' } + expect(decl.config(cred)).toEqual({ + apiKey: 'k', + baseURL: 'https://api.deepseek.com/anthropic', // unchanged + }) + }) + + it('agent-sdk fallback config uses baseUrl (not baseURL)', () => { + // Agent SDK's standard field name is baseUrl (lowercase u) + const decl = resolveTestAdapter(profile('claude-api'), PRESET_CATALOG) + const altDecl = decl.id === 'vercel-anthropic' + ? PRESET_CATALOG.find(p => p.id === 'claude-api')!.sdkAdapters!.available.find(a => a.id === 'agent-sdk')! + : decl + const cred: Credential = { vendor: 'anthropic', authType: 'api-key', apiKey: 'k', baseUrl: 'https://x' } + const cfg = altDecl.config(cred) as { baseUrl?: string; baseURL?: string } + expect(cfg.baseUrl).toBe('https://x') + expect(cfg.baseURL).toBeUndefined() + }) +}) + +// ==================== resolveTestAdapter — fallback synthesis ==================== + +describe('resolveTestAdapter (fallback)', () => { + it('Custom preset with backend=agent-sdk → synthesized agent-sdk decl', () => { + const profile: ResolvedProfile = { + backend: 'agent-sdk', model: 'm', preset: 'custom', loginMethod: 'api-key', + } + const decl = resolveTestAdapter(profile, PRESET_CATALOG) + expect(decl.id).toBe('agent-sdk') + const cred: Credential = { vendor: 'custom', authType: 'api-key', apiKey: 'k', baseUrl: 'https://x' } + expect(decl.config(cred)).toEqual({ apiKey: 'k', baseUrl: 'https://x', loginMethod: 'api-key' }) + }) + + it('Custom preset with backend=codex → synthesized codex decl', () => { + const profile: ResolvedProfile = { backend: 'codex', model: 'm', preset: 'custom', loginMethod: 'api-key' } + const decl = resolveTestAdapter(profile, PRESET_CATALOG) + expect(decl.id).toBe('codex') + }) + + it('Custom preset with backend=vercel-ai-sdk + provider=openai → vercel-openai', () => { + const profile: ResolvedProfile = { backend: 'vercel-ai-sdk', model: 'm', preset: 'custom', provider: 'openai' } + const decl = resolveTestAdapter(profile, PRESET_CATALOG) + expect(decl.id).toBe('vercel-openai') + }) + + it('Custom + provider=google → vercel-google', () => { + const profile: ResolvedProfile = { backend: 'vercel-ai-sdk', model: 'm', preset: 'custom', provider: 'google' } + expect(resolveTestAdapter(profile, PRESET_CATALOG).id).toBe('vercel-google') + }) + + it('Profile with no preset field → falls back via backend', () => { + const profile: ResolvedProfile = { backend: 'agent-sdk', model: 'm', loginMethod: 'claudeai' } + const decl = resolveTestAdapter(profile, PRESET_CATALOG) + expect(decl.id).toBe('agent-sdk') + expect(decl.config({ vendor: 'anthropic', authType: 'subscription' })).toEqual({ + apiKey: undefined, baseUrl: undefined, loginMethod: 'claudeai', + }) + }) +}) + +// ==================== invokeAdapter end-to-end ==================== + +describe('invokeAdapter (vercel-* invokers wire correctly)', () => { + it('vercel-anthropic invoker calls createAnthropic + generateText with the right args', async () => { + const decl: SdkAdapterDeclaration = { + id: 'vercel-anthropic', + config: (c) => ({ apiKey: c.apiKey, baseURL: c.baseUrl }), + } + const cred: Credential = { vendor: 'deepseek', authType: 'api-key', apiKey: 'sk-deep', baseUrl: 'https://api.deepseek.com/anthropic' } + + const result = await invokeAdapter(decl, cred, 'deepseek-v4-pro', 'Hi', { providers: {} }) + + expect(result.text).toBe('mock response') + const { createAnthropic } = await import('@ai-sdk/anthropic') + expect(createAnthropic).toHaveBeenCalledWith({ + apiKey: 'sk-deep', + baseURL: 'https://api.deepseek.com/anthropic', + }) + expect(mockAnthropicClient).toHaveBeenCalledWith('deepseek-v4-pro') + expect(mockGenerateText).toHaveBeenCalledWith({ + model: 'anthropic-model-instance', + prompt: 'Hi', + }) + }) + + it('vercel-openai invoker calls createOpenAI', async () => { + const decl: SdkAdapterDeclaration = { + id: 'vercel-openai', + config: (c) => ({ apiKey: c.apiKey, baseURL: c.baseUrl }), + } + const cred: Credential = { vendor: 'openai', authType: 'api-key', apiKey: 'sk-oa' } + + await invokeAdapter(decl, cred, 'gpt-5.4', 'Hi', { providers: {} }) + + const { createOpenAI } = await import('@ai-sdk/openai') + expect(createOpenAI).toHaveBeenCalledWith({ apiKey: 'sk-oa', baseURL: undefined }) + }) + + it('vercel-google invoker calls createGoogleGenerativeAI', async () => { + const decl: SdkAdapterDeclaration = { + id: 'vercel-google', + config: (c) => ({ apiKey: c.apiKey, baseURL: c.baseUrl }), + } + const cred: Credential = { vendor: 'google', authType: 'api-key', apiKey: 'sk-google' } + + await invokeAdapter(decl, cred, 'gemini-2.5-flash', 'Hi', { providers: {} }) + + const { createGoogleGenerativeAI } = await import('@ai-sdk/google') + expect(createGoogleGenerativeAI).toHaveBeenCalledWith({ apiKey: 'sk-google', baseURL: undefined }) + }) + + it('agent-sdk invoker delegates to deps.providers["agent-sdk"].ask', async () => { + const mockAsk = vi.fn().mockResolvedValue({ text: 'agent-sdk reply', media: [] }) + const fakeProvider = { ask: mockAsk } as never + + const decl: SdkAdapterDeclaration = { + id: 'agent-sdk', + config: () => ({ loginMethod: 'claudeai' }), + } + const cred: Credential = { vendor: 'anthropic', authType: 'subscription' } + + const result = await invokeAdapter(decl, cred, 'claude-opus-4-7', 'Hi', { + providers: { 'agent-sdk': fakeProvider }, + }) + + expect(result.text).toBe('agent-sdk reply') + expect(mockAsk).toHaveBeenCalledWith('Hi', expect.objectContaining({ + backend: 'agent-sdk', + model: 'claude-opus-4-7', + loginMethod: 'claudeai', + })) + }) + + it('agent-sdk invoker throws when provider not registered', async () => { + const decl: SdkAdapterDeclaration = { + id: 'agent-sdk', + config: () => ({ loginMethod: 'api-key' }), + } + await expect(invokeAdapter(decl, { vendor: 'anthropic', authType: 'api-key' }, 'm', 'Hi', { providers: {} })) + .rejects.toThrow(/agent-sdk provider not registered/) + }) + + it('codex invoker delegates to deps.providers["codex"].ask', async () => { + const mockAsk = vi.fn().mockResolvedValue({ text: 'codex reply', media: [] }) + const decl: SdkAdapterDeclaration = { + id: 'codex', + config: () => ({ loginMethod: 'codex-oauth' }), + } + await invokeAdapter(decl, { vendor: 'openai', authType: 'subscription' }, 'gpt-5.4', 'Hi', { + providers: { 'codex': { ask: mockAsk } as never }, + }) + expect(mockAsk).toHaveBeenCalledWith('Hi', expect.objectContaining({ backend: 'codex' })) + }) +}) + +// ==================== SDK_INVOKERS surface ==================== + +describe('SDK_INVOKERS registry', () => { + it('has all five adapter ids', () => { + expect(Object.keys(SDK_INVOKERS).sort()).toEqual([ + 'agent-sdk', 'codex', 'vercel-anthropic', 'vercel-google', 'vercel-openai', + ]) + }) +}) + +// ==================== getSdkAdapterInfo ==================== + +describe('getSdkAdapterInfo', () => { + const info = getSdkAdapterInfo() + + it('returns one entry per adapter id', () => { + expect(info.length).toBe(5) + expect(info.map(a => a.id).sort()).toEqual([ + 'agent-sdk', 'codex', 'vercel-anthropic', 'vercel-google', 'vercel-openai', + ]) + }) + + it('label and description match SDK_ADAPTER_LABELS', () => { + for (const a of info) { + expect(a.label).toBe(SDK_ADAPTER_LABELS[a.id].label) + expect(a.description).toBe(SDK_ADAPTER_LABELS[a.id].description) + } + }) + + it('agent-sdk lists every preset that registers it as available', () => { + const agentSdk = info.find(a => a.id === 'agent-sdk')! + const expectedPresetIds = PRESET_CATALOG + .filter(p => p.sdkAdapters?.available.some(decl => decl.id === 'agent-sdk')) + .map(p => p.id) + expect(agentSdk.presets.map(p => p.presetId).sort()).toEqual(expectedPresetIds.sort()) + }) + + it('marks isTestDefault correctly per preset', () => { + const vercelAnthropic = info.find(a => a.id === 'vercel-anthropic')! + // Find preset where vercel-anthropic is the test default + const deepseek = vercelAnthropic.presets.find(p => p.presetId === 'deepseek') + expect(deepseek?.isTestDefault).toBe(true) + }) + + it('Custom preset (no sdkAdapters) is excluded from all adapter lists', () => { + for (const a of info) { + expect(a.presets.find(p => p.presetId === 'custom')).toBeUndefined() + } + }) +}) diff --git a/src/ai-providers/sdk-adapters.ts b/src/ai-providers/sdk-adapters.ts new file mode 100644 index 00000000..13a11c5d --- /dev/null +++ b/src/ai-providers/sdk-adapters.ts @@ -0,0 +1,266 @@ +/** + * SDK adapters — credential / implementation separation. + * + * The preset is the registry: each preset declares the list of SDK + * adapters its credential can drive, along with a builder per adapter + * that maps the credential into the SDK's standardized config shape. + * + * This avoids reverse-lookup ("provider, please dig fields out of the + * credential record") — different SDKs have different field names + * (vercel uses `baseURL`, agent-sdk uses `baseUrl`), so a uniform + * "credential.X → sdk.Y" mapping doesn't exist. The preset spells it + * out explicitly per adapter. + * + * Test path: each preset declares a `test` adapter id. The runtime + * looks up the matching declaration, runs the config builder against + * the credential, and dispatches to SDK_INVOKERS[id] with the typed + * config. + * + * vercel-* invokers are bare-minimum: no tools, no system prompt, no + * media. agent-sdk and codex invokers delegate to existing providers + * (subscription auth physically requires the heavy harness). + */ + +import type { Credential, ResolvedProfile } from '../core/config.js' +import type { AIProvider, ProviderResult } from './types.js' +import { PRESET_CATALOG } from './preset-catalog.js' + +// ==================== Adapter ids and typed configs ==================== + +export type SdkAdapterId = + | 'agent-sdk' + | 'codex' + | 'vercel-anthropic' + | 'vercel-openai' + | 'vercel-google' + +/** Display labels + one-line descriptions, surfaced in the AI Provider page. */ +export const SDK_ADAPTER_LABELS: Record = { + 'agent-sdk': { + label: 'Claude Agent SDK', + description: 'Heavy subprocess; required for Claude Pro/Max subscription auth.', + }, + 'codex': { + label: 'Codex (OpenAI Responses)', + description: 'OpenAI Responses API via official SDK; required for ChatGPT subscription.', + }, + 'vercel-anthropic': { + label: 'Vercel Anthropic', + description: 'Lightweight HTTP via @ai-sdk/anthropic — direct Messages API call.', + }, + 'vercel-openai': { + label: 'Vercel OpenAI', + description: 'Lightweight HTTP via @ai-sdk/openai — direct Chat Completions call.', + }, + 'vercel-google': { + label: 'Vercel Google', + description: 'Lightweight HTTP via @ai-sdk/google — Gemini API.', + }, +} + +/** Endpoint payload shape for GET /api/config/sdk-adapters. */ +export interface SdkAdapterInfo { + id: SdkAdapterId + label: string + description: string + /** Presets that register this adapter as available, in catalog order. */ + presets: Array<{ presetId: string; presetLabel: string; isTestDefault: boolean }> +} + +/** Compute the SDK adapter info list — used by both the route handler and tests. */ +export function getSdkAdapterInfo(): SdkAdapterInfo[] { + const ids: SdkAdapterId[] = ['agent-sdk', 'codex', 'vercel-anthropic', 'vercel-openai', 'vercel-google'] + return ids.map((id) => ({ + id, + label: SDK_ADAPTER_LABELS[id].label, + description: SDK_ADAPTER_LABELS[id].description, + presets: PRESET_CATALOG.flatMap((preset) => { + if (!preset.sdkAdapters) return [] + const isAvailable = preset.sdkAdapters.available.some((a) => a.id === id) + if (!isAvailable) return [] + return [{ + presetId: preset.id, + presetLabel: preset.label, + isTestDefault: preset.sdkAdapters.test === id, + }] + }), + })) +} + +/** + * SDK config shape per adapter — field names match each SDK's own + * standard. Don't normalize; pass through as-is. + */ +export interface SdkConfigByAdapter { + 'agent-sdk': { apiKey?: string; baseUrl?: string; loginMethod: 'api-key' | 'claudeai' } + 'codex': { apiKey?: string; baseUrl?: string; loginMethod: 'api-key' | 'codex-oauth' } + 'vercel-anthropic': { apiKey?: string; baseURL?: string } + 'vercel-openai': { apiKey?: string; baseURL?: string } + 'vercel-google': { apiKey?: string; baseURL?: string } +} + +/** Discriminated union: narrowing on `id` types `config`'s return automatically. */ +export type SdkAdapterDeclaration = { + [K in SdkAdapterId]: { + id: K + /** Map a credential into this SDK's typed config. */ + config: (cred: Credential) => SdkConfigByAdapter[K] + } +}[SdkAdapterId] + +// ==================== Invoker registry ==================== + +export interface SdkInvokerDeps { + /** Map of registered AIProvider instances keyed by backend. Heavy + * invokers (agent-sdk, codex) delegate via this. */ + providers: Record +} + +export interface SdkInvoker { + invoke( + config: SdkConfigByAdapter[K], + model: string, + prompt: string, + deps: SdkInvokerDeps, + ): Promise +} + +export const SDK_INVOKERS: { [K in SdkAdapterId]: SdkInvoker } = { + 'vercel-anthropic': { + async invoke(config, model, prompt) { + const { createAnthropic } = await import('@ai-sdk/anthropic') + const { generateText } = await import('ai') + const client = createAnthropic({ + apiKey: config.apiKey, + baseURL: config.baseURL || undefined, + }) + const result = await generateText({ model: client(model), prompt }) + return { text: result.text ?? '', media: [] } + }, + }, + + 'vercel-openai': { + async invoke(config, model, prompt) { + const { createOpenAI } = await import('@ai-sdk/openai') + const { generateText } = await import('ai') + const client = createOpenAI({ + apiKey: config.apiKey, + baseURL: config.baseURL || undefined, + }) + const result = await generateText({ model: client(model), prompt }) + return { text: result.text ?? '', media: [] } + }, + }, + + 'vercel-google': { + async invoke(config, model, prompt) { + const { createGoogleGenerativeAI } = await import('@ai-sdk/google') + const { generateText } = await import('ai') + const client = createGoogleGenerativeAI({ + apiKey: config.apiKey, + baseURL: config.baseURL || undefined, + }) + const result = await generateText({ model: client(model), prompt }) + return { text: result.text ?? '', media: [] } + }, + }, + + 'agent-sdk': { + async invoke(config, model, prompt, deps) { + const provider = deps.providers['agent-sdk'] + if (!provider) throw new Error('agent-sdk provider not registered') + return provider.ask(prompt, { + backend: 'agent-sdk', + model, + apiKey: config.apiKey, + baseUrl: config.baseUrl, + loginMethod: config.loginMethod, + }) + }, + }, + + 'codex': { + async invoke(config, model, prompt, deps) { + const provider = deps.providers['codex'] + if (!provider) throw new Error('codex provider not registered') + return provider.ask(prompt, { + backend: 'codex', + model, + apiKey: config.apiKey, + baseUrl: config.baseUrl, + loginMethod: config.loginMethod, + }) + }, + }, +} + +// ==================== Test-path resolver ==================== + +/** + * Given a profile and the preset catalog, return the adapter + * declaration to use for the test path. + * + * Preset path (built-in presets): use the preset's declared `test` + * adapter. + * + * Fallback path (Custom preset, or legacy profiles without preset): + * synthesize a declaration from `profile.backend` + `profile.provider`, + * mapping the credential's inline fields directly. This preserves + * current behavior for users who configured Custom + agent-sdk, etc. + */ +export function resolveTestAdapter( + profile: ResolvedProfile, + presets: Array<{ id: string; sdkAdapters?: { available: SdkAdapterDeclaration[]; test: SdkAdapterId } }>, +): SdkAdapterDeclaration { + if (profile.preset) { + const preset = presets.find((p) => p.id === profile.preset) + if (preset?.sdkAdapters) { + const found = preset.sdkAdapters.available.find((a) => a.id === preset.sdkAdapters!.test) + if (found) return found + } + } + + // Fallback synthesis — honor the profile's own backend/provider + if (profile.backend === 'agent-sdk') { + const lm = (profile.loginMethod as 'api-key' | 'claudeai' | undefined) ?? 'api-key' + return { + id: 'agent-sdk', + config: (c) => ({ apiKey: c.apiKey, baseUrl: c.baseUrl, loginMethod: lm }), + } + } + if (profile.backend === 'codex') { + const lm = (profile.loginMethod as 'api-key' | 'codex-oauth' | undefined) ?? 'codex-oauth' + return { + id: 'codex', + config: (c) => ({ apiKey: c.apiKey, baseUrl: c.baseUrl, loginMethod: lm }), + } + } + // vercel-ai-sdk fallback — picks vercel-* adapter from profile.provider + if (profile.provider === 'openai') { + return { id: 'vercel-openai', config: (c) => ({ apiKey: c.apiKey, baseURL: c.baseUrl }) } + } + if (profile.provider === 'google') { + return { id: 'vercel-google', config: (c) => ({ apiKey: c.apiKey, baseURL: c.baseUrl }) } + } + return { id: 'vercel-anthropic', config: (c) => ({ apiKey: c.apiKey, baseURL: c.baseUrl }) } +} + +/** + * Run an adapter declaration end-to-end: build the SDK config from + * the credential, then dispatch to the invoker. + * + * Use of `as never` here is the standard discriminated-union TS dance — + * at runtime, decl.id matches the invoker's expected config type, but + * the compiler can't narrow the cross-product. Tests verify dispatch. + */ +export async function invokeAdapter( + decl: SdkAdapterDeclaration, + credential: Credential, + model: string, + prompt: string, + deps: SdkInvokerDeps, +): Promise { + const config = decl.config(credential) + const invoker = SDK_INVOKERS[decl.id] + return (invoker as SdkInvoker).invoke(config as never, model, prompt, deps) +} diff --git a/src/core/agent-center.ts b/src/core/agent-center.ts index 081b6d8b..e132a605 100644 --- a/src/core/agent-center.ts +++ b/src/core/agent-center.ts @@ -15,6 +15,8 @@ import type { AskOptions, ProviderResult, ProviderEvent, GenerateOpts } from './ai-provider-manager.js' import type { ResolvedProfile } from './config.js' import { GenerateRouter, StreamableResult } from './ai-provider-manager.js' +import { resolveProfile, resolveCredential } from './config.js' +import { profileToCredential } from './credential-inference.js' import type { ISessionStore, ContentBlock } from './session.js' import type { CompactionConfig } from './compaction.js' import { compactIfNeeded } from './compaction.js' @@ -59,14 +61,28 @@ export class AgentCenter { return this.router.ask(prompt) } - /** Test a saved profile by sending a prompt to its provider. */ + /** + * Test a saved profile by sending a prompt via the preset's declared + * test adapter (lightest available SDK that can drive the credential). + * Uses the stored credential when the profile carries `credentialSlug`, + * otherwise synthesizes one from the profile's inline fields. + */ async testProfile(profileSlug: string, prompt = 'Hi'): Promise { - return this.router.askWithProfileSlug(prompt, profileSlug) + const profile = await resolveProfile(profileSlug) + const credential = profile.credentialSlug + ? await resolveCredential(profile.credentialSlug) + : profileToCredential(profile) + return this.router.askForTest(prompt, profile, credential) } - /** Test an unsaved profile (inline data). Used for pre-save connection testing. */ + /** + * Test an unsaved profile (inline data from the wizard). Synthesizes + * a credential from the profile's inline fields and routes through + * the preset's declared test adapter. + */ async testWithProfile(profile: ResolvedProfile, prompt = 'Hi'): Promise { - return this.router.askWithProfile(prompt, profile) + const credential = profileToCredential(profile) + return this.router.askForTest(prompt, profile, credential) } /** Prompt with session history — full orchestration pipeline. */ diff --git a/src/core/ai-provider-manager.ts b/src/core/ai-provider-manager.ts index 85908033..a3bdaeda 100644 --- a/src/core/ai-provider-manager.ts +++ b/src/core/ai-provider-manager.ts @@ -7,8 +7,10 @@ */ import { resolveProfile } from './config.js' -import type { ResolvedProfile } from './config.js' +import type { Credential, ResolvedProfile } from './config.js' import type { ProviderEvent, ProviderResult, AIProvider } from '../ai-providers/types.js' +import { invokeAdapter, resolveTestAdapter } from '../ai-providers/sdk-adapters.js' +import { PRESET_CATALOG } from '../ai-providers/preset-catalog.js' export type { ProviderEvent, ProviderResult, AIProvider, @@ -98,7 +100,9 @@ export interface AskOptions { /** Resolves profile → AIProvider instance + resolved config. */ export class GenerateRouter { - private providers: Record + /** Public so SDK adapter invokers can delegate to the heavy providers + * (agent-sdk / codex) without re-wiring. Treated as readonly by callers. */ + public readonly providers: Record constructor( vercel: AIProvider, @@ -136,4 +140,22 @@ export class GenerateRouter { if (!provider) throw new Error(`No provider registered for backend: ${profile.backend}`) return provider.ask(prompt, profile) } + + /** + * Test-path entry — preset-driven SDK adapter selection. + * + * Looks up the profile's preset in PRESET_CATALOG, picks the + * declared `test` adapter, builds the SDK config from the credential + * via the preset's mapping function, and dispatches to the matching + * invoker. Falls back to honoring `profile.backend` when no preset + * is registered (Custom or legacy data). + */ + async askForTest( + prompt: string, + profile: ResolvedProfile, + credential: Credential, + ): Promise { + const decl = resolveTestAdapter(profile, PRESET_CATALOG) + return invokeAdapter(decl, credential, profile.model, prompt, { providers: this.providers }) + } } diff --git a/src/core/config.spec.ts b/src/core/config.spec.ts index 176b8ced..04025b59 100644 --- a/src/core/config.spec.ts +++ b/src/core/config.spec.ts @@ -27,6 +27,12 @@ import { writeUTAsConfig, aiProviderSchema, profileSchema, + resolveProfile, + resolveCredential, + deleteCredential, + credentialSchema, + extractCredentialFromProfile, + type Profile, } from './config.js' const mockReadFile = vi.mocked(readFile) @@ -323,4 +329,188 @@ describe('profileSchema', () => { it('rejects unknown backend', () => { expect(() => profileSchema.parse({ backend: 'unknown', label: 'X', model: 'y' })).toThrow() }) + + it('accepts credentialSlug', () => { + const result = profileSchema.parse({ + backend: 'agent-sdk', model: 'claude-opus-4-7', loginMethod: 'api-key', + credentialSlug: 'anthropic-1', + }) + if (result.backend === 'agent-sdk') { + expect(result.credentialSlug).toBe('anthropic-1') + } + }) +}) + +// ==================== credentialSchema ==================== + +describe('credentialSchema', () => { + it('validates api-key credential', () => { + const result = credentialSchema.parse({ vendor: 'anthropic', authType: 'api-key', apiKey: 'sk-x' }) + expect(result.vendor).toBe('anthropic') + expect(result.authType).toBe('api-key') + }) + + it('validates subscription credential without apiKey', () => { + const result = credentialSchema.parse({ vendor: 'anthropic', authType: 'subscription' }) + expect(result.apiKey).toBeUndefined() + }) + + it('rejects unknown vendor', () => { + expect(() => credentialSchema.parse({ vendor: 'fake', authType: 'api-key' })).toThrow() + }) +}) + +// ==================== resolveProfile (with credential join) ==================== + +describe('resolveProfile', () => { + it('returns inline shape when credentialSlug is absent', async () => { + fileReturns({ + profiles: { default: { backend: 'agent-sdk', model: 'claude-opus-4-7', loginMethod: 'api-key', apiKey: 'inline-key' } }, + activeProfile: 'default', + }) + const r = await resolveProfile() + expect(r.apiKey).toBe('inline-key') + expect(r.baseUrl).toBeUndefined() + }) + + it('joins credential when credentialSlug is set and inline is absent', async () => { + fileReturns({ + credentials: { 'anthropic-1': { vendor: 'anthropic', authType: 'api-key', apiKey: 'cred-key', baseUrl: 'https://api.example/' } }, + profiles: { default: { backend: 'agent-sdk', model: 'm', loginMethod: 'api-key', credentialSlug: 'anthropic-1' } }, + activeProfile: 'default', + }) + const r = await resolveProfile() + expect(r.apiKey).toBe('cred-key') + expect(r.baseUrl).toBe('https://api.example/') + }) + + it('inline value wins over credential value (transitional fallback semantics)', async () => { + fileReturns({ + credentials: { 'anthropic-1': { vendor: 'anthropic', authType: 'api-key', apiKey: 'cred-key' } }, + profiles: { + default: { + backend: 'agent-sdk', model: 'm', loginMethod: 'api-key', + apiKey: 'inline-wins', + credentialSlug: 'anthropic-1', + }, + }, + activeProfile: 'default', + }) + const r = await resolveProfile() + expect(r.apiKey).toBe('inline-wins') + }) + + it('throws when credentialSlug references missing credential', async () => { + fileReturns({ + credentials: {}, + profiles: { default: { backend: 'agent-sdk', model: 'm', loginMethod: 'api-key', credentialSlug: 'ghost' } }, + activeProfile: 'default', + }) + await expect(resolveProfile()).rejects.toThrow(/missing credential/) + }) +}) + +// ==================== resolveCredential / deleteCredential ==================== + +describe('resolveCredential', () => { + it('returns the credential by slug', async () => { + fileReturns({ + credentials: { 'openai-1': { vendor: 'openai', authType: 'api-key', apiKey: 'sk-oa' } }, + profiles: { default: { backend: 'agent-sdk', model: 'm', loginMethod: 'claudeai' } }, + activeProfile: 'default', + }) + const c = await resolveCredential('openai-1') + expect(c.vendor).toBe('openai') + expect(c.apiKey).toBe('sk-oa') + }) + + it('throws when slug is unknown', async () => { + fileReturns({ + credentials: {}, + profiles: { default: { backend: 'agent-sdk', model: 'm', loginMethod: 'claudeai' } }, + activeProfile: 'default', + }) + await expect(resolveCredential('nope')).rejects.toThrow(/Unknown credential/) + }) +}) + +describe('deleteCredential', () => { + it('errors when a profile still references the credential', async () => { + fileReturns({ + credentials: { 'anthropic-1': { vendor: 'anthropic', authType: 'api-key', apiKey: 'k' } }, + profiles: { + default: { backend: 'agent-sdk', model: 'm', loginMethod: 'api-key', credentialSlug: 'anthropic-1' }, + }, + activeProfile: 'default', + }) + await expect(deleteCredential('anthropic-1')).rejects.toThrow(/referenced by profile/) + }) + + it('deletes when no profile references it', async () => { + fileReturns({ + credentials: { 'orphan-1': { vendor: 'openai', authType: 'api-key', apiKey: 'k' } }, + profiles: { default: { backend: 'agent-sdk', model: 'm', loginMethod: 'claudeai' } }, + activeProfile: 'default', + }) + await expect(deleteCredential('orphan-1')).resolves.toBeUndefined() + expect(mockWriteFile).toHaveBeenCalled() + }) +}) + +// ==================== extractCredentialFromProfile ==================== + +describe('extractCredentialFromProfile', () => { + it('passes through profile when credentialSlug already set', () => { + const profile = { backend: 'agent-sdk', model: 'm', loginMethod: 'api-key', apiKey: 'k', credentialSlug: 'existing' } as Profile + const out = extractCredentialFromProfile(profile, {}) + expect(out.profile).toBe(profile) + expect(out.credentials).toEqual({}) + }) + + it('passes through profile when nothing extractable (no apiKey, not subscription)', () => { + const profile = { backend: 'agent-sdk', model: 'm', loginMethod: 'api-key' } as Profile + const out = extractCredentialFromProfile(profile, {}) + expect(out.profile.credentialSlug).toBeUndefined() + expect(out.credentials).toEqual({}) + }) + + it('creates a new credential and links via slug when no match exists', () => { + const profile = { + backend: 'agent-sdk', model: 'm', loginMethod: 'api-key', + apiKey: 'sk-deep', baseUrl: 'https://api.deepseek.com/anthropic', + } as Profile + const out = extractCredentialFromProfile(profile, {}) + expect(out.profile.credentialSlug).toBe('deepseek-1') + expect(out.credentials['deepseek-1']).toEqual({ + vendor: 'deepseek', + authType: 'api-key', + apiKey: 'sk-deep', + baseUrl: 'https://api.deepseek.com/anthropic', + }) + }) + + it('reuses existing credential slug when fields match (dedup)', () => { + const existing = { + 'deepseek-1': { vendor: 'deepseek' as const, authType: 'api-key' as const, apiKey: 'sk-d', baseUrl: 'https://api.deepseek.com/anthropic' }, + } + const profile = { + backend: 'agent-sdk', model: 'm', loginMethod: 'api-key', + apiKey: 'sk-d', baseUrl: 'https://api.deepseek.com/anthropic', + } as Profile + const out = extractCredentialFromProfile(profile, existing) + expect(out.profile.credentialSlug).toBe('deepseek-1') + expect(out.credentials).toBe(existing) // reference equality — no new entry + }) + + it('generates next available slug when vendor matches but fields differ', () => { + const existing = { + 'anthropic-1': { vendor: 'anthropic' as const, authType: 'api-key' as const, apiKey: 'sk-1' }, + } + const profile = { + backend: 'agent-sdk', model: 'm', loginMethod: 'api-key', apiKey: 'sk-2', + } as Profile + const out = extractCredentialFromProfile(profile, existing) + expect(out.profile.credentialSlug).toBe('anthropic-2') + expect(out.credentials['anthropic-2'].apiKey).toBe('sk-2') + }) }) diff --git a/src/core/config.ts b/src/core/config.ts index b713e05f..ef17de55 100644 --- a/src/core/config.ts +++ b/src/core/config.ts @@ -2,6 +2,12 @@ import { z } from 'zod' import { readFile, writeFile, mkdir, unlink, rm } from 'fs/promises' import { resolve } from 'path' import { newsCollectorSchema } from '../domain/news/config.js' +import { runMigrations } from '../migrations/runner.js' +import { + inferVendor as inferVendorFromProfile, + inferAuthType as inferAuthTypeFromProfile, + hasExtractableCredential, +} from './credential-inference.js' const CONFIG_DIR = resolve('data/config') @@ -41,11 +47,38 @@ const apiKeysSchema = z.object({ google: z.string().optional(), }) +// ==================== Credential layer (introduced by 0002) ==================== + +export const credentialVendorEnum = z.enum([ + 'anthropic', 'openai', 'google', + 'minimax', 'glm', 'kimi', 'deepseek', 'custom', +]) +export type CredentialVendor = z.infer + +export const credentialAuthTypeEnum = z.enum(['api-key', 'subscription']) +export type CredentialAuthType = z.infer + +export const credentialSchema = z.object({ + vendor: credentialVendorEnum, + authType: credentialAuthTypeEnum, + /** Present for api-key credentials; absent for subscription credentials. */ + apiKey: z.string().optional(), + /** Optional region / custom endpoint. */ + baseUrl: z.string().optional(), +}) +export type Credential = z.infer + const baseProfileFields = { /** Preset ID this profile was created from (for constraint enforcement on edit). */ preset: z.string().optional(), baseUrl: z.string().optional(), apiKey: z.string().optional(), + /** + * Pointer into aiProviderSchema.credentials. When present, resolveProfile() + * joins the credential's apiKey/baseUrl into the resolved shape (profile's + * own inline values still win if set — transitional). + */ + credentialSlug: z.string().optional(), } export const agentSdkProfileSchema = z.object({ @@ -77,6 +110,12 @@ export type Profile = z.infer export const aiProviderSchema = z.object({ apiKeys: apiKeysSchema.default({}), + /** + * Credentials by slug — extracted from profiles by 0002_extract_credentials. + * Profile's `credentialSlug` points here. Inline credential fields on the + * profile remain as transitional fallback. + */ + credentials: z.record(z.string(), credentialSchema).default({}), profiles: z.record( z.string(), profileSchema, @@ -353,159 +392,14 @@ async function parseAndSeed(filename: string, schema: z.ZodType, raw: unkn } export async function loadConfig(): Promise { + // Run pending migrations before reading any section. Each migration is + // recorded in data/config/_meta.json; the runner is a no-op when nothing + // is pending. See src/migrations/INDEX.md for the full list. + await runMigrations() + const files = ['engine.json', 'agent.json', 'crypto.json', 'securities.json', 'market-data.json', 'compaction.json', 'ai-provider-manager.json', 'heartbeat.json', 'snapshot.json', 'connectors.json', 'news.json', 'tools.json', 'webhook.json'] as const const raws = await Promise.all(files.map((f) => loadJsonFile(f))) - // TODO: remove all migration blocks before v1.0 — no stable release yet, breaking changes are fine - // ---------- Migration: flat ai-provider config → profile-based ---------- - const aiProviderRaw = raws[6] as Record | undefined - if (aiProviderRaw && 'backend' in aiProviderRaw && !('profiles' in aiProviderRaw)) { - // Legacy flat format detected — convert to profile-based - - // Step 1: handle very old format (model.json + api-keys.json) - if (!('model' in aiProviderRaw)) { - const oldModel = await loadJsonFile('model.json') as Record | undefined - const oldKeys = await loadJsonFile('api-keys.json') as Record | undefined - if (oldModel) Object.assign(aiProviderRaw, { provider: oldModel.provider, model: oldModel.model, ...(oldModel.baseUrl ? { baseUrl: oldModel.baseUrl } : {}) }) - if (oldKeys) aiProviderRaw.apiKeys = oldKeys - await removeJsonFile('model.json') - await removeJsonFile('api-keys.json') - } - - // Step 2: handle claude-code → agent-sdk alias - if (aiProviderRaw.backend === 'claude-code') { - aiProviderRaw.backend = 'agent-sdk' - aiProviderRaw.loginMethod = aiProviderRaw.loginMethod ?? 'claudeai' - } - - // Step 3: build default profile from flat config - const legacy = aiProviderLegacySchema.parse(aiProviderRaw) - const defaultProfile: Record = { label: 'Default' } - if (legacy.backend === 'agent-sdk') { - defaultProfile.backend = 'agent-sdk' - defaultProfile.model = legacy.model - defaultProfile.loginMethod = legacy.loginMethod === 'codex-oauth' ? 'api-key' : legacy.loginMethod - } else if (legacy.backend === 'codex') { - defaultProfile.backend = 'codex' - defaultProfile.model = legacy.model - defaultProfile.loginMethod = legacy.loginMethod === 'claudeai' ? 'codex-oauth' : legacy.loginMethod - } else { - defaultProfile.backend = 'vercel-ai-sdk' - defaultProfile.provider = legacy.provider - defaultProfile.model = legacy.model - } - if (legacy.baseUrl) defaultProfile.baseUrl = legacy.baseUrl - - // Step 4: migrate subchannel inline overrides → named profiles - const oldSubchannels = await loadJsonFile('web-subchannels.json') as Array> | undefined - const profiles: Record = { default: defaultProfile } - const newSubchannels: Array> = [] - - if (oldSubchannels) { - for (const ch of oldSubchannels) { - const sub: Record = { id: ch.id, label: ch.label } - if (ch.systemPrompt) sub.systemPrompt = ch.systemPrompt - if (ch.disabledTools) sub.disabledTools = ch.disabledTools - - const provider = ch.provider as string | undefined - const override = provider === 'vercel-ai-sdk' ? ch.vercelAiSdk - : provider === 'agent-sdk' ? ch.agentSdk - : provider === 'codex' ? ch.codex - : undefined - - if (provider && override) { - const slug = `${ch.id}-${provider}` - profiles[slug] = { backend: provider, label: `${ch.label}`, ...(override as object) } - sub.profile = slug - } else if (provider) { - // Provider set but no override — create a profile with just the backend - const slug = `${ch.id}-${provider}` - profiles[slug] = { ...defaultProfile, backend: provider, label: `${ch.label}` } - sub.profile = slug - } - - newSubchannels.push(sub) - } - await writeFile(resolve(CONFIG_DIR, 'web-subchannels.json'), JSON.stringify(newSubchannels, null, 2) + '\n') - } - - // Step 5: write new format - const migrated = { apiKeys: legacy.apiKeys, profiles, activeProfile: 'default' } - raws[6] = migrated - await mkdir(CONFIG_DIR, { recursive: true }) - await writeFile(resolve(CONFIG_DIR, 'ai-provider-manager.json'), JSON.stringify(migrated, null, 2) + '\n') - } else if (aiProviderRaw && !('backend' in aiProviderRaw) && !('profiles' in aiProviderRaw)) { - // Very old format (no backend, no profiles) — handle model.json merge first - const oldModel = await loadJsonFile('model.json') as Record | undefined - const oldKeys = await loadJsonFile('api-keys.json') as Record | undefined - const migrated = { - apiKeys: oldKeys ?? {}, - profiles: { - default: { - backend: 'agent-sdk', - label: 'Default', - model: (oldModel?.model as string) ?? 'claude-opus-4-7', - loginMethod: 'claudeai', - provider: (oldModel?.provider as string) ?? 'anthropic', - }, - }, - activeProfile: 'default', - } - raws[6] = migrated - await mkdir(CONFIG_DIR, { recursive: true }) - await writeFile(resolve(CONFIG_DIR, 'ai-provider-manager.json'), JSON.stringify(migrated, null, 2) + '\n') - await removeJsonFile('model.json') - await removeJsonFile('api-keys.json') - } - - // ---------- Migration: distribute global apiKeys into profiles ---------- - const aiConfigAfterMigration = raws[6] as Record | undefined - if (aiConfigAfterMigration && 'apiKeys' in aiConfigAfterMigration && 'profiles' in aiConfigAfterMigration) { - const keys = aiConfigAfterMigration.apiKeys as Record | undefined - const profiles = aiConfigAfterMigration.profiles as Record> - if (keys && Object.values(keys).some(Boolean)) { - let changed = false - for (const profile of Object.values(profiles)) { - if (profile.apiKey) continue // already has a key, don't overwrite - const vendor = profile.backend === 'codex' ? 'openai' - : profile.backend === 'agent-sdk' ? 'anthropic' - : (profile.provider as string) ?? 'anthropic' - const globalKey = keys[vendor] - if (globalKey) { - profile.apiKey = globalKey - changed = true - } - } - if (changed) { - delete aiConfigAfterMigration.apiKeys - raws[6] = aiConfigAfterMigration - await mkdir(CONFIG_DIR, { recursive: true }) - await writeFile(resolve(CONFIG_DIR, 'ai-provider-manager.json'), JSON.stringify(aiConfigAfterMigration, null, 2) + '\n') - } - } - } - - // ---------- Migration: consolidate old telegram.json + engine port fields ---------- - const connectorsRaw = raws[9] as Record | undefined - if (connectorsRaw === undefined) { - const oldTelegram = await loadJsonFile('telegram.json') - const oldEngine = raws[0] as Record | undefined - const migrated: Record = {} - if (oldTelegram && typeof oldTelegram === 'object') { - migrated.telegram = { ...(oldTelegram as Record), enabled: true } - } - if (oldEngine) { - if (oldEngine.webPort !== undefined) migrated.web = { port: oldEngine.webPort } - if (oldEngine.mcpPort !== undefined) migrated.mcp = { port: oldEngine.mcpPort } - if (oldEngine.askMcpPort !== undefined) migrated.mcpAsk = { enabled: true, port: oldEngine.askMcpPort } - const { mcpPort: _m, askMcpPort: _a, webPort: _w, ...cleanEngine } = oldEngine - raws[0] = cleanEngine - await mkdir(CONFIG_DIR, { recursive: true }) - await writeFile(resolve(CONFIG_DIR, 'engine.json'), JSON.stringify(cleanEngine, null, 2) + '\n') - } - raws[9] = Object.keys(migrated).length > 0 ? migrated : undefined - } - return { engine: await parseAndSeed(files[0], engineSchema, raws[0]), agent: await parseAndSeed(files[1], agentSchema, raws[1]), @@ -797,17 +691,81 @@ export interface ResolvedProfile { baseUrl?: string loginMethod?: string provider?: string + /** Pointer into AIProviderConfig.credentials. Preserved on the resolved + * shape so callers can fetch the credential separately when needed. */ + credentialSlug?: string } -/** Resolve a profile by slug. API key comes from the profile directly. */ +/** + * Resolve a profile by slug. When the profile carries a `credentialSlug`, + * the referenced credential's apiKey/baseUrl are joined into the resolved + * shape — but profile-level inline values still win when present, so the + * 0002 migration can safely leave inline fields in place as transitional + * fallback. The returned `ResolvedProfile` shape is unchanged. + */ export async function resolveProfile(slug?: string): Promise { const config = await readAIProviderConfig() const key = slug ?? config.activeProfile const profile = config.profiles[key] if (!profile) throw new Error(`Unknown AI provider profile: "${key}"`) + + if (profile.credentialSlug) { + const cred = config.credentials[profile.credentialSlug] + if (!cred) { + throw new Error( + `Profile "${key}" references missing credential "${profile.credentialSlug}"`, + ) + } + return { + ...profile, + apiKey: profile.apiKey ?? cred.apiKey, + baseUrl: profile.baseUrl ?? cred.baseUrl, + } + } return { ...profile } } +// ==================== Credential Helpers ==================== + +/** Read a credential by slug. Throws if missing. */ +export async function resolveCredential(slug: string): Promise { + const config = await readAIProviderConfig() + const cred = config.credentials[slug] + if (!cred) throw new Error(`Unknown credential: "${slug}"`) + return { ...cred } +} + +/** Read all credentials as a slug-keyed map. */ +export async function readCredentials(): Promise> { + const config = await readAIProviderConfig() + return { ...config.credentials } +} + +/** Write a single credential (create or update). */ +export async function writeCredential(slug: string, credential: Credential): Promise { + const config = await readAIProviderConfig() + const validated = credentialSchema.parse(credential) + config.credentials[slug] = validated + await mkdir(CONFIG_DIR, { recursive: true }) + await writeFile(resolve(CONFIG_DIR, 'ai-provider-manager.json'), JSON.stringify(config, null, 2) + '\n') +} + +/** Delete a credential. Errors if any profile still references it. */ +export async function deleteCredential(slug: string): Promise { + const config = await readAIProviderConfig() + const referencingProfiles = Object.entries(config.profiles) + .filter(([, p]) => p.credentialSlug === slug) + .map(([slug]) => slug) + if (referencingProfiles.length > 0) { + throw new Error( + `Cannot delete credential "${slug}" — referenced by profile(s): ${referencingProfiles.join(', ')}`, + ) + } + delete config.credentials[slug] + await mkdir(CONFIG_DIR, { recursive: true }) + await writeFile(resolve(CONFIG_DIR, 'ai-provider-manager.json'), JSON.stringify(config, null, 2) + '\n') +} + /** Get the active profile slug. */ export async function getActiveProfileSlug(): Promise { const config = await readAIProviderConfig() @@ -823,19 +781,94 @@ export async function setActiveProfile(slug: string): Promise { await writeFile(resolve(CONFIG_DIR, 'ai-provider-manager.json'), JSON.stringify(updated, null, 2) + '\n') } -/** Write a single profile (create or update). */ +/** + * Eagerly extract a credential from a profile's inline fields and link + * the profile to it. Dedupes against existing credentials (same vendor + + * authType + apiKey + baseUrl reuses the existing slug). Returns the + * possibly-updated profile and credentials map. + * + * Used by writeProfile (and the 0003 backfill migration) so new profiles + * never land with inline-only credentials. Idempotent — profiles already + * carrying credentialSlug are passed through unchanged. + */ +export function extractCredentialFromProfile( + profile: Profile, + existing: Record, +): { profile: Profile; credentials: Record } { + if (profile.credentialSlug) return { profile, credentials: existing } + if (!hasExtractableCredential(profile)) return { profile, credentials: existing } + + const vendor = inferVendorFromProfile(profile) + const authType = inferAuthTypeFromProfile(profile) + const cred: Credential = { vendor, authType } + if (profile.apiKey) cred.apiKey = profile.apiKey + if (profile.baseUrl) cred.baseUrl = profile.baseUrl + + // Dedupe against existing — same vendor/auth/apiKey/baseUrl reuses the slug + const match = Object.entries(existing).find(([, c]) => + c.vendor === cred.vendor && + c.authType === cred.authType && + c.apiKey === cred.apiKey && + c.baseUrl === cred.baseUrl + ) + if (match) { + return { + profile: { ...profile, credentialSlug: match[0] } as Profile, + credentials: existing, + } + } + + // Generate a fresh slug + const taken = new Set(Object.keys(existing)) + let n = 1 + while (taken.has(`${vendor}-${n}`)) n++ + const slug = `${vendor}-${n}` + + return { + profile: { ...profile, credentialSlug: slug } as Profile, + credentials: { ...existing, [slug]: cred }, + } +} + +/** + * Write a single profile (create or update). Eagerly extracts inline + * credential fields into the credentials map and links via + * credentialSlug — keeps the credentials map complete as new profiles + * land via the wizard. + */ export async function writeProfile(slug: string, profile: Profile): Promise { const config = await readAIProviderConfig() - config.profiles[slug] = profile + const { profile: extractedProfile, credentials } = extractCredentialFromProfile( + profile, + config.credentials, + ) + config.profiles[slug] = extractedProfile + config.credentials = credentials await mkdir(CONFIG_DIR, { recursive: true }) await writeFile(resolve(CONFIG_DIR, 'ai-provider-manager.json'), JSON.stringify(config, null, 2) + '\n') } -/** Delete a profile. Cannot delete the active profile. */ +/** + * Delete a profile. Cannot delete the active profile. If the deleted + * profile was the last one referencing its credential, the credential + * is garbage-collected too — keeps credentials map free of orphans. + */ export async function deleteProfile(slug: string): Promise { const config = await readAIProviderConfig() if (config.activeProfile === slug) throw new Error('Cannot delete the active profile') + const removedCredSlug = config.profiles[slug]?.credentialSlug delete config.profiles[slug] + + // GC: if the removed profile's credential is no longer referenced, drop it + if (removedCredSlug) { + const stillReferenced = Object.values(config.profiles).some( + (p) => p.credentialSlug === removedCredSlug, + ) + if (!stillReferenced) { + delete config.credentials[removedCredSlug] + } + } + await mkdir(CONFIG_DIR, { recursive: true }) await writeFile(resolve(CONFIG_DIR, 'ai-provider-manager.json'), JSON.stringify(config, null, 2) + '\n') } diff --git a/src/core/credential-inference.spec.ts b/src/core/credential-inference.spec.ts new file mode 100644 index 00000000..9aea2de2 --- /dev/null +++ b/src/core/credential-inference.spec.ts @@ -0,0 +1,113 @@ +import { describe, it, expect } from 'vitest' +import { + inferVendor, + inferAuthType, + hasExtractableCredential, + profileToCredential, + type ProfileLike, +} from './credential-inference.js' + +describe('inferVendor', () => { + it('codex backend → openai', () => { + expect(inferVendor({ backend: 'codex', loginMethod: 'codex-oauth' })).toBe('openai') + expect(inferVendor({ backend: 'codex', loginMethod: 'api-key' })).toBe('openai') + }) + + it('agent-sdk + claudeai → anthropic', () => { + expect(inferVendor({ backend: 'agent-sdk', loginMethod: 'claudeai' })).toBe('anthropic') + }) + + it('agent-sdk + GLM baseUrl → glm', () => { + expect(inferVendor({ backend: 'agent-sdk', loginMethod: 'api-key', baseUrl: 'https://open.bigmodel.cn/api/anthropic' })).toBe('glm') + expect(inferVendor({ backend: 'agent-sdk', loginMethod: 'api-key', baseUrl: 'https://api.z.ai/api/anthropic' })).toBe('glm') + }) + + it('agent-sdk + MiniMax baseUrl → minimax', () => { + expect(inferVendor({ backend: 'agent-sdk', loginMethod: 'api-key', baseUrl: 'https://api.minimaxi.com/anthropic' })).toBe('minimax') + expect(inferVendor({ backend: 'agent-sdk', loginMethod: 'api-key', baseUrl: 'https://api.minimax.io/anthropic' })).toBe('minimax') + }) + + it('agent-sdk + Kimi baseUrl → kimi', () => { + expect(inferVendor({ backend: 'agent-sdk', loginMethod: 'api-key', baseUrl: 'https://api.moonshot.cn/anthropic' })).toBe('kimi') + expect(inferVendor({ backend: 'agent-sdk', loginMethod: 'api-key', baseUrl: 'https://api.moonshot.ai/anthropic' })).toBe('kimi') + }) + + it('agent-sdk + DeepSeek baseUrl → deepseek', () => { + expect(inferVendor({ backend: 'agent-sdk', loginMethod: 'api-key', baseUrl: 'https://api.deepseek.com/anthropic' })).toBe('deepseek') + }) + + it('agent-sdk + api-key + no recognized baseUrl → anthropic', () => { + expect(inferVendor({ backend: 'agent-sdk', loginMethod: 'api-key' })).toBe('anthropic') + expect(inferVendor({ backend: 'agent-sdk', loginMethod: 'api-key', baseUrl: 'https://api.anthropic.com' })).toBe('anthropic') + }) + + it('vercel-ai-sdk uses provider field', () => { + expect(inferVendor({ backend: 'vercel-ai-sdk', provider: 'google' })).toBe('google') + expect(inferVendor({ backend: 'vercel-ai-sdk', provider: 'openai' })).toBe('openai') + expect(inferVendor({ backend: 'vercel-ai-sdk', provider: 'anthropic' })).toBe('anthropic') + expect(inferVendor({ backend: 'vercel-ai-sdk', provider: 'unknown' })).toBe('anthropic') + }) + + it('unknown backend → custom', () => { + expect(inferVendor({ backend: 'something-else' } as ProfileLike)).toBe('custom') + expect(inferVendor({} as ProfileLike)).toBe('custom') + }) +}) + +describe('inferAuthType', () => { + it('claudeai or codex-oauth → subscription', () => { + expect(inferAuthType({ loginMethod: 'claudeai' })).toBe('subscription') + expect(inferAuthType({ loginMethod: 'codex-oauth' })).toBe('subscription') + }) + + it('api-key or absent → api-key', () => { + expect(inferAuthType({ loginMethod: 'api-key' })).toBe('api-key') + expect(inferAuthType({})).toBe('api-key') + }) +}) + +describe('hasExtractableCredential', () => { + it('returns true when apiKey present', () => { + expect(hasExtractableCredential({ apiKey: 'k' })).toBe(true) + }) + + it('returns true for subscription loginMethods even without apiKey', () => { + expect(hasExtractableCredential({ loginMethod: 'claudeai' })).toBe(true) + expect(hasExtractableCredential({ loginMethod: 'codex-oauth' })).toBe(true) + }) + + it('returns false when no apiKey and not subscription', () => { + expect(hasExtractableCredential({})).toBe(false) + expect(hasExtractableCredential({ loginMethod: 'api-key' })).toBe(false) + }) +}) + +describe('profileToCredential', () => { + it('builds credential from inline profile fields', () => { + const cred = profileToCredential({ + backend: 'agent-sdk', + model: 'm', + loginMethod: 'api-key', + apiKey: 'sk-deep', + baseUrl: 'https://api.deepseek.com/anthropic', + }) + expect(cred).toEqual({ + vendor: 'deepseek', + authType: 'api-key', + apiKey: 'sk-deep', + baseUrl: 'https://api.deepseek.com/anthropic', + }) + }) + + it('omits apiKey/baseUrl when absent (subscription)', () => { + const cred = profileToCredential({ + backend: 'agent-sdk', + model: 'claude-opus-4-7', + loginMethod: 'claudeai', + }) + expect(cred).toEqual({ + vendor: 'anthropic', + authType: 'subscription', + }) + }) +}) diff --git a/src/core/credential-inference.ts b/src/core/credential-inference.ts new file mode 100644 index 00000000..37f76c39 --- /dev/null +++ b/src/core/credential-inference.ts @@ -0,0 +1,77 @@ +/** + * Credential inference helpers — shared between the 0002 migration + * and the runtime test path. + * + * Single source of truth for "given a profile-shaped record, what + * vendor + authType does it represent?". The migration uses this to + * extract credentials from inline profile fields. The test path uses + * it to synthesize a Credential when the profile body comes from the + * wizard and has no `credentialSlug` yet. + */ + +import type { Credential, CredentialAuthType, CredentialVendor, ResolvedProfile } from './config.js' + +/** Profile-like record — works on both raw migration data and ResolvedProfile. */ +export interface ProfileLike { + backend?: string + loginMethod?: string + apiKey?: string + baseUrl?: string + provider?: string +} + +const VENDORS_BY_BASEURL: Array<[RegExp, CredentialVendor]> = [ + [/bigmodel\.cn|z\.ai/i, 'glm'], + [/minimaxi\.com|minimax\.io/i, 'minimax'], + [/moonshot\.cn|moonshot\.ai/i, 'kimi'], + [/deepseek\.com/i, 'deepseek'], +] + +export function inferVendor(profile: ProfileLike): CredentialVendor { + const { backend, loginMethod } = profile + const baseUrl = profile.baseUrl ?? '' + + if (backend === 'codex') return 'openai' + + if (backend === 'agent-sdk' && loginMethod === 'claudeai') return 'anthropic' + + if (backend === 'agent-sdk') { + for (const [pattern, vendor] of VENDORS_BY_BASEURL) { + if (pattern.test(baseUrl)) return vendor + } + return 'anthropic' + } + + if (backend === 'vercel-ai-sdk') { + const provider = profile.provider + if (provider === 'openai' || provider === 'google' || provider === 'anthropic') return provider + return 'anthropic' + } + + return 'custom' +} + +export function inferAuthType(profile: ProfileLike): CredentialAuthType { + if (profile.loginMethod === 'claudeai' || profile.loginMethod === 'codex-oauth') { + return 'subscription' + } + return 'api-key' +} + +/** Whether the profile's inline fields contain a credential to extract. */ +export function hasExtractableCredential(profile: ProfileLike): boolean { + if (profile.apiKey) return true + if (profile.loginMethod === 'claudeai' || profile.loginMethod === 'codex-oauth') return true + return false +} + +/** Build a Credential from a ResolvedProfile's inline credential fields. */ +export function profileToCredential(profile: ResolvedProfile): Credential { + const cred: Credential = { + vendor: inferVendor(profile), + authType: inferAuthType(profile), + } + if (profile.apiKey) cred.apiKey = profile.apiKey + if (profile.baseUrl) cred.baseUrl = profile.baseUrl + return cred +} diff --git a/src/core/version.spec.ts b/src/core/version.spec.ts new file mode 100644 index 00000000..8d937c76 --- /dev/null +++ b/src/core/version.spec.ts @@ -0,0 +1,205 @@ +import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest' +import { + compareVersions, + getCurrentVersion, + getRepoSlug, + fetchLatestRelease, + getVersionInfo, + _resetCacheForTest, +} from './version.js' + +describe('compareVersions', () => { + it('compares core versions numerically', () => { + expect(compareVersions('1.0.0', '1.0.0')).toBe(0) + expect(compareVersions('1.0.1', '1.0.0')).toBeGreaterThan(0) + expect(compareVersions('1.0.0', '1.0.1')).toBeLessThan(0) + expect(compareVersions('2.0.0', '1.99.99')).toBeGreaterThan(0) + expect(compareVersions('1.10.0', '1.9.0')).toBeGreaterThan(0) // not lexicographic + }) + + it('treats release as greater than prerelease for same core', () => { + expect(compareVersions('1.0.0', '1.0.0-beta.0')).toBeGreaterThan(0) + expect(compareVersions('1.0.0-beta.0', '1.0.0')).toBeLessThan(0) + }) + + it('compares prerelease tags lexicographically', () => { + expect(compareVersions('1.0.0-beta.1', '1.0.0-beta.0')).toBeGreaterThan(0) + expect(compareVersions('1.0.0-alpha', '1.0.0-beta')).toBeLessThan(0) + }) + + it('strips a leading v', () => { + expect(compareVersions('v1.2.3', '1.2.3')).toBe(0) + expect(compareVersions('v1.2.4', 'v1.2.3')).toBeGreaterThan(0) + }) + + it('handles missing parts as zero', () => { + expect(compareVersions('1', '1.0.0')).toBe(0) + expect(compareVersions('1.2', '1.2.0')).toBe(0) + }) +}) + +describe('getCurrentVersion', () => { + it('returns a non-empty version string from package.json', () => { + const v = getCurrentVersion() + expect(typeof v).toBe('string') + expect(v.length).toBeGreaterThan(0) + }) +}) + +describe('getRepoSlug', () => { + it('parses owner/repo from package.json repository url', () => { + const slug = getRepoSlug() + expect(slug).not.toBeNull() + expect(slug?.owner).toBeTruthy() + expect(slug?.repo).toBeTruthy() + }) +}) + +describe('fetchLatestRelease (mocked fetch)', () => { + const origFetch = globalThis.fetch + + beforeEach(() => { + _resetCacheForTest() + }) + + afterEach(() => { + globalThis.fetch = origFetch + }) + + it('returns the parsed release on success (array shape, takes first non-draft)', async () => { + globalThis.fetch = vi.fn().mockResolvedValue({ + ok: true, + status: 200, + statusText: 'OK', + json: async () => ([ + { + tag_name: 'v1.2.3', + html_url: 'https://github.com/owner/repo/releases/tag/v1.2.3', + body: '## Changelog', + published_at: '2026-05-09T00:00:00Z', + draft: false, + prerelease: true, + }, + ]), + }) as unknown as typeof fetch + + const { result, error } = await fetchLatestRelease() + expect(error).toBeNull() + expect(result?.version).toBe('1.2.3') // leading v stripped + expect(result?.url).toContain('github.com') + expect(result?.body).toBe('## Changelog') + }) + + it('skips draft releases', async () => { + globalThis.fetch = vi.fn().mockResolvedValue({ + ok: true, status: 200, statusText: 'OK', + json: async () => ([ + { tag_name: 'v2.0.0', html_url: 'x', body: '', published_at: '', draft: true, prerelease: false }, + { tag_name: 'v1.0.0', html_url: 'y', body: '', published_at: '', draft: false, prerelease: false }, + ]), + }) as unknown as typeof fetch + + const { result } = await fetchLatestRelease() + expect(result?.version).toBe('1.0.0') // first non-draft + }) + + it('accepts prereleases as updates', async () => { + globalThis.fetch = vi.fn().mockResolvedValue({ + ok: true, status: 200, statusText: 'OK', + json: async () => ([ + { tag_name: 'v0.10.0-beta.0', html_url: 'x', body: '', published_at: '', draft: false, prerelease: true }, + ]), + }) as unknown as typeof fetch + + const { result } = await fetchLatestRelease() + expect(result?.version).toBe('0.10.0-beta.0') + }) + + it('returns error when no published releases found', async () => { + globalThis.fetch = vi.fn().mockResolvedValue({ + ok: true, status: 200, statusText: 'OK', + json: async () => ([]), + }) as unknown as typeof fetch + const { result, error } = await fetchLatestRelease() + expect(result).toBeNull() + expect(error).toContain('No published releases') + }) + + it('returns error and caches it on HTTP failure', async () => { + globalThis.fetch = vi.fn().mockResolvedValue({ + ok: false, status: 404, statusText: 'Not Found', + json: async () => ([]), + }) as unknown as typeof fetch + + const r1 = await fetchLatestRelease() + expect(r1.error).toContain('404') + + const fetchSpy = globalThis.fetch as unknown as ReturnType + const callsBefore = fetchSpy.mock.calls.length + const r2 = await fetchLatestRelease() + const callsAfter = fetchSpy.mock.calls.length + expect(callsAfter).toBe(callsBefore) + expect(r2.error).toBe(r1.error) + }) + + it('caches success responses', async () => { + const fetchMock = vi.fn().mockResolvedValue({ + ok: true, status: 200, statusText: 'OK', + json: async () => ([{ tag_name: 'v1.0.0', html_url: 'x', body: '', published_at: '', draft: false, prerelease: false }]), + }) + globalThis.fetch = fetchMock as unknown as typeof fetch + + await fetchLatestRelease() + await fetchLatestRelease() + await fetchLatestRelease() + expect(fetchMock).toHaveBeenCalledTimes(1) + }) + + it('handles network errors gracefully', async () => { + globalThis.fetch = vi.fn().mockRejectedValue(new Error('ECONNREFUSED')) as unknown as typeof fetch + const { result, error } = await fetchLatestRelease() + expect(result).toBeNull() + expect(error).toContain('ECONNREFUSED') + }) +}) + +describe('getVersionInfo', () => { + beforeEach(() => { _resetCacheForTest() }) + + it('reports hasUpdate=true when latest > current', async () => { + globalThis.fetch = vi.fn().mockResolvedValue({ + ok: true, status: 200, statusText: 'OK', + json: async () => ([{ + tag_name: 'v999.999.999', + html_url: 'https://example.com', + body: '', + published_at: '2026-05-09T00:00:00Z', + draft: false, prerelease: false, + }]), + }) as unknown as typeof fetch + + const info = await getVersionInfo() + expect(info.latest).toBe('999.999.999') + expect(info.hasUpdate).toBe(true) + expect(info.error).toBeNull() + }) + + it('reports hasUpdate=false when latest = current', async () => { + const current = getCurrentVersion() + globalThis.fetch = vi.fn().mockResolvedValue({ + ok: true, status: 200, statusText: 'OK', + json: async () => ([{ tag_name: current, html_url: 'x', body: '', published_at: '', draft: false, prerelease: false }]), + }) as unknown as typeof fetch + + const info = await getVersionInfo() + expect(info.hasUpdate).toBe(false) + }) + + it('returns error when GitHub fetch fails', async () => { + globalThis.fetch = vi.fn().mockRejectedValue(new Error('boom')) as unknown as typeof fetch + const info = await getVersionInfo() + expect(info.latest).toBeNull() + expect(info.hasUpdate).toBe(false) + expect(info.error).toContain('boom') + }) +}) diff --git a/src/core/version.ts b/src/core/version.ts new file mode 100644 index 00000000..aadc7b4c --- /dev/null +++ b/src/core/version.ts @@ -0,0 +1,213 @@ +/** + * App version awareness — current version + latest GitHub release. + * + * The current version comes from package.json#version (read once at + * module load). The latest version comes from the GitHub Releases API + * (cached in-memory with a TTL — GitHub unauthenticated rate limit is + * 60 req/h per IP, so we don't want to hit it on every UI load). + * + * The repo owner+name is derived from package.json#repository.url so + * fork users don't poll the upstream repo. + * + * Self-hosted source distribution: when the user sees "update + * available" they manually run `git pull && pnpm build` and restart. + * Auto-execute is out of scope (Electron will handle that path + * differently when packaging lands). + */ + +import { readFileSync } from 'node:fs' +import { resolve, dirname } from 'node:path' +import { fileURLToPath } from 'node:url' + +// ==================== Current version (from package.json) ==================== + +interface PackageJson { + version?: string + repository?: { url?: string } | string +} + +let _packageJson: PackageJson | null = null + +function readPackageJson(): PackageJson { + if (_packageJson !== null) return _packageJson + try { + const here = fileURLToPath(import.meta.url) + const repoRoot = resolve(dirname(here), '..', '..') + _packageJson = JSON.parse(readFileSync(resolve(repoRoot, 'package.json'), 'utf-8')) as PackageJson + } catch { + _packageJson = {} + } + return _packageJson +} + +export function getCurrentVersion(): string { + return readPackageJson().version ?? '0.0.0' +} + +/** Parse owner+repo from `git+https://github.com//.git` style URLs. */ +export function getRepoSlug(): { owner: string; repo: string } | null { + const repository = readPackageJson().repository + const url = typeof repository === 'string' ? repository : repository?.url ?? '' + const match = url.match(/github\.com[/:]([^/]+)\/([^/.]+)/i) + if (!match) return null + return { owner: match[1], repo: match[2] } +} + +// ==================== Semver comparison (minimal) ==================== + +interface ParsedVersion { + core: number[] + pre: string | null +} + +function parseVersion(s: string): ParsedVersion { + const stripped = s.replace(/^v/, '') + const dashIdx = stripped.indexOf('-') + const core = dashIdx === -1 ? stripped : stripped.slice(0, dashIdx) + const pre = dashIdx === -1 ? null : stripped.slice(dashIdx + 1) + const coreNums = core.split('.').map((n) => parseInt(n, 10) || 0) + while (coreNums.length < 3) coreNums.push(0) + return { core: coreNums.slice(0, 3), pre } +} + +/** + * Compare two semver-style versions. Returns negative if ab. Handles the common cases (MAJOR.MINOR.PATCH-PRERELEASE) + * — not a full RFC-compliant comparator, but enough for "is the remote + * release newer than ours". + */ +export function compareVersions(a: string, b: string): number { + const A = parseVersion(a) + const B = parseVersion(b) + for (let i = 0; i < 3; i++) { + if (A.core[i] !== B.core[i]) return A.core[i] - B.core[i] + } + // Cores equal — release > prerelease + if (A.pre === null && B.pre === null) return 0 + if (A.pre === null) return 1 + if (B.pre === null) return -1 + // Both prereleases — lexicographic comparison + return A.pre < B.pre ? -1 : A.pre > B.pre ? 1 : 0 +} + +// ==================== Latest release (cached GitHub fetch) ==================== + +export interface LatestRelease { + version: string + url: string + body: string + publishedAt: string +} + +interface CacheEntry { + fetchedAt: number + result: LatestRelease | null + error: string | null +} + +const SUCCESS_TTL_MS = 60 * 60 * 1000 // 1h +const ERROR_TTL_MS = 5 * 60 * 1000 // 5min + +let cache: CacheEntry | null = null + +/** + * Fetch the latest GitHub release. Returns null + error string when the + * API is unreachable / rate-limited / repo has no releases. Result + * (success or failure) is cached so a flapping UI doesn't burn the + * rate limit. + */ +export async function fetchLatestRelease(opts?: { + /** Force re-fetch even if cache is fresh. */ + force?: boolean +}): Promise<{ result: LatestRelease | null; error: string | null }> { + const now = Date.now() + if (!opts?.force && cache) { + const ttl = cache.error ? ERROR_TTL_MS : SUCCESS_TTL_MS + if (now - cache.fetchedAt < ttl) { + return { result: cache.result, error: cache.error } + } + } + + const slug = getRepoSlug() + if (!slug) { + cache = { fetchedAt: now, result: null, error: 'Could not derive repo slug from package.json' } + return { result: null, error: cache.error } + } + + try { + // Use /releases (not /releases/latest) — the latter excludes + // prerelease tags by default. We accept prereleases as valid + // updates because most active projects (including this one) + // ship -beta/-rc versions before stable. Drafts are still + // skipped explicitly. + const url = `https://api.github.com/repos/${slug.owner}/${slug.repo}/releases?per_page=10` + const res = await fetch(url, { + headers: { 'Accept': 'application/vnd.github+json' }, + signal: AbortSignal.timeout(10_000), + }) + if (!res.ok) { + const error = `GitHub API ${res.status} ${res.statusText}` + cache = { fetchedAt: now, result: null, error } + return { result: null, error } + } + type ReleaseRow = { tag_name?: string; html_url?: string; body?: string; published_at?: string; draft?: boolean; prerelease?: boolean } + const list = await res.json() as ReleaseRow[] + // GitHub returns newest-first by default. Take the first non-draft. + const data = Array.isArray(list) ? list.find((r) => !r.draft && r.tag_name) : null + if (!data || !data.tag_name) { + cache = { fetchedAt: now, result: null, error: 'No published releases found' } + return { result: null, error: cache.error } + } + const result: LatestRelease = { + version: data.tag_name.replace(/^v/, ''), + url: data.html_url ?? `https://github.com/${slug.owner}/${slug.repo}/releases`, + body: data.body ?? '', + publishedAt: data.published_at ?? '', + } + cache = { fetchedAt: now, result, error: null } + return { result, error: null } + } catch (err) { + const error = err instanceof Error ? err.message : String(err) + cache = { fetchedAt: now, result: null, error } + return { result: null, error } + } +} + +/** Reset the in-memory cache. Test-only. */ +export function _resetCacheForTest(): void { + cache = null +} + +// ==================== Combined view ==================== + +export interface VersionInfo { + current: string + latest: string | null + hasUpdate: boolean + releaseUrl: string | null + releaseNotes: string | null + publishedAt: string | null + error: string | null +} + +export async function getVersionInfo(opts?: { force?: boolean }): Promise { + const current = getCurrentVersion() + const { result, error } = await fetchLatestRelease(opts) + if (!result) { + return { + current, latest: null, hasUpdate: false, + releaseUrl: null, releaseNotes: null, publishedAt: null, + error, + } + } + const hasUpdate = compareVersions(result.version, current) > 0 + return { + current, + latest: result.version, + hasUpdate, + releaseUrl: result.url, + releaseNotes: result.body, + publishedAt: result.publishedAt, + error: null, + } +} diff --git a/src/migrations/0001_initial_unified.spec.ts b/src/migrations/0001_initial_unified.spec.ts new file mode 100644 index 00000000..7e5f14ef --- /dev/null +++ b/src/migrations/0001_initial_unified.spec.ts @@ -0,0 +1,178 @@ +import { describe, it, expect } from 'vitest' +import type { MigrationContext } from './types.js' +import { migration } from './0001_initial_unified/index.js' + +function makeMemoryContext(initial: Record = {}): { + ctx: MigrationContext + files: Map +} { + const files = new Map(Object.entries(initial)) + const ctx: MigrationContext = { + async readJson(filename: string): Promise { + const v = files.get(filename) + return v === undefined ? undefined : JSON.parse(JSON.stringify(v)) + }, + async writeJson(filename: string, data: unknown): Promise { + files.set(filename, JSON.parse(JSON.stringify(data))) + }, + async removeJson(filename: string): Promise { + files.delete(filename) + }, + configDir(): string { + return '/virtual/config' + }, + } + return { ctx, files } +} + +describe('0001_initial_unified', () => { + it('fresh install (no config) — no-op, leaves no files', async () => { + const { ctx, files } = makeMemoryContext() + await migration.up(ctx) + expect(files.size).toBe(0) + }) + + it('already-current data (profile-based, no apiKeys, with connectors) — no-op', async () => { + const initial = { + 'ai-provider-manager.json': { + profiles: { + default: { backend: 'agent-sdk', model: 'claude-opus-4-7', loginMethod: 'claudeai' }, + }, + activeProfile: 'default', + }, + 'connectors.json': { web: { port: 3002 } }, + } + const { ctx, files } = makeMemoryContext(initial) + const before = JSON.stringify([...files.entries()]) + await migration.up(ctx) + const after = JSON.stringify([...files.entries()]) + expect(after).toBe(before) + }) + + it('migrates flat ai-provider config → profile-based', async () => { + const { ctx, files } = makeMemoryContext({ + 'ai-provider-manager.json': { + backend: 'agent-sdk', + model: 'claude-opus-4-7', + loginMethod: 'claudeai', + provider: 'anthropic', + }, + }) + + await migration.up(ctx) + + const after = files.get('ai-provider-manager.json') as Record + expect(after.activeProfile).toBe('default') + expect(after.profiles).toBeDefined() + const profiles = after.profiles as Record> + expect(profiles.default.backend).toBe('agent-sdk') + expect(profiles.default.loginMethod).toBe('claudeai') + expect(profiles.default.model).toBe('claude-opus-4-7') + }) + + it('renames claude-code → agent-sdk during flat migration', async () => { + const { ctx, files } = makeMemoryContext({ + 'ai-provider-manager.json': { + backend: 'claude-code', + model: 'claude-opus-4-7', + }, + }) + + await migration.up(ctx) + + const after = files.get('ai-provider-manager.json') as Record + const profiles = after.profiles as Record> + expect(profiles.default.backend).toBe('agent-sdk') + expect(profiles.default.loginMethod).toBe('claudeai') + }) + + it('distributes global apiKeys into profiles', async () => { + const { ctx, files } = makeMemoryContext({ + 'ai-provider-manager.json': { + apiKeys: { anthropic: 'sk-ant', openai: 'sk-oa' }, + profiles: { + a: { backend: 'agent-sdk', model: 'claude-opus-4-7', loginMethod: 'api-key' }, + b: { backend: 'codex', model: 'gpt-5.4', loginMethod: 'api-key' }, + }, + activeProfile: 'a', + }, + }) + + await migration.up(ctx) + + const after = files.get('ai-provider-manager.json') as Record + const profiles = after.profiles as Record> + expect(profiles.a.apiKey).toBe('sk-ant') + expect(profiles.b.apiKey).toBe('sk-oa') + expect(after.apiKeys).toBeUndefined() // removed after distribution + }) + + it('does NOT overwrite profile.apiKey when one already exists', async () => { + const { ctx, files } = makeMemoryContext({ + 'ai-provider-manager.json': { + apiKeys: { anthropic: 'sk-ant' }, + profiles: { + a: { backend: 'agent-sdk', model: 'm', loginMethod: 'api-key', apiKey: 'sk-existing' }, + }, + activeProfile: 'a', + }, + }) + + await migration.up(ctx) + + const after = files.get('ai-provider-manager.json') as Record + const profiles = after.profiles as Record> + expect(profiles.a.apiKey).toBe('sk-existing') + }) + + it('consolidates telegram.json + engine port fields → connectors.json', async () => { + const { ctx, files } = makeMemoryContext({ + 'telegram.json': { botToken: 'tg-token' }, + 'engine.json': { pairs: ['BTC/USD'], webPort: 3010, mcpPort: 3011 }, + }) + + await migration.up(ctx) + + const connectors = files.get('connectors.json') as Record> + expect(connectors.telegram).toEqual({ botToken: 'tg-token', enabled: true }) + expect(connectors.web).toEqual({ port: 3010 }) + expect(connectors.mcp).toEqual({ port: 3011 }) + + const engine = files.get('engine.json') as Record + expect(engine.webPort).toBeUndefined() + expect(engine.mcpPort).toBeUndefined() + expect(engine.pairs).toEqual(['BTC/USD']) + }) + + it('skips connectors consolidation when connectors.json already exists', async () => { + const { ctx, files } = makeMemoryContext({ + 'telegram.json': { botToken: 'tg-token' }, + 'connectors.json': { web: { port: 9999 } }, // already migrated + }) + + await migration.up(ctx) + + const connectors = files.get('connectors.json') as Record> + expect(connectors.web).toEqual({ port: 9999 }) // unchanged + expect(connectors.telegram).toBeUndefined() + }) + + it('idempotent: second run produces same state', async () => { + const initial = { + 'ai-provider-manager.json': { + backend: 'agent-sdk', + model: 'claude-opus-4-7', + loginMethod: 'claudeai', + apiKeys: { anthropic: 'sk-ant' }, + }, + } + const { ctx, files } = makeMemoryContext(initial) + + await migration.up(ctx) + const afterFirst = JSON.stringify([...files.entries()]) + await migration.up(ctx) + const afterSecond = JSON.stringify([...files.entries()]) + + expect(afterSecond).toBe(afterFirst) + }) +}) diff --git a/src/migrations/0001_initial_unified/index.ts b/src/migrations/0001_initial_unified/index.ts new file mode 100644 index 00000000..d1f1fd8c --- /dev/null +++ b/src/migrations/0001_initial_unified/index.ts @@ -0,0 +1,216 @@ +/** + * 0001_initial_unified — roll-up of pre-framework ad-hoc migrations. + * + * Body is the four migration ifblocks that previously lived inline in + * config.ts:loadConfig(), copied here verbatim and adapted to use + * MigrationContext for IO. Each ifblock is its own structural-detection + * guard that short-circuits when its precondition isn't met, so the + * combined body is naturally idempotent against already-current data. + * + * Covers: + * - very-old format (no backend, no profiles) → profile-based + * - flat ai-provider config → profile-based + * - claude-code → agent-sdk alias rename + * - subchannel inline overrides → named profiles + * - global apiKeys → distributed into per-profile apiKey + * - telegram.json + engine port fields → connectors.json + * + * Rationale for not splitting into 5 named migrations: their upstream + * boundaries are lost. Different historical user installs took + * different paths through these blocks; no journal of "which user + * came through which transition" exists. Splitting would translate + * ambiguity into false precision. + */ + +import type { Migration, MigrationContext } from '../types.js' + +export const migration: Migration = { + id: '0001_initial_unified', + appVersion: '0.10.0-beta.1', + introducedAt: '2026-05-09', + affects: ['*'], + summary: + 'Roll-up of pre-framework ad-hoc migrations: model.json/api-keys.json merge, claude-code alias, flat → profile-based, subchannel overrides → named profiles, apiKeys distribution, telegram + engine port consolidation', + up: async (ctx) => { + await migrateAIProviderShape(ctx) + await distributeApiKeys(ctx) + await consolidateConnectorsConfig(ctx) + }, +} + +// ==================== Block 1+2: ai-provider shape migration ==================== + +async function migrateAIProviderShape(ctx: MigrationContext): Promise { + const aiProviderRaw = await ctx.readJson>('ai-provider-manager.json') + + // Block 1: flat ai-provider config → profile-based + if (aiProviderRaw && 'backend' in aiProviderRaw && !('profiles' in aiProviderRaw)) { + // Step 1: handle very old format (model.json + api-keys.json) + if (!('model' in aiProviderRaw)) { + const oldModel = await ctx.readJson>('model.json') + const oldKeys = await ctx.readJson>('api-keys.json') + if (oldModel) { + Object.assign(aiProviderRaw, { + provider: oldModel.provider, + model: oldModel.model, + ...(oldModel.baseUrl ? { baseUrl: oldModel.baseUrl } : {}), + }) + } + if (oldKeys) aiProviderRaw.apiKeys = oldKeys + await ctx.removeJson('model.json') + await ctx.removeJson('api-keys.json') + } + + // Step 2: claude-code → agent-sdk alias + if (aiProviderRaw.backend === 'claude-code') { + aiProviderRaw.backend = 'agent-sdk' + aiProviderRaw.loginMethod = aiProviderRaw.loginMethod ?? 'claudeai' + } + + // Step 3: build default profile from flat config + const backend = aiProviderRaw.backend as string + const defaultProfile: Record = { label: 'Default' } + if (backend === 'agent-sdk') { + defaultProfile.backend = 'agent-sdk' + defaultProfile.model = aiProviderRaw.model + defaultProfile.loginMethod = + aiProviderRaw.loginMethod === 'codex-oauth' + ? 'api-key' + : aiProviderRaw.loginMethod ?? 'api-key' + } else if (backend === 'codex') { + defaultProfile.backend = 'codex' + defaultProfile.model = aiProviderRaw.model + defaultProfile.loginMethod = + aiProviderRaw.loginMethod === 'claudeai' + ? 'codex-oauth' + : aiProviderRaw.loginMethod ?? 'codex-oauth' + } else { + defaultProfile.backend = 'vercel-ai-sdk' + defaultProfile.provider = aiProviderRaw.provider ?? 'anthropic' + defaultProfile.model = aiProviderRaw.model + } + if (aiProviderRaw.baseUrl) defaultProfile.baseUrl = aiProviderRaw.baseUrl + + // Step 4: subchannel inline overrides → named profiles + const oldSubchannels = await ctx.readJson>>('web-subchannels.json') + const profiles: Record = { default: defaultProfile } + const newSubchannels: Array> = [] + + if (oldSubchannels) { + for (const ch of oldSubchannels) { + const sub: Record = { id: ch.id, label: ch.label } + if (ch.systemPrompt) sub.systemPrompt = ch.systemPrompt + if (ch.disabledTools) sub.disabledTools = ch.disabledTools + + const provider = ch.provider as string | undefined + const override = + provider === 'vercel-ai-sdk' ? ch.vercelAiSdk + : provider === 'agent-sdk' ? ch.agentSdk + : provider === 'codex' ? ch.codex + : undefined + + if (provider && override) { + const slug = `${ch.id}-${provider}` + profiles[slug] = { backend: provider, label: `${ch.label}`, ...(override as object) } + sub.profile = slug + } else if (provider) { + const slug = `${ch.id}-${provider}` + profiles[slug] = { ...defaultProfile, backend: provider, label: `${ch.label}` } + sub.profile = slug + } + + newSubchannels.push(sub) + } + await ctx.writeJson('web-subchannels.json', newSubchannels) + } + + // Step 5: write new format + const apiKeys = (aiProviderRaw.apiKeys as Record) ?? {} + await ctx.writeJson('ai-provider-manager.json', { + apiKeys, + profiles, + activeProfile: 'default', + }) + return + } + + // Block 2: very-old format (no backend, no profiles) — only when the file exists at all + if (aiProviderRaw && !('backend' in aiProviderRaw) && !('profiles' in aiProviderRaw)) { + const oldModel = await ctx.readJson>('model.json') + const oldKeys = await ctx.readJson>('api-keys.json') + const migrated = { + apiKeys: oldKeys ?? {}, + profiles: { + default: { + backend: 'agent-sdk', + label: 'Default', + model: (oldModel?.model as string) ?? 'claude-opus-4-7', + loginMethod: 'claudeai', + provider: (oldModel?.provider as string) ?? 'anthropic', + }, + }, + activeProfile: 'default', + } + await ctx.writeJson('ai-provider-manager.json', migrated) + await ctx.removeJson('model.json') + await ctx.removeJson('api-keys.json') + } +} + +// ==================== Block 3: distribute global apiKeys into profiles ==================== + +async function distributeApiKeys(ctx: MigrationContext): Promise { + const aiConfig = await ctx.readJson>('ai-provider-manager.json') + if (!aiConfig || !('apiKeys' in aiConfig) || !('profiles' in aiConfig)) return + + const keys = aiConfig.apiKeys as Record | undefined + const profiles = aiConfig.profiles as Record> + + if (!keys || !Object.values(keys).some(Boolean)) return + + let changed = false + for (const profile of Object.values(profiles)) { + if (profile.apiKey) continue // already has a key, don't overwrite + const vendor = + profile.backend === 'codex' ? 'openai' + : profile.backend === 'agent-sdk' ? 'anthropic' + : (profile.provider as string) ?? 'anthropic' + const globalKey = keys[vendor] + if (globalKey) { + profile.apiKey = globalKey + changed = true + } + } + + if (changed) { + delete aiConfig.apiKeys + await ctx.writeJson('ai-provider-manager.json', aiConfig) + } +} + +// ==================== Block 4: consolidate telegram.json + engine port fields ==================== + +async function consolidateConnectorsConfig(ctx: MigrationContext): Promise { + const connectorsRaw = await ctx.readJson>('connectors.json') + if (connectorsRaw !== undefined) return // already migrated or not applicable + + const oldTelegram = await ctx.readJson>('telegram.json') + const oldEngine = await ctx.readJson>('engine.json') + const migrated: Record = {} + + if (oldTelegram && typeof oldTelegram === 'object') { + migrated.telegram = { ...oldTelegram, enabled: true } + } + if (oldEngine) { + if (oldEngine.webPort !== undefined) migrated.web = { port: oldEngine.webPort } + if (oldEngine.mcpPort !== undefined) migrated.mcp = { port: oldEngine.mcpPort } + if (oldEngine.askMcpPort !== undefined) migrated.mcpAsk = { enabled: true, port: oldEngine.askMcpPort } + const { mcpPort: _m, askMcpPort: _a, webPort: _w, ...cleanEngine } = oldEngine + void _m; void _a; void _w + await ctx.writeJson('engine.json', cleanEngine) + } + + if (Object.keys(migrated).length > 0) { + await ctx.writeJson('connectors.json', migrated) + } +} diff --git a/src/migrations/0002_extract_credentials.spec.ts b/src/migrations/0002_extract_credentials.spec.ts new file mode 100644 index 00000000..b126c471 --- /dev/null +++ b/src/migrations/0002_extract_credentials.spec.ts @@ -0,0 +1,179 @@ +import { describe, it, expect } from 'vitest' +import type { MigrationContext } from './types.js' +import { migration } from './0002_extract_credentials/index.js' + +function makeMemoryContext(initial: Record = {}): { + ctx: MigrationContext + files: Map +} { + const files = new Map(Object.entries(initial)) + const ctx: MigrationContext = { + async readJson(filename: string): Promise { + const v = files.get(filename) + return v === undefined ? undefined : JSON.parse(JSON.stringify(v)) + }, + async writeJson(filename: string, data: unknown): Promise { + files.set(filename, JSON.parse(JSON.stringify(data))) + }, + async removeJson(filename: string): Promise { + files.delete(filename) + }, + configDir(): string { + return '/virtual/config' + }, + } + return { ctx, files } +} + +function setup(profiles: Record>) { + return makeMemoryContext({ + 'ai-provider-manager.json': { + profiles, + activeProfile: Object.keys(profiles)[0] ?? 'default', + }, + }) +} + +function getCfg(files: Map) { + return files.get('ai-provider-manager.json') as { + profiles: Record> + credentials?: Record + } +} + +describe('0002_extract_credentials — vendor inference', () => { + it('codex + codex-oauth → openai/subscription', async () => { + const { ctx, files } = setup({ + a: { backend: 'codex', loginMethod: 'codex-oauth', model: 'gpt-5.4' }, + }) + await migration.up(ctx) + const cfg = getCfg(files) + expect(cfg.profiles.a.credentialSlug).toBe('openai-1') + expect(cfg.credentials!['openai-1']).toEqual({ vendor: 'openai', authType: 'subscription' }) + }) + + it('codex + api-key → openai/api-key', async () => { + const { ctx, files } = setup({ + a: { backend: 'codex', loginMethod: 'api-key', model: 'gpt-5.4', apiKey: 'sk-oa' }, + }) + await migration.up(ctx) + const cfg = getCfg(files) + expect(cfg.credentials!['openai-1']).toEqual({ vendor: 'openai', authType: 'api-key', apiKey: 'sk-oa' }) + }) + + it('agent-sdk + claudeai → anthropic/subscription', async () => { + const { ctx, files } = setup({ + a: { backend: 'agent-sdk', loginMethod: 'claudeai', model: 'claude-opus-4-7' }, + }) + await migration.up(ctx) + const cfg = getCfg(files) + expect(cfg.credentials!['anthropic-1']).toEqual({ vendor: 'anthropic', authType: 'subscription' }) + }) + + it('agent-sdk + api-key + GLM baseUrl → glm', async () => { + const { ctx, files } = setup({ + a: { backend: 'agent-sdk', loginMethod: 'api-key', model: 'glm-4.7', apiKey: 'k', baseUrl: 'https://open.bigmodel.cn/api/anthropic' }, + }) + await migration.up(ctx) + const cfg = getCfg(files) + expect(cfg.credentials!['glm-1'].vendor).toBe('glm') + expect(cfg.credentials!['glm-1'].baseUrl).toBe('https://open.bigmodel.cn/api/anthropic') + }) + + it('agent-sdk + MiniMax baseUrl → minimax', async () => { + const { ctx, files } = setup({ + a: { backend: 'agent-sdk', loginMethod: 'api-key', model: 'M', apiKey: 'k', baseUrl: 'https://api.minimaxi.com/anthropic' }, + }) + await migration.up(ctx) + expect(getCfg(files).credentials!['minimax-1'].vendor).toBe('minimax') + }) + + it('agent-sdk + Kimi baseUrl → kimi', async () => { + const { ctx, files } = setup({ + a: { backend: 'agent-sdk', loginMethod: 'api-key', model: 'k', apiKey: 'k', baseUrl: 'https://api.moonshot.cn/anthropic' }, + }) + await migration.up(ctx) + expect(getCfg(files).credentials!['kimi-1'].vendor).toBe('kimi') + }) + + it('agent-sdk + DeepSeek baseUrl → deepseek', async () => { + const { ctx, files } = setup({ + a: { backend: 'agent-sdk', loginMethod: 'api-key', model: 'd', apiKey: 'k', baseUrl: 'https://api.deepseek.com/anthropic' }, + }) + await migration.up(ctx) + expect(getCfg(files).credentials!['deepseek-1'].vendor).toBe('deepseek') + }) + + it('agent-sdk + api-key + no recognized baseUrl → anthropic', async () => { + const { ctx, files } = setup({ + a: { backend: 'agent-sdk', loginMethod: 'api-key', model: 'm', apiKey: 'k' }, + }) + await migration.up(ctx) + expect(getCfg(files).credentials!['anthropic-1'].vendor).toBe('anthropic') + }) + + it('vercel-ai-sdk uses profile.provider', async () => { + const { ctx, files } = setup({ + a: { backend: 'vercel-ai-sdk', provider: 'google', model: 'gemini-2.5-flash', apiKey: 'k' }, + }) + await migration.up(ctx) + expect(getCfg(files).credentials!['google-1'].vendor).toBe('google') + }) +}) + +describe('0002_extract_credentials — slug + state', () => { + it('preserves inline apiKey/baseUrl/loginMethod on profile (transitional)', async () => { + const { ctx, files } = setup({ + a: { backend: 'agent-sdk', loginMethod: 'api-key', model: 'm', apiKey: 'sk', baseUrl: 'https://api.example/' }, + }) + await migration.up(ctx) + const profile = getCfg(files).profiles.a + expect(profile.apiKey).toBe('sk') + expect(profile.baseUrl).toBe('https://api.example/') + expect(profile.loginMethod).toBe('api-key') + expect(profile.credentialSlug).toBe('anthropic-1') + }) + + it('skips profiles with no extractable credential', async () => { + const { ctx, files } = setup({ + a: { backend: 'agent-sdk', loginMethod: 'api-key', model: 'm' }, // no apiKey, no subscription + }) + await migration.up(ctx) + const cfg = getCfg(files) + expect(cfg.profiles.a.credentialSlug).toBeUndefined() + // credentials field gets initialized to {} on first run + expect(cfg.credentials).toEqual({}) + }) + + it('generates unique slugs across multiple profiles of same vendor', async () => { + const { ctx, files } = setup({ + a: { backend: 'agent-sdk', loginMethod: 'api-key', model: 'm', apiKey: 'k1' }, + b: { backend: 'agent-sdk', loginMethod: 'api-key', model: 'm', apiKey: 'k2' }, + c: { backend: 'agent-sdk', loginMethod: 'claudeai', model: 'm' }, + }) + await migration.up(ctx) + const cfg = getCfg(files) + const slugs = Object.values(cfg.profiles).map(p => p.credentialSlug) + expect(new Set(slugs).size).toBe(3) // all distinct + expect(slugs.every(s => typeof s === 'string' && s.startsWith('anthropic-'))).toBe(true) + }) + + it('idempotent — second run is a no-op', async () => { + const { ctx, files } = setup({ + a: { backend: 'agent-sdk', loginMethod: 'api-key', model: 'm', apiKey: 'k' }, + }) + + await migration.up(ctx) + const afterFirst = JSON.stringify(files.get('ai-provider-manager.json')) + await migration.up(ctx) + const afterSecond = JSON.stringify(files.get('ai-provider-manager.json')) + + expect(afterSecond).toBe(afterFirst) + }) + + it('no-op when ai-provider-manager.json absent', async () => { + const { ctx, files } = makeMemoryContext() + await migration.up(ctx) + expect(files.size).toBe(0) + }) +}) diff --git a/src/migrations/0002_extract_credentials/index.ts b/src/migrations/0002_extract_credentials/index.ts new file mode 100644 index 00000000..61c0ee4e --- /dev/null +++ b/src/migrations/0002_extract_credentials/index.ts @@ -0,0 +1,97 @@ +/** + * 0002_extract_credentials — peel credential storage off Profile. + * + * Today every Profile carries inline `apiKey` / `baseUrl` / + * `loginMethod`. This migration adds a top-level `credentials` map + * keyed by slug, infers a credential record per profile, and links + * the profile to the credential via `credentialSlug`. Inline fields + * are LEFT IN PLACE as a transitional fallback so providers don't + * need to change in this round (resolveProfile() joins the two and + * returns the same ResolvedProfile shape). + * + * Vendor / authType inference rules: see src/core/credential-inference.ts. + * + * In-body idempotency: if the credentials map exists and every + * profile either has credentialSlug or has nothing to extract, no-op. + */ + +import type { Migration, MigrationContext } from '../types.js' +import { + inferVendor, + inferAuthType, + hasExtractableCredential, + type ProfileLike, +} from '../../core/credential-inference.js' + +interface RawProfile extends Record, ProfileLike { + credentialSlug?: string +} + +interface CredentialRecord { + vendor: string + authType: 'api-key' | 'subscription' + apiKey?: string + baseUrl?: string +} + +function generateSlug(vendor: string, taken: Set): string { + let n = 1 + while (taken.has(`${vendor}-${n}`)) n++ + return `${vendor}-${n}` +} + +export const migration: Migration = { + id: '0002_extract_credentials', + appVersion: '0.10.0-beta.1', + introducedAt: '2026-05-09', + affects: ['ai-provider-manager.json'], + summary: + 'Extract apiKey/baseUrl from profiles into top-level credentials map; profiles gain credentialSlug pointer (inline fields kept as fallback)', + rationale: + 'Decouple credentials (vendor + auth) from SDK choice (backend) and use-case (model). Foundation for vendor-shaped preset catalog and internal SDK routing.', + up: async (ctx: MigrationContext) => { + const aiConfig = await ctx.readJson<{ + profiles?: Record + credentials?: Record + activeProfile?: string + apiKeys?: Record + }>('ai-provider-manager.json') + + if (!aiConfig || !aiConfig.profiles) return + + // In-body idempotency check + const profilesArr = Object.values(aiConfig.profiles) + if ( + aiConfig.credentials !== undefined && + profilesArr.every((p) => p.credentialSlug !== undefined || !hasExtractableCredential(p)) + ) { + return + } + + const credentials: Record = aiConfig.credentials ?? {} + const taken = new Set(Object.keys(credentials)) + let changed = false + + for (const profile of profilesArr) { + if (profile.credentialSlug) continue + if (!hasExtractableCredential(profile)) continue + + const vendor = inferVendor(profile) + const authType = inferAuthType(profile) + const cred: CredentialRecord = { vendor, authType } + if (profile.apiKey) cred.apiKey = profile.apiKey + if (profile.baseUrl) cred.baseUrl = profile.baseUrl + + const slug = generateSlug(vendor, taken) + taken.add(slug) + credentials[slug] = cred + profile.credentialSlug = slug + changed = true + } + + if (!changed && aiConfig.credentials !== undefined) return + + aiConfig.credentials = credentials + await ctx.writeJson('ai-provider-manager.json', aiConfig) + }, +} diff --git a/src/migrations/0003_backfill_credentials.spec.ts b/src/migrations/0003_backfill_credentials.spec.ts new file mode 100644 index 00000000..4825dece --- /dev/null +++ b/src/migrations/0003_backfill_credentials.spec.ts @@ -0,0 +1,121 @@ +import { describe, it, expect } from 'vitest' +import type { MigrationContext } from './types.js' +import { migration } from './0003_backfill_credentials/index.js' + +function makeMemoryContext(initial: Record = {}): { + ctx: MigrationContext + files: Map +} { + const files = new Map(Object.entries(initial)) + const ctx: MigrationContext = { + async readJson(filename: string): Promise { + const v = files.get(filename) + return v === undefined ? undefined : JSON.parse(JSON.stringify(v)) + }, + async writeJson(filename: string, data: unknown): Promise { + files.set(filename, JSON.parse(JSON.stringify(data))) + }, + async removeJson(filename: string): Promise { + files.delete(filename) + }, + configDir(): string { return '/virtual/config' }, + } + return { ctx, files } +} + +function getCfg(files: Map) { + return files.get('ai-provider-manager.json') as { + profiles: Record> + credentials?: Record + } +} + +describe('0003_backfill_credentials', () => { + it('backfills a profile that has inline fields but no credentialSlug', async () => { + const { ctx, files } = makeMemoryContext({ + 'ai-provider-manager.json': { + credentials: { + 'anthropic-1': { vendor: 'anthropic', authType: 'subscription' }, + }, + profiles: { + 'Old': { backend: 'agent-sdk', model: 'claude', loginMethod: 'claudeai', credentialSlug: 'anthropic-1' }, + 'New': { + backend: 'agent-sdk', model: 'm', loginMethod: 'api-key', + apiKey: 'sk-deep', baseUrl: 'https://api.deepseek.com/anthropic', + }, + }, + activeProfile: 'Old', + }, + }) + + await migration.up(ctx) + const cfg = getCfg(files) + expect(cfg.profiles.New.credentialSlug).toBe('deepseek-1') + expect(cfg.credentials!['deepseek-1']).toEqual({ + vendor: 'deepseek', + authType: 'api-key', + apiKey: 'sk-deep', + baseUrl: 'https://api.deepseek.com/anthropic', + }) + // Existing credential untouched + expect(cfg.credentials!['anthropic-1']).toEqual({ vendor: 'anthropic', authType: 'subscription' }) + }) + + it('reuses existing credential slug when fields match (dedup)', async () => { + const { ctx, files } = makeMemoryContext({ + 'ai-provider-manager.json': { + credentials: { + 'deepseek-1': { vendor: 'deepseek', authType: 'api-key', apiKey: 'sk-d', baseUrl: 'https://api.deepseek.com/anthropic' }, + }, + profiles: { + // Same key + url — should reuse deepseek-1 + 'NewOne': { backend: 'agent-sdk', model: 'a', loginMethod: 'api-key', apiKey: 'sk-d', baseUrl: 'https://api.deepseek.com/anthropic' }, + }, + activeProfile: 'NewOne', + }, + }) + + await migration.up(ctx) + const cfg = getCfg(files) + expect(cfg.profiles.NewOne.credentialSlug).toBe('deepseek-1') + expect(Object.keys(cfg.credentials!)).toEqual(['deepseek-1']) // no duplicate created + }) + + it('no-op when every profile is already linked', async () => { + const initial = { + 'ai-provider-manager.json': { + credentials: { 'a-1': { vendor: 'anthropic', authType: 'subscription' } }, + profiles: { 'P': { backend: 'agent-sdk', model: 'm', loginMethod: 'claudeai', credentialSlug: 'a-1' } }, + activeProfile: 'P', + }, + } + const { ctx, files } = makeMemoryContext(initial) + const before = JSON.stringify([...files.entries()]) + await migration.up(ctx) + const after = JSON.stringify([...files.entries()]) + expect(after).toBe(before) + }) + + it('no-op when ai-provider-manager.json missing', async () => { + const { ctx, files } = makeMemoryContext() + await migration.up(ctx) + expect(files.size).toBe(0) + }) + + it('idempotent — second run does not change file', async () => { + const { ctx, files } = makeMemoryContext({ + 'ai-provider-manager.json': { + profiles: { + 'P': { backend: 'agent-sdk', model: 'm', loginMethod: 'api-key', apiKey: 'k' }, + }, + activeProfile: 'P', + }, + }) + + await migration.up(ctx) + const afterFirst = JSON.stringify(files.get('ai-provider-manager.json')) + await migration.up(ctx) + const afterSecond = JSON.stringify(files.get('ai-provider-manager.json')) + expect(afterSecond).toBe(afterFirst) + }) +}) diff --git a/src/migrations/0003_backfill_credentials/index.ts b/src/migrations/0003_backfill_credentials/index.ts new file mode 100644 index 00000000..3cc364fa --- /dev/null +++ b/src/migrations/0003_backfill_credentials/index.ts @@ -0,0 +1,114 @@ +/** + * 0003_backfill_credentials — backfill credentials for profiles that + * landed between 0002 and the writeProfile eager-extraction change. + * + * Body is the same shape as 0002 (extract inline credential fields → + * credentials map + link via credentialSlug, with dedup). 0002 ran + * once at framework adoption; profiles added via the wizard *after* + * that point arrived without credentialSlug because writeProfile was + * still pass-through. This migration cleans those up. + * + * After this lands, the eager extraction in writeProfile prevents + * future drift, so further backfills shouldn't be needed. + * + * In-body idempotency: if every profile already has credentialSlug or + * has nothing extractable, no-op. + */ + +import type { Migration, MigrationContext } from '../types.js' +import { + inferVendor, + inferAuthType, + hasExtractableCredential, + type ProfileLike, +} from '../../core/credential-inference.js' + +interface RawProfile extends Record, ProfileLike { + credentialSlug?: string +} + +interface CredentialRecord { + vendor: string + authType: 'api-key' | 'subscription' + apiKey?: string + baseUrl?: string +} + +function generateSlug(vendor: string, taken: Set): string { + let n = 1 + while (taken.has(`${vendor}-${n}`)) n++ + return `${vendor}-${n}` +} + +/** Match against existing credentials by all distinguishing fields. */ +function findExistingSlug( + cred: CredentialRecord, + existing: Record, +): string | null { + for (const [slug, c] of Object.entries(existing)) { + if ( + c.vendor === cred.vendor && + c.authType === cred.authType && + c.apiKey === cred.apiKey && + c.baseUrl === cred.baseUrl + ) { + return slug + } + } + return null +} + +export const migration: Migration = { + id: '0003_backfill_credentials', + appVersion: '0.10.0-beta.1', + introducedAt: '2026-05-09', + affects: ['ai-provider-manager.json'], + summary: + 'Backfill credentials for profiles added between 0002 and writeProfile going eager (catches DeepSeek and similar stragglers)', + rationale: + 'Companion to writeProfile eager extraction; cleans up the gap between 0002 and the AI Provider page redesign.', + up: async (ctx: MigrationContext) => { + const aiConfig = await ctx.readJson<{ + profiles?: Record + credentials?: Record + activeProfile?: string + }>('ai-provider-manager.json') + + if (!aiConfig || !aiConfig.profiles) return + + const profilesArr = Object.values(aiConfig.profiles) + if (profilesArr.every((p) => p.credentialSlug !== undefined || !hasExtractableCredential(p))) { + return + } + + const credentials: Record = aiConfig.credentials ?? {} + let changed = false + + for (const profile of profilesArr) { + if (profile.credentialSlug) continue + if (!hasExtractableCredential(profile)) continue + + const cred: CredentialRecord = { + vendor: inferVendor(profile), + authType: inferAuthType(profile), + } + if (profile.apiKey) cred.apiKey = profile.apiKey + if (profile.baseUrl) cred.baseUrl = profile.baseUrl + + const existingSlug = findExistingSlug(cred, credentials) + if (existingSlug) { + profile.credentialSlug = existingSlug + } else { + const slug = generateSlug(cred.vendor, new Set(Object.keys(credentials))) + credentials[slug] = cred + profile.credentialSlug = slug + } + changed = true + } + + if (!changed) return + + aiConfig.credentials = credentials + await ctx.writeJson('ai-provider-manager.json', aiConfig) + }, +} diff --git a/src/migrations/INDEX.md b/src/migrations/INDEX.md new file mode 100644 index 00000000..e9910842 --- /dev/null +++ b/src/migrations/INDEX.md @@ -0,0 +1,12 @@ + + + +# Migration Index + +Each row corresponds to one migration in `src/migrations/`. The runner applies pending migrations in this order on every boot, recording applied IDs in `data/config/_meta.json`. Migrations are idempotent in their body in addition to the journal-level guard. + +| ID | App Version | Date | Affects | Summary | +|----|-------------|------|---------|---------| +| `0001_initial_unified` | 0.10.0-beta.1 | 2026-05-09 | * | Roll-up of pre-framework ad-hoc migrations: model.json/api-keys.json merge, claude-code alias, flat → profile-based, subchannel overrides → named profiles, apiKeys distribution, telegram + engine port consolidation | +| `0002_extract_credentials` | 0.10.0-beta.1 | 2026-05-09 | ai-provider-manager.json | Extract apiKey/baseUrl from profiles into top-level credentials map; profiles gain credentialSlug pointer (inline fields kept as fallback) | +| `0003_backfill_credentials` | 0.10.0-beta.1 | 2026-05-09 | ai-provider-manager.json | Backfill credentials for profiles added between 0002 and writeProfile going eager (catches DeepSeek and similar stragglers) | diff --git a/src/migrations/registry.ts b/src/migrations/registry.ts new file mode 100644 index 00000000..704b93da --- /dev/null +++ b/src/migrations/registry.ts @@ -0,0 +1,23 @@ +/** + * Ordered registry of all migrations. + * + * Order is determined by array position — keep entries in numeric ID + * order. Never reorder a migration that has already shipped; the + * journal records ids, so reordering would cause runners to try to + * apply already-applied work in a different order. + * + * Adding a migration: import it here and append. The + * `pnpm build:migration-index` script regenerates + * `src/migrations/INDEX.md` from this list at build time. + */ + +import type { Migration } from './types.js' +import { migration as migration_0001_initial_unified } from './0001_initial_unified/index.js' +import { migration as migration_0002_extract_credentials } from './0002_extract_credentials/index.js' +import { migration as migration_0003_backfill_credentials } from './0003_backfill_credentials/index.js' + +export const REGISTRY: Migration[] = [ + migration_0001_initial_unified, + migration_0002_extract_credentials, + migration_0003_backfill_credentials, +] diff --git a/src/migrations/runner.spec.ts b/src/migrations/runner.spec.ts new file mode 100644 index 00000000..965db696 --- /dev/null +++ b/src/migrations/runner.spec.ts @@ -0,0 +1,154 @@ +import { describe, it, expect, vi } from 'vitest' +import type { Migration, MigrationContext, ConfigMeta } from './types.js' +import { runMigrations } from './runner.js' + +/** Create an in-memory MigrationContext over a virtual config dir. */ +function makeMemoryContext(initial: Record = {}): { + ctx: MigrationContext + files: Map +} { + const files = new Map(Object.entries(initial)) + const ctx: MigrationContext = { + async readJson(filename: string): Promise { + // Round-trip through JSON to mimic real disk semantics + const v = files.get(filename) + return v === undefined ? undefined : JSON.parse(JSON.stringify(v)) + }, + async writeJson(filename: string, data: unknown): Promise { + files.set(filename, JSON.parse(JSON.stringify(data))) + }, + async removeJson(filename: string): Promise { + files.delete(filename) + }, + configDir(): string { + return '/virtual/config' + }, + } + return { ctx, files } +} + +function readMeta(files: Map): ConfigMeta | undefined { + return files.get('_meta.json') as ConfigMeta | undefined +} + +function makeMigration(id: string, body?: (ctx: MigrationContext) => Promise): Migration { + return { + id, + appVersion: '0.0.0', + introducedAt: '2026-01-01', + affects: ['*'], + summary: `test migration ${id}`, + up: body ?? (async () => { /* no-op */ }), + } +} + +describe('runMigrations', () => { + it('applies all migrations on empty journal', async () => { + const { ctx, files } = makeMemoryContext() + const calls: string[] = [] + const registry = [ + makeMigration('0001_a', async () => { calls.push('a') }), + makeMigration('0002_b', async () => { calls.push('b') }), + ] + + await runMigrations({ ctx, registry, snapshot: async () => null }) + + expect(calls).toEqual(['a', 'b']) + const meta = readMeta(files)! + expect(meta.appliedMigrations.map(m => m.id)).toEqual(['0001_a', '0002_b']) + }) + + it('skips migrations already in journal', async () => { + const { ctx, files } = makeMemoryContext({ + '_meta.json': { + appVersion: '0.0.0', + appliedMigrations: [{ id: '0001_a', appliedAt: 'x', appVersion: '0.0.0' }], + }, + }) + const calls: string[] = [] + const registry = [ + makeMigration('0001_a', async () => { calls.push('a') }), + makeMigration('0002_b', async () => { calls.push('b') }), + ] + + await runMigrations({ ctx, registry, snapshot: async () => null }) + + expect(calls).toEqual(['b']) // 0001_a skipped + const meta = readMeta(files)! + expect(meta.appliedMigrations.map(m => m.id)).toEqual(['0001_a', '0002_b']) + }) + + it('halts on failure; journal is NOT updated for the failed migration', async () => { + const { ctx, files } = makeMemoryContext() + const registry = [ + makeMigration('0001_a'), + makeMigration('0002_b', async () => { throw new Error('boom') }), + makeMigration('0003_c'), + ] + + const consoleErrSpy = vi.spyOn(console, 'error').mockImplementation(() => {}) + const consoleLogSpy = vi.spyOn(console, 'log').mockImplementation(() => {}) + + await expect(runMigrations({ ctx, registry, snapshot: async () => null })) + .rejects.toThrow('boom') + + const meta = readMeta(files)! + expect(meta.appliedMigrations.map(m => m.id)).toEqual(['0001_a']) // 0002 NOT recorded + consoleErrSpy.mockRestore() + consoleLogSpy.mockRestore() + }) + + it('idempotent: second run is a no-op when nothing pending', async () => { + const { ctx, files } = makeMemoryContext() + const calls: string[] = [] + const registry = [makeMigration('0001_a', async () => { calls.push('a') })] + + await runMigrations({ ctx, registry, snapshot: async () => null }) + await runMigrations({ ctx, registry, snapshot: async () => null }) + + expect(calls).toEqual(['a']) // body ran exactly once + const meta = readMeta(files)! + expect(meta.appliedMigrations).toHaveLength(1) + }) + + it('seeds empty meta when _meta.json missing', async () => { + const { ctx, files } = makeMemoryContext() + const registry = [makeMigration('0001_a')] + + await runMigrations({ ctx, registry, snapshot: async () => null }) + + const meta = readMeta(files)! + expect(meta.appliedMigrations).toHaveLength(1) + expect(meta.appVersion).toBeDefined() + }) + + it('calls snapshot for each pending migration with pre-{id} label', async () => { + const { ctx } = makeMemoryContext() + const labels: string[] = [] + const registry = [ + makeMigration('0001_a'), + makeMigration('0002_b'), + ] + + await runMigrations({ + ctx, + registry, + snapshot: async (label) => { labels.push(label); return null }, + }) + + expect(labels).toEqual(['pre-0001_a', 'pre-0002_b']) + }) + + it('writes appliedAt as ISO timestamp and appVersion on each entry', async () => { + const { ctx, files } = makeMemoryContext() + const registry = [makeMigration('0001_a')] + + await runMigrations({ ctx, registry, snapshot: async () => null }) + + const meta = readMeta(files)! + const entry = meta.appliedMigrations[0] + expect(entry.id).toBe('0001_a') + expect(entry.appliedAt).toMatch(/^\d{4}-\d{2}-\d{2}T/) + expect(entry.appVersion).toBeDefined() + }) +}) diff --git a/src/migrations/runner.ts b/src/migrations/runner.ts new file mode 100644 index 00000000..b0c6ee59 --- /dev/null +++ b/src/migrations/runner.ts @@ -0,0 +1,154 @@ +/** + * Migration runner — apply pending migrations recorded in + * data/config/_meta.json. + * + * Snapshot before each migration: copies data/config/ to + * data/_backup/{ts}-pre-{id}/config/. If a migration throws, the + * journal is NOT updated for that id and startup halts; the user + * can restore from the snapshot manually. + * + * Larger trees (data/sessions/, data/news-collector/, etc.) are NOT + * snapshotted by default. A migration that touches them must declare + * the directory in `affects` and surface a user warning ahead of + * time (warning UI is out of scope for the framework). + */ + +import { readFile, writeFile, mkdir, unlink, cp } from 'node:fs/promises' +import { readFileSync } from 'node:fs' +import { resolve, dirname } from 'node:path' +import { fileURLToPath } from 'node:url' +import type { Migration, MigrationContext, ConfigMeta } from './types.js' +import { REGISTRY } from './registry.js' + +const CONFIG_DIR = resolve('data/config') +const BACKUP_DIR = resolve('data/_backup') +const META_FILENAME = '_meta.json' + +// ==================== App version ==================== + +let _appVersion: string | null = null + +export function getAppVersion(): string { + if (_appVersion !== null) return _appVersion + try { + const here = fileURLToPath(import.meta.url) + // src/migrations/runner.ts → walk up to repo root + const repoRoot = resolve(dirname(here), '..', '..') + const pkg = JSON.parse(readFileSync(resolve(repoRoot, 'package.json'), 'utf-8')) + _appVersion = (pkg.version as string) ?? '0.0.0' + } catch { + _appVersion = '0.0.0' + } + return _appVersion +} + +// ==================== Default context ==================== + +export function makeDefaultContext(): MigrationContext { + return { + async readJson(filename: string): Promise { + try { + return JSON.parse(await readFile(resolve(CONFIG_DIR, filename), 'utf-8')) + } catch (err: unknown) { + if (isENOENT(err)) return undefined + throw err + } + }, + async writeJson(filename: string, data: unknown): Promise { + await mkdir(CONFIG_DIR, { recursive: true }) + await writeFile(resolve(CONFIG_DIR, filename), JSON.stringify(data, null, 2) + '\n') + }, + async removeJson(filename: string): Promise { + try { await unlink(resolve(CONFIG_DIR, filename)) } catch (err) { + if (!isENOENT(err)) throw err + } + }, + configDir(): string { + return CONFIG_DIR + }, + } +} + +function isENOENT(err: unknown): boolean { + return err instanceof Error && (err as NodeJS.ErrnoException).code === 'ENOENT' +} + +// ==================== Journal ==================== + +async function readMeta(ctx: MigrationContext): Promise { + const existing = await ctx.readJson(META_FILENAME) + if (existing && Array.isArray(existing.appliedMigrations)) { + return { + appVersion: existing.appVersion ?? getAppVersion(), + appliedMigrations: existing.appliedMigrations, + } + } + return { appVersion: getAppVersion(), appliedMigrations: [] } +} + +async function writeMeta(ctx: MigrationContext, meta: ConfigMeta): Promise { + await ctx.writeJson(META_FILENAME, meta) +} + +// ==================== Snapshot ==================== + +/** Copy data/config/ to data/_backup/{ts}-{label}/config/. Returns path or null if config dir doesn't exist. */ +async function defaultSnapshot(label: string): Promise { + const ts = new Date().toISOString().replace(/[:.]/g, '-') + const target = resolve(BACKUP_DIR, `${ts}-${label}`, 'config') + try { + await mkdir(dirname(target), { recursive: true }) + await cp(CONFIG_DIR, target, { recursive: true, errorOnExist: false }) + return target + } catch (err: unknown) { + if (isENOENT(err)) return null + throw err + } +} + +// ==================== Runner ==================== + +export interface RunnerOpts { + /** Override the default file-system context (used in tests). */ + ctx?: MigrationContext + /** Override the default registry (used in tests). */ + registry?: Migration[] + /** Override the snapshot strategy (used in tests). */ + snapshot?: (label: string) => Promise +} + +export async function runMigrations(opts: RunnerOpts = {}): Promise { + const ctx = opts.ctx ?? makeDefaultContext() + const registry = opts.registry ?? REGISTRY + const snapshot = opts.snapshot ?? defaultSnapshot + + const meta = await readMeta(ctx) + const applied = new Set(meta.appliedMigrations.map(m => m.id)) + const pending = registry.filter(m => !applied.has(m.id)) + + if (pending.length === 0) return + + for (const m of pending) { + let snapshotPath: string | null = null + try { + snapshotPath = await snapshot(`pre-${m.id}`) + await m.up(ctx) + meta.appliedMigrations.push({ + id: m.id, + appliedAt: new Date().toISOString(), + appVersion: getAppVersion(), + }) + meta.appVersion = getAppVersion() + await writeMeta(ctx, meta) + console.log( + `[migration] applied ${m.id} (snapshot: ${snapshotPath ?? ''})`, + ) + } catch (err) { + console.error( + `[migration] FAILED ${m.id} — data may be in partial state. ` + + `Snapshot: ${snapshotPath ?? ''}. Error: ${err instanceof Error ? err.message : String(err)}`, + ) + throw err + } + } +} diff --git a/src/migrations/types.ts b/src/migrations/types.ts new file mode 100644 index 00000000..5285fedc --- /dev/null +++ b/src/migrations/types.ts @@ -0,0 +1,56 @@ +/** + * Migration framework types. + * + * A Migration is a versioned, idempotent transformation of the user's + * config directory. The runner applies pending migrations in registry + * order and records each in the journal at data/config/_meta.json. + * + * Two-layer idempotency: + * 1. Journal — runner never re-applies a recorded id. + * 2. In-body self-check — each up() must be a no-op when data is + * already at target shape, in case the journal is corrupted / + * hand-edited / a previous run partially completed. + */ + +export interface MigrationContext { + /** Read a JSON file from the config dir. Returns undefined if missing. */ + readJson(filename: string): Promise + /** Write a JSON file to the config dir, creating dirs as needed. */ + writeJson(filename: string, data: unknown): Promise + /** Remove a JSON file from the config dir. No-op if missing. */ + removeJson(filename: string): Promise + /** Absolute path to the config directory. */ + configDir(): string +} + +export interface Migration { + /** Stable identifier with sequential prefix, e.g. '0002_extract_credentials'. */ + id: string + /** Semver of the release that ships this migration. */ + appVersion: string + /** ISO date (YYYY-MM-DD) when this migration was added. */ + introducedAt: string + /** Config filenames touched. Use ['*'] for cross-cutting migrations. */ + affects: string[] + /** One-line semantic summary, used by INDEX.md generator. */ + summary: string + /** Optional pointer to a design doc. */ + rationale?: string + /** + * Apply the migration. Body MUST be idempotent — return as a no-op + * when data is already at target shape. + */ + up: (ctx: MigrationContext) => Promise +} + +export interface AppliedMigration { + id: string + appliedAt: string + appVersion: string +} + +/** Shape of data/config/_meta.json. */ +export interface ConfigMeta { + appVersion: string + appliedMigrations: AppliedMigration[] +} diff --git a/src/webui/plugin.ts b/src/webui/plugin.ts index 49bc813e..cdd931a1 100644 --- a/src/webui/plugin.ts +++ b/src/webui/plugin.ts @@ -27,6 +27,7 @@ import { createPersonaRoutes } from './routes/persona.js' import { createNewsRoutes } from './routes/news.js' import { createMarketRoutes } from './routes/market.js' import { createNotificationsRoutes } from './routes/notifications.js' +import { createVersionRoutes } from './routes/version.js' import { mountOpenTypeBB } from '../server/opentypebb.js' import { buildSDKCredentials } from '../domain/market-data/credential-map.js' @@ -118,6 +119,7 @@ export class WebPlugin implements Plugin { app.route('/api/notifications', createNotificationsRoutes({ notificationsStore: ctx.notificationsStore, })) + app.route('/api/version', createVersionRoutes()) // ==================== Mount opentypebb (market data HTTP) ==================== // opentypebb is Alice's first-class market-data package; its router is diff --git a/src/webui/routes/config.ts b/src/webui/routes/config.ts index 27ffe1c4..c3935a1a 100644 --- a/src/webui/routes/config.ts +++ b/src/webui/routes/config.ts @@ -6,6 +6,7 @@ import { } from '../../core/config.js' import type { EngineContext } from '../../core/types.js' import { BUILTIN_PRESETS } from '../../ai-providers/presets.js' +import { getSdkAdapterInfo } from '../../ai-providers/sdk-adapters.js' interface ConfigRouteOpts { ctx?: EngineContext @@ -27,16 +28,23 @@ export function createConfigRoutes(opts?: ConfigRouteOpts) { // ==================== Profile CRUD ==================== - /** GET /profiles — list all profiles */ + /** GET /profiles — list profiles + credentials map + active profile slug */ app.get('/profiles', async (c) => { try { const config = await readAIProviderConfig() - return c.json({ profiles: config.profiles, activeProfile: config.activeProfile }) + return c.json({ + profiles: config.profiles, + credentials: config.credentials, + activeProfile: config.activeProfile, + }) } catch (err) { return c.json({ error: String(err) }, 500) } }) + /** GET /sdk-adapters — list SDK adapters with their preset associations */ + app.get('/sdk-adapters', (c) => c.json({ adapters: getSdkAdapterInfo() })) + /** POST /profiles — create a new profile */ app.post('/profiles', async (c) => { try { diff --git a/src/webui/routes/version.ts b/src/webui/routes/version.ts new file mode 100644 index 00000000..09a1aaf3 --- /dev/null +++ b/src/webui/routes/version.ts @@ -0,0 +1,22 @@ +/** + * Version awareness route — exposes current app version + latest GitHub + * release for the UI's update banner. + * + * GET /api/version → VersionInfo (see core/version.ts) + * + * Response is cheap because the GitHub fetch is cached server-side. + */ + +import { Hono } from 'hono' +import { getVersionInfo } from '../../core/version.js' + +export function createVersionRoutes() { + const app = new Hono() + + app.get('/', async (c) => { + const info = await getVersionInfo() + return c.json(info) + }) + + return app +} diff --git a/ui/package.json b/ui/package.json index bd3c3f90..407da4b1 100644 --- a/ui/package.json +++ b/ui/package.json @@ -12,6 +12,7 @@ "dompurify": "^3.4.0", "highlight.js": "^11.11.1", "lightweight-charts": "^5.1.0", + "lucide-react": "^1.14.0", "marked": "^15.0.12", "marked-highlight": "^2.2.1", "react": "^19.1.0", diff --git a/ui/src/App.tsx b/ui/src/App.tsx index e7fab6b0..1f9edd6d 100644 --- a/ui/src/App.tsx +++ b/ui/src/App.tsx @@ -4,6 +4,7 @@ import { ActivityBar } from './components/ActivityBar' import { Sidebar } from './components/Sidebar' import { TabHost } from './components/TabHost' import { ChannelConfigModal } from './components/ChannelConfigModal' +import { UpdateBanner } from './components/UpdateBanner' import { ChannelsProvider, useChannels } from './contexts/ChannelsContext' import { findSectionForActivity } from './sections' import { UrlAdopter } from './tabs/UrlAdopter' @@ -85,36 +86,39 @@ function AppShell() { ) return ( -
- setSidebarOpen(false)} /> +
+ +
+ setSidebarOpen(false)} /> - - {showSidebarPanel && section && ( - <> - - : undefined} - > - - - - - - )} - - {mainContent} - - + + {showSidebarPanel && section && ( + <> + + : undefined} + > + + + + + + )} + + {mainContent} + + - - + + +
) } diff --git a/ui/src/api/config.ts b/ui/src/api/config.ts index 5b516454..999e39d6 100644 --- a/ui/src/api/config.ts +++ b/ui/src/api/config.ts @@ -1,5 +1,5 @@ import { headers } from './client' -import type { AppConfig, Profile, Preset } from './types' +import type { AppConfig, Profile, Preset, Credential, SdkAdapterInfo } from './types' export const configApi = { async load(): Promise { @@ -29,12 +29,22 @@ export const configApi = { return res.json() }, - async getProfiles(): Promise<{ profiles: Record; activeProfile: string }> { + async getProfiles(): Promise<{ + profiles: Record + credentials: Record + activeProfile: string + }> { const res = await fetch('/api/config/profiles') if (!res.ok) throw new Error('Failed to load profiles') return res.json() }, + async getSdkAdapters(): Promise<{ adapters: SdkAdapterInfo[] }> { + const res = await fetch('/api/config/sdk-adapters') + if (!res.ok) throw new Error('Failed to load SDK adapters') + return res.json() + }, + async createProfile(slug: string, profile: Profile): Promise<{ slug: string; profile: Profile }> { const res = await fetch('/api/config/profiles', { method: 'POST', diff --git a/ui/src/api/index.ts b/ui/src/api/index.ts index f4c242f5..76df19c6 100644 --- a/ui/src/api/index.ts +++ b/ui/src/api/index.ts @@ -20,6 +20,7 @@ import { brainApi } from './brain' import { topologyApi } from './topology' import { marketApi } from './market' import { notificationsApi } from './notifications' +import { versionApi } from './version' export const api = { chat: chatApi, config: configApi, @@ -39,6 +40,7 @@ export const api = { topology: topologyApi, market: marketApi, notifications: notificationsApi, + version: versionApi, } // Re-export all types for convenience diff --git a/ui/src/api/types.ts b/ui/src/api/types.ts index fb76c604..3c4b3709 100644 --- a/ui/src/api/types.ts +++ b/ui/src/api/types.ts @@ -1,3 +1,22 @@ +// ==================== Version / Update awareness ==================== + +export interface VersionInfo { + /** App version from package.json. */ + current: string + /** Latest release tag from GitHub, or null if fetch failed / no releases. */ + latest: string | null + /** True when latest > current (semver). */ + hasUpdate: boolean + /** GitHub release page URL — UI links to this for changelog. */ + releaseUrl: string | null + /** Markdown release body. */ + releaseNotes: string | null + /** ISO timestamp when the release was published. */ + publishedAt: string | null + /** Non-null when fetch failed (rate limit, network, etc.). */ + error: string | null +} + // ==================== AI Provider Profiles ==================== export type AIBackend = 'agent-sdk' | 'codex' | 'vercel-ai-sdk' @@ -10,6 +29,41 @@ export interface Profile { provider?: string // vercel-ai-sdk only baseUrl?: string apiKey?: string + /** Pointer into the credentials map. Set eagerly by writeProfile. */ + credentialSlug?: string +} + +// ==================== AI Provider Credentials ==================== + +export type CredentialVendor = + | 'anthropic' | 'openai' | 'google' + | 'minimax' | 'glm' | 'kimi' | 'deepseek' + | 'custom' + +export type CredentialAuthType = 'api-key' | 'subscription' + +export interface Credential { + vendor: CredentialVendor + authType: CredentialAuthType + apiKey?: string + baseUrl?: string +} + +// ==================== SDK Adapters ==================== + +export type SdkAdapterId = + | 'agent-sdk' | 'codex' + | 'vercel-anthropic' | 'vercel-openai' | 'vercel-google' + +export interface SdkAdapterInfo { + id: SdkAdapterId + label: string + description: string + presets: Array<{ + presetId: string + presetLabel: string + isTestDefault: boolean + }> } // ==================== AI Provider Presets ==================== diff --git a/ui/src/api/version.ts b/ui/src/api/version.ts new file mode 100644 index 00000000..e25c3324 --- /dev/null +++ b/ui/src/api/version.ts @@ -0,0 +1,9 @@ +import type { VersionInfo } from './types' + +export const versionApi = { + async get(): Promise { + const res = await fetch('/api/version') + if (!res.ok) throw new Error(`Failed to fetch version info: ${res.status}`) + return res.json() + }, +} diff --git a/ui/src/components/ActivityBar.tsx b/ui/src/components/ActivityBar.tsx index 31871454..d97832ed 100644 --- a/ui/src/components/ActivityBar.tsx +++ b/ui/src/components/ActivityBar.tsx @@ -1,4 +1,4 @@ -import { type ReactNode } from 'react' +import { type LucideIcon, MessageSquare, LineChart, GitBranch, BarChart3, Newspaper, Notebook, Zap, Settings, Code2 } from 'lucide-react' import { type Page } from '../App' import { useWorkspace } from '../tabs/store' import type { ActivitySection, ViewSpec } from '../tabs/types' @@ -31,7 +31,7 @@ interface ActivityBarProps { interface NavLeaf { page: Page label: string - icon: (active: boolean) => ReactNode + icon: LucideIcon /** * What tab opens when this ActivityBar item is clicked. * @@ -57,127 +57,29 @@ const NAV_SECTIONS: NavSection[] = [ { sectionLabel: '', items: [ - { - page: 'chat', - label: 'Chat', - icon: (active) => ( - - - - ), - }, - { - page: 'portfolio', - label: 'Portfolio', - defaultTab: { kind: 'portfolio', params: {} }, - icon: (active) => ( - - - - - - - ), - }, - { - page: 'trading-as-git', - label: 'Trading as Git', - icon: (active) => ( - - - - - - - - - ), - }, - { - page: 'market', - label: 'Market', - icon: (active) => ( - - - - - - - ), - }, - { - page: 'news', - label: 'News', - defaultTab: { kind: 'news', params: {} }, - icon: (active) => ( - - - - - - - ), - }, - { - page: 'diary', - label: 'Diary', - defaultTab: { kind: 'diary', params: {} }, - icon: (active) => ( - - - - - ), - }, + { page: 'chat', label: 'Chat', icon: MessageSquare }, + { page: 'portfolio', label: 'Portfolio', icon: LineChart, defaultTab: { kind: 'portfolio', params: {} } }, + { page: 'trading-as-git', label: 'Trading as Git', icon: GitBranch }, + { page: 'market', label: 'Market', icon: BarChart3 }, + { page: 'news', label: 'News', icon: Newspaper, defaultTab: { kind: 'news', params: {} } }, + { page: 'diary', label: 'Diary', icon: Notebook, defaultTab: { kind: 'diary', params: {} } }, ], }, { sectionLabel: 'Agent', items: [ - { - page: 'automation', - label: 'Automation', - defaultTab: { kind: 'automation', params: { section: 'flow' } }, - icon: (active) => ( - - - - ), - }, + { page: 'automation', label: 'Automation', icon: Zap, defaultTab: { kind: 'automation', params: { section: 'flow' } } }, ], }, { sectionLabel: 'System', items: [ - { - page: 'settings', - label: 'Settings', - icon: (active) => ( - - - - - ), - }, - { - page: 'dev' as const, - label: 'Dev', - icon: (active: boolean) => ( - - - - - ), - }, + { page: 'settings', label: 'Settings', icon: Settings }, + { page: 'dev', label: 'Dev', icon: Code2 }, ], }, ] -// ==================== Helpers ==================== - -/** Style for active indicator */ -const INDICATOR_STYLE = { background: '#58a6ff' } - // ==================== ActivityBar ==================== export function ActivityBar({ open, onClose }: ActivityBarProps) { @@ -230,6 +132,7 @@ export function ActivityBar({ open, onClose }: ActivityBarProps) { {section.items.map((item) => { const sec = activitySectionFor(item.page) const isActive = selectedSidebar === sec + const Icon = item.icon const handleClick = () => { onClose() if (selectedSidebar === sec) { @@ -258,13 +161,16 @@ export function ActivityBar({ open, onClose }: ActivityBarProps) { : 'text-text-muted hover:text-text hover:bg-bg-tertiary/50 md:hover:bg-bg-secondary' }`} > + {/* Active indicator — left vertical bar, desktop only */} diff --git a/ui/src/components/ChatChannelListContainer.tsx b/ui/src/components/ChatChannelListContainer.tsx index 2158e556..7d6348c2 100644 --- a/ui/src/components/ChatChannelListContainer.tsx +++ b/ui/src/components/ChatChannelListContainer.tsx @@ -39,7 +39,7 @@ export function ChatChannelListContainer() { trail={ unreadCount > 0 ? ( {unreadCount > 99 ? '99+' : unreadCount} diff --git a/ui/src/components/ChatMessage.tsx b/ui/src/components/ChatMessage.tsx index 55d22c0a..af924217 100644 --- a/ui/src/components/ChatMessage.tsx +++ b/ui/src/components/ChatMessage.tsx @@ -234,7 +234,7 @@ export function StreamingToolGroup({ tools }: StreamingToolGroupProps) { {tool.status === 'running' ? ( ) : ( - + )} diff --git a/ui/src/components/ContextMenu.tsx b/ui/src/components/ContextMenu.tsx index a1ce16b9..47f719d9 100644 --- a/ui/src/components/ContextMenu.tsx +++ b/ui/src/components/ContextMenu.tsx @@ -86,7 +86,7 @@ export function ContextMenu({ anchor, items, onClose }: ContextMenuProps) { } const disabled = item.disabled === true const colorClass = item.danger - ? 'text-red-400 hover:bg-red-400/10' + ? 'text-red hover:bg-red/10' : 'text-text hover:bg-bg-tertiary' return ( + {!hideDot && ( + + )} + updated {ago} + + ) +} + +function formatAgo(d: Date): string { + const sec = Math.max(0, Math.floor((Date.now() - d.getTime()) / 1000)) + if (sec < 5) return 'just now' + if (sec < 60) return `${sec}s ago` + if (sec < 3600) return `${Math.floor(sec / 60)}m ago` + return `${Math.floor(sec / 3600)}h ago` +} diff --git a/ui/src/components/MarketSidebar.tsx b/ui/src/components/MarketSidebar.tsx index cf9c94ac..89b5e96b 100644 --- a/ui/src/components/MarketSidebar.tsx +++ b/ui/src/components/MarketSidebar.tsx @@ -8,7 +8,7 @@ import { SidebarRow } from './SidebarRow' const ASSET_CLASS_COLORS: Record = { equity: 'bg-accent/15 text-accent', crypto: 'bg-amber-500/15 text-amber-400', - currency: 'bg-emerald-500/15 text-emerald-400', + currency: 'bg-green/15 text-green', commodity: 'bg-purple-500/15 text-purple-400', } diff --git a/ui/src/components/PageHeader.tsx b/ui/src/components/PageHeader.tsx index 7784f14f..82619a06 100644 --- a/ui/src/components/PageHeader.tsx +++ b/ui/src/components/PageHeader.tsx @@ -1,22 +1,44 @@ import type { ReactNode } from 'react' +import { LiveIndicator } from './LiveIndicator' interface PageHeaderProps { title: string description?: ReactNode right?: ReactNode + /** Show a pulsing "data is live" indicator next to the title and a + * relative-time microcopy ("updated 14s ago") in the description row. + * Pass the timestamp of the last successful refresh; pass `null` to + * show the pulse without a time (pre-first-load). */ + live?: { lastUpdated: Date | null } } -export function PageHeader({ title, description, right }: PageHeaderProps) { +export function PageHeader({ title, description, right, live }: PageHeaderProps) { return (
-
-
-

{title}

- {description && ( -

{description}

+
+
+
+

{title}

+ {live && ( + + )} +
+ {(description || live) && ( +
+ {description && {description}} + {live && ( + <> + {description && ·} + + + )} +
)}
- {right &&
{right}
} + {right &&
{right}
}
) diff --git a/ui/src/components/PushApprovalPanel.tsx b/ui/src/components/PushApprovalPanel.tsx index 42ed703a..8cb1cdbd 100644 --- a/ui/src/components/PushApprovalPanel.tsx +++ b/ui/src/components/PushApprovalPanel.tsx @@ -94,8 +94,8 @@ function timeAgo(iso: string): string { function statusColor(status: string): string { switch (status) { case 'submitted': return 'text-blue-400' - case 'filled': return 'text-green-400' - case 'rejected': return 'text-red-400' + case 'filled': return 'text-green' + case 'rejected': return 'text-red' case 'user-rejected': return 'text-orange-400' case 'cancelled': return 'text-text-muted' default: return 'text-text-muted' @@ -226,7 +226,7 @@ export function PushApprovalPanel() {
{text} @@ -261,7 +261,7 @@ export function PushApprovalPanel() {
{text} @@ -300,7 +300,7 @@ export function PushApprovalPanel() { @@ -315,17 +315,17 @@ export function PushApprovalPanel() {
Last push
{lastResult.data.submitted.length > 0 && ( - {lastResult.data.submitted.length} submitted + {lastResult.data.submitted.length} submitted )} {lastResult.data.rejected.length > 0 && ( <> {lastResult.data.submitted.length > 0 && ', '} - {lastResult.data.rejected.length} rejected + {lastResult.data.rejected.length} rejected )}
{lastResult.data.rejected.map((r, i) => ( -
{r.error || 'Unknown error'}
+
{r.error || 'Unknown error'}
))}
diff --git a/ui/src/components/TabStrip.tsx b/ui/src/components/TabStrip.tsx index dd36a7fd..c28433d6 100644 --- a/ui/src/components/TabStrip.tsx +++ b/ui/src/components/TabStrip.tsx @@ -1,4 +1,5 @@ import { useState, type MouseEvent, type WheelEvent } from 'react' +import { X } from 'lucide-react' import { useChannels } from '../contexts/ChannelsContext' import { useWorkspace } from '../tabs/store' import { getView } from '../tabs/registry' @@ -165,9 +166,7 @@ function TabButton({ title, active, onSelect, onClose, onContextMenu }: TabButto className="w-4 h-4 rounded flex items-center justify-center text-text-muted/60 hover:text-text hover:bg-bg-tertiary" aria-label={`Close ${title}`} > - - - +
) diff --git a/ui/src/components/UpdateBanner.tsx b/ui/src/components/UpdateBanner.tsx new file mode 100644 index 00000000..3599f2bf --- /dev/null +++ b/ui/src/components/UpdateBanner.tsx @@ -0,0 +1,97 @@ +import { useEffect, useState } from 'react' +import { api } from '../api' +import type { VersionInfo } from '../api/types' + +const SKIP_STORAGE_KEY = 'openalice.update.skipVersion' + +/** + * Top-of-app banner shown when GitHub Releases reports a version newer + * than the running app's package.json. + * + * Three actions for the user: + * - "Release notes" — opens the GitHub release page (changelog) + * - "Skip this version" — persists in localStorage; never bug user + * about THIS specific version again. They'll see the banner again + * when a newer version is released. + * - "×" close — session-only dismiss (until next page load). + * + * Self-hosted source distribution: when the user wants to actually + * update, they run `git pull && pnpm build && restart` in their + * terminal. We don't auto-execute (Electron auto-update will + * eventually handle that path natively). + */ +export function UpdateBanner() { + const [info, setInfo] = useState(null) + const [sessionDismissed, setSessionDismissed] = useState(false) + + useEffect(() => { + api.version.get().then(setInfo).catch(() => {}) + }, []) + + if (!info || !info.hasUpdate || !info.latest) return null + if (sessionDismissed) return null + + const skippedVersion = (() => { + try { return localStorage.getItem(SKIP_STORAGE_KEY) } catch { return null } + })() + if (skippedVersion === info.latest) return null + + const handleSkip = () => { + try { localStorage.setItem(SKIP_STORAGE_KEY, info.latest!) } catch { /* ignore */ } + setSessionDismissed(true) + } + const handleDismiss = () => { + setSessionDismissed(true) + } + + return ( +
+ + + + + + + + + v{info.latest} is available + {' '}(you have v{info.current}) + {info.publishedAt && ( + · released {info.publishedAt.slice(0, 10)} + )} + + + Run git pull && pnpm build to update + + {info.releaseUrl && ( + + Release notes → + + )} + + +
+ ) +} + diff --git a/ui/src/components/credentials/CredentialCard.tsx b/ui/src/components/credentials/CredentialCard.tsx new file mode 100644 index 00000000..69f12de1 --- /dev/null +++ b/ui/src/components/credentials/CredentialCard.tsx @@ -0,0 +1,181 @@ +import { useState } from 'react' +import type { Credential, Profile, Preset, SdkAdapterId } from '../../api/types' + +export type TestState = { status: 'idle' | 'testing' | 'ok' | 'fail'; error?: string } + +export interface CredentialCardProfile { + slug: string + profile: Profile + isActive: boolean + testState: TestState +} + +export interface CredentialCardProps { + slug: string + credential: Credential + profiles: CredentialCardProfile[] + presets: Preset[] + /** Adapter ids this credential's preset can drive, with star on test default. */ + availableAdapters: Array<{ id: SdkAdapterId; isTestDefault: boolean }> + /** Whether this card is the active selection on the credentials side. */ + selected: boolean + /** Whether an SDK is selected on the other side and this credential + * is NOT in its compatible set. Renders with reduced opacity. */ + dimmed: boolean + onSelect: () => void + onSetActive: (profileSlug: string) => void + onTest: (profileSlug: string, profile: Profile) => void + onEditProfile: (profileSlug: string) => void + onModelChange: (profileSlug: string, model: string) => Promise +} + +const VENDOR_LABEL: Record = { + anthropic: 'Anthropic', + openai: 'OpenAI', + google: 'Google', + minimax: 'MiniMax', + glm: 'GLM (Zhipu)', + kimi: 'Kimi (Moonshot)', + deepseek: 'DeepSeek', + custom: 'Custom', +} + +const AUTH_LABEL: Record = { + 'api-key': 'API key', + 'subscription': 'subscription', +} + +function getModelOptions(profile: Profile, presets: Preset[]): Array<{ id: string; label: string }> { + const preset = presets.find(p => p.id === profile.preset) + if (!preset) return [] + const props = preset.schema?.properties as Record }> | undefined + const oneOf = props?.model?.oneOf + if (!oneOf) return [] + return oneOf.map(o => ({ id: o.const, label: o.title })) +} + +export function CredentialCard({ + slug, + credential, + profiles, + presets, + availableAdapters, + selected, + dimmed, + onSelect, + onSetActive, + onTest, + onEditProfile, + onModelChange, +}: CredentialCardProps) { + const [expanded, setExpanded] = useState(profiles.length > 0) + + const vendorLabel = VENDOR_LABEL[credential.vendor] ?? credential.vendor + const authLabel = AUTH_LABEL[credential.authType] ?? credential.authType + + return ( +
{ if (e.key === 'Enter' || e.key === ' ') onSelect() }} + className={`rounded-xl border bg-bg transition-all overflow-hidden ${ + selected ? 'border-accent ring-2 ring-accent/30' : 'border-border' + } ${dimmed ? 'opacity-30' : ''}`} + > +
+
+

+ {vendorLabel} ({authLabel}) +

+ +
+ + {availableAdapters.length > 0 && ( +
+ {availableAdapters.map(a => ( + + {a.id}{a.isTestDefault && } + + ))} +
+ )} +
+ + {expanded && profiles.length > 0 && ( +
+
Profiles
+ {profiles.map(({ slug: pSlug, profile, isActive, testState }) => { + const modelOptions = getModelOptions(profile, presets) + const canSwitchModel = modelOptions.length > 1 && modelOptions.some(o => o.id === profile.model) + const ts = testState + const testLabel = ts.status === 'testing' ? 'Testing…' : ts.status === 'ok' ? 'OK' : ts.status === 'fail' ? 'Failed' : 'Test' + const testColor = + ts.status === 'ok' ? 'text-green border-green/40' + : ts.status === 'fail' ? 'text-red border-red/40' + : 'text-text-muted border-border hover:text-text hover:bg-bg-tertiary' + return ( +
e.stopPropagation()}> +
+
+ {pSlug} + {isActive && Active} +
+ {canSwitchModel ? ( + + ) : ( +

{profile.model || 'Auto'}

+ )} +
+
+ + {!isActive && ( + + )} + +
+
+ ) + })} +
+ )} + +
+ slug: {slug} +
+
+ ) +} diff --git a/ui/src/components/credentials/SdkAdapterCard.tsx b/ui/src/components/credentials/SdkAdapterCard.tsx new file mode 100644 index 00000000..fa681963 --- /dev/null +++ b/ui/src/components/credentials/SdkAdapterCard.tsx @@ -0,0 +1,83 @@ +import type { SdkAdapterInfo } from '../../api/types' + +export interface SdkAdapterCardProps { + adapter: SdkAdapterInfo + /** + * Map of presetId → credential slug for presets the user has at least + * one credential configured for. Used to render solid (configured) vs + * hollow (not yet configured) dots. + */ + configuredPresetMap: Record + /** Whether this card is the active selection on the SDKs side. */ + selected: boolean + /** Whether a credential is selected on the other side and this adapter + * is NOT in its compatible set. Renders with reduced opacity. */ + dimmed: boolean + onSelect: () => void + onConfigurePreset: (presetId: string) => void +} + +export function SdkAdapterCard({ + adapter, + configuredPresetMap, + selected, + dimmed, + onSelect, + onConfigurePreset, +}: SdkAdapterCardProps) { + const isTestDefault = adapter.presets.some(p => p.isTestDefault) + + return ( +
{ if (e.key === 'Enter' || e.key === ' ') onSelect() }} + className={`p-4 rounded-xl border bg-bg cursor-pointer transition-all ${ + selected ? 'border-accent ring-2 ring-accent/30' : 'border-border' + } ${dimmed ? 'opacity-30' : ''}`} + > +
+

{adapter.label}

+ {isTestDefault && ( + ★ test default + )} +
+

{adapter.description}

+ + {adapter.presets.length > 0 && ( +
+
Compatible credentials
+ {adapter.presets.map((preset) => { + const isConfigured = preset.presetId in configuredPresetMap + return ( +
+ + + {preset.presetLabel} + {preset.isTestDefault && } + + {!isConfigured && ( + + )} +
+ ) + })} +
+ )} +
+ ) +} diff --git a/ui/src/components/market/FinancialStatementsPanel.tsx b/ui/src/components/market/FinancialStatementsPanel.tsx index 6516a880..7495cb4a 100644 --- a/ui/src/components/market/FinancialStatementsPanel.tsx +++ b/ui/src/components/market/FinancialStatementsPanel.tsx @@ -136,7 +136,7 @@ export function FinancialStatementsPanel({ symbol }: Props) { } > {loading && !entry &&
Loading…
} - {entry?.error &&
{entry.error}
} + {entry?.error &&
{entry.error}
} {!entry?.error && rows.length === 0 && !loading && (
No data.
)} diff --git a/ui/src/components/market/KeyMetricsPanel.tsx b/ui/src/components/market/KeyMetricsPanel.tsx index 4c074d71..5dfeac45 100644 --- a/ui/src/components/market/KeyMetricsPanel.tsx +++ b/ui/src/components/market/KeyMetricsPanel.tsx @@ -93,7 +93,7 @@ export function KeyMetricsPanel({ symbol }: Props) { return ( {loading &&
Loading…
} - {error && !loading &&
{error}
} + {error && !loading &&
{error}
} {!loading && !error && data && (
{rows.map(([k, v]) => ( diff --git a/ui/src/components/market/ProfilePanel.tsx b/ui/src/components/market/ProfilePanel.tsx index 9417e959..9f3f4b47 100644 --- a/ui/src/components/market/ProfilePanel.tsx +++ b/ui/src/components/market/ProfilePanel.tsx @@ -49,7 +49,7 @@ export function ProfilePanel({ symbol }: Props) { return ( {loading &&
Loading…
} - {error && !loading &&
{error}
} + {error && !loading &&
{error}
} {!loading && !error && profile && (
diff --git a/ui/src/components/market/QuoteHeader.tsx b/ui/src/components/market/QuoteHeader.tsx index 8fa0646d..0fd491cc 100644 --- a/ui/src/components/market/QuoteHeader.tsx +++ b/ui/src/components/market/QuoteHeader.tsx @@ -58,7 +58,7 @@ export function QuoteHeader({ symbol }: Props) { {fmtNumber(lastPrice)} {change != null && changePct != null && ( - + {up ? '+' : ''}{fmtNumber(change)} ({up ? '+' : ''}{fmtPercent(changePct)}) )} @@ -80,7 +80,7 @@ export function QuoteHeader({ symbol }: Props) {
- {error &&
{error}
} + {error &&
{error}
}
) } diff --git a/ui/src/components/market/SearchBox.tsx b/ui/src/components/market/SearchBox.tsx index 6f35b7e7..f4da9ac9 100644 --- a/ui/src/components/market/SearchBox.tsx +++ b/ui/src/components/market/SearchBox.tsx @@ -5,7 +5,7 @@ import { marketApi, type SearchResult, type AssetClass } from '../../api/market' const ASSET_CLASS_COLORS: Record = { equity: 'bg-accent/15 text-accent', crypto: 'bg-amber-500/15 text-amber-400', - currency: 'bg-emerald-500/15 text-emerald-400', + currency: 'bg-green/15 text-green', commodity: 'bg-purple-500/15 text-purple-400', } diff --git a/ui/src/components/market/TradeableContractsPanel.tsx b/ui/src/components/market/TradeableContractsPanel.tsx index b296bd74..95ab10cd 100644 --- a/ui/src/components/market/TradeableContractsPanel.tsx +++ b/ui/src/components/market/TradeableContractsPanel.tsx @@ -53,7 +53,7 @@ export function TradeableContractsPanel({ symbol, assetClass }: Props) { return ( {loading &&
Searching brokers…
} - {error && !loading &&
{error}
} + {error && !loading &&
{error}
} {!loading && !error && utasConfigured === 0 && (
diff --git a/ui/src/index.css b/ui/src/index.css index fcaae97f..3e3bf09a 100644 --- a/ui/src/index.css +++ b/ui/src/index.css @@ -31,6 +31,62 @@ body, height: 100%; } +/* ==================== Typography scale ==================== + Five semantic sizes covering the entire app. New code should + prefer these over arbitrary `text-[Npx]` so size choices stay + consistent and a future global tweak is one edit. + - .text-display hero numbers (UTA NLV, Portfolio total) + - .text-title page H2 / dialog title + - .text-body primary body text, table rows, buttons + - .text-caption secondary descriptions, subtitles + - .text-micro badges, timestamps, table metadata */ + +.text-display { + @apply text-[28px] md:text-[36px] font-bold leading-tight tabular-nums; +} +.text-title { + @apply text-[16px] font-semibold leading-snug; +} +.text-body { + @apply text-[13px] leading-relaxed; +} +.text-caption { + @apply text-[12px] leading-relaxed; +} +.text-micro { + @apply text-[11px] tracking-wide; +} + +/* ==================== Live-data pulse ==================== + Small breathing dot used in PageHeader to communicate "this + surface is auto-refreshing". 2.4s cycle so it reads as ambient + not anxious. Color is the success/green token to keep the + association with "alive + healthy". */ + +@keyframes live-pulse { + 0%, 100% { + opacity: 0.55; + transform: scale(0.92); + } + 50% { + opacity: 1; + transform: scale(1.08); + } +} +.live-pulse { + animation: live-pulse 2.4s ease-in-out infinite; +} +.live-pulse::after { + content: ''; + position: absolute; + inset: -3px; + border-radius: 9999px; + background: var(--color-green); + opacity: 0.3; + animation: live-pulse 2.4s ease-in-out infinite; + animation-delay: 0.3s; +} + /* ==================== Button utility classes ==================== */ .btn-primary { diff --git a/ui/src/pages/AIProviderPage.tsx b/ui/src/pages/AIProviderPage.tsx index 327899c4..922303eb 100644 --- a/ui/src/pages/AIProviderPage.tsx +++ b/ui/src/pages/AIProviderPage.tsx @@ -1,11 +1,14 @@ -import { useState, useEffect, useRef } from 'react' +import { useState, useEffect, useMemo, useRef } from 'react' import { api, type Profile, type AIBackend, type Preset } from '../api' +import type { Credential, SdkAdapterInfo, SdkAdapterId } from '../api/types' import { SaveIndicator } from '../components/SaveIndicator' import { Field, inputClass } from '../components/form' import { useSchemaForm, type SchemaField } from '../hooks/useSchemaForm' import type { SaveStatus } from '../hooks/useAutoSave' import { PageHeader } from '../components/PageHeader' import { PageLoading } from '../components/StateViews' +import { CredentialCard, type TestState as CredTestState } from '../components/credentials/CredentialCard' +import { SdkAdapterCard } from '../components/credentials/SdkAdapterCard' // ==================== Icons ==================== @@ -33,21 +36,110 @@ function getModelOptions(profile: Profile, presets: Preset[]): Array<{ id: strin type TestState = { status: 'idle' | 'testing' | 'ok' | 'fail'; error?: string } +type Selection = { kind: 'credential'; slug: string } | { kind: 'sdk'; id: SdkAdapterId } | null + export function AIProviderPage() { const [profiles, setProfiles] = useState | null>(null) + const [credentials, setCredentials] = useState>({}) const [activeProfile, setActiveProfile] = useState('') const [presets, setPresets] = useState([]) + const [adapters, setAdapters] = useState([]) const [editingSlug, setEditingSlug] = useState(null) const [showCreate, setShowCreate] = useState(false) const [testStates, setTestStates] = useState>({}) + const [selection, setSelection] = useState(null) useEffect(() => { - api.config.getProfiles().then(({ profiles: p, activeProfile: a }) => { - setProfiles(p); setActiveProfile(a) + api.config.getProfiles().then(({ profiles: p, credentials: c, activeProfile: a }) => { + setProfiles(p); setCredentials(c); setActiveProfile(a) }).catch(() => {}) api.config.getPresets().then(({ presets: p }) => setPresets(p)).catch(() => {}) + api.config.getSdkAdapters().then(({ adapters: a }) => setAdapters(a)).catch(() => {}) }, []) + // ============== Derived data ============== + + /** presetId → adapter ids it can drive (test default first). */ + const presetToAdapters = useMemo(() => { + const map: Record> = {} + for (const a of adapters) { + for (const p of a.presets) { + if (!map[p.presetId]) map[p.presetId] = [] + map[p.presetId].push({ id: a.id, isTestDefault: p.isTestDefault }) + } + } + // Sort: test default first + for (const list of Object.values(map)) { + list.sort((x, y) => Number(y.isTestDefault) - Number(x.isTestDefault)) + } + return map + }, [adapters]) + + /** Group profiles by their credentialSlug. Profiles without slug land under '__inline'. */ + const profilesByCredential = useMemo(() => { + const map: Record> = {} + if (!profiles) return map + for (const [slug, profile] of Object.entries(profiles)) { + const key = profile.credentialSlug ?? '__inline' + if (!map[key]) map[key] = [] + map[key].push([slug, profile]) + } + return map + }, [profiles]) + + /** Adapter ids reachable from a credential — derived via the credential's profiles' presets. */ + const credentialToAdapters = useMemo(() => { + const map: Record> = {} + for (const credSlug of Object.keys(credentials)) { + const profilesUsing = profilesByCredential[credSlug] ?? [] + const seen = new Map() + for (const [, profile] of profilesUsing) { + if (!profile.preset) continue + for (const a of presetToAdapters[profile.preset] ?? []) { + // Take test-default if any path marks it + if (!seen.has(a.id) || a.isTestDefault) seen.set(a.id, a.isTestDefault) + } + } + map[credSlug] = [...seen.entries()] + .map(([id, isTestDefault]) => ({ id, isTestDefault })) + .sort((x, y) => Number(y.isTestDefault) - Number(x.isTestDefault)) + } + return map + }, [credentials, profilesByCredential, presetToAdapters]) + + /** Map presetId → credential slug (any one) where the user has a credential matching that preset. */ + const configuredPresetMap = useMemo(() => { + const map: Record = {} + if (!profiles) return map + for (const [slug, profile] of Object.entries(profiles)) { + if (profile.preset && profile.credentialSlug) map[profile.preset] = profile.credentialSlug + } + return map + }, [profiles]) + + // ============== Selection & dim logic ============== + + const isCredentialDimmed = (credSlug: string): boolean => { + if (!selection || selection.kind !== 'sdk') return false + const compatible = credentialToAdapters[credSlug] ?? [] + return !compatible.some(a => a.id === selection.id) + } + + const isAdapterDimmed = (adapterId: SdkAdapterId): boolean => { + if (!selection || selection.kind !== 'credential') return false + const compatible = credentialToAdapters[selection.slug] ?? [] + return !compatible.some(a => a.id === adapterId) + } + + const toggleSelectCredential = (slug: string) => { + setSelection(s => (s && s.kind === 'credential' && s.slug === slug ? null : { kind: 'credential', slug })) + } + const toggleSelectAdapter = (id: SdkAdapterId) => { + setSelection(s => (s && s.kind === 'sdk' && s.id === id ? null : { kind: 'sdk', id })) + } + + // ============== Profile actions ============== + const handleSetActive = async (slug: string) => { try { await api.config.setActiveProfile(slug); setActiveProfile(slug) } catch {} } @@ -56,20 +148,23 @@ export function AIProviderPage() { if (!profiles) return try { await api.config.deleteProfile(slug) - const updated = { ...profiles }; delete updated[slug] - setProfiles(updated); setEditingSlug(null) + const { profiles: p, credentials: c, activeProfile: a } = await api.config.getProfiles() + setProfiles(p); setCredentials(c); setActiveProfile(a) + setEditingSlug(null) } catch {} } const handleCreateSave = async (name: string, profile: Profile) => { await api.config.createProfile(name, profile) - setProfiles((p) => p ? { ...p, [name]: profile } : p) - // Don't close modal here — let the modal handle test + close + // Re-fetch so credentialSlug + credentials map reflect server-side eager extraction + const { profiles: p, credentials: c, activeProfile: a } = await api.config.getProfiles() + setProfiles(p); setCredentials(c); setActiveProfile(a) } const handleProfileUpdate = async (slug: string, profile: Profile) => { await api.config.updateProfile(slug, profile) - setProfiles((p) => p ? { ...p, [slug]: profile } : p) + const { profiles: p, credentials: c } = await api.config.getProfiles() + setProfiles(p); setCredentials(c) } const handleTest = async (slug: string, profile: Profile) => { @@ -89,7 +184,10 @@ export function AIProviderPage() { } } - const handleInlineModelChange = async (slug: string, profile: Profile, newModel: string) => { + const handleInlineModelChange = async (slug: string, newModel: string) => { + if (!profiles) return + const profile = profiles[slug] + if (!profile) return const updated = { ...profile, model: newModel } setProfiles((p) => p ? { ...p, [slug]: updated } : p) try { @@ -99,68 +197,105 @@ export function AIProviderPage() { } } - if (!profiles) return
+ if (!profiles) return
+ + // Profiles that have NO credentialSlug (transitional / inline-only) — render as a fallback group + const inlineProfiles = profilesByCredential['__inline'] ?? [] return (
- +
-
- {Object.entries(profiles).map(([slug, profile]) => { - const isActive = slug === activeProfile - const modelOptions = getModelOptions(profile, presets) - const canSwitchModel = modelOptions.length > 1 && modelOptions.some(o => o.id === profile.model) - return ( -
-
{BACKEND_ICONS[profile.backend]}
-
-
- {slug} - {isActive && Active} +
setSelection(null)} + > + {/* ============== Credentials column ============== */} +
e.stopPropagation()}> +
+

Credentials

+ +
+
+ {Object.entries(credentials).map(([credSlug, cred]) => { + const profilesUsing = profilesByCredential[credSlug] ?? [] + const sel = selection?.kind === 'credential' && selection.slug === credSlug + return ( + ({ + slug: pSlug, + profile, + isActive: pSlug === activeProfile, + testState: testStates[pSlug] ?? { status: 'idle' as const } as CredTestState, + }))} + presets={presets} + availableAdapters={credentialToAdapters[credSlug] ?? []} + selected={sel} + dimmed={isCredentialDimmed(credSlug)} + onSelect={() => toggleSelectCredential(credSlug)} + onSetActive={handleSetActive} + onTest={handleTest} + onEditProfile={(pSlug) => setEditingSlug(pSlug)} + onModelChange={async (pSlug, model) => handleInlineModelChange(pSlug, model)} + /> + ) + })} + + {inlineProfiles.length > 0 && ( +
+
Inline-only profiles (no credential record)
+
+ {inlineProfiles.map(([slug]) => slug).join(', ')} +
+
+ These profiles still work via inline fallback. They'll be linked to credential records the next time they're saved or after the 0003 migration runs.
- {canSwitchModel ? ( -
- - -
- ) : ( -

{profile.model || 'Auto (subscription plan)'}

- )} -
-
- {(() => { - const ts = testStates[slug] ?? { status: 'idle' as const } - const label = ts.status === 'testing' ? 'Testing…' : ts.status === 'ok' ? 'OK' : ts.status === 'fail' ? 'Failed' : 'Test' - const color = ts.status === 'ok' - ? 'text-green border-green/40' - : ts.status === 'fail' - ? 'text-red border-red/40' - : 'text-text-muted border-border hover:text-text hover:bg-bg-tertiary' - return ( - - ) - })()} - {!isActive && } -
-
- ) - })} - + )} + + {Object.keys(credentials).length === 0 && inlineProfiles.length === 0 && ( + + )} +
+ + + {/* ============== SDKs column ============== */} +
e.stopPropagation()}> +
+

Available SDKs

+ read-only +
+
+ {adapters.map((adapter) => { + const sel = selection?.kind === 'sdk' && selection.id === adapter.id + return ( + toggleSelectAdapter(adapter.id)} + onConfigurePreset={() => setShowCreate(true)} + /> + ) + })} +
+
diff --git a/ui/src/pages/DevPage.tsx b/ui/src/pages/DevPage.tsx index fb61b9fd..8e37e9a6 100644 --- a/ui/src/pages/DevPage.tsx +++ b/ui/src/pages/DevPage.tsx @@ -746,7 +746,7 @@ function ToolExecutePanel({ detail, result, onResult }: ToolExecutePanelProps) { {result && result.name === detail.name && (
- + {result.data.isError ? 'ERROR' : 'OK'} {result.durationMs}ms diff --git a/ui/src/pages/NotificationsInboxPage.tsx b/ui/src/pages/NotificationsInboxPage.tsx index ce728da7..819b114e 100644 --- a/ui/src/pages/NotificationsInboxPage.tsx +++ b/ui/src/pages/NotificationsInboxPage.tsx @@ -7,7 +7,7 @@ import type { NotificationEntry, NotificationSource } from '../api/notifications const SOURCE_COLORS: Record = { heartbeat: 'bg-purple/15 text-purple', cron: 'bg-accent/15 text-accent', - task: 'bg-emerald-500/15 text-emerald-400', + task: 'bg-green/15 text-green', manual: 'bg-amber-500/15 text-amber-400', } diff --git a/ui/src/pages/PortfolioPage.tsx b/ui/src/pages/PortfolioPage.tsx index 38d8805b..e169b1a7 100644 --- a/ui/src/pages/PortfolioPage.tsx +++ b/ui/src/pages/PortfolioPage.tsx @@ -7,6 +7,9 @@ import { EmptyState } from '../components/StateViews' import { EquityCurve } from '../components/EquityCurve' import { SnapshotDetail } from '../components/SnapshotDetail' import { Toggle } from '../components/Toggle' +import { Metric, signFromDelta } from '../components/Metric' +import { Sparkline } from '../components/Sparkline' +import { fmt, fmtPnl, fmtNum, fmtPctSigned } from '../lib/format' // ==================== Types ==================== @@ -43,6 +46,60 @@ interface PortfolioData { const EMPTY: PortfolioData = { equity: null, accounts: [], fxRates: [] } +const CUTOFF_24H_MS = 24 * 60 * 60 * 1000 + +interface CurveSummary { + total: { values: number[]; firstAtCutoff: number | null; latest: number | null } + perAccount: Record +} + +/** Trailing-24h baseline + sparkline values, both at the aggregate level + * and per-account. Drives the today-PnL delta in the hero plus the + * per-account mini sparklines in AccountStrip. */ +function summarizeAggregateCurve(points: EquityCurvePoint[]): CurveSummary { + const sorted = [...points].sort((a, b) => a.timestamp.localeCompare(b.timestamp)) + const cutoff = Date.now() - CUTOFF_24H_MS + + const totalValues: number[] = [] + let totalFirstAtCutoff: number | null = null + let totalLatest: number | null = null + const perAccountValues = new Map() + const perAccountFirst = new Map() + const perAccountLatest = new Map() + + for (const p of sorted) { + const t = new Date(p.timestamp).getTime() + const totalN = Number(p.equity) + if (Number.isFinite(totalN)) { + totalValues.push(totalN) + totalLatest = totalN + if (t >= cutoff && totalFirstAtCutoff == null) totalFirstAtCutoff = totalN + } + for (const [id, raw] of Object.entries(p.accounts ?? {})) { + const n = Number(raw) + if (!Number.isFinite(n)) continue + let arr = perAccountValues.get(id) + if (!arr) { arr = []; perAccountValues.set(id, arr) } + arr.push(n) + perAccountLatest.set(id, n) + if (t >= cutoff && !perAccountFirst.has(id)) perAccountFirst.set(id, n) + } + } + + const perAccount: CurveSummary['perAccount'] = {} + for (const [id, values] of perAccountValues) { + perAccount[id] = { + values, + firstAtCutoff: perAccountFirst.get(id) ?? null, + latest: perAccountLatest.get(id) ?? null, + } + } + return { + total: { values: totalValues, firstAtCutoff: totalFirstAtCutoff, latest: totalLatest }, + perAccount, + } +} + // ==================== Page ==================== export function PortfolioPage() { @@ -56,6 +113,10 @@ export function PortfolioPage() { const [selectedSnapshot, setSelectedSnapshot] = useState(null) const [snapshotEnabled, setSnapshotEnabled] = useState(true) const [snapshotEvery, setSnapshotEvery] = useState('15m') + // Aggregate curve (all UTAs, full per-account breakdown) — shared between + // hero today-PnL delta and per-account sparklines. Distinct from + // curvePoints which follows the user's chart-account selection. + const [aggregateCurve, setAggregateCurve] = useState(null) const snapshotConfig = useMemo(() => ({ enabled: snapshotEnabled, every: snapshotEvery }), [snapshotEnabled, snapshotEvery]) const saveSnapshotConfig = useCallback(async (d: Record) => { @@ -63,7 +124,10 @@ export function PortfolioPage() { }, []) const { status: snapshotSaveStatus } = useAutoSave({ data: snapshotConfig, save: saveSnapshotConfig }) - // Fetch curve data for a specific account or all + // Fetch curve data for the user's chart-pane selection (single account + // or 'all'). Distinct from aggregate-curve — that one is always fetched + // 'all' so per-account derivations stay consistent regardless of the + // chart pane state. const fetchCurveData = useCallback(async (accountId: string | 'all') => { if (accountId === 'all') { const result = await api.trading.equityCurve({ limit: 200 }).catch(() => ({ points: [] })) @@ -82,11 +146,13 @@ export function PortfolioPage() { const refresh = useCallback(async () => { setLoading(true) - const [result, configResult] = await Promise.all([ + const [result, configResult, aggregateResult] = await Promise.all([ fetchPortfolioData(), api.config.load().catch(() => null), + api.trading.equityCurve({ limit: 1500 }).catch(() => ({ points: [] as EquityCurvePoint[] })), ]) setData(result) + setAggregateCurve(summarizeAggregateCurve(aggregateResult.points)) if (configResult?.snapshot) { setSnapshotEnabled(configResult.snapshot.enabled) setSnapshotEvery(configResult.snapshot.every) @@ -152,12 +218,13 @@ export function PortfolioPage() {
Live portfolio overview across all trading accounts.{lastRefresh && Updated {lastRefresh.toLocaleTimeString()}}} + description="Live portfolio overview across all trading accounts." + live={{ lastUpdated: lastRefresh }} right={ @@ -169,7 +236,7 @@ export function PortfolioPage() {
{/* Main column */}
- + {curvePoints.length > 0 && ( 0 && ( - + )} {allPositions.length > 0 && ( @@ -266,7 +336,10 @@ async function fetchPortfolioData(): Promise { // ==================== Hero Metrics ==================== -function HeroMetrics({ equity }: { equity: AggregatedEquity | null }) { +function HeroMetrics({ equity, curve }: { + equity: AggregatedEquity | null + curve: { values: number[]; firstAtCutoff: number | null; latest: number | null } | null +}) { if (!equity) { return (
@@ -275,24 +348,46 @@ function HeroMetrics({ equity }: { equity: AggregatedEquity | null }) { ) } - return ( -
-
- - - - -
-
- ) -} + const total = Number(equity.totalEquity) + const cash = Number(equity.totalCash) + const unrealized = Number(equity.totalUnrealizedPnL) + const realized = Number(equity.totalRealizedPnL) + + // Today PnL — same shape as TradingPage hero. Suppress when no baseline + // is available yet (fresh portfolio with no 24h history). + let todayDelta: { value: string; sign: 'up' | 'down' | 'flat' } | undefined + if (curve && curve.latest != null && curve.firstAtCutoff != null) { + const delta = curve.latest - curve.firstAtCutoff + const pct = curve.firstAtCutoff !== 0 ? (delta / curve.firstAtCutoff) * 100 : 0 + todayDelta = { + value: `${fmtPnl(delta, 'USD')} (${fmtPctSigned(pct)}) today`, + sign: signFromDelta(delta), + } + } -function HeroItem({ label, value, pnl }: { label: string; value: string; pnl?: number }) { - const color = pnl == null ? 'text-text' : pnl >= 0 ? 'text-green' : 'text-red' return ( -
-

{label}

-

{value}

+
+ +
+ + + +
) } @@ -305,33 +400,64 @@ const HEALTH_DOT: Record = { offline: 'bg-red', } -function AccountStrip({ sources }: { sources: Array<{ id: string; label: string; provider: string; equity: string; unrealizedPnL: number; error?: string; health?: string; disabled?: boolean }> }) { +function AccountStrip({ sources, perAccountCurve }: { + sources: Array<{ id: string; label: string; provider: string; equity: string; unrealizedPnL: number; error?: string; health?: string; disabled?: boolean }> + perAccountCurve: Record +}) { return ( -
+
{sources.map(s => { const isDisabled = s.disabled const isOffline = s.health === 'offline' && !isDisabled const dotColor = isDisabled ? 'bg-text-muted/40' : (HEALTH_DOT[s.health ?? 'healthy'] ?? 'bg-text-muted') + + const curve = perAccountCurve[s.id] + const todayDelta = curve && curve.latest != null && curve.firstAtCutoff != null + ? curve.latest - curve.firstAtCutoff + : null + const showSpark = !isDisabled && !isOffline && curve && curve.values.length >= 2 + return ( -
-
- {s.label} - {isDisabled - ? Disabled - : isOffline - ? Reconnecting... - : <> - {fmt(Number(s.equity))} - {s.unrealizedPnL !== 0 && ( - = 0 ? 'text-green' : 'text-red'}> - {fmtPnl(s.unrealizedPnL)} +
+
+
+
+ {s.label} + {!isDisabled && !isOffline && ( + {fmt(Number(s.equity))} + )} +
+
+ {isDisabled + ? Disabled + : isOffline + ? Reconnecting… + : ( + + {todayDelta != null && Number.isFinite(todayDelta) ? ( + = 0 ? 'text-green' : 'text-red'}> + {todayDelta >= 0 ? '▲' : '▼'} {fmtPnl(todayDelta)} today + + ) : s.unrealizedPnL !== 0 ? ( + = 0 ? 'text-green' : 'text-red'}> + {fmtPnl(s.unrealizedPnL)} unrealized + + ) : ( + + )} - )} - - } - {s.error && !isOffline && !isDisabled && {s.error}} + ) + } + {s.error && !isOffline && !isDisabled && {s.error}} +
+
+ {showSpark && ( +
+ +
+ )}
) })} @@ -437,7 +563,7 @@ function PositionsTable({ positions, fxRates }: { positions: PositionWithAccount {(() => { const cost = Number(p.avgCost) * Number(p.quantity) const pct = cost > 0 ? (Number(p.unrealizedPnL) / cost) * 100 : 0 - return `${pct >= 0 ? '+' : ''}${pct.toFixed(2)}%` + return fmtPctSigned(pct) })()} @@ -542,8 +668,6 @@ function TradeLog({ commits }: { commits: CommitWithAccount[] }) { ) } -// ==================== Formatting Helpers ==================== - // ==================== Snapshot Settings ==================== const INTERVAL_PRESETS = [ @@ -606,32 +730,3 @@ function SnapshotSettings({ enabled, every, onEnabledChange, onEveryChange, save
) } - -// ==================== Formatting Helpers ==================== - -const CURRENCY_SYMBOLS: Record = { - USD: '$', HKD: 'HK$', EUR: '€', GBP: '£', JPY: '¥', - CNY: '¥', CNH: '¥', CAD: 'C$', AUD: 'A$', CHF: 'CHF ', - SGD: 'S$', KRW: '₩', INR: '₹', TWD: 'NT$', BRL: 'R$', -} - -function currencySymbol(currency?: string): string { - if (!currency) return '$' - return CURRENCY_SYMBOLS[currency.toUpperCase()] ?? `${currency} ` -} - -function fmt(n: number, currency?: string): string { - const sym = currencySymbol(currency) - return n >= 1000 ? `${sym}${n.toLocaleString('en-US', { minimumFractionDigits: 2, maximumFractionDigits: 2 })}` - : `${sym}${n.toFixed(2)}` -} - -function fmtPnl(n: number, currency?: string): string { - const sign = n >= 0 ? '+' : '' - return `${sign}${fmt(n, currency)}` -} - -function fmtNum(n: number): string { - return n >= 1 ? n.toLocaleString('en-US', { maximumFractionDigits: 4 }) - : n.toPrecision(4) -} diff --git a/ui/src/pages/TradingPage.tsx b/ui/src/pages/TradingPage.tsx index 173c5cc5..ab7da0ac 100644 --- a/ui/src/pages/TradingPage.tsx +++ b/ui/src/pages/TradingPage.tsx @@ -95,6 +95,7 @@ export function TradingPage() { const [presets, setPresets] = useState([]) const [equity, setEquity] = useState(null) const [curve, setCurve] = useState(null) + const [lastUpdated, setLastUpdated] = useState(null) useEffect(() => { api.trading.getBrokerPresets().then(r => setPresets(r.presets)).catch(() => {}) @@ -112,6 +113,7 @@ export function TradingPage() { ]) if (eq) setEquity(eq) setCurve(summarizeCurve(cv.points)) + setLastUpdated(new Date()) } catch { // Don't surface — aggregates are nice-to-have, the page still renders // from useTradingConfig if the equity endpoint is down. @@ -136,7 +138,11 @@ export function TradingPage() { return (
- + 0 ? { lastUpdated } : undefined} + />
diff --git a/ui/src/pages/UTADetailPage.tsx b/ui/src/pages/UTADetailPage.tsx index d1a160f7..3e054eb6 100644 --- a/ui/src/pages/UTADetailPage.tsx +++ b/ui/src/pages/UTADetailPage.tsx @@ -39,6 +39,7 @@ export function UTADetailPage({ spec }: UTADetailPageProps) { const [orderMode, setOrderMode] = useState(null) const [dataError, setDataError] = useState(null) const [expandedSnapshot, setExpandedSnapshot] = useState(null) + const [lastUpdated, setLastUpdated] = useState(null) useEffect(() => { api.trading.getBrokerPresets().then(r => setPresets(r.presets)).catch(() => {}) @@ -61,6 +62,7 @@ export function UTADetailPage({ spec }: UTADetailPageProps) { setAccount(acct) setPositions(pos.positions) setOrders(ord.orders) + setLastUpdated(new Date()) } catch (err) { setDataError(err instanceof Error ? err.message : String(err)) } @@ -156,6 +158,7 @@ export function UTADetailPage({ spec }: UTADetailPageProps) {
← Trading