From e335032d902155c7bd64c7678a291042d7a20e3d Mon Sep 17 00:00:00 2001 From: Griffen Fargo <3642037+gfargo@users.noreply.github.com> Date: Thu, 14 May 2026 08:42:56 -0400 Subject: [PATCH] fix(config/git): merge gitconfig overrides instead of replacing the service object MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Root-cause for the schema-validation warnings users (including the reporter) hit on every coco invocation when they have a [coco] section in ~/.gitconfig. The bug was in loadGitConfig — three related shape problems on the rebuilt service object: ## What was broken 1. **Service REPLACED, not merged.** When ~/.gitconfig had a [coco] section, the loader built a fresh service object from gitconfig keys and assigned it wholesale — wiping default fields like \`tokenLimit\`, \`temperature\`, \`maxConcurrent\`, \`minTokensForSummary\`, \`maxFileTokens\` whenever the gitconfig didn't spell them out. A minimal gitconfig (\`serviceProvider\` + \`serviceModel\` + \`serviceApiKey\`) would end up with a hollow service shape. 2. **requestOptions: { timeout: null, maxRetries: null }.** The loader did \`Number(gitConfigParsed.coco?.serviceRequestOptionsTimeout)\` unconditionally. When that key wasn't set, \`Number(undefined) === NaN\`, which JSON-serializes to \`null\`. The schema requires \`timeout: number\` — null fails. 3. **endpoint + baseURL always attached.** \`endpoint\` is for ollama, \`baseURL\` is for openai. The loader set both fields regardless of provider. Each provider variant in the schema has \`additionalProperties: false\`, so the irrelevant one was rejected on every anyOf branch. Combined effect: every coco command running against a repo with this gitconfig logged \`[coco] Warning: config validation issues detected\` and (before #949) actually crashed. ## What this PR changes \`loadGitConfig\` builds an overrides object containing ONLY the keys the user actually set in gitconfig, then merges those on top of the existing service: \`\`\`ts const overrides: Record = {} if (coco.serviceProvider) overrides.provider = coco.serviceProvider if (coco.serviceModel) overrides.model = coco.serviceModel const tokenLimit = numberOrUndefined(coco.serviceTokenLimit) if (tokenLimit !== undefined) overrides.tokenLimit = tokenLimit // ... same pattern for every numeric field service = { ...(service || {}), ...overrides } as LLMService \`\`\` Provider-specific keys (baseURL for openai, endpoint for ollama) only attach when the effective provider matches: \`\`\`ts const effectiveProvider = (coco.serviceProvider || service?.provider) as ... if (effectiveProvider === 'openai' && coco.serviceBaseURL) overrides.baseURL = coco.serviceBaseURL if (effectiveProvider === 'ollama' && coco.serviceEndpoint) overrides.endpoint = coco.serviceEndpoint \`\`\` \`requestOptions\` only attaches when at least one sub-field has a finite number value. A \`numberOrUndefined\` helper centralizes the "convert iff valid number, else undefined" logic so the same trap doesn't reappear on a new field. ## Tests +4 regression tests covering each bug: - preserves default service fields when gitconfig only sets a few keys - does not attach requestOptions when neither sub-field is set - attaches only the requestOptions sub-fields that gitconfig actually sets - does not attach provider-irrelevant fields (endpoint on openai) 1677 tests pass (no flaky failure this run), lint + tsc clean. ## Verified end-to-end Reproduced the original failure locally — a ~/.gitconfig with only serviceProvider / serviceModel / serviceApiKey + an empty \`{ "conventionalCommits": true }\` local config. Before this PR: validation warning + hollowed-out service. After this PR: validation passes, service has every default field intact, gitconfig's apiKey override applied correctly. ## Follow-up not in this PR #949 made schema-validation failures non-fatal (warn + continue). That defense-in-depth still applies — this PR removes the warning's root cause, but if a future config drift surfaces a new schema mismatch, the user still sees a clear warning instead of a crash. Both layers are useful. --- src/lib/config/services/git.test.ts | 114 +++++++++++++++++++++++++++ src/lib/config/services/git.ts | 116 +++++++++++++++++++--------- 2 files changed, 194 insertions(+), 36 deletions(-) diff --git a/src/lib/config/services/git.test.ts b/src/lib/config/services/git.test.ts index 44156745..768f5fb1 100644 --- a/src/lib/config/services/git.test.ts +++ b/src/lib/config/services/git.test.ts @@ -119,6 +119,120 @@ describe('loadGitConfig', () => { expect(config.mode).toBe('stdout') expect(config.defaultBranch).toBe('test') }) + + it('preserves default service fields when gitconfig only sets a few keys', () => { + // Regression: previously the loader REPLACED the service object + // entirely, so a gitconfig that only specified serviceProvider + + // serviceApiKey would wipe the default tokenLimit, temperature, + // maxConcurrent, etc. — leaving a hollow service shape that + // failed downstream because all the LLM-call defaults were gone. + const sparseGitConfig = ` +[coco] + serviceProvider = openai + serviceModel = gpt-4o + serviceApiKey = test-api-key +` + const defaultsOpenai: Partial = { + service: getDefaultServiceConfigFromAlias('openai'), + mode: 'stdout', + defaultBranch: 'test', + } + mockFs.existsSync.mockReturnValue(true) + mockFs.readFileSync.mockReturnValue(sparseGitConfig) + + const config = loadGitConfig(defaultsOpenai as Config) + const service = config.service as OpenAILLMService + + // gitconfig override applied + expect(service.model).toBe('gpt-4o') + if (service.authentication.type === 'APIKey') { + expect(service.authentication.credentials.apiKey).toBe('test-api-key') + } + // Defaults preserved — the previous bug wiped these + expect(service.tokenLimit).toBeDefined() + expect(service.temperature).toBeDefined() + expect(service.maxConcurrent).toBeDefined() + }) + + it('does not attach requestOptions when neither sub-field is set', () => { + // Regression: previously the loader built `requestOptions: { + // timeout: Number(undefined), maxRetries: Number(undefined) }` → + // `{ timeout: NaN, maxRetries: NaN }` → JSON-serializes to + // `{ timeout: null, maxRetries: null }` → schema validation + // rejected since timeout/maxRetries must be number. + const noRequestOptions = ` +[coco] + serviceProvider = openai + serviceModel = gpt-4.1-nano + serviceApiKey = sk-test +` + const defaultsOpenai: Partial = { + service: getDefaultServiceConfigFromAlias('openai'), + mode: 'stdout', + defaultBranch: 'test', + } + mockFs.existsSync.mockReturnValue(true) + mockFs.readFileSync.mockReturnValue(noRequestOptions) + + const config = loadGitConfig(defaultsOpenai as Config) + const service = config.service as OpenAILLMService + + // No spurious requestOptions object created from undefined keys. + expect(service.requestOptions).toBeUndefined() + }) + + it('attaches only the requestOptions sub-fields that gitconfig actually sets', () => { + // Partial set: only timeout, no maxRetries. Result should have + // timeout populated and maxRetries unset (not NaN). + const partialRequestOptions = ` +[coco] + serviceProvider = openai + serviceModel = gpt-4o + serviceApiKey = sk-test + serviceRequestOptionsTimeout = 15000 +` + const defaultsOpenai: Partial = { + service: getDefaultServiceConfigFromAlias('openai'), + mode: 'stdout', + defaultBranch: 'test', + } + mockFs.existsSync.mockReturnValue(true) + mockFs.readFileSync.mockReturnValue(partialRequestOptions) + + const config = loadGitConfig(defaultsOpenai as Config) + const service = config.service as OpenAILLMService + + expect(service.requestOptions?.timeout).toBe(15000) + expect(service.requestOptions?.maxRetries).toBeUndefined() + }) + + it('does not attach provider-irrelevant fields (endpoint on openai, baseURL on ollama)', () => { + // Regression: the loader unconditionally attached both endpoint + // AND baseURL regardless of provider. Each provider's schema + // variant has additionalProperties:false, so the irrelevant one + // (endpoint for openai, baseURL for ollama) would fail + // validation in its non-applicable anyOf branch. + const openaiGitConfig = ` +[coco] + serviceProvider = openai + serviceModel = gpt-4o + serviceApiKey = sk-test +` + mockFs.existsSync.mockReturnValue(true) + mockFs.readFileSync.mockReturnValue(openaiGitConfig) + + const defaultsOpenai: Partial = { + service: getDefaultServiceConfigFromAlias('openai'), + mode: 'stdout', + defaultBranch: 'test', + } + const config = loadGitConfig(defaultsOpenai as Config) + const service = config.service as OpenAILLMService + + // 'endpoint' belongs to OllamaLLMService — must not appear on an + // openai service. + expect((service as unknown as { endpoint?: string }).endpoint).toBeUndefined() + }) }) describe('appendToGitConfig', () => { diff --git a/src/lib/config/services/git.ts b/src/lib/config/services/git.ts index 1f72d3e3..01ba63bc 100644 --- a/src/lib/config/services/git.ts +++ b/src/lib/config/services/git.ts @@ -40,43 +40,87 @@ export function loadGitConfig( if (gitConfigParsed.coco) { foundPath = gitConfigPath - service = { - provider: gitConfigParsed.coco?.serviceProvider, - model: gitConfigParsed.coco?.serviceModel, - tokenLimit: gitConfigParsed.coco?.serviceTokenLimit - ? Number(gitConfigParsed.coco.serviceTokenLimit) - : undefined, - temperature: gitConfigParsed.coco?.serviceTemperature - ? Number(gitConfigParsed.coco.serviceTemperature) - : undefined, - maxConcurrent: gitConfigParsed.coco?.serviceMaxConcurrent - ? Number(gitConfigParsed.coco.serviceMaxConcurrent) - : undefined, - minTokensForSummary: gitConfigParsed.coco?.serviceMinTokensForSummary - ? Number(gitConfigParsed.coco.serviceMinTokensForSummary) - : undefined, - maxFileTokens: gitConfigParsed.coco?.serviceMaxFileTokens - ? Number(gitConfigParsed.coco.serviceMaxFileTokens) - : undefined, - maxParsingAttempts: gitConfigParsed.coco?.serviceMaxParsingAttempts - ? Number(gitConfigParsed.coco.serviceMaxParsingAttempts) - : undefined, - baseURL: gitConfigParsed.coco?.serviceBaseURL, - authentication: { + // Build OVERRIDES from gitconfig — only include keys the user + // actually set. Then merge on top of the existing service + // (default or earlier layer) so unset fields keep their + // defaults instead of getting wiped. + // + // The previous behavior had three latent bugs: + // 1. Replaced the whole service object — wiped default + // tokenLimit / temperature / maxConcurrent / etc. + // 2. Constructed `requestOptions` unconditionally with + // `Number(undefined) === NaN` for unset sub-fields → + // JSON-serializes to `null` → schema rejects. + // 3. Attached `endpoint` and `baseURL` unconditionally → the + // per-provider schema variants (`additionalProperties: + // false` on each anyOf branch) reject the irrelevant one. + const coco = gitConfigParsed.coco + const numberOrUndefined = (raw: unknown): number | undefined => { + if (raw === undefined || raw === null || raw === '') return undefined + const n = Number(raw) + return Number.isFinite(n) ? n : undefined + } + const requestOptionsOverrides: { timeout?: number; maxRetries?: number } = {} + const timeout = numberOrUndefined(coco.serviceRequestOptionsTimeout) + const maxRetries = numberOrUndefined(coco.serviceRequestOptionsMaxRetries) + if (timeout !== undefined) requestOptionsOverrides.timeout = timeout + if (maxRetries !== undefined) requestOptionsOverrides.maxRetries = maxRetries + + const overrides: Record = {} + if (coco.serviceProvider) overrides.provider = coco.serviceProvider + if (coco.serviceModel) overrides.model = coco.serviceModel + const tokenLimit = numberOrUndefined(coco.serviceTokenLimit) + if (tokenLimit !== undefined) overrides.tokenLimit = tokenLimit + const temperature = numberOrUndefined(coco.serviceTemperature) + if (temperature !== undefined) overrides.temperature = temperature + const maxConcurrent = numberOrUndefined(coco.serviceMaxConcurrent) + if (maxConcurrent !== undefined) overrides.maxConcurrent = maxConcurrent + const minTokensForSummary = numberOrUndefined(coco.serviceMinTokensForSummary) + if (minTokensForSummary !== undefined) overrides.minTokensForSummary = minTokensForSummary + const maxFileTokens = numberOrUndefined(coco.serviceMaxFileTokens) + if (maxFileTokens !== undefined) overrides.maxFileTokens = maxFileTokens + const maxParsingAttempts = numberOrUndefined(coco.serviceMaxParsingAttempts) + if (maxParsingAttempts !== undefined) overrides.maxParsingAttempts = maxParsingAttempts + + // Provider-specific keys only attach when relevant to the + // chosen (or pre-existing) provider — keeps the merged service + // shape consistent with whichever schema variant should match. + const effectiveProvider = (coco.serviceProvider || service?.provider) as + | 'openai' | 'ollama' | 'anthropic' | undefined + if (effectiveProvider === 'openai' && coco.serviceBaseURL) { + overrides.baseURL = coco.serviceBaseURL + } + if (effectiveProvider === 'ollama' && coco.serviceEndpoint) { + overrides.endpoint = coco.serviceEndpoint + } + if (coco.serviceFields) { + try { + overrides.fields = JSON.parse(coco.serviceFields) + } catch { + // Malformed JSON in serviceFields — skip rather than throw. + // The loader's job isn't to validate user input here, just + // to surface what's parseable. The schema validator runs + // later and will catch a missing required field if any. + } + } + // requestOptions only gets attached when at least one sub-field + // was actually set. Empty `{}` would still serialize cleanly + // but stays out of the merged shape for readability. + if (Object.keys(requestOptionsOverrides).length > 0) { + overrides.requestOptions = requestOptionsOverrides + } + // Authentication: only when an apiKey was provided. Default + // service ships with an empty-credential placeholder for users + // who'll set the key via env var; gitconfig's apiKey should + // override that path, not the structure itself. + if (coco.serviceApiKey) { + overrides.authentication = { type: 'APIKey', - credentials: { - apiKey: gitConfigParsed.coco?.serviceApiKey, - }, - }, - requestOptions: { - timeout: Number(gitConfigParsed.coco?.serviceRequestOptionsTimeout), - maxRetries: Number(gitConfigParsed.coco?.serviceRequestOptionsMaxRetries), - }, - endpoint: gitConfigParsed.coco?.serviceEndpoint, - fields: gitConfigParsed.coco?.serviceFields - ? JSON.parse(gitConfigParsed.coco?.serviceFields) - : undefined, - } as LLMService + credentials: { apiKey: coco.serviceApiKey }, + } + } + + service = { ...(service || {}), ...overrides } as LLMService } config = {