Skip to content

Commit 3c1094e

Browse files
xesrevinuGit Agent
andcommitted
test: enhance llm error handling in cli tests
- Add tests for transient llm failures in git commit - Implement retries for invalid llm JSON output This update adds test cases to handle errors encountered with the LLM during git commit operations, ensuring stability and reliability. Co-Authored-By: Git Agent <noreply@git-agent.dev>
1 parent 678c58d commit 3c1094e

2 files changed

Lines changed: 118 additions & 8 deletions

File tree

tests/cli-integration.test.ts

Lines changed: 94 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -627,6 +627,100 @@ describe.concurrent("CLI integration", () => {
627627
}),
628628
);
629629

630+
it.effect(
631+
"git commit retries transient llm failures in the http client layer",
632+
Effect.fn(function* () {
633+
const repo = yield* createGitRepo();
634+
yield* writeTextFile(repo, "src/app.ts", "export const value = 'base';\n");
635+
yield* gitCommitAll(repo, "chore: seed repo");
636+
yield* writeTextFile(repo, "src/app.ts", "export const value = 'next';\n");
637+
yield* gitCommitAll(repo, "feat(core): old message");
638+
639+
const llm = yield* startMockLlmServer([
640+
{
641+
status: 503,
642+
content: {
643+
error: {
644+
message: "temporary upstream failure",
645+
},
646+
},
647+
},
648+
{
649+
content: {
650+
title: "fix(core): update app value",
651+
bullets: ["Update app value output"],
652+
explanation: "Updates the app value in the working tree.",
653+
},
654+
},
655+
]);
656+
657+
const result = yield* runCli(
658+
[
659+
"commit",
660+
"--amend",
661+
"--api-key",
662+
"test-key",
663+
"--base-url",
664+
llm.baseUrl,
665+
"--model",
666+
"test-model",
667+
],
668+
{ cwd: repo },
669+
);
670+
671+
expect(result.exitCode).toBe(0);
672+
expect(llm.requests).toHaveLength(2);
673+
expect((yield* git(repo, ["log", "-1", "--format=%s"])).stdout.trim()).toBe(
674+
"fix(core): update app value",
675+
);
676+
}),
677+
);
678+
679+
it.effect(
680+
"git commit retries invalid llm json output",
681+
Effect.fn(function* () {
682+
const repo = yield* createGitRepo();
683+
yield* writeTextFile(repo, "src/app.ts", "export const value = 'base';\n");
684+
yield* gitCommitAll(repo, "chore: seed repo");
685+
yield* writeTextFile(repo, "src/app.ts", "export const value = 'next';\n");
686+
yield* gitCommitAll(repo, "feat(core): old message");
687+
688+
const llm = yield* startMockLlmServer([
689+
{
690+
content:
691+
'{"title":"fix(core): update app value","bullets":["Update app value output"],"explanation":"Updates the app value"',
692+
},
693+
{
694+
content: {
695+
title: "fix(core): update app value",
696+
bullets: ["Update app value output"],
697+
explanation: "Updates the app value in the working tree.",
698+
},
699+
},
700+
]);
701+
702+
const result = yield* runCli(
703+
[
704+
"commit",
705+
"--amend",
706+
"--api-key",
707+
"test-key",
708+
"--base-url",
709+
llm.baseUrl,
710+
"--model",
711+
"test-model",
712+
],
713+
{ cwd: repo },
714+
);
715+
716+
expect(result.exitCode).toBe(0);
717+
expect(llm.requests).toHaveLength(2);
718+
expect((yield* git(repo, ["log", "-1", "--format=%s"])).stdout.trim()).toBe(
719+
"fix(core): update app value",
720+
);
721+
}),
722+
);
723+
630724
it.effect(
631725
"git commit exits with hook-blocked status after repeated conventional hook failures",
632726

tests/integration/helpers.ts

Lines changed: 24 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -22,10 +22,12 @@ export interface MockLlmRequest {
2222
}
2323

2424
export interface MockLlmResponse {
25-
readonly content:
25+
readonly content?:
2626
| string
2727
| Record<string, unknown>
2828
| ((request: MockLlmRequest) => string | Record<string, unknown>);
29+
readonly headers?: Record<string, string>;
30+
readonly status?: number;
2931
}
3032

3133
export interface MockLlmServer {
@@ -203,12 +205,12 @@ export const runCli = (args: ReadonlyArray<string>, options: CliOptions) => {
203205
env: {
204206
PWD: options.cwd,
205207
XDG_CONFIG_HOME: isolatedConfigHome,
206-
OPENAI_COMPACT_API_KEY: undefined,
207-
OPENAI_COMPACT_API_BASE_URL: undefined,
208-
OPENAI_COMPACT_MODEL: undefined,
209-
GIT_AGENT_BUILD_API_KEY: undefined,
210-
GIT_AGENT_BUILD_BASE_URL: undefined,
211-
GIT_AGENT_BUILD_MODEL: undefined,
208+
OPENAI_COMPACT_API_KEY: "",
209+
OPENAI_COMPACT_API_BASE_URL: "",
210+
OPENAI_COMPACT_MODEL: "",
211+
GIT_AGENT_BUILD_API_KEY: "",
212+
GIT_AGENT_BUILD_BASE_URL: "",
213+
GIT_AGENT_BUILD_MODEL: "",
212214
...options.env,
213215
},
214216
});
@@ -262,7 +264,21 @@ export const startMockLlmServer = (responses: ReadonlyArray<MockLlmResponse>) =>
262264

263265
const content =
264266
typeof next.content === "function" ? next.content(requestInfo) : next.content;
265-
response.writeHead(200, { "content-type": "application/json" });
267+
const status = next.status ?? 200;
268+
const headers = {
269+
"content-type": "application/json",
270+
...next.headers,
271+
};
272+
if (status !== 200) {
273+
response.writeHead(status, headers);
274+
response.end(
275+
typeof content === "string"
276+
? content
277+
: JSON.stringify(content ?? { error: { message: "mock llm error" } }),
278+
);
279+
return;
280+
}
281+
response.writeHead(status, headers);
266282
const text = typeof content === "string" ? content : JSON.stringify(content);
267283
response.end(
268284
JSON.stringify({

0 commit comments

Comments
 (0)