diff --git a/bridge/__tests__/gitAdvancedHandlers.test.ts b/bridge/__tests__/gitAdvancedHandlers.test.ts new file mode 100644 index 0000000..281daf8 --- /dev/null +++ b/bridge/__tests__/gitAdvancedHandlers.test.ts @@ -0,0 +1,329 @@ +import { describe, expect, test, jest, beforeEach } from "@jest/globals"; +import { GitAdvancedHandlers } from "../src/handlers/gitAdvancedHandlers"; +import type { GitService } from "../src/services/gitService"; +import type { Rpc } from "../src/types"; + +// ─── Mock Factory ────────────────────────────────── + +function createMockRpc(): Rpc & { + _responses: any[]; + _errors: any[]; +} { + const responses: any[] = []; + const errors: any[] = []; + return { + sendResponse: jest.fn((id: number | string, payload: any) => { + responses.push({ id, payload }); + }), + sendError: jest.fn((id: number | string, err: any) => { + errors.push({ id, err }); + }), + _responses: responses, + _errors: errors, + }; +} + +function createMockGitService(): GitService { + return { + remoteList: jest.fn().mockResolvedValue([]), + remoteAdd: jest.fn().mockResolvedValue(undefined), + remoteRemove: jest.fn().mockResolvedValue(undefined), + remoteGetUrl: jest.fn().mockResolvedValue("https://github.com/test/repo.git"), + remoteSetUrl: jest.fn().mockResolvedValue(undefined), + push: jest.fn().mockResolvedValue("Everything up-to-date"), + pull: jest.fn().mockResolvedValue("Already up to date."), + fetch: jest.fn().mockResolvedValue(""), + revert: jest.fn().mockResolvedValue(""), + } as any; +} + +// ─── Tests ────────────────────────────────────────── + +let rpc: ReturnType; +let gitService: GitService; +let handlers: GitAdvancedHandlers; + +beforeEach(() => { + rpc = createMockRpc(); + gitService = createMockGitService(); + handlers = new GitAdvancedHandlers(rpc, undefined, gitService); +}); + +// ========================================== +// requireDir Validation +// ========================================== + +describe("GitAdvancedHandlers — requireDir validation", () => { + const handlerMethods = [ + "handleRemoteList", + "handleRemoteAdd", + "handleRemoteRemove", + "handleRemoteGetUrl", + "handleRemoteSetUrl", + "handlePush", + "handlePull", + "handleFetch", + "handleRevert", + ] as const; + + test("all handlers send BAD_REQUEST when dir is missing", async () => { + for (const method of handlerMethods) { + rpc = createMockRpc(); + gitService = createMockGitService(); + handlers = new GitAdvancedHandlers(rpc, undefined, gitService); + + await (handlers as any)[method]({}, 1); + expect(rpc.sendError).toHaveBeenCalledWith(1, { + code: "BAD_REQUEST", + message: expect.stringContaining("dir"), + }); + } + }); + + test("accepts dir, path, or cwd as directory param", async () => { + for (const key of ["dir", "path", "cwd"]) { + rpc = createMockRpc(); + gitService = createMockGitService(); + handlers = new GitAdvancedHandlers(rpc, undefined, gitService); + + await handlers.handleRemoteList({ [key]: "/repo" }, 1); + expect(rpc.sendResponse).toHaveBeenCalled(); + } + }); +}); + +// ========================================== +// REMOTE MANAGEMENT +// ========================================== + +describe("GitAdvancedHandlers — Remote Management", () => { + test("handleRemoteList returns remotes", async () => { + const mockRemotes = [ + { name: "origin", fetchUrl: "https://a.git", pushUrl: "https://a.git" }, + ]; + (gitService.remoteList as jest.Mock).mockResolvedValue(mockRemotes); + + await handlers.handleRemoteList({ dir: "/repo" }, 1); + expect(gitService.remoteList).toHaveBeenCalledWith("/repo"); + expect(rpc.sendResponse).toHaveBeenCalledWith(1, { + ok: true, + data: mockRemotes, + }); + }); + + test("handleRemoteAdd adds a remote", async () => { + await handlers.handleRemoteAdd( + { dir: "/repo", name: "upstream", url: "https://up.git" }, + 1 + ); + expect(gitService.remoteAdd).toHaveBeenCalledWith("/repo", "upstream", "https://up.git"); + expect(rpc.sendResponse).toHaveBeenCalledWith(1, { ok: true, data: null }); + }); + + test("handleRemoteAdd returns BAD_REQUEST when name missing", async () => { + await handlers.handleRemoteAdd({ dir: "/repo", url: "https://up.git" }, 1); + expect(rpc.sendError).toHaveBeenCalledWith(1, { + code: "BAD_REQUEST", + message: expect.stringContaining("name"), + }); + }); + + test("handleRemoteAdd returns BAD_REQUEST when url missing", async () => { + await handlers.handleRemoteAdd({ dir: "/repo", name: "origin" }, 1); + expect(rpc.sendError).toHaveBeenCalledWith(1, { + code: "BAD_REQUEST", + message: expect.stringContaining("url"), + }); + }); + + test("handleRemoteRemove removes a remote", async () => { + await handlers.handleRemoteRemove({ dir: "/repo", name: "origin" }, 1); + expect(gitService.remoteRemove).toHaveBeenCalledWith("/repo", "origin"); + expect(rpc.sendResponse).toHaveBeenCalledWith(1, { ok: true, data: null }); + }); + + test("handleRemoteRemove returns BAD_REQUEST when name missing", async () => { + await handlers.handleRemoteRemove({ dir: "/repo" }, 1); + expect(rpc.sendError).toHaveBeenCalledWith(1, { + code: "BAD_REQUEST", + message: expect.stringContaining("name"), + }); + }); + + test("handleRemoteGetUrl returns url", async () => { + await handlers.handleRemoteGetUrl({ dir: "/repo", name: "origin" }, 1); + expect(gitService.remoteGetUrl).toHaveBeenCalledWith("/repo", "origin"); + expect(rpc.sendResponse).toHaveBeenCalledWith(1, { + ok: true, + data: { url: "https://github.com/test/repo.git" }, + }); + }); + + test("handleRemoteGetUrl defaults to origin", async () => { + await handlers.handleRemoteGetUrl({ dir: "/repo" }, 1); + expect(gitService.remoteGetUrl).toHaveBeenCalledWith("/repo", "origin"); + }); + + test("handleRemoteSetUrl sets url", async () => { + await handlers.handleRemoteSetUrl( + { dir: "/repo", name: "origin", url: "https://new.git" }, + 1 + ); + expect(gitService.remoteSetUrl).toHaveBeenCalledWith("/repo", "origin", "https://new.git"); + expect(rpc.sendResponse).toHaveBeenCalledWith(1, { ok: true, data: null }); + }); + + test("handleRemoteSetUrl returns BAD_REQUEST when params missing", async () => { + await handlers.handleRemoteSetUrl({ dir: "/repo" }, 1); + expect(rpc.sendError).toHaveBeenCalledWith(1, { + code: "BAD_REQUEST", + message: expect.stringContaining("name"), + }); + }); +}); + +// ========================================== +// PUSH / PULL / FETCH +// ========================================== + +describe("GitAdvancedHandlers — Push / Pull / Fetch", () => { + test("handlePush pushes to remote", async () => { + await handlers.handlePush({ dir: "/repo" }, 1); + expect(gitService.push).toHaveBeenCalledWith("/repo", "origin", undefined, { + force: false, + setUpstream: false, + }); + expect(rpc.sendResponse).toHaveBeenCalledWith(1, { + ok: true, + data: { output: "Everything up-to-date" }, + }); + }); + + test("handlePush passes custom remote and branch", async () => { + await handlers.handlePush( + { dir: "/repo", remote: "upstream", branch: "main", force: true, setUpstream: true }, + 1 + ); + expect(gitService.push).toHaveBeenCalledWith("/repo", "upstream", "main", { + force: true, + setUpstream: true, + }); + }); + + test("handlePull pulls from remote", async () => { + await handlers.handlePull({ dir: "/repo" }, 1); + expect(gitService.pull).toHaveBeenCalledWith("/repo", "origin", undefined, { + rebase: false, + }); + expect(rpc.sendResponse).toHaveBeenCalledWith(1, { + ok: true, + data: { output: "Already up to date." }, + }); + }); + + test("handlePull passes rebase option", async () => { + await handlers.handlePull({ dir: "/repo", rebase: true }, 1); + expect(gitService.pull).toHaveBeenCalledWith("/repo", "origin", undefined, { + rebase: true, + }); + }); + + test("handleFetch fetches from remote", async () => { + await handlers.handleFetch({ dir: "/repo" }, 1); + expect(gitService.fetch).toHaveBeenCalledWith("/repo", undefined, { + prune: false, + all: false, + }); + }); + + test("handleFetch passes prune and all options", async () => { + await handlers.handleFetch({ dir: "/repo", prune: true, all: true }, 1); + expect(gitService.fetch).toHaveBeenCalledWith("/repo", undefined, { + prune: true, + all: true, + }); + }); +}); + +// ========================================== +// REVERT +// ========================================== + +describe("GitAdvancedHandlers — Revert", () => { + test("handleRevert reverts a commit by hash", async () => { + await handlers.handleRevert({ dir: "/repo", hash: "abc1234" }, 1); + expect(gitService.revert).toHaveBeenCalledWith("/repo", "abc1234", { + noCommit: false, + }); + expect(rpc.sendResponse).toHaveBeenCalledWith(1, { + ok: true, + data: { output: "" }, + }); + }); + + test("handleRevert accepts commitHash alias", async () => { + await handlers.handleRevert({ dir: "/repo", commitHash: "def5678" }, 1); + expect(gitService.revert).toHaveBeenCalledWith("/repo", "def5678", { + noCommit: false, + }); + }); + + test("handleRevert passes noCommit flag", async () => { + await handlers.handleRevert({ dir: "/repo", hash: "abc", noCommit: true }, 1); + expect(gitService.revert).toHaveBeenCalledWith("/repo", "abc", { + noCommit: true, + }); + }); + + test("handleRevert returns BAD_REQUEST when hash missing", async () => { + await handlers.handleRevert({ dir: "/repo" }, 1); + expect(rpc.sendError).toHaveBeenCalledWith(1, { + code: "BAD_REQUEST", + message: expect.stringContaining("hash"), + }); + }); +}); + +// ========================================== +// Error Forwarding +// ========================================== + +describe("GitAdvancedHandlers — Error Forwarding", () => { + test("all handlers forward service errors as GIT_ERROR", async () => { + const err = new Error("network failure"); + for (const key of Object.keys(gitService)) { + const val = (gitService as any)[key]; + if (typeof val?.mockRejectedValue === "function") { + val.mockRejectedValue(err); + } + } + + const testCases: [string, () => Promise][] = [ + ["remoteList", () => handlers.handleRemoteList({ dir: "/r" }, 1)], + [ + "remoteAdd", + () => handlers.handleRemoteAdd({ dir: "/r", name: "o", url: "u" }, 1), + ], + ["remoteRemove", () => handlers.handleRemoteRemove({ dir: "/r", name: "o" }, 1)], + ["remoteGetUrl", () => handlers.handleRemoteGetUrl({ dir: "/r" }, 1)], + [ + "remoteSetUrl", + () => handlers.handleRemoteSetUrl({ dir: "/r", name: "o", url: "u" }, 1), + ], + ["push", () => handlers.handlePush({ dir: "/r" }, 1)], + ["pull", () => handlers.handlePull({ dir: "/r" }, 1)], + ["fetch", () => handlers.handleFetch({ dir: "/r" }, 1)], + ["revert", () => handlers.handleRevert({ dir: "/r", hash: "abc" }, 1)], + ]; + + for (const [name, fn] of testCases) { + rpc = createMockRpc(); + handlers = new GitAdvancedHandlers(rpc, undefined, gitService); + await fn(); + expect(rpc.sendError).toHaveBeenCalledWith(1, { + code: "GIT_ERROR", + message: "network failure", + }); + } + }); +}); diff --git a/bridge/__tests__/gitHandlers.test.ts b/bridge/__tests__/gitHandlers.test.ts new file mode 100644 index 0000000..f9436b5 --- /dev/null +++ b/bridge/__tests__/gitHandlers.test.ts @@ -0,0 +1,504 @@ +import { describe, expect, test, jest, beforeEach } from "@jest/globals"; +import { GitHandlers } from "../src/handlers/gitHandlers"; +import type { GitService } from "../src/services/gitService"; +import type { Rpc } from "../src/types"; + +// ─── Mock Factory ────────────────────────────────── + +function createMockRpc(): Rpc & { + _responses: any[]; + _errors: any[]; +} { + const responses: any[] = []; + const errors: any[] = []; + return { + sendResponse: jest.fn((id: number | string, payload: any) => { + responses.push({ id, payload }); + }), + sendError: jest.fn((id: number | string, err: any) => { + errors.push({ id, err }); + }), + _responses: responses, + _errors: errors, + }; +} + +function createMockLogger(): any { + return { + info: jest.fn(), + warn: jest.fn(), + error: jest.fn(), + debug: jest.fn(), + child: jest.fn().mockReturnThis(), + }; +} + +function createMockGitService(): GitService { + return { + isGitInstalled: jest.fn().mockResolvedValue(true), + isRepo: jest.fn().mockResolvedValue(true), + init: jest.fn().mockResolvedValue(undefined), + getRepoRoot: jest.fn().mockResolvedValue("/repo"), + getStatus: jest.fn().mockResolvedValue({ + isGitRepo: true, + branch: "main", + isDirty: false, + stagedCount: 0, + unstagedCount: 0, + untrackedCount: 0, + headCommit: "abc12345", + aheadBehind: { ahead: 0, behind: 0 }, + }), + getChangedFiles: jest.fn().mockResolvedValue([]), + stageFiles: jest.fn().mockResolvedValue(undefined), + stageAll: jest.fn().mockResolvedValue(undefined), + unstageFiles: jest.fn().mockResolvedValue(undefined), + commit: jest.fn().mockResolvedValue("abc1234"), + commitFiles: jest.fn().mockResolvedValue("abc1234"), + log: jest.fn().mockResolvedValue([]), + fileLog: jest.fn().mockResolvedValue([]), + listBranches: jest.fn().mockResolvedValue([]), + createBranch: jest.fn().mockResolvedValue(undefined), + checkoutBranch: jest.fn().mockResolvedValue(undefined), + discardChanges: jest.fn().mockResolvedValue(undefined), + stash: jest.fn().mockResolvedValue(undefined), + stashPop: jest.fn().mockResolvedValue(undefined), + diff: jest.fn().mockResolvedValue("diff output"), + ensureGitignore: jest.fn().mockResolvedValue(true), + generateGitignore: jest.fn().mockReturnValue("# gitignore"), + // Advanced methods (present on GitService but not used by GitHandlers) + resolveRef: jest.fn(), + getFileAtRef: jest.fn(), + show: jest.fn(), + push: jest.fn(), + pull: jest.fn(), + fetch: jest.fn(), + revert: jest.fn(), + remoteList: jest.fn(), + remoteAdd: jest.fn(), + remoteRemove: jest.fn(), + remoteGetUrl: jest.fn(), + remoteSetUrl: jest.fn(), + createTag: jest.fn(), + deleteTag: jest.fn(), + listTags: jest.fn(), + merge: jest.fn(), + abortMerge: jest.fn(), + rebase: jest.fn(), + cherryPick: jest.fn(), + blame: jest.fn(), + stashList: jest.fn(), + stashApply: jest.fn(), + stashDrop: jest.fn(), + stashClear: jest.fn(), + clone: jest.fn(), + dryMerge: jest.fn(), + getMergeState: jest.fn(), + markResolved: jest.fn(), + getProtectedBranches: jest.fn(), + isProtectedBranch: jest.fn(), + deleteBranch: jest.fn(), + renameBranch: jest.fn(), + } as any; +} + +// ─── Tests ────────────────────────────────────────── + +let rpc: ReturnType; +let logger: any; +let gitService: GitService; +let handlers: GitHandlers; + +beforeEach(() => { + rpc = createMockRpc(); + logger = createMockLogger(); + gitService = createMockGitService(); + handlers = new GitHandlers(rpc, logger, gitService); +}); + +// ========================================== +// requireDir Validation +// ========================================== + +describe("GitHandlers — requireDir validation", () => { + const handlerNames: [string, (p: any, id: number) => Promise][] = []; + + beforeEach(() => { + handlerNames.length = 0; + handlerNames.push( + ["handleStatus", (p, id) => handlers.handleStatus(p, id)], + ["handleInit", (p, id) => handlers.handleInit(p, id)], + ["handleChanges", (p, id) => handlers.handleChanges(p, id)], + ["handleStageAll", (p, id) => handlers.handleStageAll(p, id)], + ["handleLog", (p, id) => handlers.handleLog(p, id)], + ["handleBranches", (p, id) => handlers.handleBranches(p, id)], + ["handleDiff", (p, id) => handlers.handleDiff(p, id)], + ["handleEnsureIgnore", (p, id) => handlers.handleEnsureIgnore(p, id)], + ["handleStash", (p, id) => handlers.handleStash(p, id)], + ["handleStashPop", (p, id) => handlers.handleStashPop(p, id)] + ); + }); + + test("sends BAD_REQUEST when dir is missing", async () => { + for (const [name, fn] of handlerNames) { + rpc = createMockRpc(); + gitService = createMockGitService(); + handlers = new GitHandlers(rpc, logger, gitService); + + await fn({}, 1); + expect(rpc.sendError).toHaveBeenCalledWith(1, { + code: "BAD_REQUEST", + message: expect.stringContaining("dir"), + }); + } + }); + + test("accepts dir, path, or cwd as directory param", async () => { + for (const key of ["dir", "path", "cwd"]) { + rpc = createMockRpc(); + gitService = createMockGitService(); + handlers = new GitHandlers(rpc, logger, gitService); + + await handlers.handleStatus({ [key]: "/repo" }, 1); + expect(rpc.sendResponse).toHaveBeenCalled(); + } + }); +}); + +// ========================================== +// handleStatus +// ========================================== + +describe("GitHandlers — handleStatus", () => { + test("returns status data on success", async () => { + await handlers.handleStatus({ dir: "/repo" }, 1); + expect(gitService.getStatus).toHaveBeenCalledWith("/repo"); + expect(rpc.sendResponse).toHaveBeenCalledWith(1, { + ok: true, + data: expect.objectContaining({ isGitRepo: true, branch: "main" }), + }); + }); + + test("returns GIT_ERROR on failure", async () => { + (gitService.getStatus as jest.Mock).mockRejectedValue(new Error("git error")); + await handlers.handleStatus({ dir: "/repo" }, 1); + expect(rpc.sendError).toHaveBeenCalledWith(1, { + code: "GIT_ERROR", + message: "git error", + }); + }); +}); + +// ========================================== +// handleInit +// ========================================== + +describe("GitHandlers — handleInit", () => { + test("initializes repo, sets up gitignore, returns status", async () => { + await handlers.handleInit({ dir: "/repo" }, 1); + expect(gitService.init).toHaveBeenCalledWith("/repo", "main"); + expect(gitService.ensureGitignore).toHaveBeenCalledWith("/repo"); + expect(gitService.getStatus).toHaveBeenCalledWith("/repo"); + expect(rpc.sendResponse).toHaveBeenCalled(); + }); + + test("uses custom default branch", async () => { + await handlers.handleInit({ dir: "/repo", defaultBranch: "develop" }, 1); + expect(gitService.init).toHaveBeenCalledWith("/repo", "develop"); + }); +}); + +// ========================================== +// handleChanges +// ========================================== + +describe("GitHandlers — handleChanges", () => { + test("returns changed files array", async () => { + const mockChanges = [{ path: "file.txt", status: "M", staged: false }]; + (gitService.getChangedFiles as jest.Mock).mockResolvedValue(mockChanges); + + await handlers.handleChanges({ dir: "/repo" }, 1); + expect(rpc.sendResponse).toHaveBeenCalledWith(1, { + ok: true, + data: mockChanges, + }); + }); +}); + +// ========================================== +// handleStage +// ========================================== + +describe("GitHandlers — handleStage", () => { + test("stages specified files", async () => { + await handlers.handleStage({ dir: "/repo", files: ["a.txt", "b.txt"] }, 1); + expect(gitService.stageFiles).toHaveBeenCalledWith("/repo", ["a.txt", "b.txt"]); + expect(rpc.sendResponse).toHaveBeenCalledWith(1, { ok: true, data: null }); + }); + + test("returns BAD_REQUEST for missing files", async () => { + await handlers.handleStage({ dir: "/repo" }, 1); + expect(rpc.sendError).toHaveBeenCalledWith(1, { + code: "BAD_REQUEST", + message: expect.stringContaining("files"), + }); + }); + + test("returns BAD_REQUEST for empty files array", async () => { + await handlers.handleStage({ dir: "/repo", files: [] }, 1); + expect(rpc.sendError).toHaveBeenCalledWith(1, { + code: "BAD_REQUEST", + message: expect.stringContaining("files"), + }); + }); +}); + +// ========================================== +// handleStageAll +// ========================================== + +describe("GitHandlers — handleStageAll", () => { + test("stages all files", async () => { + await handlers.handleStageAll({ dir: "/repo" }, 1); + expect(gitService.stageAll).toHaveBeenCalledWith("/repo"); + expect(rpc.sendResponse).toHaveBeenCalledWith(1, { ok: true, data: null }); + }); +}); + +// ========================================== +// handleUnstage +// ========================================== + +describe("GitHandlers — handleUnstage", () => { + test("unstages specified files", async () => { + await handlers.handleUnstage({ dir: "/repo", files: ["a.txt"] }, 1); + expect(gitService.unstageFiles).toHaveBeenCalledWith("/repo", ["a.txt"]); + expect(rpc.sendResponse).toHaveBeenCalledWith(1, { ok: true, data: null }); + }); + + test("returns BAD_REQUEST for missing files", async () => { + await handlers.handleUnstage({ dir: "/repo" }, 1); + expect(rpc.sendError).toHaveBeenCalledWith(1, { + code: "BAD_REQUEST", + message: expect.stringContaining("files"), + }); + }); +}); + +// ========================================== +// handleCommit +// ========================================== + +describe("GitHandlers — handleCommit", () => { + test("commits with message and returns hash", async () => { + await handlers.handleCommit({ dir: "/repo", message: "feat: add X" }, 1); + expect(gitService.commit).toHaveBeenCalledWith("/repo", "feat: add X"); + expect(rpc.sendResponse).toHaveBeenCalledWith(1, { + ok: true, + data: { hash: "abc1234" }, + }); + }); + + test("returns BAD_REQUEST for missing message", async () => { + await handlers.handleCommit({ dir: "/repo" }, 1); + expect(rpc.sendError).toHaveBeenCalledWith(1, { + code: "BAD_REQUEST", + message: expect.stringContaining("message"), + }); + }); +}); + +// ========================================== +// handleLog +// ========================================== + +describe("GitHandlers — handleLog", () => { + test("returns log entries with default count", async () => { + const entries = [{ hash: "abc", subject: "test" }]; + (gitService.log as jest.Mock).mockResolvedValue(entries); + + await handlers.handleLog({ dir: "/repo" }, 1); + expect(gitService.log).toHaveBeenCalledWith("/repo", 20); + expect(rpc.sendResponse).toHaveBeenCalledWith(1, { ok: true, data: entries }); + }); + + test("respects custom count", async () => { + await handlers.handleLog({ dir: "/repo", count: 5 }, 1); + expect(gitService.log).toHaveBeenCalledWith("/repo", 5); + }); +}); + +// ========================================== +// handleBranches +// ========================================== + +describe("GitHandlers — handleBranches", () => { + test("returns branch list", async () => { + const branches = [{ name: "main", current: true }]; + (gitService.listBranches as jest.Mock).mockResolvedValue(branches); + + await handlers.handleBranches({ dir: "/repo" }, 1); + expect(rpc.sendResponse).toHaveBeenCalledWith(1, { ok: true, data: branches }); + }); +}); + +// ========================================== +// handleCreateBranch +// ========================================== + +describe("GitHandlers — handleCreateBranch", () => { + test("creates branch and returns name", async () => { + await handlers.handleCreateBranch({ dir: "/repo", name: "feature" }, 1); + expect(gitService.createBranch).toHaveBeenCalledWith("/repo", "feature"); + expect(rpc.sendResponse).toHaveBeenCalledWith(1, { + ok: true, + data: { branch: "feature" }, + }); + }); + + test("returns BAD_REQUEST for missing name", async () => { + await handlers.handleCreateBranch({ dir: "/repo" }, 1); + expect(rpc.sendError).toHaveBeenCalledWith(1, { + code: "BAD_REQUEST", + message: expect.stringContaining("name"), + }); + }); +}); + +// ========================================== +// handleCheckout +// ========================================== + +describe("GitHandlers — handleCheckout", () => { + test("checks out branch", async () => { + await handlers.handleCheckout({ dir: "/repo", name: "develop" }, 1); + expect(gitService.checkoutBranch).toHaveBeenCalledWith("/repo", "develop"); + expect(rpc.sendResponse).toHaveBeenCalledWith(1, { + ok: true, + data: { branch: "develop" }, + }); + }); + + test("returns BAD_REQUEST for missing name", async () => { + await handlers.handleCheckout({ dir: "/repo" }, 1); + expect(rpc.sendError).toHaveBeenCalledWith(1, { + code: "BAD_REQUEST", + message: expect.stringContaining("name"), + }); + }); +}); + +// ========================================== +// handleDiscard +// ========================================== + +describe("GitHandlers — handleDiscard", () => { + test("discards changes to specified files", async () => { + await handlers.handleDiscard({ dir: "/repo", files: ["f.txt"] }, 1); + expect(gitService.discardChanges).toHaveBeenCalledWith("/repo", ["f.txt"]); + expect(rpc.sendResponse).toHaveBeenCalledWith(1, { ok: true, data: null }); + }); + + test("returns BAD_REQUEST for missing files", async () => { + await handlers.handleDiscard({ dir: "/repo" }, 1); + expect(rpc.sendError).toHaveBeenCalledWith(1, { + code: "BAD_REQUEST", + message: expect.stringContaining("files"), + }); + }); +}); + +// ========================================== +// handleStash / handleStashPop +// ========================================== + +describe("GitHandlers — handleStash / handleStashPop", () => { + test("stash saves changes", async () => { + await handlers.handleStash({ dir: "/repo", message: "wip" }, 1); + expect(gitService.stash).toHaveBeenCalledWith("/repo", "wip"); + expect(rpc.sendResponse).toHaveBeenCalledWith(1, { ok: true, data: null }); + }); + + test("stashPop restores stash", async () => { + await handlers.handleStashPop({ dir: "/repo" }, 1); + expect(gitService.stashPop).toHaveBeenCalledWith("/repo"); + expect(rpc.sendResponse).toHaveBeenCalledWith(1, { ok: true, data: null }); + }); +}); + +// ========================================== +// handleDiff +// ========================================== + +describe("GitHandlers — handleDiff", () => { + test("returns diff output", async () => { + await handlers.handleDiff({ dir: "/repo", file: "readme.md" }, 1); + expect(gitService.diff).toHaveBeenCalledWith("/repo", "readme.md", false); + expect(rpc.sendResponse).toHaveBeenCalledWith(1, { + ok: true, + data: { diff: "diff output" }, + }); + }); + + test("passes staged flag", async () => { + await handlers.handleDiff({ dir: "/repo", file: "a.ts", staged: true }, 1); + expect(gitService.diff).toHaveBeenCalledWith("/repo", "a.ts", true); + }); + + test("works without file (repo-wide diff)", async () => { + await handlers.handleDiff({ dir: "/repo" }, 1); + expect(gitService.diff).toHaveBeenCalledWith("/repo", undefined, false); + }); +}); + +// ========================================== +// handleEnsureIgnore +// ========================================== + +describe("GitHandlers — handleEnsureIgnore", () => { + test("returns modified flag", async () => { + await handlers.handleEnsureIgnore({ dir: "/repo" }, 1); + expect(gitService.ensureGitignore).toHaveBeenCalledWith("/repo"); + expect(rpc.sendResponse).toHaveBeenCalledWith(1, { + ok: true, + data: { modified: true }, + }); + }); +}); + +// ========================================== +// Error Forwarding +// ========================================== + +describe("GitHandlers — Error Forwarding", () => { + test("all handlers forward errors as GIT_ERROR", async () => { + const errorMsg = "unexpected git failure"; + // Mock all service methods to reject + for (const key of Object.keys(gitService)) { + const val = (gitService as any)[key]; + if (typeof val?.mockRejectedValue === "function") { + val.mockRejectedValue(new Error(errorMsg)); + } + } + + const testCases: [string, () => Promise][] = [ + ["status", () => handlers.handleStatus({ dir: "/r" }, 1)], + ["changes", () => handlers.handleChanges({ dir: "/r" }, 1)], + ["stageAll", () => handlers.handleStageAll({ dir: "/r" }, 1)], + ["log", () => handlers.handleLog({ dir: "/r" }, 1)], + ["branches", () => handlers.handleBranches({ dir: "/r" }, 1)], + ["stashPop", () => handlers.handleStashPop({ dir: "/r" }, 1)], + ["ensureIgnore", () => handlers.handleEnsureIgnore({ dir: "/r" }, 1)], + ]; + + for (const [name, fn] of testCases) { + rpc = createMockRpc(); + handlers = new GitHandlers(rpc, logger, gitService); + await fn(); + expect(rpc.sendError).toHaveBeenCalledWith(1, { + code: "GIT_ERROR", + message: errorMsg, + }); + } + }); +}); diff --git a/bridge/__tests__/gitService.test.ts b/bridge/__tests__/gitService.test.ts new file mode 100644 index 0000000..119d3dd --- /dev/null +++ b/bridge/__tests__/gitService.test.ts @@ -0,0 +1,608 @@ +import { afterAll, beforeEach,beforeAll, describe, expect, test } from "@jest/globals"; +import { GitService } from "../src/services/gitService"; +import fs from "fs/promises"; +import fsSync from "fs"; +import path from "path"; +import os from "os"; +import { execFile } from "child_process"; +import { promisify } from "util"; + +const execFileAsync = promisify(execFile); + +// ─── Test Setup ────────────────────────────────────── + +const TEST_ROOT = path.join(os.tmpdir(), "git-service-test-" + Date.now()); +let repoDir: string; +let git: GitService; +let testCounter = 0; + +/** + * Helper: run raw git commands in a directory + */ +async function rawGit(cwd: string, ...args: string[]): Promise { + const { stdout } = await execFileAsync("git", args, { cwd, windowsHide: true }); + return stdout.trimEnd(); +} + +/** + * Helper: create a fresh temp repo for each test + */ +async function createTempRepo(): Promise { + testCounter++; + const dir = path.join(TEST_ROOT, `repo-${testCounter}`); + await fs.mkdir(dir, { recursive: true }); + await rawGit(dir, "init", "-b", "main"); + await rawGit(dir, "config", "user.email", "test@relwave.dev"); + await rawGit(dir, "config", "user.name", "Test User"); + return dir; +} + +/** + * Helper: create a file and commit it + */ +async function commitFile(dir: string, filename: string, content: string, message: string) { + await fs.writeFile(path.join(dir, filename), content, "utf-8"); + await rawGit(dir, "add", filename); + await rawGit(dir, "commit", "-m", message); +} + +beforeAll(async () => { + await fs.mkdir(TEST_ROOT, { recursive: true }); +}); + +afterAll(async () => { + if (fsSync.existsSync(TEST_ROOT)) { + await fs.rm(TEST_ROOT, { recursive: true, force: true }); + } +}); + +beforeEach(async () => { + git = new GitService(); + repoDir = await createTempRepo(); +}); + +// ========================================== +// Basic Repo Operations +// ========================================== + +describe("GitService — Basic Operations", () => { + test("isGitInstalled returns true", async () => { + const installed = await git.isGitInstalled(); + expect(installed).toBe(true); + }); + + test("isRepo returns true for initialized repo", async () => { + expect(await git.isRepo(repoDir)).toBe(true); + }); + + test("isRepo returns false for non-repo directory", async () => { + const plain = path.join(TEST_ROOT, "plain-" + Date.now()); + await fs.mkdir(plain, { recursive: true }); + expect(await git.isRepo(plain)).toBe(false); + }); + + test("init creates a new repository", async () => { + const dir = path.join(TEST_ROOT, "new-init-" + Date.now()); + await fs.mkdir(dir, { recursive: true }); + await git.init(dir, "main"); + expect(await git.isRepo(dir)).toBe(true); + }); + + test("getRepoRoot returns the repository root", async () => { + const root = await git.getRepoRoot(repoDir); + // Normalize path separators for cross-platform comparison + expect(path.normalize(root)).toBe(path.normalize(repoDir)); + }); +}); + +// ========================================== +// Status +// ========================================== + +describe("GitService — Status", () => { + test("returns clean status for empty repo", async () => { + const status = await git.getStatus(repoDir); + expect(status.isGitRepo).toBe(true); + expect(status.isDirty).toBe(false); + expect(status.stagedCount).toBe(0); + expect(status.unstagedCount).toBe(0); + expect(status.untrackedCount).toBe(0); + }); + + test("returns not-a-repo status for plain directory", async () => { + const dir = path.join(TEST_ROOT, "not-a-repo-" + Date.now()); + await fs.mkdir(dir, { recursive: true }); + + const status = await git.getStatus(dir); + expect(status.isGitRepo).toBe(false); + expect(status.branch).toBeNull(); + }); + + test("detects untracked files", async () => { + await fs.writeFile(path.join(repoDir, "newfile.txt"), "hello", "utf-8"); + const status = await git.getStatus(repoDir); + expect(status.isDirty).toBe(true); + expect(status.untrackedCount).toBe(1); + }); + + test("detects staged files", async () => { + await fs.writeFile(path.join(repoDir, "staged.txt"), "staged", "utf-8"); + await rawGit(repoDir, "add", "staged.txt"); + + const status = await git.getStatus(repoDir); + expect(status.stagedCount).toBe(1); + expect(status.isDirty).toBe(true); + }); + + test("detects unstaged modifications", async () => { + await commitFile(repoDir, "file.txt", "original", "initial"); + await fs.writeFile(path.join(repoDir, "file.txt"), "modified", "utf-8"); + + const status = await git.getStatus(repoDir); + expect(status.unstagedCount).toBe(1); + expect(status.isDirty).toBe(true); + }); + + test("returns branch name", async () => { + await commitFile(repoDir, "file.txt", "content", "first commit"); + const status = await git.getStatus(repoDir); + expect(status.branch).toBe("main"); + }); + + test("returns headCommit hash", async () => { + await commitFile(repoDir, "file.txt", "content", "first commit"); + const status = await git.getStatus(repoDir); + expect(status.headCommit).toBeDefined(); + expect(status.headCommit!.length).toBe(8); + }); +}); + +// ========================================== +// Changed Files +// ========================================== + +describe("GitService — Changed Files", () => { + test("returns empty for clean repo", async () => { + await commitFile(repoDir, "file.txt", "content", "initial"); + const changes = await git.getChangedFiles(repoDir); + expect(changes).toEqual([]); + }); + + test("detects untracked files", async () => { + await fs.writeFile(path.join(repoDir, "new.txt"), "new", "utf-8"); + const changes = await git.getChangedFiles(repoDir); + expect(changes).toHaveLength(1); + expect(changes[0].status).toBe("?"); + expect(changes[0].staged).toBe(false); + expect(changes[0].path).toBe("new.txt"); + }); + + test("detects staged modifications", async () => { + await commitFile(repoDir, "file.txt", "original", "init"); + await fs.writeFile(path.join(repoDir, "file.txt"), "changed", "utf-8"); + await rawGit(repoDir, "add", "file.txt"); + + const changes = await git.getChangedFiles(repoDir); + const staged = changes.filter((c) => c.staged); + expect(staged.length).toBeGreaterThanOrEqual(1); + expect(staged[0].status).toBe("M"); + }); + + test("detects deleted files", async () => { + await commitFile(repoDir, "file.txt", "content", "init"); + await fs.unlink(path.join(repoDir, "file.txt")); + + const changes = await git.getChangedFiles(repoDir); + const deleted = changes.filter((c) => c.status === "D"); + expect(deleted.length).toBe(1); + }); +}); + +// ========================================== +// Staging & Committing +// ========================================== + +describe("GitService — Stage & Commit", () => { + test("stageFiles stages specific files", async () => { + await fs.writeFile(path.join(repoDir, "a.txt"), "a", "utf-8"); + await fs.writeFile(path.join(repoDir, "b.txt"), "b", "utf-8"); + + await git.stageFiles(repoDir, ["a.txt"]); + + const status = await git.getStatus(repoDir); + expect(status.stagedCount).toBe(1); + expect(status.untrackedCount).toBe(1); + }); + + test("stageFiles is no-op for empty array", async () => { + // Should not throw + await expect(git.stageFiles(repoDir, [])).resolves.not.toThrow(); + }); + + test("stageAll stages everything", async () => { + await fs.writeFile(path.join(repoDir, "a.txt"), "a", "utf-8"); + await fs.writeFile(path.join(repoDir, "b.txt"), "b", "utf-8"); + + await git.stageAll(repoDir); + + const status = await git.getStatus(repoDir); + expect(status.stagedCount).toBe(2); + expect(status.untrackedCount).toBe(0); + }); + + test("commit returns a string (may be hash or empty)", async () => { + await fs.writeFile(path.join(repoDir, "file.txt"), "data", "utf-8"); + await git.stageAll(repoDir); + + const hash = await git.commit(repoDir, "test commit"); + expect(typeof hash).toBe("string"); + + // Verify the commit actually happened + const log = await git.log(repoDir, 1); + expect(log).toHaveLength(1); + expect(log[0].subject).toBe("test commit"); + }); + + test("unstageFiles removes files from staging", async () => { + await fs.writeFile(path.join(repoDir, "file.txt"), "data", "utf-8"); + await git.stageAll(repoDir); + expect((await git.getStatus(repoDir)).stagedCount).toBe(1); + + await git.unstageFiles(repoDir, ["file.txt"]); + // After unstaging a new file, it goes back to untracked + const status = await git.getStatus(repoDir); + expect(status.stagedCount).toBe(0); + }); + + test("commitFiles stages and commits in one call", async () => { + await commitFile(repoDir, "base.txt", "base", "initial"); // Need a first commit + await fs.writeFile(path.join(repoDir, "auto.txt"), "auto", "utf-8"); + + const hash = await git.commitFiles(repoDir, ["auto.txt"], "auto commit"); + expect(hash).toBeDefined(); + + const status = await git.getStatus(repoDir); + expect(status.isDirty).toBe(false); + }); +}); + +// ========================================== +// Log & History +// ========================================== + +describe("GitService — Log", () => { + test("returns empty log for fresh repo", async () => { + const entries = await git.log(repoDir); + expect(entries).toEqual([]); + }); + + test("returns commit entries", async () => { + await commitFile(repoDir, "a.txt", "a", "first commit"); + await commitFile(repoDir, "b.txt", "b", "second commit"); + + const entries = await git.log(repoDir); + expect(entries).toHaveLength(2); + expect(entries[0].subject).toBe("second commit"); + expect(entries[1].subject).toBe("first commit"); + }); + + test("entries have correct fields", async () => { + await commitFile(repoDir, "file.txt", "data", "test message"); + + const [entry] = await git.log(repoDir, 1); + expect(entry.hash).toBeDefined(); + expect(entry.fullHash).toBeDefined(); + expect(entry.author).toBe("Test User"); + expect(entry.date).toBeDefined(); + expect(entry.subject).toBe("test message"); + }); + + test("respects count limit", async () => { + for (let i = 0; i < 5; i++) { + await commitFile(repoDir, `f${i}.txt`, `${i}`, `commit ${i}`); + } + + const limited = await git.log(repoDir, 3); + expect(limited).toHaveLength(3); + }); + + test("fileLog returns commits for specific file", async () => { + await commitFile(repoDir, "a.txt", "v1", "commit a"); + await commitFile(repoDir, "b.txt", "v1", "commit b"); + await commitFile(repoDir, "a.txt", "v2", "update a"); + + const aLog = await git.fileLog(repoDir, "a.txt"); + expect(aLog).toHaveLength(2); + expect(aLog.map((e) => e.subject)).toEqual(["update a", "commit a"]); + }); +}); + +// ========================================== +// Branches +// ========================================== + +describe("GitService — Branches", () => { + beforeEach(async () => { + await commitFile(repoDir, "init.txt", "init", "initial commit"); + }); + + test("lists branches with current indicator", async () => { + const branches = await git.listBranches(repoDir); + expect(branches).toHaveLength(1); + expect(branches[0].name).toBe("main"); + expect(branches[0].current).toBe(true); + }); + + test("creates and lists new branches", async () => { + await git.createBranch(repoDir, "feature"); + + const branches = await git.listBranches(repoDir); + expect(branches).toHaveLength(2); + + const feature = branches.find((b) => b.name === "feature"); + expect(feature).toBeDefined(); + expect(feature!.current).toBe(true); // createBranch does checkout -b + }); + + test("checkout switches branches", async () => { + await git.createBranch(repoDir, "feature"); + await git.checkoutBranch(repoDir, "main"); + + const branches = await git.listBranches(repoDir); + const main = branches.find((b) => b.name === "main"); + expect(main!.current).toBe(true); + }); + + test("resolveRef returns commit hash", async () => { + const hash = await git.resolveRef(repoDir, "HEAD"); + expect(hash).toBeDefined(); + expect(hash!.length).toBe(40); + }); + + test("resolveRef returns null for invalid ref", async () => { + const hash = await git.resolveRef(repoDir, "nonexistent"); + expect(hash).toBeNull(); + }); +}); + +// ========================================== +// Discard & Stash +// ========================================== + +describe("GitService — Discard & Stash", () => { + beforeEach(async () => { + await commitFile(repoDir, "file.txt", "original", "initial"); + }); + + test("discardChanges restores file content", async () => { + await fs.writeFile(path.join(repoDir, "file.txt"), "modified", "utf-8"); + await git.discardChanges(repoDir, ["file.txt"]); + + const content = await fs.readFile(path.join(repoDir, "file.txt"), "utf-8"); + expect(content).toBe("original"); + }); + + test("stash saves and restores changes", async () => { + await fs.writeFile(path.join(repoDir, "file.txt"), "modified", "utf-8"); + await git.stash(repoDir, "wip changes"); + + // Working tree should be clean after stash + const status = await git.getStatus(repoDir); + expect(status.isDirty).toBe(false); + + // Pop restores + await git.stashPop(repoDir); + const content = await fs.readFile(path.join(repoDir, "file.txt"), "utf-8"); + expect(content).toBe("modified"); + }); +}); + +// ========================================== +// Diff +// ========================================== + +describe("GitService — Diff", () => { + beforeEach(async () => { + await commitFile(repoDir, "file.txt", "line1\nline2\n", "initial"); + }); + + test("diff shows unstaged changes", async () => { + await fs.writeFile(path.join(repoDir, "file.txt"), "line1\nline2\nline3\n", "utf-8"); + + const diff = await git.diff(repoDir, "file.txt"); + expect(diff).toContain("+line3"); + }); + + test("diff shows staged changes with --staged", async () => { + await fs.writeFile(path.join(repoDir, "file.txt"), "changed\n", "utf-8"); + await rawGit(repoDir, "add", "file.txt"); + + const diff = await git.diff(repoDir, "file.txt", true); + expect(diff).toContain("-line1"); + expect(diff).toContain("+changed"); + }); + + test("diff returns empty for no changes", async () => { + const diff = await git.diff(repoDir); + expect(diff).toBe(""); + }); +}); + +// ========================================== +// Gitignore +// ========================================== + +describe("GitService — Gitignore", () => { + test("generateGitignore returns rules containing relwave.local.json", () => { + const content = git.generateGitignore(); + expect(content).toContain("relwave.local.json"); + expect(content).toContain(".credentials"); + expect(content).toContain(".DS_Store"); + }); + + test("ensureGitignore creates new file", async () => { + const modified = await git.ensureGitignore(repoDir); + expect(modified).toBe(true); + + const content = await fs.readFile(path.join(repoDir, ".gitignore"), "utf-8"); + expect(content).toContain("relwave.local.json"); + }); + + test("ensureGitignore is idempotent", async () => { + await git.ensureGitignore(repoDir); + const secondCall = await git.ensureGitignore(repoDir); + expect(secondCall).toBe(false); + }); + + test("ensureGitignore appends to existing file", async () => { + await fs.writeFile(path.join(repoDir, ".gitignore"), "node_modules/\n", "utf-8"); + const modified = await git.ensureGitignore(repoDir); + expect(modified).toBe(true); + + const content = await fs.readFile(path.join(repoDir, ".gitignore"), "utf-8"); + expect(content).toContain("node_modules/"); + expect(content).toContain("relwave.local.json"); + }); +}); + +// ========================================== +// Remote Management +// ========================================== + +describe("GitService — Remote Management", () => { + beforeEach(async () => { + await commitFile(repoDir, "init.txt", "init", "initial"); + }); + + test("remoteList returns empty for no remotes", async () => { + const remotes = await git.remoteList(repoDir); + expect(remotes).toEqual([]); + }); + + test("remoteAdd and remoteList", async () => { + await git.remoteAdd(repoDir, "origin", "https://github.com/test/repo.git"); + + const remotes = await git.remoteList(repoDir); + expect(remotes).toHaveLength(1); + expect(remotes[0].name).toBe("origin"); + expect(remotes[0].fetchUrl).toBe("https://github.com/test/repo.git"); + expect(remotes[0].pushUrl).toBe("https://github.com/test/repo.git"); + }); + + test("remoteRemove removes a remote", async () => { + await git.remoteAdd(repoDir, "origin", "https://github.com/test/repo.git"); + await git.remoteRemove(repoDir, "origin"); + + const remotes = await git.remoteList(repoDir); + expect(remotes).toEqual([]); + }); + + test("remoteGetUrl returns URL", async () => { + await git.remoteAdd(repoDir, "origin", "https://github.com/test/repo.git"); + const url = await git.remoteGetUrl(repoDir, "origin"); + expect(url).toBe("https://github.com/test/repo.git"); + }); + + test("remoteGetUrl returns null for non-existent remote", async () => { + const url = await git.remoteGetUrl(repoDir, "nonexistent"); + expect(url).toBeNull(); + }); + + test("remoteSetUrl changes URL", async () => { + await git.remoteAdd(repoDir, "origin", "https://old.url/repo.git"); + await git.remoteSetUrl(repoDir, "origin", "https://new.url/repo.git"); + + const url = await git.remoteGetUrl(repoDir, "origin"); + expect(url).toBe("https://new.url/repo.git"); + }); + + test("multiple remotes", async () => { + await git.remoteAdd(repoDir, "origin", "https://github.com/main.git"); + await git.remoteAdd(repoDir, "upstream", "https://github.com/upstream.git"); + + const remotes = await git.remoteList(repoDir); + expect(remotes).toHaveLength(2); + expect(remotes.map((r) => r.name).sort()).toEqual(["origin", "upstream"]); + }); +}); + +// ========================================== +// Tags +// ========================================== + +describe("GitService — Tags", () => { + beforeEach(async () => { + await commitFile(repoDir, "init.txt", "init", "initial commit"); + }); + + test("createTag creates a lightweight tag", async () => { + await git.createTag(repoDir, "v1.0.0"); + const tags = await git.listTags(repoDir); + expect(tags).toContain("v1.0.0"); + }); + + test("createTag creates an annotated tag", async () => { + await git.createTag(repoDir, "v2.0.0", "Release 2.0"); + const tags = await git.listTags(repoDir); + expect(tags).toContain("v2.0.0"); + }); + + test("deleteTag removes a tag", async () => { + await git.createTag(repoDir, "v1.0.0"); + await git.deleteTag(repoDir, "v1.0.0"); + const tags = await git.listTags(repoDir); + expect(tags).not.toContain("v1.0.0"); + }); + + test("listTags returns all tags", async () => { + await git.createTag(repoDir, "v1.0.0"); + await commitFile(repoDir, "b.txt", "b", "second"); + await git.createTag(repoDir, "v2.0.0"); + + const tags = await git.listTags(repoDir); + expect(tags).toHaveLength(2); + expect(tags).toContain("v1.0.0"); + expect(tags).toContain("v2.0.0"); + }); +}); + +// ========================================== +// Revert +// ========================================== + +describe("GitService — Revert", () => { + test("revert creates a revert commit", async () => { + await commitFile(repoDir, "file.txt", "version1", "commit 1"); + await commitFile(repoDir, "file.txt", "version2", "commit 2"); + + const log = await git.log(repoDir, 1); + await git.revert(repoDir, log[0].hash); + + const afterLog = await git.log(repoDir); + expect(afterLog[0].subject).toContain("Revert"); + }); +}); + +// ========================================== +// File at Ref +// ========================================== + +describe("GitService — File at Ref", () => { + test("getFileAtRef returns file content at HEAD", async () => { + await commitFile(repoDir, "file.txt", "hello world", "add file"); + const content = await git.getFileAtRef(repoDir, "file.txt", "HEAD"); + expect(content).toBe("hello world"); + }); + + test("getFileAtRef returns null for missing file", async () => { + await commitFile(repoDir, "file.txt", "data", "init"); + const content = await git.getFileAtRef(repoDir, "nonexistent.txt", "HEAD"); + expect(content).toBeNull(); + }); + + test("show is alias for getFileAtRef", async () => { + await commitFile(repoDir, "file.txt", "show me", "add"); + const content = await git.show(repoDir, "HEAD", "file.txt"); + expect(content).toBe("show me"); + }); +}); diff --git a/bridge/__tests__/projectStore.test.ts b/bridge/__tests__/projectStore.test.ts new file mode 100644 index 0000000..b0a4dbd --- /dev/null +++ b/bridge/__tests__/projectStore.test.ts @@ -0,0 +1,566 @@ +import { afterEach, beforeEach, describe, expect, jest, test } from "@jest/globals"; +import { ProjectStore, ProjectMetadata, SchemaSnapshot } from "../src/services/projectStore"; +import fs from "fs/promises"; +import fsSync from "fs"; +import path from "path"; +import os from "os"; + +// ─── Test Setup ────────────────────────────────────── + +const TEST_ROOT = path.join(os.tmpdir(), "projectstore-test-" + Date.now()); +const PROJECTS_DIR = path.join(TEST_ROOT, "projects"); +const INDEX_FILE = path.join(PROJECTS_DIR, "index.json"); + +/** + * ProjectStore uses `getProjectDir()` from config.ts, which is hardcoded. + * We mock the config module to redirect to our temp folder. + */ +jest.mock("../src/utils/config", () => { + const original = jest.requireActual("../src/utils/config") as any; + const _path = require("path"); + const _os = require("os"); + const testProjects = _path.join(_os.tmpdir(), "projectstore-test-" + Date.now(), "projects"); + return { + ...original, + PROJECTS_FOLDER: testProjects, + PROJECTS_INDEX_FILE: _path.join(testProjects, "index.json"), + getProjectDir: (id: string) => _path.join(testProjects, id), + }; +}); + +/** + * Mock dbStoreInstance.getDB to avoid needing a real database store + */ +jest.mock("../src/services/dbStore", () => ({ + dbStoreInstance: { + getDB: jest.fn<() => Promise>().mockResolvedValue({ + id: "test-db-id", + name: "TestDB", + type: "POSTGRES", + host: "localhost", + port: 5432, + }), + }, + DBMeta: {}, +})); + +// After mocking, get the actual folder being used +import { PROJECTS_FOLDER, PROJECTS_INDEX_FILE, getProjectDir } from "../src/utils/config"; + +describe("ProjectStore", () => { + let store: ProjectStore; + + beforeEach(async () => { + // Clean & create test directory + if (fsSync.existsSync(PROJECTS_FOLDER)) { + await fs.rm(PROJECTS_FOLDER, { recursive: true, force: true }); + } + await fs.mkdir(PROJECTS_FOLDER, { recursive: true }); + + store = new ProjectStore(PROJECTS_FOLDER, PROJECTS_INDEX_FILE); + }); + + afterEach(async () => { + if (fsSync.existsSync(PROJECTS_FOLDER)) { + await fs.rm(PROJECTS_FOLDER, { recursive: true, force: true }); + } + }); + + // ========================================== + // Project CRUD + // ========================================== + + describe("CRUD Operations", () => { + test("should create a new project", async () => { + const project = await store.createProject({ + databaseId: "db-1", + name: "My Project", + description: "A test project", + }); + + expect(project).toBeDefined(); + expect(project.id).toBeDefined(); + expect(project.name).toBe("My Project"); + expect(project.description).toBe("A test project"); + expect(project.databaseId).toBe("db-1"); + expect(project.engine).toBe("POSTGRES"); // from mocked dbStore + expect(project.version).toBe(1); + expect(project.createdAt).toBeDefined(); + expect(project.updatedAt).toBeDefined(); + }); + + test("should create project directories", async () => { + const project = await store.createProject({ + databaseId: "db-1", + name: "DirTest", + }); + + const dir = getProjectDir(project.id); + expect(fsSync.existsSync(dir)).toBe(true); + expect(fsSync.existsSync(path.join(dir, "schema"))).toBe(true); + expect(fsSync.existsSync(path.join(dir, "diagrams"))).toBe(true); + expect(fsSync.existsSync(path.join(dir, "queries"))).toBe(true); + }); + + test("should create initial sub-files", async () => { + const project = await store.createProject({ + databaseId: "db-1", + name: "SubFileTest", + }); + + const dir = getProjectDir(project.id); + expect(fsSync.existsSync(path.join(dir, "relwave.json"))).toBe(true); + expect(fsSync.existsSync(path.join(dir, "relwave.local.json"))).toBe(true); + expect(fsSync.existsSync(path.join(dir, "schema", "schema.json"))).toBe(true); + expect(fsSync.existsSync(path.join(dir, "diagrams", "er.json"))).toBe(true); + expect(fsSync.existsSync(path.join(dir, "queries", "queries.json"))).toBe(true); + expect(fsSync.existsSync(path.join(dir, ".gitignore"))).toBe(true); + }); + + test("should get project by ID", async () => { + const created = await store.createProject({ + databaseId: "db-1", + name: "GetTest", + }); + + const found = await store.getProject(created.id); + expect(found).toBeDefined(); + expect(found!.id).toBe(created.id); + expect(found!.name).toBe("GetTest"); + }); + + test("should return null for non-existent project", async () => { + const found = await store.getProject("non-existent-id"); + expect(found).toBeNull(); + }); + + test("should get project by databaseId", async () => { + await store.createProject({ + databaseId: "db-unique", + name: "Linked Project", + }); + + const found = await store.getProjectByDatabaseId("db-unique"); + expect(found).toBeDefined(); + expect(found!.name).toBe("Linked Project"); + }); + + test("should return null for unlinked databaseId", async () => { + const found = await store.getProjectByDatabaseId("no-such-db"); + expect(found).toBeNull(); + }); + + test("should list all projects", async () => { + await store.createProject({ databaseId: "db-1", name: "P1" }); + await store.createProject({ databaseId: "db-2", name: "P2" }); + await store.createProject({ databaseId: "db-3", name: "P3" }); + + const projects = await store.listProjects(); + expect(projects).toHaveLength(3); + expect(projects.map((p) => p.name).sort()).toEqual(["P1", "P2", "P3"]); + }); + + test("should update project metadata", async () => { + const project = await store.createProject({ + databaseId: "db-1", + name: "Original", + description: "Old desc", + }); + + const updated = await store.updateProject(project.id, { + name: "Renamed", + description: "New desc", + }); + + expect(updated).toBeDefined(); + expect(updated!.name).toBe("Renamed"); + expect(updated!.description).toBe("New desc"); + expect(updated!.updatedAt).not.toBe(project.updatedAt); + }); + + test("should only update whitelisted fields", async () => { + const project = await store.createProject({ + databaseId: "db-1", + name: "WhitelistTest", + }); + + const updated = await store.updateProject(project.id, { + name: "NewName", + // These should be ignored / not writable: + ...({ id: "injected-id", databaseId: "injected-db" } as any), + }); + + expect(updated!.name).toBe("NewName"); + expect(updated!.id).toBe(project.id); // unchanged + expect(updated!.databaseId).toBe(project.databaseId); // unchanged + }); + + test("should return null when updating non-existent project", async () => { + const result = await store.updateProject("no-such-id", { name: "x" }); + expect(result).toBeNull(); + }); + + test("should sync index after update", async () => { + const project = await store.createProject({ + databaseId: "db-1", + name: "SyncTest", + }); + + await store.updateProject(project.id, { name: "Updated" }); + + const projects = await store.listProjects(); + expect(projects.find((p) => p.id === project.id)?.name).toBe("Updated"); + }); + + test("should delete a project", async () => { + const project = await store.createProject({ + databaseId: "db-1", + name: "DeleteTest", + }); + + const dir = getProjectDir(project.id); + expect(fsSync.existsSync(dir)).toBe(true); + + await store.deleteProject(project.id); + + expect(fsSync.existsSync(dir)).toBe(false); + const projects = await store.listProjects(); + expect(projects.find((p) => p.id === project.id)).toBeUndefined(); + }); + + test("should handle deleting non-existent project gracefully", async () => { + // Should not throw + await expect(store.deleteProject("no-such-id")).resolves.not.toThrow(); + }); + }); + + // ========================================== + // Schema Operations + // ========================================== + + describe("Schema Operations", () => { + let projectId: string; + + beforeEach(async () => { + const project = await store.createProject({ + databaseId: "db-1", + name: "SchemaTest", + }); + projectId = project.id; + }); + + const mockSchemas: SchemaSnapshot[] = [ + { + name: "public", + tables: [ + { + name: "users", + type: "BASE TABLE", + columns: [ + { name: "id", type: "integer", nullable: false, isPrimaryKey: true, isForeignKey: false, defaultValue: null, isUnique: true }, + { name: "email", type: "varchar(255)", nullable: false, isPrimaryKey: false, isForeignKey: false, defaultValue: null, isUnique: true }, + ], + }, + ], + }, + ]; + + test("should get initial empty schema", async () => { + const schema = await store.getSchema(projectId); + expect(schema).toBeDefined(); + expect(schema!.schemas).toEqual([]); + }); + + test("should save and retrieve schema", async () => { + await store.saveSchema(projectId, mockSchemas); + const saved = await store.getSchema(projectId); + + expect(saved).toBeDefined(); + expect(saved!.schemas).toHaveLength(1); + expect(saved!.schemas[0].name).toBe("public"); + expect(saved!.schemas[0].tables[0].name).toBe("users"); + }); + + test("should skip write when schema is identical (cachedAt dedup)", async () => { + const first = await store.saveSchema(projectId, mockSchemas); + const second = await store.saveSchema(projectId, mockSchemas); + + // Same cachedAt means the write was skipped + expect(second.cachedAt).toBe(first.cachedAt); + }); + + test("should write when schema changes", async () => { + const first = await store.saveSchema(projectId, mockSchemas); + + const changedSchemas: SchemaSnapshot[] = [ + { + ...mockSchemas[0], + tables: [ + ...mockSchemas[0].tables, + { + name: "posts", + type: "BASE TABLE", + columns: [ + { name: "id", type: "integer", nullable: false, isPrimaryKey: true, isForeignKey: false, defaultValue: null, isUnique: true }, + ], + }, + ], + }, + ]; + + // Allow time difference + await new Promise((r) => setTimeout(r, 10)); + const second = await store.saveSchema(projectId, changedSchemas); + + expect(second.cachedAt).not.toBe(first.cachedAt); + expect(second.schemas[0].tables).toHaveLength(2); + }); + + test("should throw when saving schema for non-existent project", async () => { + await expect( + store.saveSchema("no-such-project", mockSchemas) + ).rejects.toThrow("Project no-such-project not found"); + }); + }); + + // ========================================== + // ER Diagram Operations + // ========================================== + + describe("ER Diagram Operations", () => { + let projectId: string; + + beforeEach(async () => { + const project = await store.createProject({ + databaseId: "db-1", + name: "ERTest", + }); + projectId = project.id; + }); + + test("should get initial empty diagram", async () => { + const diagram = await store.getERDiagram(projectId); + expect(diagram).toBeDefined(); + expect(diagram!.nodes).toEqual([]); + }); + + test("should save and retrieve diagram", async () => { + const nodes = [ + { tableId: "users", x: 100, y: 200 }, + { tableId: "posts", x: 300, y: 400, collapsed: true }, + ]; + + const saved = await store.saveERDiagram(projectId, { + nodes, + zoom: 1.5, + panX: 50, + panY: 75, + }); + + expect(saved.nodes).toHaveLength(2); + expect(saved.zoom).toBe(1.5); + expect(saved.panX).toBe(50); + + const retrieved = await store.getERDiagram(projectId); + expect(retrieved!.nodes).toHaveLength(2); + expect(retrieved!.nodes[0].tableId).toBe("users"); + }); + }); + + // ========================================== + // Query Operations + // ========================================== + + describe("Query Operations", () => { + let projectId: string; + + beforeEach(async () => { + const project = await store.createProject({ + databaseId: "db-1", + name: "QueryTest", + }); + projectId = project.id; + }); + + test("should get initial empty queries", async () => { + const queries = await store.getQueries(projectId); + expect(queries).toBeDefined(); + expect(queries!.queries).toEqual([]); + }); + + test("should add a query", async () => { + const query = await store.addQuery(projectId, { + name: "Get Users", + sql: "SELECT * FROM users", + description: "Fetch all users", + }); + + expect(query).toBeDefined(); + expect(query.id).toBeDefined(); + expect(query.name).toBe("Get Users"); + expect(query.sql).toBe("SELECT * FROM users"); + expect(query.description).toBe("Fetch all users"); + }); + + test("should list queries after adding", async () => { + await store.addQuery(projectId, { name: "Q1", sql: "SELECT 1" }); + await store.addQuery(projectId, { name: "Q2", sql: "SELECT 2" }); + + const queries = await store.getQueries(projectId); + expect(queries!.queries).toHaveLength(2); + }); + + test("should update a query", async () => { + const query = await store.addQuery(projectId, { + name: "Old Name", + sql: "SELECT 1", + }); + + const updated = await store.updateQuery(projectId, query.id, { + name: "New Name", + sql: "SELECT 2", + }); + + expect(updated).toBeDefined(); + expect(updated!.name).toBe("New Name"); + expect(updated!.sql).toBe("SELECT 2"); + expect(updated!.updatedAt).not.toBe(query.updatedAt); + }); + + test("should return null when updating non-existent query", async () => { + const result = await store.updateQuery(projectId, "no-such-query", { + name: "x", + }); + expect(result).toBeNull(); + }); + + test("should delete a query", async () => { + const query = await store.addQuery(projectId, { + name: "To Delete", + sql: "SELECT 1", + }); + + await store.deleteQuery(projectId, query.id); + + const queries = await store.getQueries(projectId); + expect(queries!.queries.find((q) => q.id === query.id)).toBeUndefined(); + }); + }); + + // ========================================== + // Export + // ========================================== + + describe("Export", () => { + test("should export full project bundle", async () => { + const project = await store.createProject({ + databaseId: "db-1", + name: "ExportTest", + }); + + // Add some data + await store.saveSchema(project.id, [ + { name: "public", tables: [] }, + ]); + await store.saveERDiagram(project.id, { nodes: [{ tableId: "t1", x: 0, y: 0 }] }); + await store.addQuery(project.id, { name: "Q1", sql: "SELECT 1" }); + + const bundle = await store.exportProject(project.id); + expect(bundle).toBeDefined(); + expect(bundle!.metadata.name).toBe("ExportTest"); + expect(bundle!.schema).toBeDefined(); + expect(bundle!.erDiagram!.nodes).toHaveLength(1); + expect(bundle!.queries!.queries).toHaveLength(1); + }); + + test("should return null for non-existent project export", async () => { + const bundle = await store.exportProject("no-such-id"); + expect(bundle).toBeNull(); + }); + }); + + // ========================================== + // Local Config (git-ignored) + // ========================================== + + describe("Local Config", () => { + let projectId: string; + + beforeEach(async () => { + const project = await store.createProject({ + databaseId: "db-1", + name: "ConfigTest", + }); + projectId = project.id; + }); + + test("should get initial empty local config", async () => { + const config = await store.getLocalConfig(projectId); + expect(config).toBeDefined(); + expect(config).toEqual({}); + }); + + test("should save and retrieve local config", async () => { + const saved = await store.saveLocalConfig(projectId, { + connectionUrl: "postgres://localhost:5432/mydb", + environment: "development", + notes: "My dev setup", + }); + + expect(saved.connectionUrl).toBe("postgres://localhost:5432/mydb"); + + const retrieved = await store.getLocalConfig(projectId); + expect(retrieved!.environment).toBe("development"); + }); + }); + + // ========================================== + // .gitignore Management + // ========================================== + + describe("Gitignore Management", () => { + let projectId: string; + + beforeEach(async () => { + const project = await store.createProject({ + databaseId: "db-1", + name: "GitignoreTest", + }); + projectId = project.id; + }); + + test("should create .gitignore on project creation", async () => { + const dir = getProjectDir(projectId); + const giPath = path.join(dir, ".gitignore"); + expect(fsSync.existsSync(giPath)).toBe(true); + }); + + test("should include relwave.local.json in .gitignore", async () => { + const dir = getProjectDir(projectId); + const content = await fs.readFile(path.join(dir, ".gitignore"), "utf-8"); + expect(content).toContain("relwave.local.json"); + }); + + test("should be idempotent", async () => { + // Already created once during project creation + const result = await store.ensureGitignore(projectId); + // Should return false = already has our rules + expect(result).toBe(false); + }); + + test("should append to existing .gitignore without our rules", async () => { + const dir = getProjectDir(projectId); + const giPath = path.join(dir, ".gitignore"); + + // Overwrite with custom content (without our rules) + await fs.writeFile(giPath, "node_modules/\n*.log\n", "utf-8"); + + const result = await store.ensureGitignore(projectId); + expect(result).toBe(true); + + const content = await fs.readFile(giPath, "utf-8"); + expect(content).toContain("node_modules/"); + expect(content).toContain("relwave.local.json"); + }); + }); +}); diff --git a/bridge/package.json b/bridge/package.json index 61fe8fd..d65891f 100644 --- a/bridge/package.json +++ b/bridge/package.json @@ -1,6 +1,6 @@ { "name": "db-visualizer-bridge", - "version": "0.1.0-beta.5", + "version": "0.2.0-beta.1", "type": "commonjs", "main": "dist/index.cjs", "scripts": { diff --git a/bridge/src/connectors/mariadb.ts b/bridge/src/connectors/mariadb.ts index 5f0c07d..8ce9151 100644 --- a/bridge/src/connectors/mariadb.ts +++ b/bridge/src/connectors/mariadb.ts @@ -7,7 +7,7 @@ import mysql, { import { loadLocalMigrations, writeBaselineMigration } from "../utils/baselineMigration"; import crypto from "crypto"; import fs from "fs"; -import { ensureDir, getMigrationsDir } from "../services/dbStore"; +import { ensureDir, getMigrationsDir } from "../utils/config"; import { CacheEntry, CACHE_TTL, @@ -1765,3 +1765,20 @@ export async function searchTable( await pool.end(); } } + +/** + * listSchemaNames: Retrieves just the names of schemas (databases). + * Lightweight version for schema selector. + */ +export async function listSchemaNames(cfg: MariaDBConfig): Promise { + const pool = mysql.createPool(createPoolConfig(cfg)); + const connection = await pool.getConnection(); + + try { + const [rows] = await connection.query(LIST_SCHEMAS); + return (rows as any[]).map((r: any) => r.name); + } finally { + connection.release(); + await pool.end(); + } +} diff --git a/bridge/src/connectors/mysql.ts b/bridge/src/connectors/mysql.ts index 0bc4116..72acba9 100644 --- a/bridge/src/connectors/mysql.ts +++ b/bridge/src/connectors/mysql.ts @@ -7,7 +7,7 @@ import mysql, { import { loadLocalMigrations, writeBaselineMigration } from "../utils/baselineMigration"; import crypto from "crypto"; import fs from "fs"; -import { ensureDir, getMigrationsDir } from "../services/dbStore"; +import { ensureDir, getMigrationsDir } from "../utils/config"; import { CacheEntry, CACHE_TTL, @@ -1740,3 +1740,20 @@ export async function searchTable( await pool.end(); } } + +/** + * listSchemaNames: Retrieves just the names of schemas (databases). + * Lightweight version for schema selector. + */ +export async function listSchemaNames(cfg: MySQLConfig): Promise { + const pool = mysql.createPool(createPoolConfig(cfg)); + const connection = await pool.getConnection(); + + try { + const [rows] = await connection.query(LIST_SCHEMAS); + return (rows as any[]).map((r: any) => r.name); + } finally { + connection.release(); + await pool.end(); + } +} diff --git a/bridge/src/connectors/postgres.ts b/bridge/src/connectors/postgres.ts index e576be3..19084dc 100644 --- a/bridge/src/connectors/postgres.ts +++ b/bridge/src/connectors/postgres.ts @@ -5,7 +5,7 @@ import { Readable } from "stream"; import { loadLocalMigrations, writeBaselineMigration } from "../utils/baselineMigration"; import crypto from "crypto"; import fs from "fs"; -import { ensureDir, getMigrationsDir } from "../services/dbStore"; +import { ensureDir, getMigrationsDir } from "../utils/config"; import { CacheEntry, CACHE_TTL, @@ -1869,3 +1869,23 @@ export async function searchTable( } catch (_) { } } } + +/** + * listSchemaNames: Retrieves just the names of schemas. + * Lightweight version of listSchemas. + */ +export async function listSchemaNames(connection: PGConfig): Promise { + // Check cache first (re-use schemas cache if available, or a new cache if needed) + // For now, simpler to just query as it's very fast + const client = createClient(connection); + + try { + await client.connect(); + const res = await client.query(PG_LIST_SCHEMAS); + return res.rows.map((r: any) => r.name); + } finally { + try { + await client.end(); + } catch (e) { } + } +} diff --git a/bridge/src/handlers/databaseHandlers.ts b/bridge/src/handlers/databaseHandlers.ts index 3b80a01..4a9c622 100644 --- a/bridge/src/handlers/databaseHandlers.ts +++ b/bridge/src/handlers/databaseHandlers.ts @@ -85,7 +85,7 @@ export class DatabaseHandlers { } const { conn, dbType } = await this.dbService.getDatabaseConnection(dbId); - const tables = await this.queryExecutor.listTables(conn, dbType); + const tables = await this.queryExecutor.listTables(conn, dbType, params.schema); this.rpc.sendResponse(id, { ok: true, data: tables }); } catch (e: any) { this.logger?.error({ e }, "db.listTables failed"); @@ -93,6 +93,24 @@ export class DatabaseHandlers { } } + async handleListSchemas(params: any, id: number | string) { + try { + const { id: dbId } = params || {}; + if (!dbId) { + return this.rpc.sendError(id, { + code: "BAD_REQUEST", + message: "Missing id", + }); + } + const { conn, dbType } = await this.dbService.getDatabaseConnection(dbId); + const schemas = await this.queryExecutor.listSchemaNames(conn, dbType); + this.rpc.sendResponse(id, { ok: true, data: schemas }); + } catch (e: any) { + this.logger?.error({ e }, "db.listSchemas failed"); + this.rpc.sendError(id, { code: "IO_ERROR", message: String(e) }); + } + } + async handleGetSchema(params: any, id: number | string) { try { const { id: dbId, schema } = params || {}; diff --git a/bridge/src/handlers/gitAdvancedHandlers.ts b/bridge/src/handlers/gitAdvancedHandlers.ts new file mode 100644 index 0000000..6daae6b --- /dev/null +++ b/bridge/src/handlers/gitAdvancedHandlers.ts @@ -0,0 +1,201 @@ +// ---------------------------- +// handlers/gitAdvancedHandlers.ts +// ---------------------------- +// +// RPC handlers for: Remote management, push/pull/fetch, revert. + +import { Rpc } from "../types"; +import { GitService, gitServiceInstance } from "../services/gitService"; +import { Logger } from "pino"; + +export class GitAdvancedHandlers { + constructor( + private rpc: Rpc, + private logger?: Logger, + private gitService: GitService = gitServiceInstance + ) { } + + private requireDir(params: any, id: number | string): string | null { + const dir = params?.dir || params?.path || params?.cwd; + if (!dir) { + this.rpc.sendError(id, { + code: "BAD_REQUEST", + message: "Missing 'dir' parameter (project directory path)", + }); + return null; + } + return dir; + } + + // ========================================== + // REMOTE MANAGEMENT + // ========================================== + + async handleRemoteList(params: any, id: number | string) { + const dir = this.requireDir(params, id); + if (!dir) return; + try { + const remotes = await this.gitService.remoteList(dir); + this.rpc.sendResponse(id, { ok: true, data: remotes }); + } catch (e: any) { + this.logger?.error({ e }, "git.remoteList failed"); + this.rpc.sendError(id, { code: "GIT_ERROR", message: String(e.message || e) }); + } + } + + async handleRemoteAdd(params: any, id: number | string) { + const dir = this.requireDir(params, id); + if (!dir) return; + try { + const name = params?.name; + const url = params?.url; + if (!name || !url) { + return this.rpc.sendError(id, { + code: "BAD_REQUEST", + message: "Missing 'name' and/or 'url' parameters", + }); + } + await this.gitService.remoteAdd(dir, name, url); + this.rpc.sendResponse(id, { ok: true, data: null }); + } catch (e: any) { + this.logger?.error({ e }, "git.remoteAdd failed"); + this.rpc.sendError(id, { code: "GIT_ERROR", message: String(e.message || e) }); + } + } + + async handleRemoteRemove(params: any, id: number | string) { + const dir = this.requireDir(params, id); + if (!dir) return; + try { + const name = params?.name; + if (!name) { + return this.rpc.sendError(id, { + code: "BAD_REQUEST", + message: "Missing 'name' parameter", + }); + } + await this.gitService.remoteRemove(dir, name); + this.rpc.sendResponse(id, { ok: true, data: null }); + } catch (e: any) { + this.logger?.error({ e }, "git.remoteRemove failed"); + this.rpc.sendError(id, { code: "GIT_ERROR", message: String(e.message || e) }); + } + } + + async handleRemoteGetUrl(params: any, id: number | string) { + const dir = this.requireDir(params, id); + if (!dir) return; + try { + const url = await this.gitService.remoteGetUrl(dir, params?.name || "origin"); + this.rpc.sendResponse(id, { ok: true, data: { url } }); + } catch (e: any) { + this.logger?.error({ e }, "git.remoteGetUrl failed"); + this.rpc.sendError(id, { code: "GIT_ERROR", message: String(e.message || e) }); + } + } + + async handleRemoteSetUrl(params: any, id: number | string) { + const dir = this.requireDir(params, id); + if (!dir) return; + try { + const name = params?.name; + const url = params?.url; + if (!name || !url) { + return this.rpc.sendError(id, { + code: "BAD_REQUEST", + message: "Missing 'name' and/or 'url' parameters", + }); + } + await this.gitService.remoteSetUrl(dir, name, url); + this.rpc.sendResponse(id, { ok: true, data: null }); + } catch (e: any) { + this.logger?.error({ e }, "git.remoteSetUrl failed"); + this.rpc.sendError(id, { code: "GIT_ERROR", message: String(e.message || e) }); + } + } + + // ========================================== + // PUSH / PULL / FETCH + // ========================================== + + async handlePush(params: any, id: number | string) { + const dir = this.requireDir(params, id); + if (!dir) return; + try { + const output = await this.gitService.push( + dir, + params?.remote || "origin", + params?.branch, + { + force: params?.force === true, + setUpstream: params?.setUpstream === true, + } + ); + this.rpc.sendResponse(id, { ok: true, data: { output } }); + } catch (e: any) { + this.logger?.error({ e }, "git.push failed"); + this.rpc.sendError(id, { code: "GIT_ERROR", message: String(e.message || e) }); + } + } + + async handlePull(params: any, id: number | string) { + const dir = this.requireDir(params, id); + if (!dir) return; + try { + const output = await this.gitService.pull( + dir, + params?.remote || "origin", + params?.branch, + { rebase: params?.rebase === true } + ); + this.rpc.sendResponse(id, { ok: true, data: { output } }); + } catch (e: any) { + this.logger?.error({ e }, "git.pull failed"); + this.rpc.sendError(id, { code: "GIT_ERROR", message: String(e.message || e) }); + } + } + + async handleFetch(params: any, id: number | string) { + const dir = this.requireDir(params, id); + if (!dir) return; + try { + const output = await this.gitService.fetch( + dir, + params?.remote, + { + prune: params?.prune === true, + all: params?.all === true, + } + ); + this.rpc.sendResponse(id, { ok: true, data: { output } }); + } catch (e: any) { + this.logger?.error({ e }, "git.fetch failed"); + this.rpc.sendError(id, { code: "GIT_ERROR", message: String(e.message || e) }); + } + } + + // ========================================== + // REVERT (Rollback to Previous Commit) + // ========================================== + + async handleRevert(params: any, id: number | string) { + const dir = this.requireDir(params, id); + if (!dir) return; + try { + const hash = params?.hash || params?.commitHash; + if (!hash) { + return this.rpc.sendError(id, { + code: "BAD_REQUEST", + message: "Missing 'hash' parameter", + }); + } + const output = await this.gitService.revert(dir, hash, { + noCommit: params?.noCommit === true, + }); + this.rpc.sendResponse(id, { ok: true, data: { output } }); + } catch (e: any) { + this.logger?.error({ e }, "git.revert failed"); + this.rpc.sendError(id, { code: "GIT_ERROR", message: String(e.message || e) }); + } + } +} diff --git a/bridge/src/handlers/gitHandlers.ts b/bridge/src/handlers/gitHandlers.ts new file mode 100644 index 0000000..5de207c --- /dev/null +++ b/bridge/src/handlers/gitHandlers.ts @@ -0,0 +1,290 @@ +import { Rpc } from "../types"; +import { Logger } from "pino"; +import { gitServiceInstance, GitService } from "../services/gitService"; + +/** + * RPC handlers for git operations. + * + * Methods: + * git.status — repo status (branch, dirty, ahead/behind) + * git.init — initialize a new repo + * git.changes — list changed files + * git.stage — stage files + * git.stageAll — stage everything + * git.unstage — unstage files + * git.commit — commit staged changes + * git.log — recent commit history + * git.branches — list branches + * git.createBranch — create + checkout new branch + * git.checkout — switch branch + * git.discard — discard file changes + * git.stash — stash changes + * git.stashPop — pop latest stash + * git.diff — get diff output + * git.ensureIgnore — write/update .gitignore + */ +export class GitHandlers { + constructor( + private rpc: Rpc, + private logger: Logger, + private gitService: GitService = gitServiceInstance + ) { } + + // ---- Helpers ---- + + private requireDir(params: any, id: number | string): string | null { + const dir = params?.dir || params?.path || params?.cwd; + if (!dir) { + this.rpc.sendError(id, { + code: "BAD_REQUEST", + message: "Missing 'dir' parameter (project directory path)", + }); + return null; + } + return dir; + } + + // ---- Handlers ---- + + async handleStatus(params: any, id: number | string) { + const dir = this.requireDir(params, id); + if (!dir) return; + try { + const status = await this.gitService.getStatus(dir); + this.rpc.sendResponse(id, { ok: true, data: status }); + } catch (e: any) { + this.logger?.error({ e }, "git.status failed"); + this.rpc.sendError(id, { code: "GIT_ERROR", message: String(e.message || e) }); + } + } + + async handleInit(params: any, id: number | string) { + const dir = this.requireDir(params, id); + if (!dir) return; + try { + await this.gitService.init(dir, params?.defaultBranch || "main"); + // Also set up .gitignore + await this.gitService.ensureGitignore(dir); + const status = await this.gitService.getStatus(dir); + this.rpc.sendResponse(id, { ok: true, data: status }); + } catch (e: any) { + this.logger?.error({ e }, "git.init failed"); + this.rpc.sendError(id, { code: "GIT_ERROR", message: String(e.message || e) }); + } + } + + async handleChanges(params: any, id: number | string) { + const dir = this.requireDir(params, id); + if (!dir) return; + try { + const changes = await this.gitService.getChangedFiles(dir); + this.rpc.sendResponse(id, { ok: true, data: changes }); + } catch (e: any) { + this.logger?.error({ e }, "git.changes failed"); + this.rpc.sendError(id, { code: "GIT_ERROR", message: String(e.message || e) }); + } + } + + async handleStage(params: any, id: number | string) { + const dir = this.requireDir(params, id); + if (!dir) return; + try { + const files: string[] = params?.files; + if (!files?.length) { + return this.rpc.sendError(id, { + code: "BAD_REQUEST", + message: "Missing 'files' array", + }); + } + await this.gitService.stageFiles(dir, files); + this.rpc.sendResponse(id, { ok: true, data: null }); + } catch (e: any) { + this.logger?.error({ e }, "git.stage failed"); + this.rpc.sendError(id, { code: "GIT_ERROR", message: String(e.message || e) }); + } + } + + async handleStageAll(params: any, id: number | string) { + const dir = this.requireDir(params, id); + if (!dir) return; + try { + await this.gitService.stageAll(dir); + this.rpc.sendResponse(id, { ok: true, data: null }); + } catch (e: any) { + this.logger?.error({ e }, "git.stageAll failed"); + this.rpc.sendError(id, { code: "GIT_ERROR", message: String(e.message || e) }); + } + } + + async handleUnstage(params: any, id: number | string) { + const dir = this.requireDir(params, id); + if (!dir) return; + try { + const files: string[] = params?.files; + if (!files?.length) { + return this.rpc.sendError(id, { + code: "BAD_REQUEST", + message: "Missing 'files' array", + }); + } + await this.gitService.unstageFiles(dir, files); + this.rpc.sendResponse(id, { ok: true, data: null }); + } catch (e: any) { + this.logger?.error({ e }, "git.unstage failed"); + this.rpc.sendError(id, { code: "GIT_ERROR", message: String(e.message || e) }); + } + } + + async handleCommit(params: any, id: number | string) { + const dir = this.requireDir(params, id); + if (!dir) return; + try { + const message = params?.message; + if (!message) { + return this.rpc.sendError(id, { + code: "BAD_REQUEST", + message: "Missing 'message' parameter", + }); + } + const hash = await this.gitService.commit(dir, message); + this.rpc.sendResponse(id, { ok: true, data: { hash } }); + } catch (e: any) { + this.logger?.error({ e }, "git.commit failed"); + this.rpc.sendError(id, { code: "GIT_ERROR", message: String(e.message || e) }); + } + } + + async handleLog(params: any, id: number | string) { + const dir = this.requireDir(params, id); + if (!dir) return; + try { + const count = params?.count ?? 20; + const entries = await this.gitService.log(dir, count); + this.rpc.sendResponse(id, { ok: true, data: entries }); + } catch (e: any) { + this.logger?.error({ e }, "git.log failed"); + this.rpc.sendError(id, { code: "GIT_ERROR", message: String(e.message || e) }); + } + } + + async handleBranches(params: any, id: number | string) { + const dir = this.requireDir(params, id); + if (!dir) return; + try { + const branches = await this.gitService.listBranches(dir); + this.rpc.sendResponse(id, { ok: true, data: branches }); + } catch (e: any) { + this.logger?.error({ e }, "git.branches failed"); + this.rpc.sendError(id, { code: "GIT_ERROR", message: String(e.message || e) }); + } + } + + async handleCreateBranch(params: any, id: number | string) { + const dir = this.requireDir(params, id); + if (!dir) return; + try { + const name = params?.name; + if (!name) { + return this.rpc.sendError(id, { + code: "BAD_REQUEST", + message: "Missing 'name' parameter", + }); + } + await this.gitService.createBranch(dir, name); + this.rpc.sendResponse(id, { ok: true, data: { branch: name } }); + } catch (e: any) { + this.logger?.error({ e }, "git.createBranch failed"); + this.rpc.sendError(id, { code: "GIT_ERROR", message: String(e.message || e) }); + } + } + + async handleCheckout(params: any, id: number | string) { + const dir = this.requireDir(params, id); + if (!dir) return; + try { + const name = params?.name; + if (!name) { + return this.rpc.sendError(id, { + code: "BAD_REQUEST", + message: "Missing 'name' parameter", + }); + } + await this.gitService.checkoutBranch(dir, name); + this.rpc.sendResponse(id, { ok: true, data: { branch: name } }); + } catch (e: any) { + this.logger?.error({ e }, "git.checkout failed"); + this.rpc.sendError(id, { code: "GIT_ERROR", message: String(e.message || e) }); + } + } + + async handleDiscard(params: any, id: number | string) { + const dir = this.requireDir(params, id); + if (!dir) return; + try { + const files: string[] = params?.files; + if (!files?.length) { + return this.rpc.sendError(id, { + code: "BAD_REQUEST", + message: "Missing 'files' array", + }); + } + await this.gitService.discardChanges(dir, files); + this.rpc.sendResponse(id, { ok: true, data: null }); + } catch (e: any) { + this.logger?.error({ e }, "git.discard failed"); + this.rpc.sendError(id, { code: "GIT_ERROR", message: String(e.message || e) }); + } + } + + async handleStash(params: any, id: number | string) { + const dir = this.requireDir(params, id); + if (!dir) return; + try { + await this.gitService.stash(dir, params?.message); + this.rpc.sendResponse(id, { ok: true, data: null }); + } catch (e: any) { + this.logger?.error({ e }, "git.stash failed"); + this.rpc.sendError(id, { code: "GIT_ERROR", message: String(e.message || e) }); + } + } + + async handleStashPop(params: any, id: number | string) { + const dir = this.requireDir(params, id); + if (!dir) return; + try { + await this.gitService.stashPop(dir); + this.rpc.sendResponse(id, { ok: true, data: null }); + } catch (e: any) { + this.logger?.error({ e }, "git.stashPop failed"); + this.rpc.sendError(id, { code: "GIT_ERROR", message: String(e.message || e) }); + } + } + + async handleDiff(params: any, id: number | string) { + const dir = this.requireDir(params, id); + if (!dir) return; + try { + const diff = await this.gitService.diff( + dir, + params?.file, + params?.staged === true + ); + this.rpc.sendResponse(id, { ok: true, data: { diff } }); + } catch (e: any) { + this.logger?.error({ e }, "git.diff failed"); + this.rpc.sendError(id, { code: "GIT_ERROR", message: String(e.message || e) }); + } + } + + async handleEnsureIgnore(params: any, id: number | string) { + const dir = this.requireDir(params, id); + if (!dir) return; + try { + const modified = await this.gitService.ensureGitignore(dir); + this.rpc.sendResponse(id, { ok: true, data: { modified } }); + } catch (e: any) { + this.logger?.error({ e }, "git.ensureIgnore failed"); + this.rpc.sendError(id, { code: "GIT_ERROR", message: String(e.message || e) }); + } + } +} diff --git a/bridge/src/handlers/migrationHandlers.ts b/bridge/src/handlers/migrationHandlers.ts index 4f18b58..686740d 100644 --- a/bridge/src/handlers/migrationHandlers.ts +++ b/bridge/src/handlers/migrationHandlers.ts @@ -2,7 +2,7 @@ import { Rpc } from "../types"; import { DatabaseService } from "../services/databaseService"; import { QueryExecutor } from "../services/queryExecutor"; import { Logger } from "pino"; -import { getMigrationsDir } from "../services/dbStore"; +import { getMigrationsDir } from "../utils/config"; import path from "path"; import fs from "fs"; diff --git a/bridge/src/handlers/projectHandlers.ts b/bridge/src/handlers/projectHandlers.ts new file mode 100644 index 0000000..4941e27 --- /dev/null +++ b/bridge/src/handlers/projectHandlers.ts @@ -0,0 +1,398 @@ +import { Rpc } from "../types"; +import { Logger } from "pino"; +import { projectStoreInstance } from "../services/projectStore"; +import { getProjectDir } from "../utils/config"; + +/** + * RPC handlers for project CRUD and sub-resource operations. + * Mirrors the DatabaseHandlers pattern. + */ +export class ProjectHandlers { + constructor( + private rpc: Rpc, + private logger: Logger + ) { } + + + async handleListProjects(_params: any, id: number | string) { + try { + const projects = await projectStoreInstance.listProjects(); + this.rpc.sendResponse(id, { ok: true, data: projects }); + } catch (e: any) { + this.logger?.error({ e }, "project.list failed"); + this.rpc.sendError(id, { code: "IO_ERROR", message: String(e) }); + } + } + + async handleGetProject(params: any, id: number | string) { + try { + const { id: projectId } = params || {}; + if (!projectId) { + return this.rpc.sendError(id, { + code: "BAD_REQUEST", + message: "Missing id", + }); + } + + const project = await projectStoreInstance.getProject(projectId); + if (!project) { + return this.rpc.sendError(id, { + code: "NOT_FOUND", + message: "Project not found", + }); + } + + this.rpc.sendResponse(id, { ok: true, data: project }); + } catch (e: any) { + this.logger?.error({ e }, "project.get failed"); + this.rpc.sendError(id, { code: "IO_ERROR", message: String(e) }); + } + } + + async handleGetProjectByDatabaseId(params: any, id: number | string) { + try { + const { databaseId } = params || {}; + if (!databaseId) { + return this.rpc.sendError(id, { + code: "BAD_REQUEST", + message: "Missing databaseId", + }); + } + + const project = await projectStoreInstance.getProjectByDatabaseId(databaseId); + // Return null (not an error) when no project is linked + this.rpc.sendResponse(id, { ok: true, data: project }); + } catch (e: any) { + this.logger?.error({ e }, "project.getByDatabaseId failed"); + this.rpc.sendError(id, { code: "IO_ERROR", message: String(e) }); + } + } + + async handleCreateProject(params: any, id: number | string) { + try { + const { databaseId, name, description, defaultSchema } = params || {}; + if (!databaseId || !name) { + return this.rpc.sendError(id, { + code: "BAD_REQUEST", + message: "Missing databaseId or name", + }); + } + + const project = await projectStoreInstance.createProject({ + databaseId, + name, + description, + defaultSchema, + }); + + this.rpc.sendResponse(id, { ok: true, data: project }); + } catch (e: any) { + this.logger?.error({ e }, "project.create failed"); + this.rpc.sendError(id, { code: "IO_ERROR", message: String(e) }); + } + } + + async handleUpdateProject(params: any, id: number | string) { + try { + const { id: projectId, ...updates } = params || {}; + if (!projectId) { + return this.rpc.sendError(id, { + code: "BAD_REQUEST", + message: "Missing id", + }); + } + + const project = await projectStoreInstance.updateProject(projectId, updates); + if (!project) { + return this.rpc.sendError(id, { + code: "NOT_FOUND", + message: "Project not found", + }); + } + + this.rpc.sendResponse(id, { ok: true, data: project }); + } catch (e: any) { + this.logger?.error({ e }, "project.update failed"); + this.rpc.sendError(id, { code: "IO_ERROR", message: String(e) }); + } + } + + async handleDeleteProject(params: any, id: number | string) { + try { + const { id: projectId } = params || {}; + if (!projectId) { + return this.rpc.sendError(id, { + code: "BAD_REQUEST", + message: "Missing id", + }); + } + + await projectStoreInstance.deleteProject(projectId); + this.rpc.sendResponse(id, { ok: true }); + } catch (e: any) { + this.logger?.error({ e }, "project.delete failed"); + this.rpc.sendError(id, { code: "IO_ERROR", message: String(e) }); + } + } + + async handleGetSchema(params: any, id: number | string) { + try { + const { projectId } = params || {}; + if (!projectId) { + return this.rpc.sendError(id, { + code: "BAD_REQUEST", + message: "Missing projectId", + }); + } + + const schema = await projectStoreInstance.getSchema(projectId); + this.rpc.sendResponse(id, { ok: true, data: schema }); + } catch (e: any) { + this.logger?.error({ e }, "project.getSchema failed"); + this.rpc.sendError(id, { code: "IO_ERROR", message: String(e) }); + } + } + + async handleSaveSchema(params: any, id: number | string) { + try { + const { projectId, schemas } = params || {}; + if (!projectId || !schemas) { + return this.rpc.sendError(id, { + code: "BAD_REQUEST", + message: "Missing projectId or schemas", + }); + } + + const result = await projectStoreInstance.saveSchema(projectId, schemas); + this.rpc.sendResponse(id, { ok: true, data: result }); + } catch (e: any) { + this.logger?.error({ e }, "project.saveSchema failed"); + this.rpc.sendError(id, { code: "IO_ERROR", message: String(e) }); + } + } + + async handleGetERDiagram(params: any, id: number | string) { + try { + const { projectId } = params || {}; + if (!projectId) { + return this.rpc.sendError(id, { + code: "BAD_REQUEST", + message: "Missing projectId", + }); + } + + const diagram = await projectStoreInstance.getERDiagram(projectId); + this.rpc.sendResponse(id, { ok: true, data: diagram }); + } catch (e: any) { + this.logger?.error({ e }, "project.getERDiagram failed"); + this.rpc.sendError(id, { code: "IO_ERROR", message: String(e) }); + } + } + + async handleSaveERDiagram(params: any, id: number | string) { + try { + const { projectId, nodes, zoom, panX, panY } = params || {}; + if (!projectId || !nodes) { + return this.rpc.sendError(id, { + code: "BAD_REQUEST", + message: "Missing projectId or nodes", + }); + } + + const result = await projectStoreInstance.saveERDiagram(projectId, { + nodes, + zoom, + panX, + panY, + }); + this.rpc.sendResponse(id, { ok: true, data: result }); + } catch (e: any) { + this.logger?.error({ e }, "project.saveERDiagram failed"); + this.rpc.sendError(id, { code: "IO_ERROR", message: String(e) }); + } + } + + async handleGetQueries(params: any, id: number | string) { + try { + const { projectId } = params || {}; + if (!projectId) { + return this.rpc.sendError(id, { + code: "BAD_REQUEST", + message: "Missing projectId", + }); + } + + const queries = await projectStoreInstance.getQueries(projectId); + this.rpc.sendResponse(id, { ok: true, data: queries }); + } catch (e: any) { + this.logger?.error({ e }, "project.getQueries failed"); + this.rpc.sendError(id, { code: "IO_ERROR", message: String(e) }); + } + } + + async handleAddQuery(params: any, id: number | string) { + try { + const { projectId, name, sql, description } = params || {}; + if (!projectId || !name || !sql) { + return this.rpc.sendError(id, { + code: "BAD_REQUEST", + message: "Missing projectId, name, or sql", + }); + } + + const query = await projectStoreInstance.addQuery(projectId, { + name, + sql, + description, + }); + this.rpc.sendResponse(id, { ok: true, data: query }); + } catch (e: any) { + this.logger?.error({ e }, "project.addQuery failed"); + this.rpc.sendError(id, { code: "IO_ERROR", message: String(e) }); + } + } + + async handleUpdateQuery(params: any, id: number | string) { + try { + const { projectId, queryId, ...updates } = params || {}; + if (!projectId || !queryId) { + return this.rpc.sendError(id, { + code: "BAD_REQUEST", + message: "Missing projectId or queryId", + }); + } + + const query = await projectStoreInstance.updateQuery( + projectId, + queryId, + updates + ); + if (!query) { + return this.rpc.sendError(id, { + code: "NOT_FOUND", + message: "Query not found", + }); + } + + this.rpc.sendResponse(id, { ok: true, data: query }); + } catch (e: any) { + this.logger?.error({ e }, "project.updateQuery failed"); + this.rpc.sendError(id, { code: "IO_ERROR", message: String(e) }); + } + } + + async handleDeleteQuery(params: any, id: number | string) { + try { + const { projectId, queryId } = params || {}; + if (!projectId || !queryId) { + return this.rpc.sendError(id, { + code: "BAD_REQUEST", + message: "Missing projectId or queryId", + }); + } + + await projectStoreInstance.deleteQuery(projectId, queryId); + this.rpc.sendResponse(id, { ok: true }); + } catch (e: any) { + this.logger?.error({ e }, "project.deleteQuery failed"); + this.rpc.sendError(id, { code: "IO_ERROR", message: String(e) }); + } + } + + // ========================================== + // Export (for future git-native support) + // ========================================== + + async handleExportProject(params: any, id: number | string) { + try { + const { projectId } = params || {}; + if (!projectId) { + return this.rpc.sendError(id, { + code: "BAD_REQUEST", + message: "Missing projectId", + }); + } + + const bundle = await projectStoreInstance.exportProject(projectId); + if (!bundle) { + return this.rpc.sendError(id, { + code: "NOT_FOUND", + message: "Project not found", + }); + } + + this.rpc.sendResponse(id, { ok: true, data: bundle }); + } catch (e: any) { + this.logger?.error({ e }, "project.export failed"); + this.rpc.sendError(id, { code: "IO_ERROR", message: String(e) }); + } + } + + async handleGetProjectDir(params: any, id: number | string) { + try { + const { projectId } = params || {}; + if (!projectId) { + return this.rpc.sendError(id, { + code: "BAD_REQUEST", + message: "Missing projectId", + }); + } + const dir = getProjectDir(projectId); + this.rpc.sendResponse(id, { ok: true, data: { dir } }); + } catch (e: any) { + this.logger?.error({ e }, "project.getDir failed"); + this.rpc.sendError(id, { code: "IO_ERROR", message: String(e) }); + } + } + + async handleGetLocalConfig(params: any, id: number | string) { + try { + const { projectId } = params || {}; + if (!projectId) { + return this.rpc.sendError(id, { + code: "BAD_REQUEST", + message: "Missing projectId", + }); + } + const config = await projectStoreInstance.getLocalConfig(projectId); + this.rpc.sendResponse(id, { ok: true, data: config }); + } catch (e: any) { + this.logger?.error({ e }, "project.getLocalConfig failed"); + this.rpc.sendError(id, { code: "IO_ERROR", message: String(e) }); + } + } + + async handleSaveLocalConfig(params: any, id: number | string) { + try { + const { projectId, config } = params || {}; + if (!projectId) { + return this.rpc.sendError(id, { + code: "BAD_REQUEST", + message: "Missing projectId", + }); + } + const saved = await projectStoreInstance.saveLocalConfig(projectId, config || {}); + this.rpc.sendResponse(id, { ok: true, data: saved }); + } catch (e: any) { + this.logger?.error({ e }, "project.saveLocalConfig failed"); + this.rpc.sendError(id, { code: "IO_ERROR", message: String(e) }); + } + } + + async handleEnsureGitignore(params: any, id: number | string) { + try { + const { projectId } = params || {}; + if (!projectId) { + return this.rpc.sendError(id, { + code: "BAD_REQUEST", + message: "Missing projectId", + }); + } + const modified = await projectStoreInstance.ensureGitignore(projectId); + this.rpc.sendResponse(id, { ok: true, data: { modified } }); + } catch (e: any) { + this.logger?.error({ e }, "project.ensureGitignore failed"); + this.rpc.sendError(id, { code: "IO_ERROR", message: String(e) }); + } + } +} diff --git a/bridge/src/jsonRpcHandler.ts b/bridge/src/jsonRpcHandler.ts index ba92907..cb11087 100644 --- a/bridge/src/jsonRpcHandler.ts +++ b/bridge/src/jsonRpcHandler.ts @@ -7,6 +7,9 @@ import { DatabaseHandlers } from "./handlers/databaseHandlers"; import { SessionHandlers } from "./handlers/sessionHandlers"; import { StatsHandlers } from "./handlers/statsHandlers"; import { MigrationHandlers } from "./handlers/migrationHandlers"; +import { ProjectHandlers } from "./handlers/projectHandlers"; +import { GitHandlers } from "./handlers/gitHandlers"; +import { GitAdvancedHandlers } from "./handlers/gitAdvancedHandlers"; import { discoveryService } from "./services/discoveryService"; import { Logger } from "pino"; @@ -52,6 +55,9 @@ export function registerDbHandlers( dbService, queryExecutor ); + const projectHandlers = new ProjectHandlers(rpc, logger); + const gitHandlers = new GitHandlers(rpc, logger); + const gitAdvancedHandlers = new GitAdvancedHandlers(rpc, logger); // ========================================== // SESSION MANAGEMENT HANDLERS @@ -131,6 +137,9 @@ export function registerDbHandlers( rpcRegister("db.getSchema", (p, id) => databaseHandlers.handleGetSchema(p, id) ); + rpcRegister("db.listSchemas", (p, id) => + databaseHandlers.handleListSchemas(p, id) + ); // ========================================== // MIGRATION HANDLERS @@ -165,6 +174,106 @@ export function registerDbHandlers( statsHandlers.handleGetTotalStats(p, id) ); + // ========================================== + // PROJECT HANDLERS + // ========================================== + rpcRegister("project.list", (p, id) => + projectHandlers.handleListProjects(p, id) + ); + rpcRegister("project.get", (p, id) => + projectHandlers.handleGetProject(p, id) + ); + rpcRegister("project.getByDatabaseId", (p, id) => + projectHandlers.handleGetProjectByDatabaseId(p, id) + ); + rpcRegister("project.create", (p, id) => + projectHandlers.handleCreateProject(p, id) + ); + rpcRegister("project.update", (p, id) => + projectHandlers.handleUpdateProject(p, id) + ); + rpcRegister("project.delete", (p, id) => + projectHandlers.handleDeleteProject(p, id) + ); + rpcRegister("project.getSchema", (p, id) => + projectHandlers.handleGetSchema(p, id) + ); + rpcRegister("project.saveSchema", (p, id) => + projectHandlers.handleSaveSchema(p, id) + ); + rpcRegister("project.getERDiagram", (p, id) => + projectHandlers.handleGetERDiagram(p, id) + ); + rpcRegister("project.saveERDiagram", (p, id) => + projectHandlers.handleSaveERDiagram(p, id) + ); + rpcRegister("project.getQueries", (p, id) => + projectHandlers.handleGetQueries(p, id) + ); + rpcRegister("project.addQuery", (p, id) => + projectHandlers.handleAddQuery(p, id) + ); + rpcRegister("project.updateQuery", (p, id) => + projectHandlers.handleUpdateQuery(p, id) + ); + rpcRegister("project.deleteQuery", (p, id) => + projectHandlers.handleDeleteQuery(p, id) + ); + rpcRegister("project.export", (p, id) => + projectHandlers.handleExportProject(p, id) + ); + rpcRegister("project.getDir", (p, id) => + projectHandlers.handleGetProjectDir(p, id) + ); + rpcRegister("project.getLocalConfig", (p, id) => + projectHandlers.handleGetLocalConfig(p, id) + ); + rpcRegister("project.saveLocalConfig", (p, id) => + projectHandlers.handleSaveLocalConfig(p, id) + ); + rpcRegister("project.ensureGitignore", (p, id) => + projectHandlers.handleEnsureGitignore(p, id) + ); + + // ========================================== + // GIT HANDLERS + // ========================================== + rpcRegister("git.status", (p, id) => gitHandlers.handleStatus(p, id)); + rpcRegister("git.init", (p, id) => gitHandlers.handleInit(p, id)); + rpcRegister("git.changes", (p, id) => gitHandlers.handleChanges(p, id)); + rpcRegister("git.stage", (p, id) => gitHandlers.handleStage(p, id)); + rpcRegister("git.stageAll", (p, id) => gitHandlers.handleStageAll(p, id)); + rpcRegister("git.unstage", (p, id) => gitHandlers.handleUnstage(p, id)); + rpcRegister("git.commit", (p, id) => gitHandlers.handleCommit(p, id)); + rpcRegister("git.log", (p, id) => gitHandlers.handleLog(p, id)); + rpcRegister("git.branches", (p, id) => gitHandlers.handleBranches(p, id)); + rpcRegister("git.createBranch", (p, id) => gitHandlers.handleCreateBranch(p, id)); + rpcRegister("git.checkout", (p, id) => gitHandlers.handleCheckout(p, id)); + rpcRegister("git.discard", (p, id) => gitHandlers.handleDiscard(p, id)); + rpcRegister("git.stash", (p, id) => gitHandlers.handleStash(p, id)); + rpcRegister("git.stashPop", (p, id) => gitHandlers.handleStashPop(p, id)); + rpcRegister("git.diff", (p, id) => gitHandlers.handleDiff(p, id)); + rpcRegister("git.ensureIgnore", (p, id) => gitHandlers.handleEnsureIgnore(p, id)); + + // ========================================== + // GIT ADVANCED HANDLERS + // ========================================== + + // Remote management + rpcRegister("git.remoteList", (p, id) => gitAdvancedHandlers.handleRemoteList(p, id)); + rpcRegister("git.remoteAdd", (p, id) => gitAdvancedHandlers.handleRemoteAdd(p, id)); + rpcRegister("git.remoteRemove", (p, id) => gitAdvancedHandlers.handleRemoteRemove(p, id)); + rpcRegister("git.remoteGetUrl", (p, id) => gitAdvancedHandlers.handleRemoteGetUrl(p, id)); + rpcRegister("git.remoteSetUrl", (p, id) => gitAdvancedHandlers.handleRemoteSetUrl(p, id)); + + // Push / Pull / Fetch + rpcRegister("git.push", (p, id) => gitAdvancedHandlers.handlePush(p, id)); + rpcRegister("git.pull", (p, id) => gitAdvancedHandlers.handlePull(p, id)); + rpcRegister("git.fetch", (p, id) => gitAdvancedHandlers.handleFetch(p, id)); + + // Rollback + rpcRegister("git.revert", (p, id) => gitAdvancedHandlers.handleRevert(p, id)); + // ========================================== // DATABASE DISCOVERY HANDLERS // ========================================== diff --git a/bridge/src/services/dbStore.ts b/bridge/src/services/dbStore.ts index b65757d..e452361 100644 --- a/bridge/src/services/dbStore.ts +++ b/bridge/src/services/dbStore.ts @@ -9,34 +9,9 @@ import fsSync from "fs"; import { v4 as uuidv4 } from "uuid"; import { createCipheriv, createDecipheriv, randomBytes, scrypt } from "crypto"; import { promisify } from "util"; - +import { CONFIG_FOLDER, CONFIG_FILE, CREDENTIALS_FILE } from "../utils/config"; const scryptAsync = promisify(scrypt); -export const CONFIG_FOLDER = - process.env.RELWAVE_HOME || - path.join( - os.homedir(), - process.platform === "win32" - ? "AppData\\Roaming\\relwave" - : ".relwave" - ); - -export const CONFIG_FILE = path.join(CONFIG_FOLDER, "databases.json"); -export const CREDENTIALS_FILE = path.join(CONFIG_FOLDER, ".credentials"); - -export function getConnectionDir(connectionId: string) { - return path.join(CONFIG_FOLDER, "connections", connectionId); -} - -export function getMigrationsDir(connectionId: string) { - return path.join(CONFIG_FOLDER, "migrations", connectionId); -} - -export function ensureDir(dir: string) { - if (!fsSync.existsSync(dir)) { - fsSync.mkdirSync(dir, { recursive: true }); - } -} // Use machine-specific key for encryption diff --git a/bridge/src/services/gitService.ts b/bridge/src/services/gitService.ts new file mode 100644 index 0000000..3c559da --- /dev/null +++ b/bridge/src/services/gitService.ts @@ -0,0 +1,1038 @@ +// ---------------------------- +// services/gitService.ts +// ---------------------------- +// +// Lightweight git integration that shells out to `git` CLI. +// No npm dependency required — just needs git on PATH. + +import { execFile } from "child_process"; +import { promisify } from "util"; +import path from "path"; +import fsSync from "fs"; + +const execFileAsync = promisify(execFile); + +export interface GitStatus { + /** Whether the directory is inside a git repository */ + isGitRepo: boolean; + + /** Current branch name (e.g. "main", "feature/auth") */ + branch: string | null; + + /** Short commit hash of HEAD */ + headCommit: string | null; + + /** Whether there are uncommitted changes (staged or unstaged) */ + isDirty: boolean; + + /** Number of files with staged changes */ + stagedCount: number; + + /** Number of files with unstaged changes */ + unstagedCount: number; + + /** Number of untracked files */ + untrackedCount: number; + + /** Number of commits ahead of upstream (null if no upstream) */ + ahead: number | null; + + /** Number of commits behind upstream (null if no upstream) */ + behind: number | null; + + /** Remote tracking branch (e.g. "origin/main") */ + upstream: string | null; +} + +export interface GitFileChange { + /** Relative file path */ + path: string; + + /** Git status code: M=modified, A=added, D=deleted, ?=untracked, R=renamed */ + status: string; + + /** Whether this change is staged */ + staged: boolean; +} + +export interface GitLogEntry { + /** Short commit hash */ + hash: string; + + /** Full commit hash */ + fullHash: string; + + /** Author name */ + author: string; + + /** Commit date as ISO string */ + date: string; + + /** First line of commit message */ + subject: string; +} + +export interface GitBranchInfo { + /** Branch name */ + name: string; + + /** Is this the current branch? */ + current: boolean; + + /** Remote tracking branch (null for local-only branches) */ + upstream: string | null; +} + +export class GitService { + /** + * Run a git command in a specific directory. + * Returns stdout. Throws on non-zero exit. + */ + private async git(cwd: string, ...args: string[]): Promise { + try { + const { stdout } = await execFileAsync("git", args, { + cwd, + maxBuffer: 10 * 1024 * 1024, // 10 MB + timeout: 30_000, + windowsHide: true, + }); + return stdout.trimEnd(); + } catch (err: any) { + // Git returns exit code 128 for "not a git repo" etc. + if (err.code === "ENOENT") { + throw new Error("Git is not installed or not on PATH"); + } + throw err; + } + } + + /** + * Check if git is available on this machine + */ + async isGitInstalled(): Promise { + try { + await execFileAsync("git", ["--version"], { + timeout: 5000, + windowsHide: true, + }); + return true; + } catch { + return false; + } + } + + /** + * Check if a directory is inside a git repository + */ + async isRepo(dir: string): Promise { + try { + await this.git(dir, "rev-parse", "--is-inside-work-tree"); + return true; + } catch { + return false; + } + } + + /** + * Get the root directory of the git repository + */ + async getRepoRoot(dir: string): Promise { + return this.git(dir, "rev-parse", "--show-toplevel"); + } + + /** + * Resolve a ref (tag, branch, HEAD~1, etc.) to a full commit hash. + * Returns null if the ref cannot be resolved. + */ + async resolveRef(dir: string, ref: string): Promise { + try { + return await this.git(dir, "rev-list", "-n1", ref); + } catch { + return null; + } + } + + /** + * Initialize a new git repository + */ + async init(dir: string, defaultBranch = "main"): Promise { + await this.git(dir, "init", "-b", defaultBranch); + } + + /** + * Get comprehensive git status for a directory + */ + async getStatus(dir: string): Promise { + const isGitRepo = await this.isRepo(dir); + + if (!isGitRepo) { + return { + isGitRepo: false, + branch: null, + headCommit: null, + isDirty: false, + stagedCount: 0, + unstagedCount: 0, + untrackedCount: 0, + ahead: null, + behind: null, + upstream: null, + }; + } + + // Get branch + upstream + ahead/behind in one call + let branch: string | null = null; + let headCommit: string | null = null; + let upstream: string | null = null; + let ahead: number | null = null; + let behind: number | null = null; + + try { + // --porcelain=v2 --branch gives structured branch info + const branchOutput = await this.git( + dir, + "status", + "--porcelain=v2", + "--branch" + ); + + for (const line of branchOutput.split("\n")) { + if (line.startsWith("# branch.head ")) { + branch = line.slice("# branch.head ".length); + } else if (line.startsWith("# branch.oid ")) { + headCommit = line.slice("# branch.oid ".length).slice(0, 8); + } else if (line.startsWith("# branch.upstream ")) { + upstream = line.slice("# branch.upstream ".length); + } else if (line.startsWith("# branch.ab ")) { + const match = line.match(/\+(\d+) -(\d+)/); + if (match) { + ahead = parseInt(match[1], 10); + behind = parseInt(match[2], 10); + } + } + } + } catch { + // HEAD might be unborn (initial commit) + branch = "(no commits)"; + } + + // Get file-level status + let stagedCount = 0; + let unstagedCount = 0; + let untrackedCount = 0; + + try { + const statusOutput = await this.git( + dir, + "status", + "--porcelain=v1", + "-uall" + ); + + if (statusOutput) { + for (const line of statusOutput.split("\n")) { + if (!line) continue; + const x = line[0]; // staged status + const y = line[1]; // unstaged status + + if (x === "?" && y === "?") { + untrackedCount++; + } else { + if (x !== " " && x !== "?") stagedCount++; + if (y !== " " && y !== "?") unstagedCount++; + } + } + } + } catch { + // Ignore — might be empty repo + } + + return { + isGitRepo: true, + branch, + headCommit, + isDirty: stagedCount > 0 || unstagedCount > 0 || untrackedCount > 0, + stagedCount, + unstagedCount, + untrackedCount, + ahead, + behind, + upstream, + }; + } + + /** + * Get list of changed files with their status + */ + async getChangedFiles(dir: string): Promise { + const output = await this.git(dir, "status", "--porcelain=v1", "-uall"); + if (!output) return []; + + const changes: GitFileChange[] = []; + + for (const line of output.split("\n")) { + if (!line || line.length < 4) continue; + const x = line[0]; // index (staged) + const y = line[1]; // working tree + const filePath = line.slice(3); + + // Staged change + if (x !== " " && x !== "?") { + changes.push({ path: filePath, status: x, staged: true }); + } + // Unstaged change + if (y !== " " && y !== "?") { + changes.push({ path: filePath, status: y, staged: false }); + } + // Untracked + if (x === "?" && y === "?") { + changes.push({ path: filePath, status: "?", staged: false }); + } + } + + return changes; + } + + /** + * Stage files for commit + */ + async stageFiles(dir: string, files: string[]): Promise { + if (files.length === 0) return; + await this.git(dir, "add", "--", ...files); + } + + /** + * Stage all changes + */ + async stageAll(dir: string): Promise { + await this.git(dir, "add", "-A"); + } + + /** + * Unstage files + */ + async unstageFiles(dir: string, files: string[]): Promise { + if (files.length === 0) return; + await this.git(dir, "reset", "HEAD", "--", ...files); + } + + /** + * Commit staged changes + */ + async commit(dir: string, message: string): Promise { + const output = await this.git(dir, "commit", "-m", message); + // Extract short hash from output like "[main abc1234] message" + const match = output.match(/\[[\w/.-]+ ([a-f0-9]+)\]/); + return match?.[1] ?? ""; + } + + /** + * Get recent commit log + */ + async log(dir: string, count = 20): Promise { + try { + const SEP = "<>"; + const format = ["%h", "%H", "%an", "%aI", "%s"].join(SEP); + const output = await this.git( + dir, + "log", + `--max-count=${count}`, + `--format=${format}` + ); + + if (!output) return []; + + return output.split("\n").map((line) => { + const [hash, fullHash, author, date, subject] = line.split(SEP); + return { hash, fullHash, author, date, subject }; + }); + } catch { + return []; // No commits yet + } + } + + /** + * List branches + */ + async listBranches(dir: string): Promise { + try { + const output = await this.git( + dir, + "for-each-ref", + "--format=%(refname:short)%09%(HEAD)%09%(upstream:short)", + "refs/heads/" + ); + + if (!output) return []; + + return output.split("\n").map((line) => { + const [name, head, upstream] = line.split("\t"); + return { + name, + current: head === "*", + upstream: upstream || null, + }; + }); + } catch { + return []; + } + } + + /** + * Create and checkout a new branch + */ + async createBranch(dir: string, name: string): Promise { + await this.git(dir, "checkout", "-b", name); + } + + /** + * Checkout an existing branch + */ + async checkoutBranch(dir: string, name: string): Promise { + await this.git(dir, "checkout", name); + } + + /** + * Discard unstaged changes in a file + */ + async discardChanges(dir: string, files: string[]): Promise { + if (files.length === 0) return; + await this.git(dir, "checkout", "--", ...files); + } + + /** + * Stash all changes + */ + async stash(dir: string, message?: string): Promise { + const args = ["stash", "push", "-u"]; + if (message) args.push("-m", message); + await this.git(dir, ...args); + } + + /** + * Pop the latest stash + */ + async stashPop(dir: string): Promise { + await this.git(dir, "stash", "pop"); + } + + /** + * Get diff for a specific file (or all files) + */ + async diff(dir: string, file?: string, staged = false): Promise { + const args = ["diff"]; + if (staged) args.push("--staged"); + if (file) args.push("--", file); + return this.git(dir, ...args); + } + + /** + * Read a file's content at a given git ref (HEAD, branch, commit hash). + * Returns null if the file doesn't exist at that ref. + */ + async getFileAtRef(dir: string, filePath: string, ref = "HEAD"): Promise { + try { + return await this.git(dir, "show", `${ref}:${filePath}`); + } catch { + return null; // file doesn't exist at this ref + } + } + + /** + * List commits that touched a specific file + */ + async fileLog(dir: string, filePath: string, count = 20): Promise { + try { + const SEP = "<>"; + const format = ["%h", "%H", "%an", "%aI", "%s"].join(SEP); + const output = await this.git( + dir, + "log", + `--max-count=${count}`, + `--format=${format}`, + "--follow", + "--", + filePath + ); + if (!output) return []; + return output.split("\n").map((line) => { + const [hash, fullHash, author, date, subject] = line.split(SEP); + return { hash, fullHash, author, date, subject }; + }); + } catch { + return []; + } + } + + /** + * Generate a .gitignore file suitable for RelWave projects + */ + generateGitignore(): string { + return [ + "# RelWave - auto-generated", + "# Connection credentials (NEVER commit these)", + "relwave.local.json", + ".credentials", + "", + "# OS files", + ".DS_Store", + "Thumbs.db", + "", + "# Editor", + ".vscode/", + ".idea/", + "*.swp", + "*.swo", + "", + ].join("\n"); + } + + /** + * Write a .gitignore if it doesn't already exist in the repo + */ + async ensureGitignore(dir: string): Promise { + const gi = path.join(dir, ".gitignore"); + if (fsSync.existsSync(gi)) { + // Append our rules if the file exists but doesn't contain them + const existing = fsSync.readFileSync(gi, "utf-8"); + if (!existing.includes("relwave.local.json")) { + fsSync.appendFileSync( + gi, + "\n\n" + this.generateGitignore(), + "utf-8" + ); + return true; // modified + } + return false; // already has our rules + } + fsSync.writeFileSync(gi, this.generateGitignore(), "utf-8"); + return true; // created + } + + // ========================================== + // Tags + // ========================================== + + /** + * Create an annotated tag at the current HEAD (or a given ref) + */ + async createTag(dir: string, tagName: string, message?: string, ref?: string): Promise { + const args = ["tag"]; + if (message) { + args.push("-a", tagName, "-m", message); + } else { + args.push(tagName); + } + if (ref) args.push(ref); + await this.git(dir, ...args); + } + + /** + * Delete a tag + */ + async deleteTag(dir: string, tagName: string): Promise { + await this.git(dir, "tag", "-d", tagName); + } + + /** + * List tags with optional pattern filter. + * Returns tag names sorted by creation date (newest first). + */ + async listTags(dir: string, pattern?: string): Promise { + try { + const args = ["tag", "-l", "--sort=-creatordate"]; + if (pattern) args.push(pattern); + const output = await this.git(dir, ...args); + if (!output) return []; + return output.split("\n").filter(Boolean); + } catch { + return []; + } + } + + /** + * Get the message of an annotated tag + */ + async getTagMessage(dir: string, tagName: string): Promise { + try { + return await this.git(dir, "tag", "-l", "-n99", tagName); + } catch { + return null; + } + } + + // ========================================== + // Merge / Conflict detection + // ========================================== + + /** + * Get the merge-base (common ancestor commit) between two refs. + * Returns full hash, or null if no common ancestor. + */ + async mergeBase(dir: string, refA: string, refB: string): Promise { + try { + const output = await this.git(dir, "merge-base", refA, refB); + return output || null; + } catch { + return null; + } + } + + /** + * Check if merging `source` into the current branch would produce conflicts, + * without actually modifying the working tree. + * Returns list of conflicting file paths, or empty if clean. + */ + async dryMerge(dir: string, source: string): Promise { + try { + // Try to merge in-memory (index only) + await this.git(dir, "merge-tree", "--write-tree", "--no-messages", "HEAD", source); + return []; // clean merge + } catch (err: any) { + // merge-tree exits non-zero when there are conflicts and lists them + const output: string = err.stdout ?? err.message ?? ""; + const conflicts: string[] = []; + for (const line of output.split("\n")) { + // merge-tree outputs "CONFLICT (content): ..." lines + if (line.startsWith("CONFLICT")) { + const match = line.match(/Merge conflict in (.+)/); + if (match) conflicts.push(match[1].trim()); + } + } + return conflicts.length > 0 ? conflicts : ["(unknown conflict)"]; + } + } + + /** + * Stage-and-commit specific files in one go (for auto-commit workflows). + * Returns the short commit hash. + */ + async commitFiles(dir: string, files: string[], message: string): Promise { + await this.git(dir, "add", "--", ...files); + return this.commit(dir, message); + } + + // ========================================== + // Remote Management (P3) + // ========================================== + + /** + * List all remotes with their fetch/push URLs. + */ + async remoteList(dir: string): Promise<{ name: string; fetchUrl: string; pushUrl: string }[]> { + try { + const output = await this.git(dir, "remote", "-v"); + if (!output) return []; + + const map = new Map(); + for (const line of output.split("\n")) { + const match = line.match(/^(\S+)\s+(\S+)\s+\((fetch|push)\)$/); + if (!match) continue; + const [, name, url, type] = match; + if (!map.has(name)) map.set(name, { fetchUrl: "", pushUrl: "" }); + const entry = map.get(name)!; + if (type === "fetch") entry.fetchUrl = url; + else entry.pushUrl = url; + } + + return Array.from(map.entries()).map(([name, urls]) => ({ name, ...urls })); + } catch { + return []; + } + } + + /** + * Add a named remote + */ + async remoteAdd(dir: string, name: string, url: string): Promise { + await this.git(dir, "remote", "add", name, url); + } + + /** + * Remove a named remote + */ + async remoteRemove(dir: string, name: string): Promise { + await this.git(dir, "remote", "remove", name); + } + + /** + * Get the URL of a remote + */ + async remoteGetUrl(dir: string, name = "origin"): Promise { + try { + return await this.git(dir, "remote", "get-url", name); + } catch { + return null; + } + } + + /** + * Change the URL of an existing remote + */ + async remoteSetUrl(dir: string, name: string, url: string): Promise { + await this.git(dir, "remote", "set-url", name, url); + } + + // ========================================== + // Push / Pull / Fetch (P3) + // ========================================== + + /** + * Push commits to a remote. + * Returns push output text. + */ + async push( + dir: string, + remote = "origin", + branch?: string, + options?: { force?: boolean; setUpstream?: boolean } + ): Promise { + const args = ["push"]; + if (options?.force) args.push("--force-with-lease"); + if (options?.setUpstream) args.push("--set-upstream"); + args.push(remote); + if (branch) args.push(branch); + return this.git(dir, ...args); + } + + /** + * Pull from a remote. + * Returns pull output text. + */ + async pull( + dir: string, + remote = "origin", + branch?: string, + options?: { rebase?: boolean } + ): Promise { + const args = ["pull"]; + if (options?.rebase) args.push("--rebase"); + args.push(remote); + if (branch) args.push(branch); + return this.git(dir, ...args); + } + + /** + * Fetch from a remote (or all remotes). + */ + async fetch( + dir: string, + remote?: string, + options?: { prune?: boolean; all?: boolean } + ): Promise { + const args = ["fetch"]; + if (options?.prune) args.push("--prune"); + if (options?.all || !remote) { + args.push("--all"); + } else { + args.push(remote); + } + return this.git(dir, ...args); + } + + // ========================================== + // Merge & Rebase (P3) + // ========================================== + + /** + * Merge a branch into the current branch. + * Returns merge output. Throws on conflict. + */ + async merge( + dir: string, + branch: string, + options?: { noFF?: boolean; squash?: boolean; message?: string } + ): Promise { + const args = ["merge"]; + if (options?.noFF) args.push("--no-ff"); + if (options?.squash) args.push("--squash"); + if (options?.message) args.push("-m", options.message); + args.push(branch); + return this.git(dir, ...args); + } + + /** + * Abort an in-progress merge + */ + async abortMerge(dir: string): Promise { + await this.git(dir, "merge", "--abort"); + } + + /** + * Rebase current branch onto target + */ + async rebase(dir: string, onto: string): Promise { + return this.git(dir, "rebase", onto); + } + + /** + * Abort an in-progress rebase + */ + async abortRebase(dir: string): Promise { + await this.git(dir, "rebase", "--abort"); + } + + /** + * Continue a rebase after resolving conflicts + */ + async continueRebase(dir: string): Promise { + return this.git(dir, "rebase", "--continue"); + } + + // ========================================== + // History & Reversal (P3) + // ========================================== + + /** + * Revert a specific commit (creates a new commit that undoes the changes) + */ + async revert(dir: string, commitHash: string, options?: { noCommit?: boolean }): Promise { + const args = ["revert"]; + if (options?.noCommit) args.push("--no-commit"); + args.push(commitHash); + return this.git(dir, ...args); + } + + /** + * Cherry-pick a commit from another branch + */ + async cherryPick(dir: string, commitHash: string, options?: { noCommit?: boolean }): Promise { + const args = ["cherry-pick"]; + if (options?.noCommit) args.push("--no-commit"); + args.push(commitHash); + return this.git(dir, ...args); + } + + /** + * Get line-by-line blame for a file. + * Returns array of blame entries. + */ + async blame(dir: string, filePath: string): Promise<{ + hash: string; + author: string; + date: string; + lineNumber: number; + content: string; + }[]> { + try { + const output = await this.git( + dir, + "blame", + "--porcelain", + "--", + filePath + ); + if (!output) return []; + + const entries: { hash: string; author: string; date: string; lineNumber: number; content: string }[] = []; + const lines = output.split("\n"); + let i = 0; + while (i < lines.length) { + const header = lines[i]; + const headerMatch = header.match(/^([0-9a-f]{40})\s+\d+\s+(\d+)/); + if (!headerMatch) { i++; continue; } + const hash = headerMatch[1].slice(0, 8); + const lineNumber = parseInt(headerMatch[2], 10); + let author = ""; + let date = ""; + i++; + // Read header fields until content line starting with \t + while (i < lines.length && !lines[i].startsWith("\t")) { + if (lines[i].startsWith("author ")) author = lines[i].slice(7); + if (lines[i].startsWith("author-time ")) { + const ts = parseInt(lines[i].slice(12), 10); + date = new Date(ts * 1000).toISOString(); + } + i++; + } + const content = i < lines.length ? lines[i].slice(1) : ""; + entries.push({ hash, author, date, lineNumber, content }); + i++; + } + return entries; + } catch { + return []; + } + } + + /** + * Show a file at a specific ref (alias for getFileAtRef for consistency) + */ + async show(dir: string, ref: string, filePath: string): Promise { + return this.getFileAtRef(dir, filePath, ref); + } + + // ========================================== + // Stash Management (P3) + // ========================================== + + /** + * List all stash entries + */ + async stashList(dir: string): Promise<{ index: number; message: string; date: string }[]> { + try { + const SEP = "<>"; + const output = await this.git( + dir, + "stash", + "list", + `--format=%gd${SEP}%s${SEP}%aI` + ); + if (!output) return []; + + return output.split("\n").filter(Boolean).map((line) => { + const [ref, message, date] = line.split(SEP); + const indexMatch = ref.match(/\{(\d+)\}/); + return { + index: indexMatch ? parseInt(indexMatch[1], 10) : 0, + message: message || ref, + date: date || "", + }; + }); + } catch { + return []; + } + } + + /** + * Apply a specific stash entry (without removing it from the stash list) + */ + async stashApply(dir: string, index = 0): Promise { + await this.git(dir, "stash", "apply", `stash@{${index}}`); + } + + /** + * Drop a specific stash entry + */ + async stashDrop(dir: string, index = 0): Promise { + await this.git(dir, "stash", "drop", `stash@{${index}}`); + } + + /** + * Clear all stash entries + */ + async stashClear(dir: string): Promise { + await this.git(dir, "stash", "clear"); + } + + // ========================================== + // Clone (P3) + // ========================================== + + /** + * Clone a repository. Returns the path of the cloned directory. + */ + async clone(url: string, dest: string, branch?: string): Promise { + const args = ["clone"]; + if (branch) args.push("-b", branch); + args.push(url, dest); + // cwd doesn't matter for clone, use dest's parent + const parent = path.dirname(dest); + await this.git(parent, ...args); + return dest; + } + + // ========================================== + // Conflict Resolution Helpers (P3) + // ========================================== + + /** + * Check if there is a merge or rebase in progress + */ + async getMergeState(dir: string): Promise<{ + mergeInProgress: boolean; + rebaseInProgress: boolean; + conflictedFiles: string[]; + }> { + let mergeInProgress = false; + let rebaseInProgress = false; + + try { + const gitDir = await this.git(dir, "rev-parse", "--git-dir"); + const absGitDir = path.resolve(dir, gitDir); + mergeInProgress = fsSync.existsSync(path.join(absGitDir, "MERGE_HEAD")); + rebaseInProgress = + fsSync.existsSync(path.join(absGitDir, "rebase-merge")) || + fsSync.existsSync(path.join(absGitDir, "rebase-apply")); + } catch { + // Not a repo or other error + } + + // Get list of conflicted files + const conflictedFiles: string[] = []; + try { + const output = await this.git(dir, "diff", "--name-only", "--diff-filter=U"); + if (output) { + conflictedFiles.push(...output.split("\n").filter(Boolean)); + } + } catch { + // Ignore + } + + return { mergeInProgress, rebaseInProgress, conflictedFiles }; + } + + /** + * Mark conflicted files as resolved (stage them) + */ + async markResolved(dir: string, files: string[]): Promise { + if (files.length === 0) return; + await this.git(dir, "add", "--", ...files); + } + + // ========================================== + // Protection & Safety (P3) + // ========================================== + + /** + * Get the list of configured protected branch patterns. + * By convention, reads from .relwave-protected-branches in the repo root. + * Returns ["main", "production"] by default if the file doesn't exist. + */ + getProtectedBranches(dir: string): string[] { + try { + const filePath = path.join(dir, ".relwave-protected-branches"); + if (fsSync.existsSync(filePath)) { + return fsSync + .readFileSync(filePath, "utf-8") + .split("\n") + .map((l) => l.trim()) + .filter(Boolean); + } + } catch { + // Ignore + } + return ["main", "production"]; + } + + /** + * Check if a branch name matches any protected pattern + */ + isProtectedBranch(dir: string, branch: string): boolean { + const patterns = this.getProtectedBranches(dir); + return patterns.some((p) => { + if (p.includes("*")) { + const regex = new RegExp("^" + p.replace(/\*/g, ".*") + "$"); + return regex.test(branch); + } + return p === branch; + }); + } + + /** + * Delete a local branch (prevent deletion of the current branch) + */ + async deleteBranch(dir: string, name: string, force = false): Promise { + const flag = force ? "-D" : "-d"; + await this.git(dir, "branch", flag, name); + } + + /** + * Rename the current branch + */ + async renameBranch(dir: string, newName: string): Promise { + await this.git(dir, "branch", "-m", newName); + } +} + +export const gitServiceInstance = new GitService(); diff --git a/bridge/src/services/projectStore.ts b/bridge/src/services/projectStore.ts new file mode 100644 index 0000000..f67beeb --- /dev/null +++ b/bridge/src/services/projectStore.ts @@ -0,0 +1,606 @@ +// ---------------------------- +// services/projectStore.ts +// ---------------------------- + +import path from "path"; +import fs from "fs/promises"; +import fsSync from "fs"; +import { v4 as uuidv4 } from "uuid"; +import { + PROJECTS_FOLDER, + PROJECTS_INDEX_FILE, + getProjectDir, + ensureDir, +} from "../utils/config"; +import { dbStoreInstance, DBMeta } from "./dbStore"; + +// ========================================== +// Types +// ========================================== + +export type ProjectMetadata = { + version: number; + id: string; + databaseId: string; + name: string; + description?: string; + engine?: string; + defaultSchema?: string; + createdAt: string; + updatedAt: string; +}; + +export type SavedQuery = { + id: string; + name: string; + sql: string; + description?: string; + createdAt: string; + updatedAt: string; +}; + +export type QueriesFile = { + version: number; + projectId: string; + queries: SavedQuery[]; +}; + +export type ERNode = { + tableId: string; + x: number; + y: number; + width?: number; + height?: number; + collapsed?: boolean; +}; + +export type ERDiagramFile = { + version: number; + projectId: string; + nodes: ERNode[]; + zoom?: number; + panX?: number; + panY?: number; + updatedAt: string; +}; + +export type SchemaFile = { + version: number; + projectId: string; + databaseId: string; + schemas: SchemaSnapshot[]; + cachedAt: string; +}; + +export type SchemaSnapshot = { + name: string; + tables: TableSnapshot[]; +}; + +export type TableSnapshot = { + name: string; + type: string; + columns: ColumnSnapshot[]; +}; + +export type ColumnSnapshot = { + name: string; + type: string; + nullable: boolean; + isPrimaryKey: boolean; + isForeignKey: boolean; + defaultValue: string | null; + isUnique: boolean; +}; + +export type ProjectSummary = Pick< + ProjectMetadata, + "id" | "name" | "description" | "engine" | "databaseId" | "createdAt" | "updatedAt" +>; + +/** + * Local (git-ignored) configuration for a project. + * Contains per-developer settings that should NOT be committed. + */ +export type LocalConfig = { + /** Override connection URL (developer-specific) */ + connectionUrl?: string; + + /** Environment label (dev / staging / prod) */ + environment?: string; + + /** Any developer-specific notes */ + notes?: string; +}; + + + +type ProjectIndex = { + version: number; + projects: ProjectSummary[]; +}; + + +const PROJECT_FILES = { + metadata: "relwave.json", + localConfig: "relwave.local.json", + schema: path.join("schema", "schema.json"), + erDiagram: path.join("diagrams", "er.json"), + queries: path.join("queries", "queries.json"), +} as const; + +export class ProjectStore { + private projectsFolder: string; + private indexFile: string; + + constructor( + projectsFolder: string = PROJECTS_FOLDER, + indexFile: string = PROJECTS_INDEX_FILE + ) { + this.projectsFolder = projectsFolder; + this.indexFile = indexFile; + } + + private projectDir(projectId: string): string { + return getProjectDir(projectId); + } + + private projectFile(projectId: string, file: string): string { + return path.join(this.projectDir(projectId), file); + } + + /** + * Ensure the project directory and sub-folders exist + */ + private async ensureProjectDirs(projectId: string): Promise { + const base = this.projectDir(projectId); + ensureDir(base); + ensureDir(path.join(base, "schema")); + ensureDir(path.join(base, "diagrams")); + ensureDir(path.join(base, "queries")); + } + + /** + * Read and parse a JSON file, returns null if missing + */ + private async readJSON(filePath: string): Promise { + try { + if (!fsSync.existsSync(filePath)) return null; + const raw = await fs.readFile(filePath, "utf-8"); + return JSON.parse(raw) as T; + } catch { + return null; + } + } + + /** + * Write JSON atomically (write to tmp then rename) + */ + private async writeJSON(filePath: string, data: unknown): Promise { + const dir = path.dirname(filePath); + ensureDir(dir); + const tmp = `${filePath}.${process.pid}.${uuidv4()}.tmp`; + await fs.writeFile(tmp, JSON.stringify(data, null, 2), "utf-8"); + await fs.rename(tmp, filePath); + } + + private async loadIndex(): Promise { + const data = await this.readJSON(this.indexFile); + return data ?? { version: 1, projects: [] }; + } + + private async saveIndex(index: ProjectIndex): Promise { + ensureDir(this.projectsFolder); + await this.writeJSON(this.indexFile, index); + } + + + /** + * List all projects (lightweight, from index) + */ + async listProjects(): Promise { + const index = await this.loadIndex(); + return index.projects; + } + + /** + * Get full project metadata + */ + async getProject(projectId: string): Promise { + return this.readJSON( + this.projectFile(projectId, PROJECT_FILES.metadata) + ); + } + + /** + * Find a project linked to a specific database ID. + * Returns the first matching project or null. + */ + async getProjectByDatabaseId(databaseId: string): Promise { + const index = await this.loadIndex(); + const entry = index.projects.find((p) => p.databaseId === databaseId); + if (!entry) return null; + return this.getProject(entry.id); + } + + /** + * Create a new project linked to a database connection + */ + async createProject(params: { + databaseId: string; + name: string; + description?: string; + defaultSchema?: string; + }): Promise { + // Resolve engine from the linked database + let engine: string | undefined; + try { + const db: DBMeta | null = await dbStoreInstance.getDB(params.databaseId); + engine = db?.type; + } catch { + // db may not exist yet — that's OK + } + + const id = uuidv4(); + const now = new Date().toISOString(); + + const meta: ProjectMetadata = { + version: 1, + id, + databaseId: params.databaseId, + name: params.name, + description: params.description, + engine, + defaultSchema: params.defaultSchema, + createdAt: now, + updatedAt: now, + }; + + // Create project directory structure + await this.ensureProjectDirs(id); + + // Write metadata + await this.writeJSON( + this.projectFile(id, PROJECT_FILES.metadata), + meta + ); + + // Initialise empty sub-files + const emptySchema: SchemaFile = { + version: 1, + projectId: id, + databaseId: params.databaseId, + schemas: [], + cachedAt: now, + }; + const emptyER: ERDiagramFile = { + version: 1, + projectId: id, + nodes: [], + updatedAt: now, + }; + const emptyQueries: QueriesFile = { + version: 1, + projectId: id, + queries: [], + }; + + await Promise.all([ + this.writeJSON(this.projectFile(id, PROJECT_FILES.schema), emptySchema), + this.writeJSON(this.projectFile(id, PROJECT_FILES.erDiagram), emptyER), + this.writeJSON(this.projectFile(id, PROJECT_FILES.queries), emptyQueries), + ]); + + // Create git-safe scaffolding + await this.ensureGitignore(id); + // Create empty local config (will be gitignored) + const emptyLocal: LocalConfig = {}; + await this.writeJSON(this.projectFile(id, PROJECT_FILES.localConfig), emptyLocal); + + // Update global index + const index = await this.loadIndex(); + index.projects.push({ + id, + name: meta.name, + description: meta.description, + engine, + databaseId: meta.databaseId, + createdAt: now, + updatedAt: now, + }); + await this.saveIndex(index); + + return meta; + } + + /** + * Update project metadata (name, description, defaultSchema) + */ + async updateProject( + projectId: string, + updates: Partial> + ): Promise { + const meta = await this.getProject(projectId); + if (!meta) return null; + + const now = new Date().toISOString(); + + // Whitelist only allowed fields from updates to avoid overwriting + // sensitive metadata (e.g., id, databaseId, version, timestamps). + const { name, description, defaultSchema } = updates; + const safeUpdates: Partial> = {}; + if (name !== undefined) { + safeUpdates.name = name; + } + if (description !== undefined) { + safeUpdates.description = description; + } + if (defaultSchema !== undefined) { + safeUpdates.defaultSchema = defaultSchema; + } + + const updated: ProjectMetadata = { + ...meta, + ...safeUpdates, + updatedAt: now, + }; + + await this.writeJSON( + this.projectFile(projectId, PROJECT_FILES.metadata), + updated + ); + + // Sync the index entry + const index = await this.loadIndex(); + const entry = index.projects.find((p) => p.id === projectId); + if (entry) { + if (updates.name !== undefined) entry.name = updates.name; + if (updates.description !== undefined) entry.description = updates.description; + entry.updatedAt = now; + await this.saveIndex(index); + } + + return updated; + } + + /** + * Delete a project and its directory + */ + async deleteProject(projectId: string): Promise { + const dir = this.projectDir(projectId); + if (fsSync.existsSync(dir)) { + await fs.rm(dir, { recursive: true, force: true }); + } + + // Remove from index + const index = await this.loadIndex(); + index.projects = index.projects.filter((p) => p.id !== projectId); + await this.saveIndex(index); + } + + async getSchema(projectId: string): Promise { + return this.readJSON( + this.projectFile(projectId, PROJECT_FILES.schema) + ); + } + + async saveSchema(projectId: string, schemas: SchemaSnapshot[]): Promise { + const meta = await this.getProject(projectId); + if (!meta) throw new Error(`Project ${projectId} not found`); + + // Read existing file and skip write if schema data is identical + // (avoids cachedAt churn that creates phantom git changes) + const existing = await this.getSchema(projectId); + if (existing) { + const oldData = JSON.stringify(existing.schemas); + const newData = JSON.stringify(schemas); + if (oldData === newData) { + return existing; // nothing changed — keep old cachedAt + } + } + + const now = new Date().toISOString(); + const file: SchemaFile = { + version: 1, + projectId, + databaseId: meta.databaseId, + schemas, + cachedAt: now, + }; + + await this.writeJSON( + this.projectFile(projectId, PROJECT_FILES.schema), + file + ); + + return file; + } + + async getERDiagram(projectId: string): Promise { + return this.readJSON( + this.projectFile(projectId, PROJECT_FILES.erDiagram) + ); + } + + async saveERDiagram( + projectId: string, + data: Pick + ): Promise { + const now = new Date().toISOString(); + const file: ERDiagramFile = { + version: 1, + projectId, + nodes: data.nodes, + zoom: data.zoom, + panX: data.panX, + panY: data.panY, + updatedAt: now, + }; + + await this.writeJSON( + this.projectFile(projectId, PROJECT_FILES.erDiagram), + file + ); + + return file; + } + + async getQueries(projectId: string): Promise { + return this.readJSON( + this.projectFile(projectId, PROJECT_FILES.queries) + ); + } + + async addQuery( + projectId: string, + params: { name: string; sql: string; description?: string } + ): Promise { + const file = (await this.getQueries(projectId)) ?? { + version: 1, + projectId, + queries: [], + }; + + const now = new Date().toISOString(); + const query: SavedQuery = { + id: uuidv4(), + name: params.name, + sql: params.sql, + description: params.description, + createdAt: now, + updatedAt: now, + }; + + file.queries.push(query); + + await this.writeJSON( + this.projectFile(projectId, PROJECT_FILES.queries), + file + ); + + return query; + } + + async updateQuery( + projectId: string, + queryId: string, + updates: Partial> + ): Promise { + const file = await this.getQueries(projectId); + if (!file) return null; + + const idx = file.queries.findIndex((q) => q.id === queryId); + if (idx === -1) return null; + + const now = new Date().toISOString(); + file.queries[idx] = { + ...file.queries[idx], + ...updates, + updatedAt: now, + }; + + await this.writeJSON( + this.projectFile(projectId, PROJECT_FILES.queries), + file + ); + + return file.queries[idx]; + } + + async deleteQuery(projectId: string, queryId: string): Promise { + const file = await this.getQueries(projectId); + if (!file) return; + + file.queries = file.queries.filter((q) => q.id !== queryId); + + await this.writeJSON( + this.projectFile(projectId, PROJECT_FILES.queries), + file + ); + } + + /** + * Returns the full project bundle — useful for export / git commit + */ + async exportProject(projectId: string): Promise<{ + metadata: ProjectMetadata; + schema: SchemaFile | null; + erDiagram: ERDiagramFile | null; + queries: QueriesFile | null; + } | null> { + const metadata = await this.getProject(projectId); + if (!metadata) return null; + + const [schema, erDiagram, queries] = await Promise.all([ + this.getSchema(projectId), + this.getERDiagram(projectId), + this.getQueries(projectId), + ]); + + return { metadata, schema, erDiagram, queries }; + } + + // ========================================== + // Local Config (git-ignored) + // ========================================== + + /** + * Read the local (git-ignored) config for a project + */ + async getLocalConfig(projectId: string): Promise { + return this.readJSON( + this.projectFile(projectId, PROJECT_FILES.localConfig) + ); + } + + /** + * Write/update the local config + */ + async saveLocalConfig(projectId: string, config: LocalConfig): Promise { + await this.writeJSON( + this.projectFile(projectId, PROJECT_FILES.localConfig), + config + ); + return config; + } + + // ========================================== + // .gitignore management + // ========================================== + + /** + * Ensure a .gitignore file exists in the project directory + * with rules to exclude local credentials and caches. + */ + async ensureGitignore(projectId: string): Promise { + const dir = this.projectDir(projectId); + const giPath = path.join(dir, ".gitignore"); + + const rules = [ + "# RelWave — auto-generated", + "# Local config (connection credentials, environment overrides)", + "relwave.local.json", + "", + "# OS / Editor", + ".DS_Store", + "Thumbs.db", + "", + ].join("\n"); + + if (fsSync.existsSync(giPath)) { + const existing = await fs.readFile(giPath, "utf-8"); + if (existing.includes("relwave.local.json")) { + return false; // already has our rules + } + // Append to existing + await fs.writeFile(giPath, existing + "\n\n" + rules, "utf-8"); + return true; + } + + await fs.writeFile(giPath, rules, "utf-8"); + return true; + } +} + +// Singleton instance +export const projectStoreInstance = new ProjectStore(); \ No newline at end of file diff --git a/bridge/src/services/queryExecutor.ts b/bridge/src/services/queryExecutor.ts index 8083453..06baeea 100644 --- a/bridge/src/services/queryExecutor.ts +++ b/bridge/src/services/queryExecutor.ts @@ -320,4 +320,15 @@ export class QueryExecutor { }; } } + + async listSchemaNames(conn: DatabaseConfig, dbType: DBType): Promise { + if (dbType === DBType.POSTGRES) { + return this.postgres.listSchemaNames(conn); + } else if (dbType === DBType.MARIADB) { + return this.mariadb.listSchemaNames(conn); + } else if (dbType === DBType.MYSQL) { + return this.mysql.listSchemaNames(conn); + } + return ["public"]; + } } diff --git a/bridge/src/types/index.ts b/bridge/src/types/index.ts index d9ba93f..4081bd2 100644 --- a/bridge/src/types/index.ts +++ b/bridge/src/types/index.ts @@ -20,7 +20,7 @@ export enum DBType { export type Rpc = { sendResponse: (id: number | string, payload: any) => void; - sendError: (id: number | string, err: { code?: string; message: string }) => void; + sendError: (id: number | string, err: { code?: string; message: string; details?: any }) => void; sendNotification?: (method: string, params?: any) => void; }; diff --git a/bridge/src/utils/config.ts b/bridge/src/utils/config.ts new file mode 100644 index 0000000..c1435ec --- /dev/null +++ b/bridge/src/utils/config.ts @@ -0,0 +1,37 @@ +import path from "path"; +import os from "os"; +import fsSync from "fs"; + +export const CONFIG_FOLDER = + process.env.RELWAVE_HOME || + path.join( + os.homedir(), + process.platform === "win32" + ? "AppData\\Roaming\\relwave" + : ".relwave" + ); + +export const CONFIG_FILE = path.join(CONFIG_FOLDER, "databases.json"); +export const CREDENTIALS_FILE = path.join(CONFIG_FOLDER, ".credentials"); + + +export const PROJECTS_FOLDER = path.join(CONFIG_FOLDER, "projects"); +export const PROJECTS_INDEX_FILE = path.join(PROJECTS_FOLDER, "index.json"); + +export function getConnectionDir(connectionId: string) { + return path.join(CONFIG_FOLDER, "connections", connectionId); +} + +export function getMigrationsDir(connectionId: string) { + return path.join(CONFIG_FOLDER, "migrations", connectionId); +} + +export function getProjectDir(projectId: string) { + return path.join(PROJECTS_FOLDER, projectId); +} + +export function ensureDir(dir: string) { + if (!fsSync.existsSync(dir)) { + fsSync.mkdirSync(dir, { recursive: true }); + } +} \ No newline at end of file diff --git a/bridge/src/utils/migrationGenerator.ts b/bridge/src/utils/migrationGenerator.ts index 3b09692..37e1bd2 100644 --- a/bridge/src/utils/migrationGenerator.ts +++ b/bridge/src/utils/migrationGenerator.ts @@ -58,10 +58,9 @@ export function generateCreateTableMigration(params: { const allDefs = [...[columnDefs], ...fkDefs].filter(Boolean).join(",\n"); - // For MySQL/MariaDB, don't use schema prefix (database is the schema) - const tableRef = (dbType === "mysql" || dbType === "mariadb") - ? quoteIdent(tableName, dbType) - : `${quoteIdent(schemaName, dbType)}.${quoteIdent(tableName, dbType)}`; + // For MySQL/MariaDB, use database.table format (schemas are databases) + // For Postgres, use schema.table format + const tableRef = `${quoteIdent(schemaName, dbType)}.${quoteIdent(tableName, dbType)}`; // Generate UP SQL const upSQL = `CREATE TABLE ${tableRef} ( @@ -94,10 +93,8 @@ export function generateAlterTableMigration(params: { const name = `alter_${tableName}_table`; const filename = `${version}_${name}.sql`; - // For MySQL/MariaDB, don't use schema prefix - const fullTableName = (dbType === "mysql" || dbType === "mariadb") - ? quoteIdent(tableName, dbType) - : `${quoteIdent(schemaName, dbType)}.${quoteIdent(tableName, dbType)}`; + // For all database types, use schema/database prefix + const fullTableName = `${quoteIdent(schemaName, dbType)}.${quoteIdent(tableName, dbType)}`; // Build UP SQL const upStatements: string[] = []; @@ -251,10 +248,8 @@ export function generateDropTableMigration(params: { const name = `drop_${tableName}_table`; const filename = `${version}_${name}.sql`; - // For MySQL/MariaDB, don't use schema prefix - const fullTableName = (dbType === "mysql" || dbType === "mariadb") - ? quoteIdent(tableName, dbType) - : `${quoteIdent(schemaName, dbType)}.${quoteIdent(tableName, dbType)}`; + // For all database types, use schema/database prefix + const fullTableName = `${quoteIdent(schemaName, dbType)}.${quoteIdent(tableName, dbType)}`; let upSQL = ""; if (mode === "CASCADE") { diff --git a/package.json b/package.json index 666dd72..0a0713a 100644 --- a/package.json +++ b/package.json @@ -1,7 +1,7 @@ { "name": "relwave", "private": true, - "version": "0.1.0-beta.5", + "version": "0.2.0-beta.1", "type": "module", "scripts": { "dev": "vite", @@ -71,6 +71,7 @@ "@types/react": "^19.1.8", "@types/react-dom": "^19.1.6", "@vitejs/plugin-react": "^4.6.0", + "baseline-browser-mapping": "^2.9.19", "tw-animate-css": "^1.4.0", "typescript": "~5.8.3", "vite": "^7.0.4" diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 4bbe8d8..7ed4353 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -147,6 +147,9 @@ importers: '@vitejs/plugin-react': specifier: ^4.6.0 version: 4.7.0(vite@7.2.4(@types/node@24.10.1)(jiti@2.6.1)(lightningcss@1.30.2)) + baseline-browser-mapping: + specifier: ^2.9.19 + version: 2.9.19 tw-animate-css: specifier: ^1.4.0 version: 1.4.0 @@ -1452,8 +1455,8 @@ packages: resolution: {integrity: sha512-ik3ZgC9dY/lYVVM++OISsaYDeg1tb0VtP5uL3ouh1koGOaUMDPpbFIei4JkFimWUFPn90sbMNMXQAIVOlnYKJA==} engines: {node: '>=10'} - baseline-browser-mapping@2.8.29: - resolution: {integrity: sha512-sXdt2elaVnhpDNRDz+1BDx1JQoJRuNk7oVlAlbGiFkLikHCAQiccexF/9e91zVi6RCgqspl04aP+6Cnl9zRLrA==} + baseline-browser-mapping@2.9.19: + resolution: {integrity: sha512-ipDqC8FrAl/76p2SSWKSI+H9tFwm7vYqXQrItCuiVPt26Km0jS+NzSsBWAaBusvSbQcfJG+JitdMm+wZAgTYqg==} hasBin: true browserslist@4.28.0: @@ -3342,11 +3345,11 @@ snapshots: dependencies: tslib: 2.8.1 - baseline-browser-mapping@2.8.29: {} + baseline-browser-mapping@2.9.19: {} browserslist@4.28.0: dependencies: - baseline-browser-mapping: 2.8.29 + baseline-browser-mapping: 2.9.19 caniuse-lite: 1.0.30001756 electron-to-chromium: 1.5.258 node-releases: 2.0.27 diff --git a/src-tauri/tauri.conf.json b/src-tauri/tauri.conf.json index 8e85649..724ea76 100644 --- a/src-tauri/tauri.conf.json +++ b/src-tauri/tauri.conf.json @@ -1,7 +1,7 @@ { "$schema": "https://schema.tauri.app/config/2", "productName": "RelWave", - "version": "0.1.0-beta.5", + "version": "0.2.0-beta.1", "identifier": "com.yashs.RelWave", "build": { "beforeDevCommand": "npm run dev", diff --git a/src/components/common/GitStatusBar.tsx b/src/components/common/GitStatusBar.tsx new file mode 100644 index 0000000..44916ea --- /dev/null +++ b/src/components/common/GitStatusBar.tsx @@ -0,0 +1,516 @@ +import { useState } from "react"; +import { + GitBranch, + GitCommitHorizontal, + ArrowUp, + ArrowDown, + Circle, + Plus, + Check, + ChevronDown, + FolderGit2, + Globe, + CloudUpload, + CloudDownload, + RefreshCw, +} from "lucide-react"; +import { Button } from "@/components/ui/button"; +import { + Tooltip, + TooltipContent, + TooltipTrigger, +} from "@/components/ui/tooltip"; +import { + DropdownMenu, + DropdownMenuContent, + DropdownMenuItem, + DropdownMenuSeparator, + DropdownMenuTrigger, +} from "@/components/ui/dropdown-menu"; +import { + Dialog, + DialogContent, + DialogHeader, + DialogTitle, + DialogFooter, +} from "@/components/ui/dialog"; +import { Input } from "@/components/ui/input"; +import { Spinner } from "@/components/ui/spinner"; + +import { + useGitStatus, + useGitBranches, + useGitInit, + useGitStageAll, + useGitCommit, + useGitCheckout, + useGitCreateBranch, +} from "@/hooks/useGitQueries"; +import { + useGitPush, + useGitPull, + useGitFetch, + useGitRemotes, +} from "@/hooks/useGitAdvanced"; +import { toast } from "sonner"; +import type { GitBranchInfo } from "@/types/git"; +import RemoteConfigDialog from "./RemoteConfigDialog"; + +interface GitStatusBarProps { + projectDir: string | null | undefined; +} + +export default function GitStatusBar({ projectDir }: GitStatusBarProps) { + const { data: status, isLoading } = useGitStatus(projectDir); + const { data: branches } = useGitBranches( + status?.isGitRepo ? projectDir : undefined + ); + + const initMutation = useGitInit(projectDir); + const stageAllMutation = useGitStageAll(projectDir); + const commitMutation = useGitCommit(projectDir); + const checkoutMutation = useGitCheckout(projectDir); + const createBranchMutation = useGitCreateBranch(projectDir); + + const [commitDialogOpen, setCommitDialogOpen] = useState(false); + const [commitMessage, setCommitMessage] = useState(""); + const [branchDialogOpen, setBranchDialogOpen] = useState(false); + const [newBranchName, setNewBranchName] = useState(""); + const [remoteDialogOpen, setRemoteDialogOpen] = useState(false); + + const pushMutation = useGitPush(projectDir); + const pullMutation = useGitPull(projectDir); + const fetchMutation = useGitFetch(projectDir); + const { data: remotes } = useGitRemotes( + status?.isGitRepo ? projectDir : undefined + ); + + const hasRemote = remotes && remotes.length > 0; + + if (!projectDir) return null; + + if (isLoading) { + return ( +
+ +
+ ); + } + + if (!status?.isGitRepo) { + return ( + + + + + +

This project is not version-controlled. Click to init a git repo.

+
+
+ ); + } + + const totalChanges = status.stagedCount + status.unstagedCount + status.untrackedCount; + + const handleQuickCommit = async () => { + if (!commitMessage.trim()) return; + try { + await stageAllMutation.mutateAsync(); + const result = await commitMutation.mutateAsync(commitMessage.trim()); + toast.success(`Committed as ${result.hash}`); + setCommitMessage(""); + setCommitDialogOpen(false); + } catch (e: any) { + toast.error("Commit failed: " + e.message); + } + }; + + const handleCreateBranch = async () => { + if (!newBranchName.trim()) return; + try { + await createBranchMutation.mutateAsync(newBranchName.trim()); + toast.success(`Switched to branch ${newBranchName.trim()}`); + setNewBranchName(""); + setBranchDialogOpen(false); + } catch (e: any) { + toast.error("Create branch failed: " + e.message); + } + }; + + const handleCheckout = async (name: string) => { + try { + await checkoutMutation.mutateAsync(name); + toast.success(`Switched to ${name}`); + } catch (e: any) { + toast.error("Checkout failed: " + e.message); + } + }; + + const handlePush = async () => { + try { + const needsUpstream = !status?.upstream; + await pushMutation.mutateAsync({ + setUpstream: needsUpstream, + branch: needsUpstream ? (status?.branch ?? undefined) : undefined, + }); + toast.success("Pushed successfully"); + } catch (e: any) { + toast.error("Push failed: " + e.message); + } + }; + + const handlePull = async () => { + try { + await pullMutation.mutateAsync(); + toast.success("Pulled successfully"); + } catch (e: any) { + toast.error("Pull failed: " + e.message); + } + }; + + const handleFetch = async () => { + try { + await fetchMutation.mutateAsync({ all: true, prune: true }); + toast.success("Fetched from all remotes"); + } catch (e: any) { + toast.error("Fetch failed: " + e.message); + } + }; + + return ( + <> +
+ {/* Branch selector */} + + + + + + {branches?.map((b: GitBranchInfo) => ( + { + if (!b.current) handleCheckout(b.name); + }} + className="font-mono text-xs" + > +
+ {b.current ? ( + + ) : ( +
+ )} + {b.name} + {b.upstream && ( + + {b.upstream} + + )} +
+ + ))} + + setBranchDialogOpen(true)}> + + New Branch... + + {hasRemote && ( + <> + + + + Push + + + + Pull + + + + Fetch All + + + )} + + setRemoteDialogOpen(true)}> + + Manage Remotes... + + + + + {/* Ahead indicator — click to push */} + {hasRemote && status.ahead != null && status.ahead > 0 && ( + + + + + +

Push {status.ahead} commit{status.ahead > 1 ? "s" : ""} to {status.upstream}

+
+
+ )} + + {/* Behind indicator — click to pull */} + {hasRemote && status.behind != null && status.behind > 0 && ( + + + + + +

Pull {status.behind} commit{status.behind > 1 ? "s" : ""} from {status.upstream}

+
+
+ )} + + {/* Sync button when up to date */} + {hasRemote && (status.ahead === 0 || status.ahead == null) && (status.behind === 0 || status.behind == null) && ( + + + + + +

Fetch from all remotes

+
+
+ )} + + {/* No remote — show add remote button */} + {!hasRemote && ( + + + + + +

Add a remote to enable push/pull

+
+
+ )} + + {/* Dirty indicator + quick commit */} + {status.isDirty && ( + + + + + +
+ {status.stagedCount > 0 &&

{status.stagedCount} staged

} + {status.unstagedCount > 0 &&

{status.unstagedCount} modified

} + {status.untrackedCount > 0 &&

{status.untrackedCount} untracked

} +

Click to commit

+
+
+
+ )} + + {/* Clean indicator */} + {!status.isDirty && status.headCommit && ( + + + + + {status.headCommit} + + + +

Working tree clean — HEAD at {status.headCommit}

+
+
+ )} +
+ + {/* Quick Commit Dialog */} + + + + + + Commit Changes + + +
+
+ {status.stagedCount + status.unstagedCount + status.untrackedCount} file(s) will be staged and committed. +
+ setCommitMessage(e.target.value)} + onKeyDown={(e) => { + if (e.key === "Enter" && !e.shiftKey) { + e.preventDefault(); + handleQuickCommit(); + } + }} + autoFocus + /> +
+ + + + +
+
+ + {/* New Branch Dialog */} + + + + + + Create Branch + + +
+
+ New branch will be created from current HEAD ({status.headCommit || "initial"}). +
+ setNewBranchName(e.target.value)} + onKeyDown={(e) => { + if (e.key === "Enter" && !e.shiftKey) { + e.preventDefault(); + handleCreateBranch(); + } + }} + autoFocus + /> +
+ + + + +
+
+ + {/* Remote Config Dialog */} + + + ); +} diff --git a/src/components/common/RemoteConfigDialog.tsx b/src/components/common/RemoteConfigDialog.tsx new file mode 100644 index 0000000..11da2f7 --- /dev/null +++ b/src/components/common/RemoteConfigDialog.tsx @@ -0,0 +1,266 @@ +import { useState } from "react"; +import { + Globe, + Plus, + Trash2, + Pencil, + Check, + X, + Copy, +} from "lucide-react"; +import { Button } from "@/components/ui/button"; +import { + Dialog, + DialogContent, + DialogHeader, + DialogTitle, + DialogFooter, +} from "@/components/ui/dialog"; +import { Input } from "@/components/ui/input"; +import { Spinner } from "@/components/ui/spinner"; +import { + useGitRemotes, + useGitRemoteAdd, + useGitRemoteRemove, + useGitRemoteSetUrl, +} from "@/hooks/useGitAdvanced"; +import { toast } from "sonner"; +import type { GitRemoteInfo } from "@/types/git"; + +interface RemoteConfigDialogProps { + open: boolean; + onOpenChange: (open: boolean) => void; + projectDir: string | null | undefined; +} + +export default function RemoteConfigDialog({ + open, + onOpenChange, + projectDir, +}: RemoteConfigDialogProps) { + const { data: remotes, isLoading } = useGitRemotes(open ? projectDir : undefined); + const addMutation = useGitRemoteAdd(projectDir); + const removeMutation = useGitRemoteRemove(projectDir); + const setUrlMutation = useGitRemoteSetUrl(projectDir); + + const [addMode, setAddMode] = useState(false); + const [newName, setNewName] = useState("origin"); + const [newUrl, setNewUrl] = useState(""); + + // Editing state + const [editingRemote, setEditingRemote] = useState(null); + const [editUrl, setEditUrl] = useState(""); + + const handleAdd = async () => { + if (!newName.trim() || !newUrl.trim()) return; + try { + await addMutation.mutateAsync({ name: newName.trim(), url: newUrl.trim() }); + toast.success(`Remote '${newName.trim()}' added`); + setNewName("origin"); + setNewUrl(""); + setAddMode(false); + } catch (e: any) { + toast.error("Failed to add remote: " + e.message); + } + }; + + const handleRemove = async (name: string) => { + try { + await removeMutation.mutateAsync(name); + toast.success(`Remote '${name}' removed`); + } catch (e: any) { + toast.error("Failed to remove remote: " + e.message); + } + }; + + const handleUpdateUrl = async (name: string) => { + if (!editUrl.trim()) return; + try { + await setUrlMutation.mutateAsync({ name, url: editUrl.trim() }); + toast.success(`Remote '${name}' URL updated`); + setEditingRemote(null); + setEditUrl(""); + } catch (e: any) { + toast.error("Failed to update URL: " + e.message); + } + }; + + return ( + + + + + + Remote Repositories + + + +
+ {isLoading && ( +
+ +
+ )} + + {!isLoading && (!remotes || remotes.length === 0) && !addMode && ( +
+ +

No remotes configured.

+

+ Add a remote to push and pull changes. +

+
+ )} + + {remotes?.map((r: GitRemoteInfo) => ( +
+
+ + {r.name} + +
+ + + +
+
+ + {editingRemote === r.name ? ( +
+ setEditUrl(e.target.value)} + placeholder="https://github.com/user/repo.git" + className="h-7 text-xs font-mono" + onKeyDown={(e) => { + if (e.key === "Enter") handleUpdateUrl(r.name); + if (e.key === "Escape") setEditingRemote(null); + }} + autoFocus + /> + + +
+ ) : ( +

+ {r.pushUrl || r.fetchUrl} +

+ )} +
+ ))} + + {/* Add new remote form */} + {addMode && ( +
+ setNewName(e.target.value)} + placeholder="Remote name (e.g. origin)" + className="h-7 text-xs font-mono" + autoFocus + /> + setNewUrl(e.target.value)} + placeholder="https://github.com/user/repo.git" + className="h-7 text-xs font-mono" + onKeyDown={(e) => { + if (e.key === "Enter") handleAdd(); + if (e.key === "Escape") setAddMode(false); + }} + /> +
+ + +
+
+ )} +
+ + + {!addMode && ( + + )} + + +
+
+ ); +} diff --git a/src/components/common/VerticalIconBar.tsx b/src/components/common/VerticalIconBar.tsx index b77bc86..3441ea8 100644 --- a/src/components/common/VerticalIconBar.tsx +++ b/src/components/common/VerticalIconBar.tsx @@ -1,4 +1,4 @@ -import { Home, Database, Search, GitBranch, Settings, Layers, Terminal } from 'lucide-react'; +import { Home, Database, Search, GitBranch, GitCommitHorizontal, Settings, Layers, Terminal, FolderOpen } from 'lucide-react'; import { Link, useLocation } from 'react-router-dom'; import { Button } from '@/components/ui/button'; import { @@ -7,7 +7,7 @@ import { TooltipTrigger, } from '@/components/ui/tooltip'; -export type PanelType = 'data' | 'sql-workspace' | 'query-builder' | 'schema-explorer' | 'er-diagram'; +export type PanelType = 'data' | 'sql-workspace' | 'query-builder' | 'schema-explorer' | 'er-diagram' | 'git-status'; interface VerticalIconBarProps { dbId?: string; @@ -17,6 +17,7 @@ interface VerticalIconBarProps { const globalNavigationItems = [ { icon: Home, label: 'Dashboard', path: '/' }, + { icon: FolderOpen, label: 'Projects', path: '/projects' }, { icon: Settings, label: 'Settings', path: '/settings' }, ]; @@ -37,6 +38,7 @@ export default function VerticalIconBar({ dbId, activePanel, onPanelChange }: Ve { icon: Search, label: 'Query Builder', panel: 'query-builder' }, { icon: GitBranch, label: 'Schema Explorer', panel: 'schema-explorer' }, { icon: Database, label: 'ER Diagram', panel: 'er-diagram' }, + { icon: GitCommitHorizontal, label: 'Git Status', panel: 'git-status' }, ] : []; return ( diff --git a/src/components/database/MigrationsPanel.tsx b/src/components/database/MigrationsPanel.tsx index ba0b408..d994b7a 100644 --- a/src/components/database/MigrationsPanel.tsx +++ b/src/components/database/MigrationsPanel.tsx @@ -1,5 +1,5 @@ import { useState } from "react"; -import { CheckCircle2, Clock, AlertCircle, Database, Play, Undo2, Trash2, Eye } from "lucide-react"; +import { CheckCircle2, Clock, AlertCircle, Database, Play, Undo2, Trash2, Eye, RefreshCw } from "lucide-react"; import { Card, CardContent, CardHeader, CardTitle } from "@/components/ui/card"; import { Badge } from "@/components/ui/badge"; import { Button } from "@/components/ui/button"; @@ -22,6 +22,22 @@ export default function MigrationsPanel({ migrations, baselined, dbId }: Migrati const [selectedMigration, setSelectedMigration] = useState<{ version: string; name: string } | null>(null); const [showSQLDialog, setShowSQLDialog] = useState(false); const [sqlContent, setSqlContent] = useState<{ up: string; down: string } | null>(null); + const [isRefreshing, setIsRefreshing] = useState(false); + + const handleRefresh = async () => { + setIsRefreshing(true); + try { + await Promise.all([ + queryClient.invalidateQueries({ queryKey: ["migrations", dbId] }), + queryClient.invalidateQueries({ queryKey: ["tables", dbId] }), + queryClient.invalidateQueries({ queryKey: ["schema", dbId] }), + queryClient.invalidateQueries({ queryKey: ["schemaNames", dbId] }), + ]); + toast.success("Refreshed successfully"); + } finally { + setIsRefreshing(false); + } + }; // Merge and sort migrations const appliedVersions = new Set(applied.map((m) => m.version)); @@ -115,6 +131,15 @@ export default function MigrationsPanel({ migrations, baselined, dbId }: Migrati {baselined ? "Baselined" : "Not Baselined"} +

Schema version control and migration status diff --git a/src/components/database/TablesExplorerPanel.tsx b/src/components/database/TablesExplorerPanel.tsx index 0faa00f..4b81067 100644 --- a/src/components/database/TablesExplorerPanel.tsx +++ b/src/components/database/TablesExplorerPanel.tsx @@ -11,6 +11,7 @@ interface TablesExplorerPanelProps { dbId: string; tables: TableInfo[]; selectedTable: SelectedTable | null; + selectedSchema: string; onSelectTable: (tableName: string, schemaName: string) => void; loading?: boolean; } @@ -19,6 +20,7 @@ export default function TablesExplorerPanel({ dbId, tables, selectedTable, + selectedSchema, onSelectTable, loading = false, }: TablesExplorerPanelProps) { @@ -160,7 +162,7 @@ export default function TablesExplorerPanel({ dbId={dbId} open={createTableOpen} onOpenChange={setCreateTableOpen} - schemaName={selectedTable?.schema || ''} + schemaName={selectedSchema} /> diff --git a/src/components/er-diagram/ERDiagramContent.tsx b/src/components/er-diagram/ERDiagramContent.tsx index 342481f..e1adab1 100644 --- a/src/components/er-diagram/ERDiagramContent.tsx +++ b/src/components/er-diagram/ERDiagramContent.tsx @@ -1,6 +1,6 @@ import { toPng, toSvg } from "html-to-image"; -import { ArrowLeft, Database, Download, Filter, LayoutGrid, Search, X } from "lucide-react"; -import { useCallback, useEffect, useMemo, useState } from "react"; +import { ChevronDown, Cloud, Database, Download, Filter, HardDrive, LayoutGrid, Layers, RefreshCw, Search, WifiOff, X } from "lucide-react"; +import { useCallback, useEffect, useMemo, useRef, useState } from "react"; import { Link, useParams } from "react-router-dom"; import { Background, @@ -18,14 +18,23 @@ import { import { toast } from "sonner"; import { transformSchemaToER } from "@/lib/schemaTransformer"; import { Spinner } from "@/components/ui/spinner"; -import { useFullSchema } from "@/hooks/useDbQueries"; +import { useERDiagramData } from "@/hooks/useERDiagramData"; +import { bridgeApi } from "@/services/bridgeApi"; import { ColumnDetails, DatabaseSchemaDetails, ForeignKeyInfo, TableSchemaDetails } from "@/types/database"; +import type { ERNode } from "@/types/project"; import { Tooltip, TooltipContent, TooltipProvider, TooltipTrigger, } from "@/components/ui/tooltip"; +import { + DropdownMenu, + DropdownMenuContent, + DropdownMenuItem, + DropdownMenuTrigger, +} from "@/components/ui/dropdown-menu"; +import { Button } from "@/components/ui/button"; interface Column extends ColumnDetails { fkRef?: string; // e.g., "public.roles.id" @@ -48,9 +57,12 @@ interface ERDiagramContentProps { nodeTypes: { table: React.FC<{ data: TableNodeData }>; }; + projectId?: string | null; } -const ERDiagramContent: React.FC = ({ nodeTypes }) => { +const ER_SAVE_DEBOUNCE_MS = 2000; + +const ERDiagramContent: React.FC = ({ nodeTypes, projectId }) => { const { id: dbId } = useParams<{ id: string }>(); const reactFlowInstance = useReactFlow(); @@ -61,28 +73,122 @@ const ERDiagramContent: React.FC = ({ nodeTypes }) => { const [searchQuery, setSearchQuery] = useState(""); const [selectedNodeId, setSelectedNodeId] = useState(null); const [hoveredEdge, setHoveredEdge] = useState(null); + const [selectedSchema, setSelectedSchema] = useState("__all__"); + const [isSyncing, setIsSyncing] = useState(false); - // Use React Query for schema data (cached!) + // Use the smart data source hook (offline-first + live fallback) const { - data: schemaData, + schemaData, + savedLayout, isLoading, - error: queryError - } = useFullSchema(dbId); - - - const error = queryError ? (queryError as Error).message : - (schemaData && !schemaData.schemas?.some(s => s.tables?.length)) + dataSource, + hasLiveSchema, + syncFromDatabase, + } = useERDiagramData(dbId, projectId); + + const error = !isLoading && !schemaData + ? "No schema data available. Connect to a database or open a project." + : (schemaData && !schemaData.schemas?.some(s => s.tables?.length)) ? "Schema data found, but no tables to render." : null; - // Transform schema to ER nodes/edges when data changes + // Get available schema names for the dropdown + const availableSchemas = useMemo(() => { + if (!schemaData?.schemas) return []; + return schemaData.schemas + .filter(s => s.tables?.length > 0) + .map(s => s.name); + }, [schemaData]); + + // Filter schema data based on selected schema + const filteredSchemaData = useMemo((): DatabaseSchemaDetails | null => { + if (!schemaData) return null; + if (selectedSchema === "__all__") return schemaData; + + return { + ...schemaData, + schemas: schemaData.schemas.filter(s => s.name === selectedSchema) + }; + }, [schemaData, selectedSchema]); + + // Transform schema to ER nodes/edges when data or filter changes + // Merges saved layout positions with schema data: + // - Table in schema + in saved layout → use saved position + // - Table in schema but NOT in saved layout → auto-place (new table) + // - Table in saved layout but NOT in schema → ignored (removed table) useEffect(() => { - if (schemaData && schemaData.schemas?.some(s => s.tables?.length)) { - const { nodes: newNodes, edges: newEdges } = transformSchemaToER(schemaData); + if (filteredSchemaData && filteredSchemaData.schemas?.some(s => s.tables?.length)) { + const layoutNodes = savedLayout?.nodes ?? null; + const { nodes: newNodes, edges: newEdges } = transformSchemaToER( + filteredSchemaData, + true, + layoutNodes + ); setNodes(newNodes as typeof nodes); setEdges(newEdges); + // Fit view after layout change + setTimeout(() => { + if (savedLayout?.zoom != null && savedLayout?.panX != null && savedLayout?.panY != null) { + reactFlowInstance?.setViewport({ + zoom: savedLayout.zoom, + x: savedLayout.panX, + y: savedLayout.panY, + }); + } else { + reactFlowInstance?.fitView({ padding: 0.2, duration: 300 }); + } + }, 100); + } else { + setNodes([]); + setEdges([]); } - }, [schemaData, setNodes, setEdges]); + }, [filteredSchemaData, savedLayout, setNodes, setEdges, reactFlowInstance]); + + // ----------------------------------------- + // Auto-save ER node positions to project + // Debounced: only fires ER_SAVE_DEBOUNCE_MS after last node movement + // ----------------------------------------- + const erSaveTimerRef = useRef | null>(null); + const initialLayoutDoneRef = useRef(false); + + // Mark that the initial layout just happened so we skip saving it + useEffect(() => { + initialLayoutDoneRef.current = false; + const id = setTimeout(() => { initialLayoutDoneRef.current = true; }, 800); + return () => clearTimeout(id); + }, [filteredSchemaData, savedLayout]); + + useEffect(() => { + // Don't save during initial layout or if no project linked + if (!projectId || !initialLayoutDoneRef.current || nodes.length === 0) return; + + if (erSaveTimerRef.current) clearTimeout(erSaveTimerRef.current); + + erSaveTimerRef.current = setTimeout(() => { + const viewport = reactFlowInstance?.getViewport(); + const erNodes: ERNode[] = nodes.map((n) => ({ + tableId: n.id, + x: n.position.x, + y: n.position.y, + width: n.width ?? undefined, + height: n.height ?? undefined, + })); + + bridgeApi + .saveProjectERDiagram(projectId, { + nodes: erNodes, + zoom: viewport?.zoom, + panX: viewport?.x, + panY: viewport?.y, + }) + .then(() => console.debug("[ProjectSync] ER diagram saved")) + .catch((err) => console.warn("[ProjectSync] ER diagram save failed:", err.message)); + }, ER_SAVE_DEBOUNCE_MS); + + return () => { + if (erSaveTimerRef.current) clearTimeout(erSaveTimerRef.current); + }; + }, [nodes, projectId, reactFlowInstance]); // Filter nodes based on search query const filteredNodes = useMemo(() => { @@ -178,15 +284,15 @@ const ERDiagramContent: React.FC = ({ nodeTypes }) => { } }, [filteredNodes, reactFlowInstance]); - // Re-layout with dagre + // Re-layout with dagre (ignores saved layout to generate fresh positions) const reLayout = useCallback(() => { - if (schemaData) { - const { nodes: newNodes, edges: newEdges } = transformSchemaToER(schemaData, true); + if (filteredSchemaData) { + const { nodes: newNodes, edges: newEdges } = transformSchemaToER(filteredSchemaData, true, null); setNodes(newNodes as typeof nodes); setEdges(newEdges); setTimeout(() => reactFlowInstance?.fitView({ padding: 0.2, duration: 500 }), 100); } - }, [schemaData, setNodes, setEdges, reactFlowInstance]); + }, [filteredSchemaData, setNodes, setEdges, reactFlowInstance]); // Edge hover handlers for tooltip const onEdgeMouseEnter: EdgeMouseHandler = useCallback((_event, edge) => { @@ -252,15 +358,35 @@ const ERDiagramContent: React.FC = ({ nodeTypes }) => { if (error || !schemaData || nodes.length === 0) { return (

-
+

Diagram Unavailable

{error || "No tables found."}

- - - +
+ {hasLiveSchema && projectId && ( + + )} + + + +
); @@ -276,8 +402,59 @@ const ERDiagramContent: React.FC = ({ nodeTypes }) => { {schemaData.name || 'Database'} ER Diagram + {/* Data source badge */} + + + + {dataSource === "live" ? ( + + ) : ( + + )} + {dataSource === "live" ? "Live" : "Offline"} + + + + {dataSource === "live" + ? "Schema loaded from live database connection" + : "Schema loaded from saved project files (offline)"} + +
+ {/* Schema filter dropdown */} + {availableSchemas.length > 0 && ( + + + + + + setSelectedSchema("__all__")}> + + All Schemas + + {schemaData?.schemas.reduce((acc, s) => acc + (s.tables?.length || 0), 0)} tables + + + {availableSchemas.map(schema => ( + setSelectedSchema(schema)}> + {schema} + + {schemaData?.schemas.find(s => s.name === schema)?.tables?.length || 0} tables + + + ))} + + + )} + {/* Search bar */}
@@ -323,6 +500,34 @@ const ERDiagramContent: React.FC = ({ nodeTypes }) => { Clear )} + {/* Sync from Database button */} + {projectId && ( + + + + + + Pull fresh schema from database (keeps your layout) + + + )} - Re-layout diagram + Re-layout diagram {["png", "svg"].map((format) => (
diff --git a/src/components/er-diagram/ERDiagramPanel.tsx b/src/components/er-diagram/ERDiagramPanel.tsx index 5a17f9c..0946a7d 100644 --- a/src/components/er-diagram/ERDiagramPanel.tsx +++ b/src/components/er-diagram/ERDiagramPanel.tsx @@ -7,11 +7,15 @@ const nodeTypes = { table: TableNode, } as const; -export default function ERDiagramPanel() { +interface ERDiagramPanelProps { + projectId?: string | null; +} + +export default function ERDiagramPanel({ projectId }: ERDiagramPanelProps) { return (
- +
); diff --git a/src/components/git/GitStatusPanel.tsx b/src/components/git/GitStatusPanel.tsx new file mode 100644 index 0000000..019858b --- /dev/null +++ b/src/components/git/GitStatusPanel.tsx @@ -0,0 +1,513 @@ +import { useState } from "react"; +import { + GitBranch, + GitCommitHorizontal, + FileEdit, + FilePlus2, + FileX2, + FileQuestion, + FileDiff, + Clock, + ArrowUp, + ArrowDown, + ChevronRight, + Eye, + RotateCcw, + User, +} from "lucide-react"; +import { Badge } from "@/components/ui/badge"; +import { Button } from "@/components/ui/button"; +import { ScrollArea } from "@/components/ui/scroll-area"; +import { Tabs, TabsContent, TabsList, TabsTrigger } from "@/components/ui/tabs"; +import { + Tooltip, + TooltipContent, + TooltipTrigger, +} from "@/components/ui/tooltip"; +import { + Dialog, + DialogContent, + DialogHeader, + DialogTitle, +} from "@/components/ui/dialog"; +import { + useGitStatus, + useGitChanges, + useGitLog, + useGitBranches, +} from "@/hooks/useGitQueries"; +import { useGitRevert } from "@/hooks/useGitAdvanced"; +import { bridgeApi } from "@/services/bridgeApi"; +import { Spinner } from "@/components/ui/spinner"; +import { toast } from "sonner"; +import type { GitFileChange, GitLogEntry } from "@/types/git"; + +// ─── Helpers ────────────────────────────────────────── + +function statusIcon(status: string, staged: boolean) { + const color = staged ? "text-green-500" : "text-yellow-500"; + switch (status) { + case "M": + return ; + case "A": + return ; + case "D": + return ; + case "R": + return ; + case "?": + return ; + default: + return ; + } +} + +function statusLabel(status: string) { + switch (status) { + case "M": + return "Modified"; + case "A": + return "Added"; + case "D": + return "Deleted"; + case "R": + return "Renamed"; + case "?": + return "Untracked"; + case "C": + return "Copied"; + case "U": + return "Unmerged"; + default: + return status; + } +} + +function timeAgo(dateStr: string): string { + const diff = Date.now() - new Date(dateStr).getTime(); + const mins = Math.floor(diff / 60000); + if (mins < 1) return "just now"; + if (mins < 60) return `${mins}m ago`; + const hrs = Math.floor(mins / 60); + if (hrs < 24) return `${hrs}h ago`; + const days = Math.floor(hrs / 24); + if (days < 30) return `${days}d ago`; + return new Date(dateStr).toLocaleDateString(); +} + +// ─── Component ──────────────────────────────────────── + +interface GitStatusPanelProps { + projectDir: string | null | undefined; +} + +export default function GitStatusPanel({ projectDir }: GitStatusPanelProps) { + const { data: status, isLoading: statusLoading } = useGitStatus(projectDir); + const { data: changes } = useGitChanges( + status?.isGitRepo ? projectDir : undefined + ); + const { data: log } = useGitLog( + status?.isGitRepo ? projectDir : undefined, + 50 + ); + const { data: branches } = useGitBranches( + status?.isGitRepo ? projectDir : undefined + ); + const revertMutation = useGitRevert(projectDir); + + const [diffDialogOpen, setDiffDialogOpen] = useState(false); + const [diffContent, setDiffContent] = useState(""); + const [diffFile, setDiffFile] = useState(""); + const [diffLoading, setDiffLoading] = useState(false); + + if (!projectDir) { + return ( +
+ No project directory available. +
+ ); + } + + if (statusLoading) { + return ( +
+ + Loading git status… +
+ ); + } + + if (!status?.isGitRepo) { + return ( +
+ + Not a git repository. Initialize git from the status bar below. +
+ ); + } + + // Split changes into staged / unstaged + const staged = (changes ?? []).filter((c) => c.staged); + const unstaged = (changes ?? []).filter((c) => !c.staged); + + const viewDiff = async (file: string, isStaged: boolean) => { + setDiffFile(file); + setDiffLoading(true); + setDiffDialogOpen(true); + try { + const diff = await bridgeApi.gitDiff(projectDir!, file, isStaged); + setDiffContent(diff || "(no diff available)"); + } catch { + setDiffContent("Failed to load diff."); + } finally { + setDiffLoading(false); + } + }; + + const handleRevert = (hash: string, subject: string) => { + revertMutation.mutate( + { hash }, + { + onSuccess: () => toast.success(`Reverted: ${subject}`), + onError: (err: any) => + toast.error(`Revert failed: ${err?.message ?? "Unknown error"}`), + } + ); + }; + + return ( +
+ {/* Header */} +
+
+
+ +

Git Status

+ + {status.branch ?? "HEAD"} + + {status.headCommit && ( + + {status.headCommit} + + )} +
+
+ {status.ahead != null && status.ahead > 0 && ( + + + {status.ahead} + + )} + {status.behind != null && status.behind > 0 && ( + + + {status.behind} + + )} + {status.upstream && ( + {status.upstream} + )} +
+
+
+ + {/* Tabs */} + + + + + Changes + {(changes?.length ?? 0) > 0 && ( + + {changes!.length} + + )} + + + + History + + + + Branches + {branches && ( + + {branches.length} + + )} + + + + {/* ── Changes Tab ─────────────────────────── */} + + + {(!changes || changes.length === 0) ? ( +
+ + Working tree clean +
+ ) : ( +
+ {/* Staged */} + {staged.length > 0 && ( +
+

+ Staged Changes + + {staged.length} + +

+
+ {staged.map((f) => ( + viewDiff(f.path, true)} + /> + ))} +
+
+ )} + + {/* Unstaged */} + {unstaged.length > 0 && ( +
+

+ Unstaged Changes + + {unstaged.length} + +

+
+ {unstaged.map((f) => ( + viewDiff(f.path, false)} + /> + ))} +
+
+ )} +
+ )} +
+
+ + {/* ── History Tab ─────────────────────────── */} + + + {(!log || log.length === 0) ? ( +
+ + No commits yet +
+ ) : ( +
+ {log.map((entry, idx) => ( + handleRevert(entry.hash, entry.subject)} + reverting={revertMutation.isPending} + /> + ))} +
+ )} +
+
+ + {/* ── Branches Tab ────────────────────────── */} + + + {(!branches || branches.length === 0) ? ( +
+ + No branches +
+ ) : ( +
+ {branches.map((b) => ( +
+ + {b.name} + {b.current && ( + + current + + )} + {b.upstream && ( + + → {b.upstream} + + )} +
+ ))} +
+ )} +
+
+
+ + {/* Diff Dialog */} + + + + + + {diffFile} + + + + {diffLoading ? ( +
+ +
+ ) : ( +
+                                {diffContent.split("\n").map((line, i) => {
+                                    let color = "text-foreground/80";
+                                    if (line.startsWith("+") && !line.startsWith("+++"))
+                                        color = "text-green-500";
+                                    else if (line.startsWith("-") && !line.startsWith("---"))
+                                        color = "text-red-500";
+                                    else if (line.startsWith("@@"))
+                                        color = "text-blue-400";
+                                    return (
+                                        
+ {line} +
+ ); + })} +
+ )} +
+
+
+
+ ); +} + +// ─── File Row Sub-Component ─────────────────────────── + +function FileRow({ + file, + onViewDiff, +}: { + file: GitFileChange; + onViewDiff: () => void; +}) { + // Extract filename from path + const parts = file.path.split("/"); + const fileName = parts.pop() ?? file.path; + const dir = parts.length > 0 ? parts.join("/") + "/" : ""; + + return ( +
+ {statusIcon(file.status, file.staged)} +
+ {fileName} + {dir && ( + + {dir} + + )} +
+ + {statusLabel(file.status)} + + + + + + View Diff + +
+ ); +} + +// ─── Commit Row Sub-Component ───────────────────────── + +function CommitRow({ + entry, + isLatest, + onRevert, + reverting, +}: { + entry: GitLogEntry; + isLatest: boolean; + onRevert: () => void; + reverting: boolean; +}) { + return ( +
+ {/* Timeline dot */} +
+
+
+ +
+
+ + {entry.subject} + + + + + + + Rollback to this commit + + +
+
+ {entry.hash} + + + {entry.author} + + + + {timeAgo(entry.date)} + +
+
+
+ ); +} diff --git a/src/components/project/CreateProjectDialog.tsx b/src/components/project/CreateProjectDialog.tsx new file mode 100644 index 0000000..c23b1b2 --- /dev/null +++ b/src/components/project/CreateProjectDialog.tsx @@ -0,0 +1,172 @@ +import { useState } from "react"; +import { Database, Link as LinkIcon } from "lucide-react"; +import { Button } from "@/components/ui/button"; +import { + Dialog, + DialogContent, + DialogDescription, + DialogHeader, + DialogTitle, +} from "@/components/ui/dialog"; +import { Input } from "@/components/ui/input"; +import { Label } from "@/components/ui/label"; +import { + Select, + SelectContent, + SelectItem, + SelectTrigger, + SelectValue, +} from "@/components/ui/select"; +import { Textarea } from "@/components/ui/textarea"; +import { DatabaseConnection } from "@/types/database"; + +interface CreateProjectDialogProps { + open: boolean; + onOpenChange: (open: boolean) => void; + onSubmit: (data: { + databaseId: string; + name: string; + description?: string; + defaultSchema?: string; + }) => void; + isLoading?: boolean; + databases: DatabaseConnection[]; +} + +export function CreateProjectDialog({ + open, + onOpenChange, + onSubmit, + isLoading, + databases, +}: CreateProjectDialogProps) { + const [name, setName] = useState(""); + const [description, setDescription] = useState(""); + const [databaseId, setDatabaseId] = useState(""); + const [defaultSchema, setDefaultSchema] = useState(""); + + const resetForm = () => { + setName(""); + setDescription(""); + setDatabaseId(""); + setDefaultSchema(""); + }; + + const handleSubmit = (e: React.FormEvent) => { + e.preventDefault(); + if (!name.trim() || !databaseId) return; + + onSubmit({ + databaseId, + name: name.trim(), + description: description.trim() || undefined, + defaultSchema: defaultSchema.trim() || undefined, + }); + + resetForm(); + }; + + return ( + { + onOpenChange(isOpen); + if (!isOpen) resetForm(); + }} + > + + + + + Create Project + + + Create a project to save schema, ER diagrams, and queries offline. + + + +
+ {/* Project Name */} +
+ + setName(e.target.value)} + autoFocus + /> +
+ + {/* Linked Database */} +
+ + +
+ + {/* Default Schema */} +
+ + setDefaultSchema(e.target.value)} + /> +
+ + {/* Description */} +
+ +