diff --git a/bridge/src/connectors/mariadb.ts b/bridge/src/connectors/mariadb.ts index 896de96..8ce9151 100644 --- a/bridge/src/connectors/mariadb.ts +++ b/bridge/src/connectors/mariadb.ts @@ -7,7 +7,7 @@ import mysql, { import { loadLocalMigrations, writeBaselineMigration } from "../utils/baselineMigration"; import crypto from "crypto"; import fs from "fs"; -import { ensureDir, getMigrationsDir } from "../services/dbStore"; +import { ensureDir, getMigrationsDir } from "../utils/config"; import { CacheEntry, CACHE_TTL, diff --git a/bridge/src/connectors/mysql.ts b/bridge/src/connectors/mysql.ts index 3500830..72acba9 100644 --- a/bridge/src/connectors/mysql.ts +++ b/bridge/src/connectors/mysql.ts @@ -7,7 +7,7 @@ import mysql, { import { loadLocalMigrations, writeBaselineMigration } from "../utils/baselineMigration"; import crypto from "crypto"; import fs from "fs"; -import { ensureDir, getMigrationsDir } from "../services/dbStore"; +import { ensureDir, getMigrationsDir } from "../utils/config"; import { CacheEntry, CACHE_TTL, diff --git a/bridge/src/connectors/postgres.ts b/bridge/src/connectors/postgres.ts index 58654c1..19084dc 100644 --- a/bridge/src/connectors/postgres.ts +++ b/bridge/src/connectors/postgres.ts @@ -5,7 +5,7 @@ import { Readable } from "stream"; import { loadLocalMigrations, writeBaselineMigration } from "../utils/baselineMigration"; import crypto from "crypto"; import fs from "fs"; -import { ensureDir, getMigrationsDir } from "../services/dbStore"; +import { ensureDir, getMigrationsDir } from "../utils/config"; import { CacheEntry, CACHE_TTL, diff --git a/bridge/src/handlers/migrationHandlers.ts b/bridge/src/handlers/migrationHandlers.ts index 4f18b58..686740d 100644 --- a/bridge/src/handlers/migrationHandlers.ts +++ b/bridge/src/handlers/migrationHandlers.ts @@ -2,7 +2,7 @@ import { Rpc } from "../types"; import { DatabaseService } from "../services/databaseService"; import { QueryExecutor } from "../services/queryExecutor"; import { Logger } from "pino"; -import { getMigrationsDir } from "../services/dbStore"; +import { getMigrationsDir } from "../utils/config"; import path from "path"; import fs from "fs"; diff --git a/bridge/src/handlers/projectHandlers.ts b/bridge/src/handlers/projectHandlers.ts new file mode 100644 index 0000000..0858c05 --- /dev/null +++ b/bridge/src/handlers/projectHandlers.ts @@ -0,0 +1,329 @@ +import { Rpc } from "../types"; +import { Logger } from "pino"; +import { projectStoreInstance } from "../services/projectStore"; + +/** + * RPC handlers for project CRUD and sub-resource operations. + * Mirrors the DatabaseHandlers pattern. + */ +export class ProjectHandlers { + constructor( + private rpc: Rpc, + private logger: Logger + ) { } + + + async handleListProjects(_params: any, id: number | string) { + try { + const projects = await projectStoreInstance.listProjects(); + this.rpc.sendResponse(id, { ok: true, data: projects }); + } catch (e: any) { + this.logger?.error({ e }, "project.list failed"); + this.rpc.sendError(id, { code: "IO_ERROR", message: String(e) }); + } + } + + async handleGetProject(params: any, id: number | string) { + try { + const { id: projectId } = params || {}; + if (!projectId) { + return this.rpc.sendError(id, { + code: "BAD_REQUEST", + message: "Missing id", + }); + } + + const project = await projectStoreInstance.getProject(projectId); + if (!project) { + return this.rpc.sendError(id, { + code: "NOT_FOUND", + message: "Project not found", + }); + } + + this.rpc.sendResponse(id, { ok: true, data: project }); + } catch (e: any) { + this.logger?.error({ e }, "project.get failed"); + this.rpc.sendError(id, { code: "IO_ERROR", message: String(e) }); + } + } + + async handleGetProjectByDatabaseId(params: any, id: number | string) { + try { + const { databaseId } = params || {}; + if (!databaseId) { + return this.rpc.sendError(id, { + code: "BAD_REQUEST", + message: "Missing databaseId", + }); + } + + const project = await projectStoreInstance.getProjectByDatabaseId(databaseId); + // Return null (not an error) when no project is linked + this.rpc.sendResponse(id, { ok: true, data: project }); + } catch (e: any) { + this.logger?.error({ e }, "project.getByDatabaseId failed"); + this.rpc.sendError(id, { code: "IO_ERROR", message: String(e) }); + } + } + + async handleCreateProject(params: any, id: number | string) { + try { + const { databaseId, name, description, defaultSchema } = params || {}; + if (!databaseId || !name) { + return this.rpc.sendError(id, { + code: "BAD_REQUEST", + message: "Missing databaseId or name", + }); + } + + const project = await projectStoreInstance.createProject({ + databaseId, + name, + description, + defaultSchema, + }); + + this.rpc.sendResponse(id, { ok: true, data: project }); + } catch (e: any) { + this.logger?.error({ e }, "project.create failed"); + this.rpc.sendError(id, { code: "IO_ERROR", message: String(e) }); + } + } + + async handleUpdateProject(params: any, id: number | string) { + try { + const { id: projectId, ...updates } = params || {}; + if (!projectId) { + return this.rpc.sendError(id, { + code: "BAD_REQUEST", + message: "Missing id", + }); + } + + const project = await projectStoreInstance.updateProject(projectId, updates); + if (!project) { + return this.rpc.sendError(id, { + code: "NOT_FOUND", + message: "Project not found", + }); + } + + this.rpc.sendResponse(id, { ok: true, data: project }); + } catch (e: any) { + this.logger?.error({ e }, "project.update failed"); + this.rpc.sendError(id, { code: "IO_ERROR", message: String(e) }); + } + } + + async handleDeleteProject(params: any, id: number | string) { + try { + const { id: projectId } = params || {}; + if (!projectId) { + return this.rpc.sendError(id, { + code: "BAD_REQUEST", + message: "Missing id", + }); + } + + await projectStoreInstance.deleteProject(projectId); + this.rpc.sendResponse(id, { ok: true }); + } catch (e: any) { + this.logger?.error({ e }, "project.delete failed"); + this.rpc.sendError(id, { code: "IO_ERROR", message: String(e) }); + } + } + + async handleGetSchema(params: any, id: number | string) { + try { + const { projectId } = params || {}; + if (!projectId) { + return this.rpc.sendError(id, { + code: "BAD_REQUEST", + message: "Missing projectId", + }); + } + + const schema = await projectStoreInstance.getSchema(projectId); + this.rpc.sendResponse(id, { ok: true, data: schema }); + } catch (e: any) { + this.logger?.error({ e }, "project.getSchema failed"); + this.rpc.sendError(id, { code: "IO_ERROR", message: String(e) }); + } + } + + async handleSaveSchema(params: any, id: number | string) { + try { + const { projectId, schemas } = params || {}; + if (!projectId || !schemas) { + return this.rpc.sendError(id, { + code: "BAD_REQUEST", + message: "Missing projectId or schemas", + }); + } + + const result = await projectStoreInstance.saveSchema(projectId, schemas); + this.rpc.sendResponse(id, { ok: true, data: result }); + } catch (e: any) { + this.logger?.error({ e }, "project.saveSchema failed"); + this.rpc.sendError(id, { code: "IO_ERROR", message: String(e) }); + } + } + + async handleGetERDiagram(params: any, id: number | string) { + try { + const { projectId } = params || {}; + if (!projectId) { + return this.rpc.sendError(id, { + code: "BAD_REQUEST", + message: "Missing projectId", + }); + } + + const diagram = await projectStoreInstance.getERDiagram(projectId); + this.rpc.sendResponse(id, { ok: true, data: diagram }); + } catch (e: any) { + this.logger?.error({ e }, "project.getERDiagram failed"); + this.rpc.sendError(id, { code: "IO_ERROR", message: String(e) }); + } + } + + async handleSaveERDiagram(params: any, id: number | string) { + try { + const { projectId, nodes, zoom, panX, panY } = params || {}; + if (!projectId || !nodes) { + return this.rpc.sendError(id, { + code: "BAD_REQUEST", + message: "Missing projectId or nodes", + }); + } + + const result = await projectStoreInstance.saveERDiagram(projectId, { + nodes, + zoom, + panX, + panY, + }); + this.rpc.sendResponse(id, { ok: true, data: result }); + } catch (e: any) { + this.logger?.error({ e }, "project.saveERDiagram failed"); + this.rpc.sendError(id, { code: "IO_ERROR", message: String(e) }); + } + } + + async handleGetQueries(params: any, id: number | string) { + try { + const { projectId } = params || {}; + if (!projectId) { + return this.rpc.sendError(id, { + code: "BAD_REQUEST", + message: "Missing projectId", + }); + } + + const queries = await projectStoreInstance.getQueries(projectId); + this.rpc.sendResponse(id, { ok: true, data: queries }); + } catch (e: any) { + this.logger?.error({ e }, "project.getQueries failed"); + this.rpc.sendError(id, { code: "IO_ERROR", message: String(e) }); + } + } + + async handleAddQuery(params: any, id: number | string) { + try { + const { projectId, name, sql, description } = params || {}; + if (!projectId || !name || !sql) { + return this.rpc.sendError(id, { + code: "BAD_REQUEST", + message: "Missing projectId, name, or sql", + }); + } + + const query = await projectStoreInstance.addQuery(projectId, { + name, + sql, + description, + }); + this.rpc.sendResponse(id, { ok: true, data: query }); + } catch (e: any) { + this.logger?.error({ e }, "project.addQuery failed"); + this.rpc.sendError(id, { code: "IO_ERROR", message: String(e) }); + } + } + + async handleUpdateQuery(params: any, id: number | string) { + try { + const { projectId, queryId, ...updates } = params || {}; + if (!projectId || !queryId) { + return this.rpc.sendError(id, { + code: "BAD_REQUEST", + message: "Missing projectId or queryId", + }); + } + + const query = await projectStoreInstance.updateQuery( + projectId, + queryId, + updates + ); + if (!query) { + return this.rpc.sendError(id, { + code: "NOT_FOUND", + message: "Query not found", + }); + } + + this.rpc.sendResponse(id, { ok: true, data: query }); + } catch (e: any) { + this.logger?.error({ e }, "project.updateQuery failed"); + this.rpc.sendError(id, { code: "IO_ERROR", message: String(e) }); + } + } + + async handleDeleteQuery(params: any, id: number | string) { + try { + const { projectId, queryId } = params || {}; + if (!projectId || !queryId) { + return this.rpc.sendError(id, { + code: "BAD_REQUEST", + message: "Missing projectId or queryId", + }); + } + + await projectStoreInstance.deleteQuery(projectId, queryId); + this.rpc.sendResponse(id, { ok: true }); + } catch (e: any) { + this.logger?.error({ e }, "project.deleteQuery failed"); + this.rpc.sendError(id, { code: "IO_ERROR", message: String(e) }); + } + } + + // ========================================== + // Export (for future git-native support) + // ========================================== + + async handleExportProject(params: any, id: number | string) { + try { + const { projectId } = params || {}; + if (!projectId) { + return this.rpc.sendError(id, { + code: "BAD_REQUEST", + message: "Missing projectId", + }); + } + + const bundle = await projectStoreInstance.exportProject(projectId); + if (!bundle) { + return this.rpc.sendError(id, { + code: "NOT_FOUND", + message: "Project not found", + }); + } + + this.rpc.sendResponse(id, { ok: true, data: bundle }); + } catch (e: any) { + this.logger?.error({ e }, "project.export failed"); + this.rpc.sendError(id, { code: "IO_ERROR", message: String(e) }); + } + } +} diff --git a/bridge/src/jsonRpcHandler.ts b/bridge/src/jsonRpcHandler.ts index 5c45512..fb33bcc 100644 --- a/bridge/src/jsonRpcHandler.ts +++ b/bridge/src/jsonRpcHandler.ts @@ -7,6 +7,7 @@ import { DatabaseHandlers } from "./handlers/databaseHandlers"; import { SessionHandlers } from "./handlers/sessionHandlers"; import { StatsHandlers } from "./handlers/statsHandlers"; import { MigrationHandlers } from "./handlers/migrationHandlers"; +import { ProjectHandlers } from "./handlers/projectHandlers"; import { discoveryService } from "./services/discoveryService"; import { Logger } from "pino"; @@ -52,6 +53,7 @@ export function registerDbHandlers( dbService, queryExecutor ); + const projectHandlers = new ProjectHandlers(rpc, logger); // ========================================== // SESSION MANAGEMENT HANDLERS @@ -168,6 +170,55 @@ export function registerDbHandlers( statsHandlers.handleGetTotalStats(p, id) ); + // ========================================== + // PROJECT HANDLERS + // ========================================== + rpcRegister("project.list", (p, id) => + projectHandlers.handleListProjects(p, id) + ); + rpcRegister("project.get", (p, id) => + projectHandlers.handleGetProject(p, id) + ); + rpcRegister("project.getByDatabaseId", (p, id) => + projectHandlers.handleGetProjectByDatabaseId(p, id) + ); + rpcRegister("project.create", (p, id) => + projectHandlers.handleCreateProject(p, id) + ); + rpcRegister("project.update", (p, id) => + projectHandlers.handleUpdateProject(p, id) + ); + rpcRegister("project.delete", (p, id) => + projectHandlers.handleDeleteProject(p, id) + ); + rpcRegister("project.getSchema", (p, id) => + projectHandlers.handleGetSchema(p, id) + ); + rpcRegister("project.saveSchema", (p, id) => + projectHandlers.handleSaveSchema(p, id) + ); + rpcRegister("project.getERDiagram", (p, id) => + projectHandlers.handleGetERDiagram(p, id) + ); + rpcRegister("project.saveERDiagram", (p, id) => + projectHandlers.handleSaveERDiagram(p, id) + ); + rpcRegister("project.getQueries", (p, id) => + projectHandlers.handleGetQueries(p, id) + ); + rpcRegister("project.addQuery", (p, id) => + projectHandlers.handleAddQuery(p, id) + ); + rpcRegister("project.updateQuery", (p, id) => + projectHandlers.handleUpdateQuery(p, id) + ); + rpcRegister("project.deleteQuery", (p, id) => + projectHandlers.handleDeleteQuery(p, id) + ); + rpcRegister("project.export", (p, id) => + projectHandlers.handleExportProject(p, id) + ); + // ========================================== // DATABASE DISCOVERY HANDLERS // ========================================== diff --git a/bridge/src/services/dbStore.ts b/bridge/src/services/dbStore.ts index b65757d..e452361 100644 --- a/bridge/src/services/dbStore.ts +++ b/bridge/src/services/dbStore.ts @@ -9,34 +9,9 @@ import fsSync from "fs"; import { v4 as uuidv4 } from "uuid"; import { createCipheriv, createDecipheriv, randomBytes, scrypt } from "crypto"; import { promisify } from "util"; - +import { CONFIG_FOLDER, CONFIG_FILE, CREDENTIALS_FILE } from "../utils/config"; const scryptAsync = promisify(scrypt); -export const CONFIG_FOLDER = - process.env.RELWAVE_HOME || - path.join( - os.homedir(), - process.platform === "win32" - ? "AppData\\Roaming\\relwave" - : ".relwave" - ); - -export const CONFIG_FILE = path.join(CONFIG_FOLDER, "databases.json"); -export const CREDENTIALS_FILE = path.join(CONFIG_FOLDER, ".credentials"); - -export function getConnectionDir(connectionId: string) { - return path.join(CONFIG_FOLDER, "connections", connectionId); -} - -export function getMigrationsDir(connectionId: string) { - return path.join(CONFIG_FOLDER, "migrations", connectionId); -} - -export function ensureDir(dir: string) { - if (!fsSync.existsSync(dir)) { - fsSync.mkdirSync(dir, { recursive: true }); - } -} // Use machine-specific key for encryption diff --git a/bridge/src/services/projectStore.ts b/bridge/src/services/projectStore.ts new file mode 100644 index 0000000..115091a --- /dev/null +++ b/bridge/src/services/projectStore.ts @@ -0,0 +1,512 @@ +// ---------------------------- +// services/projectStore.ts +// ---------------------------- + +import path from "path"; +import fs from "fs/promises"; +import fsSync from "fs"; +import { v4 as uuidv4 } from "uuid"; +import { + PROJECTS_FOLDER, + PROJECTS_INDEX_FILE, + getProjectDir, + ensureDir, +} from "../utils/config"; +import { dbStoreInstance, DBMeta } from "./dbStore"; + +// ========================================== +// Types +// ========================================== + +export type ProjectMetadata = { + version: number; + id: string; + databaseId: string; + name: string; + description?: string; + engine?: string; + defaultSchema?: string; + createdAt: string; + updatedAt: string; +}; + +export type SavedQuery = { + id: string; + name: string; + sql: string; + description?: string; + createdAt: string; + updatedAt: string; +}; + +export type QueriesFile = { + version: number; + projectId: string; + queries: SavedQuery[]; +}; + +export type ERNode = { + tableId: string; + x: number; + y: number; + width?: number; + height?: number; + collapsed?: boolean; +}; + +export type ERDiagramFile = { + version: number; + projectId: string; + nodes: ERNode[]; + zoom?: number; + panX?: number; + panY?: number; + updatedAt: string; +}; + +export type SchemaFile = { + version: number; + projectId: string; + databaseId: string; + schemas: SchemaSnapshot[]; + cachedAt: string; +}; + +export type SchemaSnapshot = { + name: string; + tables: TableSnapshot[]; +}; + +export type TableSnapshot = { + name: string; + type: string; + columns: ColumnSnapshot[]; +}; + +export type ColumnSnapshot = { + name: string; + type: string; + nullable: boolean; + isPrimaryKey: boolean; + isForeignKey: boolean; + defaultValue: string | null; + isUnique: boolean; +}; + +export type ProjectSummary = Pick< + ProjectMetadata, + "id" | "name" | "description" | "engine" | "databaseId" | "createdAt" | "updatedAt" +>; + + + +type ProjectIndex = { + version: number; + projects: ProjectSummary[]; +}; + + +const PROJECT_FILES = { + metadata: "relwave.json", + schema: path.join("schema", "schema.json"), + erDiagram: path.join("diagrams", "er.json"), + queries: path.join("queries", "queries.json"), +} as const; + +export class ProjectStore { + private projectsFolder: string; + private indexFile: string; + + constructor( + projectsFolder: string = PROJECTS_FOLDER, + indexFile: string = PROJECTS_INDEX_FILE + ) { + this.projectsFolder = projectsFolder; + this.indexFile = indexFile; + } + + private projectDir(projectId: string): string { + return getProjectDir(projectId); + } + + private projectFile(projectId: string, file: string): string { + return path.join(this.projectDir(projectId), file); + } + + /** + * Ensure the project directory and sub-folders exist + */ + private async ensureProjectDirs(projectId: string): Promise { + const base = this.projectDir(projectId); + ensureDir(base); + ensureDir(path.join(base, "schema")); + ensureDir(path.join(base, "diagrams")); + ensureDir(path.join(base, "queries")); + } + + /** + * Read and parse a JSON file, returns null if missing + */ + private async readJSON(filePath: string): Promise { + try { + if (!fsSync.existsSync(filePath)) return null; + const raw = await fs.readFile(filePath, "utf-8"); + return JSON.parse(raw) as T; + } catch { + return null; + } + } + + /** + * Write JSON atomically (write to tmp then rename) + */ + private async writeJSON(filePath: string, data: unknown): Promise { + const dir = path.dirname(filePath); + ensureDir(dir); + const tmp = `${filePath}.${process.pid}.${uuidv4()}.tmp`; + await fs.writeFile(tmp, JSON.stringify(data, null, 2), "utf-8"); + await fs.rename(tmp, filePath); + } + + private async loadIndex(): Promise { + const data = await this.readJSON(this.indexFile); + return data ?? { version: 1, projects: [] }; + } + + private async saveIndex(index: ProjectIndex): Promise { + ensureDir(this.projectsFolder); + await this.writeJSON(this.indexFile, index); + } + + + /** + * List all projects (lightweight, from index) + */ + async listProjects(): Promise { + const index = await this.loadIndex(); + return index.projects; + } + + /** + * Get full project metadata + */ + async getProject(projectId: string): Promise { + return this.readJSON( + this.projectFile(projectId, PROJECT_FILES.metadata) + ); + } + + /** + * Find a project linked to a specific database ID. + * Returns the first matching project or null. + */ + async getProjectByDatabaseId(databaseId: string): Promise { + const index = await this.loadIndex(); + const entry = index.projects.find((p) => p.databaseId === databaseId); + if (!entry) return null; + return this.getProject(entry.id); + } + + /** + * Create a new project linked to a database connection + */ + async createProject(params: { + databaseId: string; + name: string; + description?: string; + defaultSchema?: string; + }): Promise { + // Resolve engine from the linked database + let engine: string | undefined; + try { + const db: DBMeta | null = await dbStoreInstance.getDB(params.databaseId); + engine = db?.type; + } catch { + // db may not exist yet — that's OK + } + + const id = uuidv4(); + const now = new Date().toISOString(); + + const meta: ProjectMetadata = { + version: 1, + id, + databaseId: params.databaseId, + name: params.name, + description: params.description, + engine, + defaultSchema: params.defaultSchema, + createdAt: now, + updatedAt: now, + }; + + // Create project directory structure + await this.ensureProjectDirs(id); + + // Write metadata + await this.writeJSON( + this.projectFile(id, PROJECT_FILES.metadata), + meta + ); + + // Initialise empty sub-files + const emptySchema: SchemaFile = { + version: 1, + projectId: id, + databaseId: params.databaseId, + schemas: [], + cachedAt: now, + }; + const emptyER: ERDiagramFile = { + version: 1, + projectId: id, + nodes: [], + updatedAt: now, + }; + const emptyQueries: QueriesFile = { + version: 1, + projectId: id, + queries: [], + }; + + await Promise.all([ + this.writeJSON(this.projectFile(id, PROJECT_FILES.schema), emptySchema), + this.writeJSON(this.projectFile(id, PROJECT_FILES.erDiagram), emptyER), + this.writeJSON(this.projectFile(id, PROJECT_FILES.queries), emptyQueries), + ]); + + // Update global index + const index = await this.loadIndex(); + index.projects.push({ + id, + name: meta.name, + description: meta.description, + engine, + databaseId: meta.databaseId, + createdAt: now, + updatedAt: now, + }); + await this.saveIndex(index); + + return meta; + } + + /** + * Update project metadata (name, description, defaultSchema) + */ + async updateProject( + projectId: string, + updates: Partial> + ): Promise { + const meta = await this.getProject(projectId); + if (!meta) return null; + + const now = new Date().toISOString(); + + // Whitelist only allowed fields from updates to avoid overwriting + // sensitive metadata (e.g., id, databaseId, version, timestamps). + const { name, description, defaultSchema } = updates; + const safeUpdates: Partial> = {}; + if (name !== undefined) { + safeUpdates.name = name; + } + if (description !== undefined) { + safeUpdates.description = description; + } + if (defaultSchema !== undefined) { + safeUpdates.defaultSchema = defaultSchema; + } + + const updated: ProjectMetadata = { + ...meta, + ...safeUpdates, + updatedAt: now, + }; + + await this.writeJSON( + this.projectFile(projectId, PROJECT_FILES.metadata), + updated + ); + + // Sync the index entry + const index = await this.loadIndex(); + const entry = index.projects.find((p) => p.id === projectId); + if (entry) { + if (updates.name !== undefined) entry.name = updates.name; + if (updates.description !== undefined) entry.description = updates.description; + entry.updatedAt = now; + await this.saveIndex(index); + } + + return updated; + } + + /** + * Delete a project and its directory + */ + async deleteProject(projectId: string): Promise { + const dir = this.projectDir(projectId); + if (fsSync.existsSync(dir)) { + await fs.rm(dir, { recursive: true, force: true }); + } + + // Remove from index + const index = await this.loadIndex(); + index.projects = index.projects.filter((p) => p.id !== projectId); + await this.saveIndex(index); + } + + async getSchema(projectId: string): Promise { + return this.readJSON( + this.projectFile(projectId, PROJECT_FILES.schema) + ); + } + + async saveSchema(projectId: string, schemas: SchemaSnapshot[]): Promise { + const meta = await this.getProject(projectId); + if (!meta) throw new Error(`Project ${projectId} not found`); + + const now = new Date().toISOString(); + const file: SchemaFile = { + version: 1, + projectId, + databaseId: meta.databaseId, + schemas, + cachedAt: now, + }; + + await this.writeJSON( + this.projectFile(projectId, PROJECT_FILES.schema), + file + ); + + return file; + } + + async getERDiagram(projectId: string): Promise { + return this.readJSON( + this.projectFile(projectId, PROJECT_FILES.erDiagram) + ); + } + + async saveERDiagram( + projectId: string, + data: Pick + ): Promise { + const now = new Date().toISOString(); + const file: ERDiagramFile = { + version: 1, + projectId, + nodes: data.nodes, + zoom: data.zoom, + panX: data.panX, + panY: data.panY, + updatedAt: now, + }; + + await this.writeJSON( + this.projectFile(projectId, PROJECT_FILES.erDiagram), + file + ); + + return file; + } + + async getQueries(projectId: string): Promise { + return this.readJSON( + this.projectFile(projectId, PROJECT_FILES.queries) + ); + } + + async addQuery( + projectId: string, + params: { name: string; sql: string; description?: string } + ): Promise { + const file = (await this.getQueries(projectId)) ?? { + version: 1, + projectId, + queries: [], + }; + + const now = new Date().toISOString(); + const query: SavedQuery = { + id: uuidv4(), + name: params.name, + sql: params.sql, + description: params.description, + createdAt: now, + updatedAt: now, + }; + + file.queries.push(query); + + await this.writeJSON( + this.projectFile(projectId, PROJECT_FILES.queries), + file + ); + + return query; + } + + async updateQuery( + projectId: string, + queryId: string, + updates: Partial> + ): Promise { + const file = await this.getQueries(projectId); + if (!file) return null; + + const idx = file.queries.findIndex((q) => q.id === queryId); + if (idx === -1) return null; + + const now = new Date().toISOString(); + file.queries[idx] = { + ...file.queries[idx], + ...updates, + updatedAt: now, + }; + + await this.writeJSON( + this.projectFile(projectId, PROJECT_FILES.queries), + file + ); + + return file.queries[idx]; + } + + async deleteQuery(projectId: string, queryId: string): Promise { + const file = await this.getQueries(projectId); + if (!file) return; + + file.queries = file.queries.filter((q) => q.id !== queryId); + + await this.writeJSON( + this.projectFile(projectId, PROJECT_FILES.queries), + file + ); + } + + /** + * Returns the full project bundle — useful for export / git commit + */ + async exportProject(projectId: string): Promise<{ + metadata: ProjectMetadata; + schema: SchemaFile | null; + erDiagram: ERDiagramFile | null; + queries: QueriesFile | null; + } | null> { + const metadata = await this.getProject(projectId); + if (!metadata) return null; + + const [schema, erDiagram, queries] = await Promise.all([ + this.getSchema(projectId), + this.getERDiagram(projectId), + this.getQueries(projectId), + ]); + + return { metadata, schema, erDiagram, queries }; + } +} + +// Singleton instance +export const projectStoreInstance = new ProjectStore(); \ No newline at end of file diff --git a/bridge/src/utils/config.ts b/bridge/src/utils/config.ts new file mode 100644 index 0000000..c1435ec --- /dev/null +++ b/bridge/src/utils/config.ts @@ -0,0 +1,37 @@ +import path from "path"; +import os from "os"; +import fsSync from "fs"; + +export const CONFIG_FOLDER = + process.env.RELWAVE_HOME || + path.join( + os.homedir(), + process.platform === "win32" + ? "AppData\\Roaming\\relwave" + : ".relwave" + ); + +export const CONFIG_FILE = path.join(CONFIG_FOLDER, "databases.json"); +export const CREDENTIALS_FILE = path.join(CONFIG_FOLDER, ".credentials"); + + +export const PROJECTS_FOLDER = path.join(CONFIG_FOLDER, "projects"); +export const PROJECTS_INDEX_FILE = path.join(PROJECTS_FOLDER, "index.json"); + +export function getConnectionDir(connectionId: string) { + return path.join(CONFIG_FOLDER, "connections", connectionId); +} + +export function getMigrationsDir(connectionId: string) { + return path.join(CONFIG_FOLDER, "migrations", connectionId); +} + +export function getProjectDir(projectId: string) { + return path.join(PROJECTS_FOLDER, projectId); +} + +export function ensureDir(dir: string) { + if (!fsSync.existsSync(dir)) { + fsSync.mkdirSync(dir, { recursive: true }); + } +} \ No newline at end of file diff --git a/src/components/common/VerticalIconBar.tsx b/src/components/common/VerticalIconBar.tsx index b77bc86..bd11cfe 100644 --- a/src/components/common/VerticalIconBar.tsx +++ b/src/components/common/VerticalIconBar.tsx @@ -1,4 +1,4 @@ -import { Home, Database, Search, GitBranch, Settings, Layers, Terminal } from 'lucide-react'; +import { Home, Database, Search, GitBranch, Settings, Layers, Terminal, FolderOpen } from 'lucide-react'; import { Link, useLocation } from 'react-router-dom'; import { Button } from '@/components/ui/button'; import { @@ -17,6 +17,7 @@ interface VerticalIconBarProps { const globalNavigationItems = [ { icon: Home, label: 'Dashboard', path: '/' }, + { icon: FolderOpen, label: 'Projects', path: '/projects' }, { icon: Settings, label: 'Settings', path: '/settings' }, ]; diff --git a/src/components/er-diagram/ERDiagramContent.tsx b/src/components/er-diagram/ERDiagramContent.tsx index 5261db7..e1adab1 100644 --- a/src/components/er-diagram/ERDiagramContent.tsx +++ b/src/components/er-diagram/ERDiagramContent.tsx @@ -1,6 +1,6 @@ import { toPng, toSvg } from "html-to-image"; -import { ArrowLeft, ChevronDown, Database, Download, Filter, LayoutGrid, Layers, Search, X } from "lucide-react"; -import { useCallback, useEffect, useMemo, useState } from "react"; +import { ChevronDown, Cloud, Database, Download, Filter, HardDrive, LayoutGrid, Layers, RefreshCw, Search, WifiOff, X } from "lucide-react"; +import { useCallback, useEffect, useMemo, useRef, useState } from "react"; import { Link, useParams } from "react-router-dom"; import { Background, @@ -18,8 +18,10 @@ import { import { toast } from "sonner"; import { transformSchemaToER } from "@/lib/schemaTransformer"; import { Spinner } from "@/components/ui/spinner"; -import { useFullSchema } from "@/hooks/useDbQueries"; +import { useERDiagramData } from "@/hooks/useERDiagramData"; +import { bridgeApi } from "@/services/bridgeApi"; import { ColumnDetails, DatabaseSchemaDetails, ForeignKeyInfo, TableSchemaDetails } from "@/types/database"; +import type { ERNode } from "@/types/project"; import { Tooltip, TooltipContent, @@ -55,9 +57,12 @@ interface ERDiagramContentProps { nodeTypes: { table: React.FC<{ data: TableNodeData }>; }; + projectId?: string | null; } -const ERDiagramContent: React.FC = ({ nodeTypes }) => { +const ER_SAVE_DEBOUNCE_MS = 2000; + +const ERDiagramContent: React.FC = ({ nodeTypes, projectId }) => { const { id: dbId } = useParams<{ id: string }>(); const reactFlowInstance = useReactFlow(); @@ -69,17 +74,21 @@ const ERDiagramContent: React.FC = ({ nodeTypes }) => { const [selectedNodeId, setSelectedNodeId] = useState(null); const [hoveredEdge, setHoveredEdge] = useState(null); const [selectedSchema, setSelectedSchema] = useState("__all__"); + const [isSyncing, setIsSyncing] = useState(false); - // Use React Query for schema data (cached!) + // Use the smart data source hook (offline-first + live fallback) const { - data: schemaData, + schemaData, + savedLayout, isLoading, - error: queryError - } = useFullSchema(dbId); - - - const error = queryError ? (queryError as Error).message : - (schemaData && !schemaData.schemas?.some(s => s.tables?.length)) + dataSource, + hasLiveSchema, + syncFromDatabase, + } = useERDiagramData(dbId, projectId); + + const error = !isLoading && !schemaData + ? "No schema data available. Connect to a database or open a project." + : (schemaData && !schemaData.schemas?.some(s => s.tables?.length)) ? "Schema data found, but no tables to render." : null; @@ -95,7 +104,7 @@ const ERDiagramContent: React.FC = ({ nodeTypes }) => { const filteredSchemaData = useMemo((): DatabaseSchemaDetails | null => { if (!schemaData) return null; if (selectedSchema === "__all__") return schemaData; - + return { ...schemaData, schemas: schemaData.schemas.filter(s => s.name === selectedSchema) @@ -103,18 +112,83 @@ const ERDiagramContent: React.FC = ({ nodeTypes }) => { }, [schemaData, selectedSchema]); // Transform schema to ER nodes/edges when data or filter changes + // Merges saved layout positions with schema data: + // - Table in schema + in saved layout → use saved position + // - Table in schema but NOT in saved layout → auto-place (new table) + // - Table in saved layout but NOT in schema → ignored (removed table) useEffect(() => { if (filteredSchemaData && filteredSchemaData.schemas?.some(s => s.tables?.length)) { - const { nodes: newNodes, edges: newEdges } = transformSchemaToER(filteredSchemaData); + const layoutNodes = savedLayout?.nodes ?? null; + const { nodes: newNodes, edges: newEdges } = transformSchemaToER( + filteredSchemaData, + true, + layoutNodes + ); setNodes(newNodes as typeof nodes); setEdges(newEdges); // Fit view after layout change - setTimeout(() => reactFlowInstance?.fitView({ padding: 0.2, duration: 300 }), 100); + setTimeout(() => { + if (savedLayout?.zoom != null && savedLayout?.panX != null && savedLayout?.panY != null) { + reactFlowInstance?.setViewport({ + zoom: savedLayout.zoom, + x: savedLayout.panX, + y: savedLayout.panY, + }); + } else { + reactFlowInstance?.fitView({ padding: 0.2, duration: 300 }); + } + }, 100); } else { setNodes([]); setEdges([]); } - }, [filteredSchemaData, setNodes, setEdges, reactFlowInstance]); + }, [filteredSchemaData, savedLayout, setNodes, setEdges, reactFlowInstance]); + + // ----------------------------------------- + // Auto-save ER node positions to project + // Debounced: only fires ER_SAVE_DEBOUNCE_MS after last node movement + // ----------------------------------------- + const erSaveTimerRef = useRef | null>(null); + const initialLayoutDoneRef = useRef(false); + + // Mark that the initial layout just happened so we skip saving it + useEffect(() => { + initialLayoutDoneRef.current = false; + const id = setTimeout(() => { initialLayoutDoneRef.current = true; }, 800); + return () => clearTimeout(id); + }, [filteredSchemaData, savedLayout]); + + useEffect(() => { + // Don't save during initial layout or if no project linked + if (!projectId || !initialLayoutDoneRef.current || nodes.length === 0) return; + + if (erSaveTimerRef.current) clearTimeout(erSaveTimerRef.current); + + erSaveTimerRef.current = setTimeout(() => { + const viewport = reactFlowInstance?.getViewport(); + const erNodes: ERNode[] = nodes.map((n) => ({ + tableId: n.id, + x: n.position.x, + y: n.position.y, + width: n.width ?? undefined, + height: n.height ?? undefined, + })); + + bridgeApi + .saveProjectERDiagram(projectId, { + nodes: erNodes, + zoom: viewport?.zoom, + panX: viewport?.x, + panY: viewport?.y, + }) + .then(() => console.debug("[ProjectSync] ER diagram saved")) + .catch((err) => console.warn("[ProjectSync] ER diagram save failed:", err.message)); + }, ER_SAVE_DEBOUNCE_MS); + + return () => { + if (erSaveTimerRef.current) clearTimeout(erSaveTimerRef.current); + }; + }, [nodes, projectId, reactFlowInstance]); // Filter nodes based on search query const filteredNodes = useMemo(() => { @@ -210,10 +284,10 @@ const ERDiagramContent: React.FC = ({ nodeTypes }) => { } }, [filteredNodes, reactFlowInstance]); - // Re-layout with dagre + // Re-layout with dagre (ignores saved layout to generate fresh positions) const reLayout = useCallback(() => { if (filteredSchemaData) { - const { nodes: newNodes, edges: newEdges } = transformSchemaToER(filteredSchemaData, true); + const { nodes: newNodes, edges: newEdges } = transformSchemaToER(filteredSchemaData, true, null); setNodes(newNodes as typeof nodes); setEdges(newEdges); setTimeout(() => reactFlowInstance?.fitView({ padding: 0.2, duration: 500 }), 100); @@ -284,15 +358,35 @@ const ERDiagramContent: React.FC = ({ nodeTypes }) => { if (error || !schemaData || nodes.length === 0) { return (
-
+

Diagram Unavailable

{error || "No tables found."}

- - - +
+ {hasLiveSchema && projectId && ( + + )} + + + +
); @@ -308,6 +402,27 @@ const ERDiagramContent: React.FC = ({ nodeTypes }) => { {schemaData.name || 'Database'} ER Diagram + {/* Data source badge */} + + + + {dataSource === "live" ? ( + + ) : ( + + )} + {dataSource === "live" ? "Live" : "Offline"} + + + + {dataSource === "live" + ? "Schema loaded from live database connection" + : "Schema loaded from saved project files (offline)"} + +
{/* Schema filter dropdown */} @@ -385,6 +500,34 @@ const ERDiagramContent: React.FC = ({ nodeTypes }) => { Clear )} + {/* Sync from Database button */} + {projectId && ( + + + + + + Pull fresh schema from database (keeps your layout) + + + )} - Re-layout diagram + Re-layout diagram {["png", "svg"].map((format) => (