From 582ef594b194a06a9e7048463ba4b9bc277cd4e3 Mon Sep 17 00:00:00 2001 From: Yash Date: Fri, 30 Jan 2026 22:52:16 +0530 Subject: [PATCH 01/19] feat: Implemented schema based fetching --- bridge/src/connectors/mariadb.ts | 17 ++++++++++++++++ bridge/src/connectors/mysql.ts | 17 ++++++++++++++++ bridge/src/connectors/postgres.ts | 20 ++++++++++++++++++ bridge/src/handlers/databaseHandlers.ts | 20 +++++++++++++++++- bridge/src/jsonRpcHandler.ts | 3 +++ bridge/src/services/queryExecutor.ts | 11 ++++++++++ src/hooks/useDatabaseDetails.ts | 27 ++++++++++++++++++++++--- src/hooks/useDbQueries.ts | 20 ++++++++++++++---- src/pages/DatabaseDetails.tsx | 26 ++++++++++++++++++++++-- src/services/bridgeApi.ts | 18 +++++++++++++++-- 10 files changed, 167 insertions(+), 12 deletions(-) diff --git a/bridge/src/connectors/mariadb.ts b/bridge/src/connectors/mariadb.ts index 5f0c07d..896de96 100644 --- a/bridge/src/connectors/mariadb.ts +++ b/bridge/src/connectors/mariadb.ts @@ -1765,3 +1765,20 @@ export async function searchTable( await pool.end(); } } + +/** + * listSchemaNames: Retrieves just the names of schemas (databases). + * Lightweight version for schema selector. + */ +export async function listSchemaNames(cfg: MariaDBConfig): Promise { + const pool = mysql.createPool(createPoolConfig(cfg)); + const connection = await pool.getConnection(); + + try { + const [rows] = await connection.query(LIST_SCHEMAS); + return (rows as any[]).map((r: any) => r.name); + } finally { + connection.release(); + await pool.end(); + } +} diff --git a/bridge/src/connectors/mysql.ts b/bridge/src/connectors/mysql.ts index 0bc4116..3500830 100644 --- a/bridge/src/connectors/mysql.ts +++ b/bridge/src/connectors/mysql.ts @@ -1740,3 +1740,20 @@ export async function searchTable( await pool.end(); } } + +/** + * listSchemaNames: Retrieves just the names of schemas (databases). + * Lightweight version for schema selector. + */ +export async function listSchemaNames(cfg: MySQLConfig): Promise { + const pool = mysql.createPool(createPoolConfig(cfg)); + const connection = await pool.getConnection(); + + try { + const [rows] = await connection.query(LIST_SCHEMAS); + return (rows as any[]).map((r: any) => r.name); + } finally { + connection.release(); + await pool.end(); + } +} diff --git a/bridge/src/connectors/postgres.ts b/bridge/src/connectors/postgres.ts index e576be3..58654c1 100644 --- a/bridge/src/connectors/postgres.ts +++ b/bridge/src/connectors/postgres.ts @@ -1869,3 +1869,23 @@ export async function searchTable( } catch (_) { } } } + +/** + * listSchemaNames: Retrieves just the names of schemas. + * Lightweight version of listSchemas. + */ +export async function listSchemaNames(connection: PGConfig): Promise { + // Check cache first (re-use schemas cache if available, or a new cache if needed) + // For now, simpler to just query as it's very fast + const client = createClient(connection); + + try { + await client.connect(); + const res = await client.query(PG_LIST_SCHEMAS); + return res.rows.map((r: any) => r.name); + } finally { + try { + await client.end(); + } catch (e) { } + } +} diff --git a/bridge/src/handlers/databaseHandlers.ts b/bridge/src/handlers/databaseHandlers.ts index 3b80a01..4a9c622 100644 --- a/bridge/src/handlers/databaseHandlers.ts +++ b/bridge/src/handlers/databaseHandlers.ts @@ -85,7 +85,7 @@ export class DatabaseHandlers { } const { conn, dbType } = await this.dbService.getDatabaseConnection(dbId); - const tables = await this.queryExecutor.listTables(conn, dbType); + const tables = await this.queryExecutor.listTables(conn, dbType, params.schema); this.rpc.sendResponse(id, { ok: true, data: tables }); } catch (e: any) { this.logger?.error({ e }, "db.listTables failed"); @@ -93,6 +93,24 @@ export class DatabaseHandlers { } } + async handleListSchemas(params: any, id: number | string) { + try { + const { id: dbId } = params || {}; + if (!dbId) { + return this.rpc.sendError(id, { + code: "BAD_REQUEST", + message: "Missing id", + }); + } + const { conn, dbType } = await this.dbService.getDatabaseConnection(dbId); + const schemas = await this.queryExecutor.listSchemaNames(conn, dbType); + this.rpc.sendResponse(id, { ok: true, data: schemas }); + } catch (e: any) { + this.logger?.error({ e }, "db.listSchemas failed"); + this.rpc.sendError(id, { code: "IO_ERROR", message: String(e) }); + } + } + async handleGetSchema(params: any, id: number | string) { try { const { id: dbId, schema } = params || {}; diff --git a/bridge/src/jsonRpcHandler.ts b/bridge/src/jsonRpcHandler.ts index ba92907..5c45512 100644 --- a/bridge/src/jsonRpcHandler.ts +++ b/bridge/src/jsonRpcHandler.ts @@ -131,6 +131,9 @@ export function registerDbHandlers( rpcRegister("db.getSchema", (p, id) => databaseHandlers.handleGetSchema(p, id) ); + rpcRegister("db.listSchemas", (p, id) => + databaseHandlers.handleListSchemas(p, id) + ); // ========================================== // MIGRATION HANDLERS diff --git a/bridge/src/services/queryExecutor.ts b/bridge/src/services/queryExecutor.ts index 8083453..06baeea 100644 --- a/bridge/src/services/queryExecutor.ts +++ b/bridge/src/services/queryExecutor.ts @@ -320,4 +320,15 @@ export class QueryExecutor { }; } } + + async listSchemaNames(conn: DatabaseConfig, dbType: DBType): Promise { + if (dbType === DBType.POSTGRES) { + return this.postgres.listSchemaNames(conn); + } else if (dbType === DBType.MARIADB) { + return this.mariadb.listSchemaNames(conn); + } else if (dbType === DBType.MYSQL) { + return this.mysql.listSchemaNames(conn); + } + return ["public"]; + } } diff --git a/src/hooks/useDatabaseDetails.ts b/src/hooks/useDatabaseDetails.ts index 1f2c277..bf4ac88 100644 --- a/src/hooks/useDatabaseDetails.ts +++ b/src/hooks/useDatabaseDetails.ts @@ -1,7 +1,7 @@ import { useCallback, useEffect, useState } from "react"; import { toast } from "sonner"; import { bridgeApi } from "@/services/bridgeApi"; -import { useDatabase, useTables, useTableData, usePrefetch, useInvalidateCache } from "@/hooks/useDbQueries"; +import { useDatabase, useTables, useTableData, usePrefetch, useInvalidateCache, useSchemaNames } from "@/hooks/useDbQueries"; import { QueryProgress, SelectedTable, TableInfo, TableRow } from "@/types/database"; interface UseDatabaseDetailsOptions { @@ -34,6 +34,9 @@ interface UseDatabaseDetailsReturn { handlePageChange: (page: number) => Promise; handlePageSizeChange: (size: number) => Promise; refetchTableData: () => void; + schemas: string[]; + selectedSchema: string; + setSelectedSchema: (schema: string) => void; } export function useDatabaseDetails({ @@ -43,16 +46,31 @@ export function useDatabaseDetails({ const { data: dbDetails } = useDatabase(dbId); const databaseName = dbDetails?.name || "Database"; + const { data: schemas = [] } = useSchemaNames(dbId); + const [selectedSchema, setSelectedSchema] = useState("public"); + + // Auto-select first schema if current one is invalid + useEffect(() => { + if (schemas.length > 0 && !schemas.includes(selectedSchema)) { + // Prefer public if available, otherwise first schema + if (schemas.includes("public")) { + setSelectedSchema("public"); + } else { + setSelectedSchema(schemas[0]); + } + } + }, [schemas, selectedSchema]); + const { data: tablesData = [], isLoading: loadingTables, refetch: refetchTables, isRefetching: isRefetchingTables - } = useTables(dbId); + } = useTables(dbId, selectedSchema); // Transform tables data const tables: TableInfo[] = tablesData.map((item: any) => ({ - schema: item.schema || "public", + schema: item.schema || selectedSchema || "public", name: item.name || "unknown", type: item.type || "table", })); @@ -311,5 +329,8 @@ export function useDatabaseDetails({ handlePageChange, handlePageSizeChange, refetchTableData, + schemas, + selectedSchema, + setSelectedSchema }; } diff --git a/src/hooks/useDbQueries.ts b/src/hooks/useDbQueries.ts index 501c5b9..dc597ab 100644 --- a/src/hooks/useDbQueries.ts +++ b/src/hooks/useDbQueries.ts @@ -12,7 +12,7 @@ export const queryKeys = { database: (id: string) => ["databases", id] as const, // Tables - tables: (dbId: string) => ["tables", dbId] as const, + tables: (dbId: string, schema?: string) => ["tables", dbId, schema || "all"] as const, tableData: (dbId: string, schema: string, table: string, page: number, pageSize: number) => ["tableData", dbId, schema, table, page, pageSize] as const, @@ -93,11 +93,11 @@ export function useMigrations(dbId: string | undefined) { * - Returns cached data instantly if available * - Background refetch if stale */ -export function useTables(dbId: string | undefined) { +export function useTables(dbId: string | undefined, schema?: string) { return useQuery({ - queryKey: queryKeys.tables(dbId!), + queryKey: queryKeys.tables(dbId!, schema), queryFn: async () => { - const result = await bridgeApi.listTables(dbId!); + const result = await bridgeApi.listTables(dbId!, schema); return result.map((item: any): TableInfo => ({ schema: item.schema || "public", name: item.name || "unknown", @@ -110,6 +110,18 @@ export function useTables(dbId: string | undefined) { }); } +/** + * Fetch schema names for a database + */ +export function useSchemaNames(dbId: string | undefined) { + return useQuery({ + queryKey: ["schemaNames", dbId] as const, + queryFn: () => bridgeApi.listSchemas(dbId!), + enabled: !!dbId, + staleTime: STALE_TIMES.schemas, + }); +} + /** * Fetch paginated table data * - Each page is cached separately diff --git a/src/pages/DatabaseDetails.tsx b/src/pages/DatabaseDetails.tsx index 084deb8..a17c215 100644 --- a/src/pages/DatabaseDetails.tsx +++ b/src/pages/DatabaseDetails.tsx @@ -70,6 +70,9 @@ const DatabaseDetail = () => { handlePageChange, handlePageSizeChange, refetchTableData, + schemas, + selectedSchema, + setSelectedSchema, } = useDatabaseDetails({ dbId, bridgeReady: bridgeReady ?? false, @@ -159,9 +162,28 @@ const DatabaseDetail = () => { )}
-

{databaseName || 'Database'}

+
+

{databaseName || 'Database'}

+ {schemas.length > 0 && ( + + + + + + {schemas.map(s => ( + setSelectedSchema(s)}> + {s} + + ))} + + + )} +

- {tables.length} tables + {tables.length} tables in {selectedSchema}

diff --git a/src/services/bridgeApi.ts b/src/services/bridgeApi.ts index d01cb9a..a0eefe1 100644 --- a/src/services/bridgeApi.ts +++ b/src/services/bridgeApi.ts @@ -289,14 +289,14 @@ class BridgeApiService { /** * List all tables in a database */ - async listTables(id: string): Promise { + async listTables(id: string, schema?: string): Promise { // Changed return type to any[] to match typical result shape [{schema, name, type}] try { if (!id) { throw new Error("Database ID is required"); } - const result = await bridgeRequest("db.listTables", { id }); + const result = await bridgeRequest("db.listTables", { id, schema }); return result?.data || []; } catch (error: any) { console.error("Failed to list tables:", error); @@ -304,6 +304,20 @@ class BridgeApiService { } } + async listSchemas(id: string): Promise { + try { + if (!id) { + throw new Error("Database ID is required"); + } + + const result = await bridgeRequest("db.listSchemas", { id }); + return result?.data || []; + } catch (error: any) { + console.error("Failed to list schemas:", error); + throw new Error(`Failed to list schemas: ${error.message}`); + } + } + /** * Alias for getDatabaseStats - used by useDbQueries hook */ From 4ac13949310e6c866898e788855fef9e786c0d52 Mon Sep 17 00:00:00 2001 From: Yash Date: Sat, 31 Jan 2026 00:03:28 +0530 Subject: [PATCH 02/19] feat: implemented the table creation based on schemas --- bridge/src/utils/migrationGenerator.ts | 19 +++++-------- package.json | 1 + pnpm-lock.yaml | 11 +++++--- src/components/database/MigrationsPanel.tsx | 27 ++++++++++++++++++- .../database/TablesExplorerPanel.tsx | 4 ++- src/pages/DatabaseDetails.tsx | 1 + 6 files changed, 45 insertions(+), 18 deletions(-) diff --git a/bridge/src/utils/migrationGenerator.ts b/bridge/src/utils/migrationGenerator.ts index 3b09692..37e1bd2 100644 --- a/bridge/src/utils/migrationGenerator.ts +++ b/bridge/src/utils/migrationGenerator.ts @@ -58,10 +58,9 @@ export function generateCreateTableMigration(params: { const allDefs = [...[columnDefs], ...fkDefs].filter(Boolean).join(",\n"); - // For MySQL/MariaDB, don't use schema prefix (database is the schema) - const tableRef = (dbType === "mysql" || dbType === "mariadb") - ? quoteIdent(tableName, dbType) - : `${quoteIdent(schemaName, dbType)}.${quoteIdent(tableName, dbType)}`; + // For MySQL/MariaDB, use database.table format (schemas are databases) + // For Postgres, use schema.table format + const tableRef = `${quoteIdent(schemaName, dbType)}.${quoteIdent(tableName, dbType)}`; // Generate UP SQL const upSQL = `CREATE TABLE ${tableRef} ( @@ -94,10 +93,8 @@ export function generateAlterTableMigration(params: { const name = `alter_${tableName}_table`; const filename = `${version}_${name}.sql`; - // For MySQL/MariaDB, don't use schema prefix - const fullTableName = (dbType === "mysql" || dbType === "mariadb") - ? quoteIdent(tableName, dbType) - : `${quoteIdent(schemaName, dbType)}.${quoteIdent(tableName, dbType)}`; + // For all database types, use schema/database prefix + const fullTableName = `${quoteIdent(schemaName, dbType)}.${quoteIdent(tableName, dbType)}`; // Build UP SQL const upStatements: string[] = []; @@ -251,10 +248,8 @@ export function generateDropTableMigration(params: { const name = `drop_${tableName}_table`; const filename = `${version}_${name}.sql`; - // For MySQL/MariaDB, don't use schema prefix - const fullTableName = (dbType === "mysql" || dbType === "mariadb") - ? quoteIdent(tableName, dbType) - : `${quoteIdent(schemaName, dbType)}.${quoteIdent(tableName, dbType)}`; + // For all database types, use schema/database prefix + const fullTableName = `${quoteIdent(schemaName, dbType)}.${quoteIdent(tableName, dbType)}`; let upSQL = ""; if (mode === "CASCADE") { diff --git a/package.json b/package.json index 666dd72..8315075 100644 --- a/package.json +++ b/package.json @@ -71,6 +71,7 @@ "@types/react": "^19.1.8", "@types/react-dom": "^19.1.6", "@vitejs/plugin-react": "^4.6.0", + "baseline-browser-mapping": "^2.9.19", "tw-animate-css": "^1.4.0", "typescript": "~5.8.3", "vite": "^7.0.4" diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 4bbe8d8..7ed4353 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -147,6 +147,9 @@ importers: '@vitejs/plugin-react': specifier: ^4.6.0 version: 4.7.0(vite@7.2.4(@types/node@24.10.1)(jiti@2.6.1)(lightningcss@1.30.2)) + baseline-browser-mapping: + specifier: ^2.9.19 + version: 2.9.19 tw-animate-css: specifier: ^1.4.0 version: 1.4.0 @@ -1452,8 +1455,8 @@ packages: resolution: {integrity: sha512-ik3ZgC9dY/lYVVM++OISsaYDeg1tb0VtP5uL3ouh1koGOaUMDPpbFIei4JkFimWUFPn90sbMNMXQAIVOlnYKJA==} engines: {node: '>=10'} - baseline-browser-mapping@2.8.29: - resolution: {integrity: sha512-sXdt2elaVnhpDNRDz+1BDx1JQoJRuNk7oVlAlbGiFkLikHCAQiccexF/9e91zVi6RCgqspl04aP+6Cnl9zRLrA==} + baseline-browser-mapping@2.9.19: + resolution: {integrity: sha512-ipDqC8FrAl/76p2SSWKSI+H9tFwm7vYqXQrItCuiVPt26Km0jS+NzSsBWAaBusvSbQcfJG+JitdMm+wZAgTYqg==} hasBin: true browserslist@4.28.0: @@ -3342,11 +3345,11 @@ snapshots: dependencies: tslib: 2.8.1 - baseline-browser-mapping@2.8.29: {} + baseline-browser-mapping@2.9.19: {} browserslist@4.28.0: dependencies: - baseline-browser-mapping: 2.8.29 + baseline-browser-mapping: 2.9.19 caniuse-lite: 1.0.30001756 electron-to-chromium: 1.5.258 node-releases: 2.0.27 diff --git a/src/components/database/MigrationsPanel.tsx b/src/components/database/MigrationsPanel.tsx index ba0b408..d994b7a 100644 --- a/src/components/database/MigrationsPanel.tsx +++ b/src/components/database/MigrationsPanel.tsx @@ -1,5 +1,5 @@ import { useState } from "react"; -import { CheckCircle2, Clock, AlertCircle, Database, Play, Undo2, Trash2, Eye } from "lucide-react"; +import { CheckCircle2, Clock, AlertCircle, Database, Play, Undo2, Trash2, Eye, RefreshCw } from "lucide-react"; import { Card, CardContent, CardHeader, CardTitle } from "@/components/ui/card"; import { Badge } from "@/components/ui/badge"; import { Button } from "@/components/ui/button"; @@ -22,6 +22,22 @@ export default function MigrationsPanel({ migrations, baselined, dbId }: Migrati const [selectedMigration, setSelectedMigration] = useState<{ version: string; name: string } | null>(null); const [showSQLDialog, setShowSQLDialog] = useState(false); const [sqlContent, setSqlContent] = useState<{ up: string; down: string } | null>(null); + const [isRefreshing, setIsRefreshing] = useState(false); + + const handleRefresh = async () => { + setIsRefreshing(true); + try { + await Promise.all([ + queryClient.invalidateQueries({ queryKey: ["migrations", dbId] }), + queryClient.invalidateQueries({ queryKey: ["tables", dbId] }), + queryClient.invalidateQueries({ queryKey: ["schema", dbId] }), + queryClient.invalidateQueries({ queryKey: ["schemaNames", dbId] }), + ]); + toast.success("Refreshed successfully"); + } finally { + setIsRefreshing(false); + } + }; // Merge and sort migrations const appliedVersions = new Set(applied.map((m) => m.version)); @@ -115,6 +131,15 @@ export default function MigrationsPanel({ migrations, baselined, dbId }: Migrati {baselined ? "Baselined" : "Not Baselined"} +

Schema version control and migration status diff --git a/src/components/database/TablesExplorerPanel.tsx b/src/components/database/TablesExplorerPanel.tsx index 0faa00f..4b81067 100644 --- a/src/components/database/TablesExplorerPanel.tsx +++ b/src/components/database/TablesExplorerPanel.tsx @@ -11,6 +11,7 @@ interface TablesExplorerPanelProps { dbId: string; tables: TableInfo[]; selectedTable: SelectedTable | null; + selectedSchema: string; onSelectTable: (tableName: string, schemaName: string) => void; loading?: boolean; } @@ -19,6 +20,7 @@ export default function TablesExplorerPanel({ dbId, tables, selectedTable, + selectedSchema, onSelectTable, loading = false, }: TablesExplorerPanelProps) { @@ -160,7 +162,7 @@ export default function TablesExplorerPanel({ dbId={dbId} open={createTableOpen} onOpenChange={setCreateTableOpen} - schemaName={selectedTable?.schema || ''} + schemaName={selectedSchema} /> diff --git a/src/pages/DatabaseDetails.tsx b/src/pages/DatabaseDetails.tsx index a17c215..429be47 100644 --- a/src/pages/DatabaseDetails.tsx +++ b/src/pages/DatabaseDetails.tsx @@ -254,6 +254,7 @@ const DatabaseDetail = () => { dbId={dbId || ''} tables={tables} selectedTable={selectedTable} + selectedSchema={selectedSchema} onSelectTable={handleTableSelect} loading={loadingTables} /> From 64795373ebeb2b906a0edfc8a99ce834394b422b Mon Sep 17 00:00:00 2001 From: Yash Date: Tue, 3 Feb 2026 18:20:00 +0530 Subject: [PATCH 03/19] feat: add schema filtering dropdown to ER diagram component --- .../er-diagram/ERDiagramContent.tsx | 79 +++++++++++++++++-- 1 file changed, 71 insertions(+), 8 deletions(-) diff --git a/src/components/er-diagram/ERDiagramContent.tsx b/src/components/er-diagram/ERDiagramContent.tsx index 342481f..5261db7 100644 --- a/src/components/er-diagram/ERDiagramContent.tsx +++ b/src/components/er-diagram/ERDiagramContent.tsx @@ -1,5 +1,5 @@ import { toPng, toSvg } from "html-to-image"; -import { ArrowLeft, Database, Download, Filter, LayoutGrid, Search, X } from "lucide-react"; +import { ArrowLeft, ChevronDown, Database, Download, Filter, LayoutGrid, Layers, Search, X } from "lucide-react"; import { useCallback, useEffect, useMemo, useState } from "react"; import { Link, useParams } from "react-router-dom"; import { @@ -26,6 +26,13 @@ import { TooltipProvider, TooltipTrigger, } from "@/components/ui/tooltip"; +import { + DropdownMenu, + DropdownMenuContent, + DropdownMenuItem, + DropdownMenuTrigger, +} from "@/components/ui/dropdown-menu"; +import { Button } from "@/components/ui/button"; interface Column extends ColumnDetails { fkRef?: string; // e.g., "public.roles.id" @@ -61,6 +68,7 @@ const ERDiagramContent: React.FC = ({ nodeTypes }) => { const [searchQuery, setSearchQuery] = useState(""); const [selectedNodeId, setSelectedNodeId] = useState(null); const [hoveredEdge, setHoveredEdge] = useState(null); + const [selectedSchema, setSelectedSchema] = useState("__all__"); // Use React Query for schema data (cached!) const { @@ -75,14 +83,38 @@ const ERDiagramContent: React.FC = ({ nodeTypes }) => { ? "Schema data found, but no tables to render." : null; - // Transform schema to ER nodes/edges when data changes + // Get available schema names for the dropdown + const availableSchemas = useMemo(() => { + if (!schemaData?.schemas) return []; + return schemaData.schemas + .filter(s => s.tables?.length > 0) + .map(s => s.name); + }, [schemaData]); + + // Filter schema data based on selected schema + const filteredSchemaData = useMemo((): DatabaseSchemaDetails | null => { + if (!schemaData) return null; + if (selectedSchema === "__all__") return schemaData; + + return { + ...schemaData, + schemas: schemaData.schemas.filter(s => s.name === selectedSchema) + }; + }, [schemaData, selectedSchema]); + + // Transform schema to ER nodes/edges when data or filter changes useEffect(() => { - if (schemaData && schemaData.schemas?.some(s => s.tables?.length)) { - const { nodes: newNodes, edges: newEdges } = transformSchemaToER(schemaData); + if (filteredSchemaData && filteredSchemaData.schemas?.some(s => s.tables?.length)) { + const { nodes: newNodes, edges: newEdges } = transformSchemaToER(filteredSchemaData); setNodes(newNodes as typeof nodes); setEdges(newEdges); + // Fit view after layout change + setTimeout(() => reactFlowInstance?.fitView({ padding: 0.2, duration: 300 }), 100); + } else { + setNodes([]); + setEdges([]); } - }, [schemaData, setNodes, setEdges]); + }, [filteredSchemaData, setNodes, setEdges, reactFlowInstance]); // Filter nodes based on search query const filteredNodes = useMemo(() => { @@ -180,13 +212,13 @@ const ERDiagramContent: React.FC = ({ nodeTypes }) => { // Re-layout with dagre const reLayout = useCallback(() => { - if (schemaData) { - const { nodes: newNodes, edges: newEdges } = transformSchemaToER(schemaData, true); + if (filteredSchemaData) { + const { nodes: newNodes, edges: newEdges } = transformSchemaToER(filteredSchemaData, true); setNodes(newNodes as typeof nodes); setEdges(newEdges); setTimeout(() => reactFlowInstance?.fitView({ padding: 0.2, duration: 500 }), 100); } - }, [schemaData, setNodes, setEdges, reactFlowInstance]); + }, [filteredSchemaData, setNodes, setEdges, reactFlowInstance]); // Edge hover handlers for tooltip const onEdgeMouseEnter: EdgeMouseHandler = useCallback((_event, edge) => { @@ -278,6 +310,36 @@ const ERDiagramContent: React.FC = ({ nodeTypes }) => { ER Diagram + {/* Schema filter dropdown */} + {availableSchemas.length > 0 && ( + + + + + + setSelectedSchema("__all__")}> + + All Schemas + + {schemaData?.schemas.reduce((acc, s) => acc + (s.tables?.length || 0), 0)} tables + + + {availableSchemas.map(schema => ( + setSelectedSchema(schema)}> + {schema} + + {schemaData?.schemas.find(s => s.name === schema)?.tables?.length || 0} tables + + + ))} + + + )} + {/* Search bar */}

@@ -417,6 +479,7 @@ const ERDiagramContent: React.FC = ({ nodeTypes }) => {
{nodes.length} Tables • {edges.length} Relations + {selectedSchema !== "__all__" && ` • Schema: ${selectedSchema}`} {selectedNodeId && ` • Selected: ${selectedNodeId.split('.')[1]}`} Click table to highlight • Drag to pan • Scroll to zoom From bf59263d548553ba13b81bafa723b8a0c3ce98d7 Mon Sep 17 00:00:00 2001 From: Yash Date: Tue, 3 Feb 2026 18:54:04 +0530 Subject: [PATCH 04/19] feat: add schema filtering functionality to query builder --- .../query-builder/BuilderSidebar.tsx | 32 ++++++++++++++++++- .../query-builder/QueryBuilderPanel.tsx | 24 ++++++++++++-- src/components/query-builder/types.ts | 4 +++ 3 files changed, 56 insertions(+), 4 deletions(-) diff --git a/src/components/query-builder/BuilderSidebar.tsx b/src/components/query-builder/BuilderSidebar.tsx index 7c5e094..acd1f0e 100644 --- a/src/components/query-builder/BuilderSidebar.tsx +++ b/src/components/query-builder/BuilderSidebar.tsx @@ -8,6 +8,7 @@ import { PanelLeft, History, Columns3, + Layers, } from "lucide-react"; import { Button } from "@/components/ui/button"; import { Input } from "@/components/ui/input"; @@ -36,6 +37,9 @@ export function BuilderSidebar({ nodes, history, availableColumns, + availableSchemas, + selectedSchema, + onSchemaChange, filters, selectedColumns, sortBy, @@ -121,7 +125,33 @@ export function BuilderSidebar({ {tables.length} - + + {/* Schema Filter */} + {availableSchemas.length > 0 && ( +
+ +
+ )}
{tables.map((table) => { const isAdded = nodes.some( diff --git a/src/components/query-builder/QueryBuilderPanel.tsx b/src/components/query-builder/QueryBuilderPanel.tsx index 6237fa9..180542f 100644 --- a/src/components/query-builder/QueryBuilderPanel.tsx +++ b/src/components/query-builder/QueryBuilderPanel.tsx @@ -70,11 +70,26 @@ const QueryBuilderPanel = ({ dbId }: QueryBuilderPanelProps) => { const databaseName = dbDetails?.name || dbId; - // Get all tables from schema + // Schema filter state + const [selectedSchema, setSelectedSchema] = useState("__all__"); + + // Get available schema names + const availableSchemas = useMemo(() => { + if (!schemaData?.schemas) return []; + return schemaData.schemas + .filter(s => s.tables?.length > 0) + .map(s => s.name); + }, [schemaData]); + + // Get tables filtered by selected schema const allTables = useMemo(() => { if (!schemaData) return []; - return schemaData.schemas.flatMap((schema) => schema.tables); - }, [schemaData]); + if (selectedSchema === "__all__") { + return schemaData.schemas.flatMap((schema) => schema.tables); + } + const schema = schemaData.schemas.find(s => s.name === selectedSchema); + return schema?.tables || []; + }, [schemaData, selectedSchema]); // Get available columns from added nodes const availableColumns: ColumnOption[] = useMemo(() => { @@ -365,6 +380,9 @@ const QueryBuilderPanel = ({ dbId }: QueryBuilderPanelProps) => { nodes={nodes} history={history} availableColumns={availableColumns} + availableSchemas={availableSchemas} + selectedSchema={selectedSchema} + onSchemaChange={setSelectedSchema} filters={filters} selectedColumns={selectedColumns} sortBy={sortBy} diff --git a/src/components/query-builder/types.ts b/src/components/query-builder/types.ts index 83e740b..6518df0 100644 --- a/src/components/query-builder/types.ts +++ b/src/components/query-builder/types.ts @@ -52,6 +52,10 @@ export interface BuilderSidebarProps { nodes: Node[]; history: QueryHistoryItem[]; availableColumns: ColumnOption[]; + // Schema filtering + availableSchemas: string[]; + selectedSchema: string; + onSchemaChange: (schema: string) => void; filters: QueryFilter[]; selectedColumns: string[]; sortBy: string; From a3d10436e1b4f796f36fac320b8048a2d6f1c36d Mon Sep 17 00:00:00 2001 From: Yash Date: Mon, 9 Feb 2026 17:19:13 +0530 Subject: [PATCH 05/19] feat: implement project management handlers and refactor configuration utilities --- bridge/src/connectors/mariadb.ts | 2 +- bridge/src/connectors/mysql.ts | 2 +- bridge/src/connectors/postgres.ts | 2 +- bridge/src/handlers/migrationHandlers.ts | 2 +- bridge/src/handlers/projectHandlers.ts | 310 +++++++++++++++ bridge/src/jsonRpcHandler.ts | 48 +++ bridge/src/services/dbStore.ts | 27 +- bridge/src/services/projectStore.ts | 486 +++++++++++++++++++++++ bridge/src/utils/config.ts | 37 ++ 9 files changed, 886 insertions(+), 30 deletions(-) create mode 100644 bridge/src/handlers/projectHandlers.ts create mode 100644 bridge/src/services/projectStore.ts create mode 100644 bridge/src/utils/config.ts diff --git a/bridge/src/connectors/mariadb.ts b/bridge/src/connectors/mariadb.ts index 896de96..8ce9151 100644 --- a/bridge/src/connectors/mariadb.ts +++ b/bridge/src/connectors/mariadb.ts @@ -7,7 +7,7 @@ import mysql, { import { loadLocalMigrations, writeBaselineMigration } from "../utils/baselineMigration"; import crypto from "crypto"; import fs from "fs"; -import { ensureDir, getMigrationsDir } from "../services/dbStore"; +import { ensureDir, getMigrationsDir } from "../utils/config"; import { CacheEntry, CACHE_TTL, diff --git a/bridge/src/connectors/mysql.ts b/bridge/src/connectors/mysql.ts index 3500830..72acba9 100644 --- a/bridge/src/connectors/mysql.ts +++ b/bridge/src/connectors/mysql.ts @@ -7,7 +7,7 @@ import mysql, { import { loadLocalMigrations, writeBaselineMigration } from "../utils/baselineMigration"; import crypto from "crypto"; import fs from "fs"; -import { ensureDir, getMigrationsDir } from "../services/dbStore"; +import { ensureDir, getMigrationsDir } from "../utils/config"; import { CacheEntry, CACHE_TTL, diff --git a/bridge/src/connectors/postgres.ts b/bridge/src/connectors/postgres.ts index 58654c1..19084dc 100644 --- a/bridge/src/connectors/postgres.ts +++ b/bridge/src/connectors/postgres.ts @@ -5,7 +5,7 @@ import { Readable } from "stream"; import { loadLocalMigrations, writeBaselineMigration } from "../utils/baselineMigration"; import crypto from "crypto"; import fs from "fs"; -import { ensureDir, getMigrationsDir } from "../services/dbStore"; +import { ensureDir, getMigrationsDir } from "../utils/config"; import { CacheEntry, CACHE_TTL, diff --git a/bridge/src/handlers/migrationHandlers.ts b/bridge/src/handlers/migrationHandlers.ts index 4f18b58..686740d 100644 --- a/bridge/src/handlers/migrationHandlers.ts +++ b/bridge/src/handlers/migrationHandlers.ts @@ -2,7 +2,7 @@ import { Rpc } from "../types"; import { DatabaseService } from "../services/databaseService"; import { QueryExecutor } from "../services/queryExecutor"; import { Logger } from "pino"; -import { getMigrationsDir } from "../services/dbStore"; +import { getMigrationsDir } from "../utils/config"; import path from "path"; import fs from "fs"; diff --git a/bridge/src/handlers/projectHandlers.ts b/bridge/src/handlers/projectHandlers.ts new file mode 100644 index 0000000..98a0a35 --- /dev/null +++ b/bridge/src/handlers/projectHandlers.ts @@ -0,0 +1,310 @@ +import { Rpc } from "../types"; +import { Logger } from "pino"; +import { projectStoreInstance } from "../services/projectStore"; + +/** + * RPC handlers for project CRUD and sub-resource operations. + * Mirrors the DatabaseHandlers pattern. + */ +export class ProjectHandlers { + constructor( + private rpc: Rpc, + private logger: Logger + ) { } + + + async handleListProjects(_params: any, id: number | string) { + try { + const projects = await projectStoreInstance.listProjects(); + this.rpc.sendResponse(id, { ok: true, data: projects }); + } catch (e: any) { + this.logger?.error({ e }, "project.list failed"); + this.rpc.sendError(id, { code: "IO_ERROR", message: String(e) }); + } + } + + async handleGetProject(params: any, id: number | string) { + try { + const { id: projectId } = params || {}; + if (!projectId) { + return this.rpc.sendError(id, { + code: "BAD_REQUEST", + message: "Missing id", + }); + } + + const project = await projectStoreInstance.getProject(projectId); + if (!project) { + return this.rpc.sendError(id, { + code: "NOT_FOUND", + message: "Project not found", + }); + } + + this.rpc.sendResponse(id, { ok: true, data: project }); + } catch (e: any) { + this.logger?.error({ e }, "project.get failed"); + this.rpc.sendError(id, { code: "IO_ERROR", message: String(e) }); + } + } + + async handleCreateProject(params: any, id: number | string) { + try { + const { databaseId, name, description, defaultSchema } = params || {}; + if (!databaseId || !name) { + return this.rpc.sendError(id, { + code: "BAD_REQUEST", + message: "Missing databaseId or name", + }); + } + + const project = await projectStoreInstance.createProject({ + databaseId, + name, + description, + defaultSchema, + }); + + this.rpc.sendResponse(id, { ok: true, data: project }); + } catch (e: any) { + this.logger?.error({ e }, "project.create failed"); + this.rpc.sendError(id, { code: "IO_ERROR", message: String(e) }); + } + } + + async handleUpdateProject(params: any, id: number | string) { + try { + const { id: projectId, ...updates } = params || {}; + if (!projectId) { + return this.rpc.sendError(id, { + code: "BAD_REQUEST", + message: "Missing id", + }); + } + + const project = await projectStoreInstance.updateProject(projectId, updates); + if (!project) { + return this.rpc.sendError(id, { + code: "NOT_FOUND", + message: "Project not found", + }); + } + + this.rpc.sendResponse(id, { ok: true, data: project }); + } catch (e: any) { + this.logger?.error({ e }, "project.update failed"); + this.rpc.sendError(id, { code: "IO_ERROR", message: String(e) }); + } + } + + async handleDeleteProject(params: any, id: number | string) { + try { + const { id: projectId } = params || {}; + if (!projectId) { + return this.rpc.sendError(id, { + code: "BAD_REQUEST", + message: "Missing id", + }); + } + + await projectStoreInstance.deleteProject(projectId); + this.rpc.sendResponse(id, { ok: true }); + } catch (e: any) { + this.logger?.error({ e }, "project.delete failed"); + this.rpc.sendError(id, { code: "IO_ERROR", message: String(e) }); + } + } + + async handleGetSchema(params: any, id: number | string) { + try { + const { projectId } = params || {}; + if (!projectId) { + return this.rpc.sendError(id, { + code: "BAD_REQUEST", + message: "Missing projectId", + }); + } + + const schema = await projectStoreInstance.getSchema(projectId); + this.rpc.sendResponse(id, { ok: true, data: schema }); + } catch (e: any) { + this.logger?.error({ e }, "project.getSchema failed"); + this.rpc.sendError(id, { code: "IO_ERROR", message: String(e) }); + } + } + + async handleSaveSchema(params: any, id: number | string) { + try { + const { projectId, schemas } = params || {}; + if (!projectId || !schemas) { + return this.rpc.sendError(id, { + code: "BAD_REQUEST", + message: "Missing projectId or schemas", + }); + } + + const result = await projectStoreInstance.saveSchema(projectId, schemas); + this.rpc.sendResponse(id, { ok: true, data: result }); + } catch (e: any) { + this.logger?.error({ e }, "project.saveSchema failed"); + this.rpc.sendError(id, { code: "IO_ERROR", message: String(e) }); + } + } + + async handleGetERDiagram(params: any, id: number | string) { + try { + const { projectId } = params || {}; + if (!projectId) { + return this.rpc.sendError(id, { + code: "BAD_REQUEST", + message: "Missing projectId", + }); + } + + const diagram = await projectStoreInstance.getERDiagram(projectId); + this.rpc.sendResponse(id, { ok: true, data: diagram }); + } catch (e: any) { + this.logger?.error({ e }, "project.getERDiagram failed"); + this.rpc.sendError(id, { code: "IO_ERROR", message: String(e) }); + } + } + + async handleSaveERDiagram(params: any, id: number | string) { + try { + const { projectId, nodes, zoom, panX, panY } = params || {}; + if (!projectId || !nodes) { + return this.rpc.sendError(id, { + code: "BAD_REQUEST", + message: "Missing projectId or nodes", + }); + } + + const result = await projectStoreInstance.saveERDiagram(projectId, { + nodes, + zoom, + panX, + panY, + }); + this.rpc.sendResponse(id, { ok: true, data: result }); + } catch (e: any) { + this.logger?.error({ e }, "project.saveERDiagram failed"); + this.rpc.sendError(id, { code: "IO_ERROR", message: String(e) }); + } + } + + async handleGetQueries(params: any, id: number | string) { + try { + const { projectId } = params || {}; + if (!projectId) { + return this.rpc.sendError(id, { + code: "BAD_REQUEST", + message: "Missing projectId", + }); + } + + const queries = await projectStoreInstance.getQueries(projectId); + this.rpc.sendResponse(id, { ok: true, data: queries }); + } catch (e: any) { + this.logger?.error({ e }, "project.getQueries failed"); + this.rpc.sendError(id, { code: "IO_ERROR", message: String(e) }); + } + } + + async handleAddQuery(params: any, id: number | string) { + try { + const { projectId, name, sql, description } = params || {}; + if (!projectId || !name || !sql) { + return this.rpc.sendError(id, { + code: "BAD_REQUEST", + message: "Missing projectId, name, or sql", + }); + } + + const query = await projectStoreInstance.addQuery(projectId, { + name, + sql, + description, + }); + this.rpc.sendResponse(id, { ok: true, data: query }); + } catch (e: any) { + this.logger?.error({ e }, "project.addQuery failed"); + this.rpc.sendError(id, { code: "IO_ERROR", message: String(e) }); + } + } + + async handleUpdateQuery(params: any, id: number | string) { + try { + const { projectId, queryId, ...updates } = params || {}; + if (!projectId || !queryId) { + return this.rpc.sendError(id, { + code: "BAD_REQUEST", + message: "Missing projectId or queryId", + }); + } + + const query = await projectStoreInstance.updateQuery( + projectId, + queryId, + updates + ); + if (!query) { + return this.rpc.sendError(id, { + code: "NOT_FOUND", + message: "Query not found", + }); + } + + this.rpc.sendResponse(id, { ok: true, data: query }); + } catch (e: any) { + this.logger?.error({ e }, "project.updateQuery failed"); + this.rpc.sendError(id, { code: "IO_ERROR", message: String(e) }); + } + } + + async handleDeleteQuery(params: any, id: number | string) { + try { + const { projectId, queryId } = params || {}; + if (!projectId || !queryId) { + return this.rpc.sendError(id, { + code: "BAD_REQUEST", + message: "Missing projectId or queryId", + }); + } + + await projectStoreInstance.deleteQuery(projectId, queryId); + this.rpc.sendResponse(id, { ok: true }); + } catch (e: any) { + this.logger?.error({ e }, "project.deleteQuery failed"); + this.rpc.sendError(id, { code: "IO_ERROR", message: String(e) }); + } + } + + // ========================================== + // Export (for future git-native support) + // ========================================== + + async handleExportProject(params: any, id: number | string) { + try { + const { projectId } = params || {}; + if (!projectId) { + return this.rpc.sendError(id, { + code: "BAD_REQUEST", + message: "Missing projectId", + }); + } + + const bundle = await projectStoreInstance.exportProject(projectId); + if (!bundle) { + return this.rpc.sendError(id, { + code: "NOT_FOUND", + message: "Project not found", + }); + } + + this.rpc.sendResponse(id, { ok: true, data: bundle }); + } catch (e: any) { + this.logger?.error({ e }, "project.export failed"); + this.rpc.sendError(id, { code: "IO_ERROR", message: String(e) }); + } + } +} diff --git a/bridge/src/jsonRpcHandler.ts b/bridge/src/jsonRpcHandler.ts index 5c45512..824d823 100644 --- a/bridge/src/jsonRpcHandler.ts +++ b/bridge/src/jsonRpcHandler.ts @@ -7,6 +7,7 @@ import { DatabaseHandlers } from "./handlers/databaseHandlers"; import { SessionHandlers } from "./handlers/sessionHandlers"; import { StatsHandlers } from "./handlers/statsHandlers"; import { MigrationHandlers } from "./handlers/migrationHandlers"; +import { ProjectHandlers } from "./handlers/projectHandlers"; import { discoveryService } from "./services/discoveryService"; import { Logger } from "pino"; @@ -52,6 +53,7 @@ export function registerDbHandlers( dbService, queryExecutor ); + const projectHandlers = new ProjectHandlers(rpc, logger); // ========================================== // SESSION MANAGEMENT HANDLERS @@ -168,6 +170,52 @@ export function registerDbHandlers( statsHandlers.handleGetTotalStats(p, id) ); + // ========================================== + // PROJECT HANDLERS + // ========================================== + rpcRegister("project.list", (p, id) => + projectHandlers.handleListProjects(p, id) + ); + rpcRegister("project.get", (p, id) => + projectHandlers.handleGetProject(p, id) + ); + rpcRegister("project.create", (p, id) => + projectHandlers.handleCreateProject(p, id) + ); + rpcRegister("project.update", (p, id) => + projectHandlers.handleUpdateProject(p, id) + ); + rpcRegister("project.delete", (p, id) => + projectHandlers.handleDeleteProject(p, id) + ); + rpcRegister("project.getSchema", (p, id) => + projectHandlers.handleGetSchema(p, id) + ); + rpcRegister("project.saveSchema", (p, id) => + projectHandlers.handleSaveSchema(p, id) + ); + rpcRegister("project.getERDiagram", (p, id) => + projectHandlers.handleGetERDiagram(p, id) + ); + rpcRegister("project.saveERDiagram", (p, id) => + projectHandlers.handleSaveERDiagram(p, id) + ); + rpcRegister("project.getQueries", (p, id) => + projectHandlers.handleGetQueries(p, id) + ); + rpcRegister("project.addQuery", (p, id) => + projectHandlers.handleAddQuery(p, id) + ); + rpcRegister("project.updateQuery", (p, id) => + projectHandlers.handleUpdateQuery(p, id) + ); + rpcRegister("project.deleteQuery", (p, id) => + projectHandlers.handleDeleteQuery(p, id) + ); + rpcRegister("project.export", (p, id) => + projectHandlers.handleExportProject(p, id) + ); + // ========================================== // DATABASE DISCOVERY HANDLERS // ========================================== diff --git a/bridge/src/services/dbStore.ts b/bridge/src/services/dbStore.ts index b65757d..e452361 100644 --- a/bridge/src/services/dbStore.ts +++ b/bridge/src/services/dbStore.ts @@ -9,34 +9,9 @@ import fsSync from "fs"; import { v4 as uuidv4 } from "uuid"; import { createCipheriv, createDecipheriv, randomBytes, scrypt } from "crypto"; import { promisify } from "util"; - +import { CONFIG_FOLDER, CONFIG_FILE, CREDENTIALS_FILE } from "../utils/config"; const scryptAsync = promisify(scrypt); -export const CONFIG_FOLDER = - process.env.RELWAVE_HOME || - path.join( - os.homedir(), - process.platform === "win32" - ? "AppData\\Roaming\\relwave" - : ".relwave" - ); - -export const CONFIG_FILE = path.join(CONFIG_FOLDER, "databases.json"); -export const CREDENTIALS_FILE = path.join(CONFIG_FOLDER, ".credentials"); - -export function getConnectionDir(connectionId: string) { - return path.join(CONFIG_FOLDER, "connections", connectionId); -} - -export function getMigrationsDir(connectionId: string) { - return path.join(CONFIG_FOLDER, "migrations", connectionId); -} - -export function ensureDir(dir: string) { - if (!fsSync.existsSync(dir)) { - fsSync.mkdirSync(dir, { recursive: true }); - } -} // Use machine-specific key for encryption diff --git a/bridge/src/services/projectStore.ts b/bridge/src/services/projectStore.ts new file mode 100644 index 0000000..43ece1c --- /dev/null +++ b/bridge/src/services/projectStore.ts @@ -0,0 +1,486 @@ +// ---------------------------- +// services/projectStore.ts +// ---------------------------- + +import path from "path"; +import fs from "fs/promises"; +import fsSync from "fs"; +import { v4 as uuidv4 } from "uuid"; +import { + PROJECTS_FOLDER, + PROJECTS_INDEX_FILE, + getProjectDir, + ensureDir, +} from "../utils/config"; +import { dbStoreInstance, DBMeta } from "./dbStore"; + +// ========================================== +// Types +// ========================================== + +export type ProjectMetadata = { + version: number; + id: string; + databaseId: string; + name: string; + description?: string; + engine?: string; + defaultSchema?: string; + createdAt: string; + updatedAt: string; +}; + +export type SavedQuery = { + id: string; + name: string; + sql: string; + description?: string; + createdAt: string; + updatedAt: string; +}; + +export type QueriesFile = { + version: number; + projectId: string; + queries: SavedQuery[]; +}; + +export type ERNode = { + tableId: string; + x: number; + y: number; + width?: number; + height?: number; + collapsed?: boolean; +}; + +export type ERDiagramFile = { + version: number; + projectId: string; + nodes: ERNode[]; + zoom?: number; + panX?: number; + panY?: number; + updatedAt: string; +}; + +export type SchemaFile = { + version: number; + projectId: string; + databaseId: string; + schemas: SchemaSnapshot[]; + cachedAt: string; +}; + +export type SchemaSnapshot = { + name: string; + tables: TableSnapshot[]; +}; + +export type TableSnapshot = { + name: string; + type: string; + columns: ColumnSnapshot[]; +}; + +export type ColumnSnapshot = { + name: string; + type: string; + nullable: boolean; + isPrimaryKey: boolean; + isForeignKey: boolean; + defaultValue: string | null; + isUnique: boolean; +}; + +export type ProjectSummary = Pick< + ProjectMetadata, + "id" | "name" | "description" | "engine" | "databaseId" | "createdAt" | "updatedAt" +>; + + + +type ProjectIndex = { + version: number; + projects: ProjectSummary[]; +}; + + +const PROJECT_FILES = { + metadata: "relwave.json", + schema: path.join("schema", "schema.json"), + erDiagram: path.join("diagrams", "er.json"), + queries: path.join("queries", "queries.json"), +} as const; + +export class ProjectStore { + private projectsFolder: string; + private indexFile: string; + + constructor( + projectsFolder: string = PROJECTS_FOLDER, + indexFile: string = PROJECTS_INDEX_FILE + ) { + this.projectsFolder = projectsFolder; + this.indexFile = indexFile; + } + + private projectDir(projectId: string): string { + return getProjectDir(projectId); + } + + private projectFile(projectId: string, file: string): string { + return path.join(this.projectDir(projectId), file); + } + + /** + * Ensure the project directory and sub-folders exist + */ + private async ensureProjectDirs(projectId: string): Promise { + const base = this.projectDir(projectId); + ensureDir(base); + ensureDir(path.join(base, "schema")); + ensureDir(path.join(base, "diagrams")); + ensureDir(path.join(base, "queries")); + } + + /** + * Read and parse a JSON file, returns null if missing + */ + private async readJSON(filePath: string): Promise { + try { + if (!fsSync.existsSync(filePath)) return null; + const raw = await fs.readFile(filePath, "utf-8"); + return JSON.parse(raw) as T; + } catch { + return null; + } + } + + /** + * Write JSON atomically (write to tmp then rename) + */ + private async writeJSON(filePath: string, data: unknown): Promise { + const dir = path.dirname(filePath); + ensureDir(dir); + const tmp = filePath + ".tmp"; + await fs.writeFile(tmp, JSON.stringify(data, null, 2), "utf-8"); + await fs.rename(tmp, filePath); + } + + private async loadIndex(): Promise { + const data = await this.readJSON(this.indexFile); + return data ?? { version: 1, projects: [] }; + } + + private async saveIndex(index: ProjectIndex): Promise { + ensureDir(this.projectsFolder); + await this.writeJSON(this.indexFile, index); + } + + + /** + * List all projects (lightweight, from index) + */ + async listProjects(): Promise { + const index = await this.loadIndex(); + return index.projects; + } + + /** + * Get full project metadata + */ + async getProject(projectId: string): Promise { + return this.readJSON( + this.projectFile(projectId, PROJECT_FILES.metadata) + ); + } + + /** + * Create a new project linked to a database connection + */ + async createProject(params: { + databaseId: string; + name: string; + description?: string; + defaultSchema?: string; + }): Promise { + // Resolve engine from the linked database + let engine: string | undefined; + try { + const db: DBMeta | null = await dbStoreInstance.getDB(params.databaseId); + engine = db?.type; + } catch { + // db may not exist yet — that's OK + } + + const id = uuidv4(); + const now = new Date().toISOString(); + + const meta: ProjectMetadata = { + version: 1, + id, + databaseId: params.databaseId, + name: params.name, + description: params.description, + engine, + defaultSchema: params.defaultSchema, + createdAt: now, + updatedAt: now, + }; + + // Create project directory structure + await this.ensureProjectDirs(id); + + // Write metadata + await this.writeJSON( + this.projectFile(id, PROJECT_FILES.metadata), + meta + ); + + // Initialise empty sub-files + const emptySchema: SchemaFile = { + version: 1, + projectId: id, + databaseId: params.databaseId, + schemas: [], + cachedAt: now, + }; + const emptyER: ERDiagramFile = { + version: 1, + projectId: id, + nodes: [], + updatedAt: now, + }; + const emptyQueries: QueriesFile = { + version: 1, + projectId: id, + queries: [], + }; + + await Promise.all([ + this.writeJSON(this.projectFile(id, PROJECT_FILES.schema), emptySchema), + this.writeJSON(this.projectFile(id, PROJECT_FILES.erDiagram), emptyER), + this.writeJSON(this.projectFile(id, PROJECT_FILES.queries), emptyQueries), + ]); + + // Update global index + const index = await this.loadIndex(); + index.projects.push({ + id, + name: meta.name, + description: meta.description, + engine, + databaseId: meta.databaseId, + createdAt: now, + updatedAt: now, + }); + await this.saveIndex(index); + + return meta; + } + + /** + * Update project metadata (name, description, defaultSchema) + */ + async updateProject( + projectId: string, + updates: Partial> + ): Promise { + const meta = await this.getProject(projectId); + if (!meta) return null; + + const now = new Date().toISOString(); + const updated: ProjectMetadata = { + ...meta, + ...updates, + updatedAt: now, + }; + + await this.writeJSON( + this.projectFile(projectId, PROJECT_FILES.metadata), + updated + ); + + // Sync the index entry + const index = await this.loadIndex(); + const entry = index.projects.find((p) => p.id === projectId); + if (entry) { + if (updates.name !== undefined) entry.name = updates.name; + if (updates.description !== undefined) entry.description = updates.description; + entry.updatedAt = now; + await this.saveIndex(index); + } + + return updated; + } + + /** + * Delete a project and its directory + */ + async deleteProject(projectId: string): Promise { + const dir = this.projectDir(projectId); + if (fsSync.existsSync(dir)) { + await fs.rm(dir, { recursive: true, force: true }); + } + + // Remove from index + const index = await this.loadIndex(); + index.projects = index.projects.filter((p) => p.id !== projectId); + await this.saveIndex(index); + } + + async getSchema(projectId: string): Promise { + return this.readJSON( + this.projectFile(projectId, PROJECT_FILES.schema) + ); + } + + async saveSchema(projectId: string, schemas: SchemaSnapshot[]): Promise { + const meta = await this.getProject(projectId); + if (!meta) throw new Error(`Project ${projectId} not found`); + + const now = new Date().toISOString(); + const file: SchemaFile = { + version: 1, + projectId, + databaseId: meta.databaseId, + schemas, + cachedAt: now, + }; + + await this.writeJSON( + this.projectFile(projectId, PROJECT_FILES.schema), + file + ); + + return file; + } + + async getERDiagram(projectId: string): Promise { + return this.readJSON( + this.projectFile(projectId, PROJECT_FILES.erDiagram) + ); + } + + async saveERDiagram( + projectId: string, + data: Pick + ): Promise { + const now = new Date().toISOString(); + const file: ERDiagramFile = { + version: 1, + projectId, + nodes: data.nodes, + zoom: data.zoom, + panX: data.panX, + panY: data.panY, + updatedAt: now, + }; + + await this.writeJSON( + this.projectFile(projectId, PROJECT_FILES.erDiagram), + file + ); + + return file; + } + + async getQueries(projectId: string): Promise { + return this.readJSON( + this.projectFile(projectId, PROJECT_FILES.queries) + ); + } + + async addQuery( + projectId: string, + params: { name: string; sql: string; description?: string } + ): Promise { + const file = (await this.getQueries(projectId)) ?? { + version: 1, + projectId, + queries: [], + }; + + const now = new Date().toISOString(); + const query: SavedQuery = { + id: uuidv4(), + name: params.name, + sql: params.sql, + description: params.description, + createdAt: now, + updatedAt: now, + }; + + file.queries.push(query); + + await this.writeJSON( + this.projectFile(projectId, PROJECT_FILES.queries), + file + ); + + return query; + } + + async updateQuery( + projectId: string, + queryId: string, + updates: Partial> + ): Promise { + const file = await this.getQueries(projectId); + if (!file) return null; + + const idx = file.queries.findIndex((q) => q.id === queryId); + if (idx === -1) return null; + + const now = new Date().toISOString(); + file.queries[idx] = { + ...file.queries[idx], + ...updates, + updatedAt: now, + }; + + await this.writeJSON( + this.projectFile(projectId, PROJECT_FILES.queries), + file + ); + + return file.queries[idx]; + } + + async deleteQuery(projectId: string, queryId: string): Promise { + const file = await this.getQueries(projectId); + if (!file) return; + + file.queries = file.queries.filter((q) => q.id !== queryId); + + await this.writeJSON( + this.projectFile(projectId, PROJECT_FILES.queries), + file + ); + } + + /** + * Returns the full project bundle — useful for export / git commit + */ + async exportProject(projectId: string): Promise<{ + metadata: ProjectMetadata; + schema: SchemaFile | null; + erDiagram: ERDiagramFile | null; + queries: QueriesFile | null; + } | null> { + const metadata = await this.getProject(projectId); + if (!metadata) return null; + + const [schema, erDiagram, queries] = await Promise.all([ + this.getSchema(projectId), + this.getERDiagram(projectId), + this.getQueries(projectId), + ]); + + return { metadata, schema, erDiagram, queries }; + } +} + +// Singleton instance +export const projectStoreInstance = new ProjectStore(); \ No newline at end of file diff --git a/bridge/src/utils/config.ts b/bridge/src/utils/config.ts new file mode 100644 index 0000000..c1435ec --- /dev/null +++ b/bridge/src/utils/config.ts @@ -0,0 +1,37 @@ +import path from "path"; +import os from "os"; +import fsSync from "fs"; + +export const CONFIG_FOLDER = + process.env.RELWAVE_HOME || + path.join( + os.homedir(), + process.platform === "win32" + ? "AppData\\Roaming\\relwave" + : ".relwave" + ); + +export const CONFIG_FILE = path.join(CONFIG_FOLDER, "databases.json"); +export const CREDENTIALS_FILE = path.join(CONFIG_FOLDER, ".credentials"); + + +export const PROJECTS_FOLDER = path.join(CONFIG_FOLDER, "projects"); +export const PROJECTS_INDEX_FILE = path.join(PROJECTS_FOLDER, "index.json"); + +export function getConnectionDir(connectionId: string) { + return path.join(CONFIG_FOLDER, "connections", connectionId); +} + +export function getMigrationsDir(connectionId: string) { + return path.join(CONFIG_FOLDER, "migrations", connectionId); +} + +export function getProjectDir(projectId: string) { + return path.join(PROJECTS_FOLDER, projectId); +} + +export function ensureDir(dir: string) { + if (!fsSync.existsSync(dir)) { + fsSync.mkdirSync(dir, { recursive: true }); + } +} \ No newline at end of file From 6407a6c85a51ae6bc4cdbd9ef99c198e1e5b23ea Mon Sep 17 00:00:00 2001 From: Yash Date: Mon, 9 Feb 2026 18:00:36 +0530 Subject: [PATCH 06/19] feat: add project management features including project list, detail view, and deletion dialog - Implemented ProjectList component for displaying and managing projects. - Created ProjectDetailView component to show detailed information about a selected project. - Added DeleteProjectDialog component for confirming project deletions. - Integrated project-related hooks for querying, creating, updating, and deleting projects. - Updated bridgeApi service to handle project-related API calls. - Added new Project types to define project metadata, queries, and schemas. - Created Projects page to manage projects with a sidebar and detail view. - Enhanced routing to include a dedicated projects page. --- src/components/common/VerticalIconBar.tsx | 3 +- .../project/CreateProjectDialog.tsx | 172 ++++++++++++ .../project/DeleteProjectDialog.tsx | 48 ++++ src/components/project/ProjectDetailView.tsx | 204 ++++++++++++++ src/components/project/ProjectList.tsx | 160 +++++++++++ src/components/project/index.ts | 4 + src/hooks/useProjectQueries.ts | 262 ++++++++++++++++++ src/main.tsx | 2 + src/pages/Projects.tsx | 219 +++++++++++++++ src/services/bridgeApi.ts | 215 ++++++++++++++ src/types/project.ts | 104 +++++++ 11 files changed, 1392 insertions(+), 1 deletion(-) create mode 100644 src/components/project/CreateProjectDialog.tsx create mode 100644 src/components/project/DeleteProjectDialog.tsx create mode 100644 src/components/project/ProjectDetailView.tsx create mode 100644 src/components/project/ProjectList.tsx create mode 100644 src/components/project/index.ts create mode 100644 src/hooks/useProjectQueries.ts create mode 100644 src/pages/Projects.tsx create mode 100644 src/types/project.ts diff --git a/src/components/common/VerticalIconBar.tsx b/src/components/common/VerticalIconBar.tsx index b77bc86..bd11cfe 100644 --- a/src/components/common/VerticalIconBar.tsx +++ b/src/components/common/VerticalIconBar.tsx @@ -1,4 +1,4 @@ -import { Home, Database, Search, GitBranch, Settings, Layers, Terminal } from 'lucide-react'; +import { Home, Database, Search, GitBranch, Settings, Layers, Terminal, FolderOpen } from 'lucide-react'; import { Link, useLocation } from 'react-router-dom'; import { Button } from '@/components/ui/button'; import { @@ -17,6 +17,7 @@ interface VerticalIconBarProps { const globalNavigationItems = [ { icon: Home, label: 'Dashboard', path: '/' }, + { icon: FolderOpen, label: 'Projects', path: '/projects' }, { icon: Settings, label: 'Settings', path: '/settings' }, ]; diff --git a/src/components/project/CreateProjectDialog.tsx b/src/components/project/CreateProjectDialog.tsx new file mode 100644 index 0000000..c23b1b2 --- /dev/null +++ b/src/components/project/CreateProjectDialog.tsx @@ -0,0 +1,172 @@ +import { useState } from "react"; +import { Database, Link as LinkIcon } from "lucide-react"; +import { Button } from "@/components/ui/button"; +import { + Dialog, + DialogContent, + DialogDescription, + DialogHeader, + DialogTitle, +} from "@/components/ui/dialog"; +import { Input } from "@/components/ui/input"; +import { Label } from "@/components/ui/label"; +import { + Select, + SelectContent, + SelectItem, + SelectTrigger, + SelectValue, +} from "@/components/ui/select"; +import { Textarea } from "@/components/ui/textarea"; +import { DatabaseConnection } from "@/types/database"; + +interface CreateProjectDialogProps { + open: boolean; + onOpenChange: (open: boolean) => void; + onSubmit: (data: { + databaseId: string; + name: string; + description?: string; + defaultSchema?: string; + }) => void; + isLoading?: boolean; + databases: DatabaseConnection[]; +} + +export function CreateProjectDialog({ + open, + onOpenChange, + onSubmit, + isLoading, + databases, +}: CreateProjectDialogProps) { + const [name, setName] = useState(""); + const [description, setDescription] = useState(""); + const [databaseId, setDatabaseId] = useState(""); + const [defaultSchema, setDefaultSchema] = useState(""); + + const resetForm = () => { + setName(""); + setDescription(""); + setDatabaseId(""); + setDefaultSchema(""); + }; + + const handleSubmit = (e: React.FormEvent) => { + e.preventDefault(); + if (!name.trim() || !databaseId) return; + + onSubmit({ + databaseId, + name: name.trim(), + description: description.trim() || undefined, + defaultSchema: defaultSchema.trim() || undefined, + }); + + resetForm(); + }; + + return ( + { + onOpenChange(isOpen); + if (!isOpen) resetForm(); + }} + > + + + + + Create Project + + + Create a project to save schema, ER diagrams, and queries offline. + + + +
+ {/* Project Name */} +
+ + setName(e.target.value)} + autoFocus + /> +
+ + {/* Linked Database */} +
+ + +
+ + {/* Default Schema */} +
+ + setDefaultSchema(e.target.value)} + /> +
+ + {/* Description */} +
+ +