diff --git a/integration/extension_list_test.ts b/integration/extension_list_test.ts index b1340903..18fdc05c 100644 --- a/integration/extension_list_test.ts +++ b/integration/extension_list_test.ts @@ -138,14 +138,14 @@ Deno.test("extension list shows installed extensions", async () => { "@test/beta": { version: "2026.01.01.1", pulledAt: "2026-01-01T00:00:00.000Z", - files: ["extensions/models/beta/model.yaml"], + files: [".swamp/pulled-extensions/models/beta/model.yaml"], }, "@test/alpha": { version: "2026.02.01.1", pulledAt: "2026-02-01T00:00:00.000Z", files: [ - "extensions/models/alpha/model.yaml", - "extensions/models/alpha/handler.ts", + ".swamp/pulled-extensions/models/alpha/model.yaml", + ".swamp/pulled-extensions/models/alpha/handler.ts", ], }, }); @@ -176,7 +176,7 @@ Deno.test("extension list --json shows installed extensions", async () => { "@test/one": { version: "2026.01.15.1", pulledAt: "2026-01-15T12:00:00.000Z", - files: ["extensions/models/one/model.yaml"], + files: [".swamp/pulled-extensions/models/one/model.yaml"], }, }); @@ -207,8 +207,8 @@ Deno.test("extension list --verbose shows individual files", async () => { version: "2026.01.01.1", pulledAt: "2026-01-01T00:00:00.000Z", files: [ - "extensions/models/verbose-ext/model.yaml", - "extensions/models/verbose-ext/handler.ts", + ".swamp/pulled-extensions/models/verbose-ext/model.yaml", + ".swamp/pulled-extensions/models/verbose-ext/handler.ts", ], }, }); diff --git a/src/cli/auto_resolver_adapters.ts b/src/cli/auto_resolver_adapters.ts index 3d731372..abf28f84 100644 --- a/src/cli/auto_resolver_adapters.ts +++ b/src/cli/auto_resolver_adapters.ts @@ -45,6 +45,8 @@ interface InstallerAdapterConfig { getExtension: (name: string) => Promise; downloadArchive: (name: string, version: string) => Promise; getChecksum: (name: string, version: string) => Promise; + /** Full path to the upstream_extensions.json lockfile. */ + lockfilePath: string; modelsDir: string; workflowsDir: string; vaultsDir: string; @@ -66,6 +68,7 @@ export function createAutoResolveInstallerAdapter( getExtension, downloadArchive, getChecksum, + lockfilePath, modelsDir, workflowsDir, vaultsDir, @@ -85,6 +88,7 @@ export function createAutoResolveInstallerAdapter( downloadArchive, getChecksum, logger, + lockfilePath, modelsDir, workflowsDir, vaultsDir, diff --git a/src/cli/commands/extension.ts b/src/cli/commands/extension.ts index 45274558..f45df7d0 100644 --- a/src/cli/commands/extension.ts +++ b/src/cli/commands/extension.ts @@ -20,6 +20,7 @@ import { Command } from "@cliffy/command"; import { extensionPushCommand } from "./extension_push.ts"; import { extensionPullCommand } from "./extension_pull.ts"; +import { extensionInstallCommand } from "./extension_install.ts"; import { extensionFmtCommand } from "./extension_fmt.ts"; import { extensionRemoveCommand } from "./extension_rm.ts"; import { extensionListCommand } from "./extension_list.ts"; @@ -39,6 +40,7 @@ export const extensionCommand = new Command() .command("push", extensionPushCommand) .command("fmt", extensionFmtCommand) .command("pull", extensionPullCommand) + .command("install", extensionInstallCommand) .command("rm", extensionRemoveCommand) .command("list", extensionListCommand) .command("search", extensionSearchCommand) diff --git a/src/cli/commands/extension_install.ts b/src/cli/commands/extension_install.ts new file mode 100644 index 00000000..ea3985ba --- /dev/null +++ b/src/cli/commands/extension_install.ts @@ -0,0 +1,133 @@ +// Swamp, an Automation Framework +// Copyright (C) 2026 System Initiative, Inc. +// +// This file is part of Swamp. +// +// Swamp is free software: you can redistribute it and/or modify +// it under the terms of the GNU Affero General Public License version 3 +// as published by the Free Software Foundation, with the Swamp +// Extension and Definition Exception (found in the "COPYING-EXCEPTION" +// file). +// +// Swamp is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU Affero General Public License for more details. +// +// You should have received a copy of the GNU Affero General Public License +// along with Swamp. If not, see . + +import { Command } from "@cliffy/command"; +import { join, resolve } from "@std/path"; +import { createContext, type GlobalOptions } from "../context.ts"; +import { requireInitializedRepo } from "../repo_context.ts"; +import { resolveModelsDir } from "../resolve_models_dir.ts"; +import { + RepoMarkerRepository, +} from "../../infrastructure/persistence/repo_marker_repository.ts"; +import { RepoPath } from "../../domain/repo/repo_path.ts"; +import { + SWAMP_SUBDIRS, + swampPath, +} from "../../infrastructure/persistence/paths.ts"; +import { + consumeStream, + createLibSwampContext, + extensionInstall, + requireCurrentExtensionLayout, + resolveServerUrl, +} from "../../libswamp/mod.ts"; +import { UserError } from "../../domain/errors.ts"; +import { ExtensionApiClient } from "../../infrastructure/http/extension_api_client.ts"; +import { createExtensionInstallRenderer } from "../../presentation/renderers/extension_install.ts"; + +// deno-lint-ignore no-explicit-any +type AnyOptions = any; + +export const extensionInstallCommand = new Command() + .name("install") + .description( + "Restore pulled extensions from the lockfile.\n\nReads upstream_extensions.json and re-pulls any extensions whose source\nfiles are missing. Use after cloning a repo or in CI.\nTo add a new extension, use 'swamp extension pull ' instead.\n\nExamples:\n swamp extension install", + ) + .arguments("[unexpected:string]") + .option("--repo-dir ", "Repository directory", { default: "." }) + .action(async function (options: AnyOptions, unexpected?: string) { + if (unexpected) { + throw new UserError( + `'swamp extension install' takes no arguments.\n` + + `To add a new extension, use: swamp extension pull ${unexpected}`, + ); + } + + const cliCtx = createContext(options as GlobalOptions, [ + "extension", + "install", + ]); + cliCtx.logger.debug`Starting extension install`; + + // 1. Validate repo + const repoDir = options.repoDir ?? "."; + await requireInitializedRepo({ + repoDir, + outputMode: cliCtx.outputMode, + }); + + // 2. Resolve lockfile path + const repoPath = RepoPath.create(repoDir); + const markerRepo = new RepoMarkerRepository(); + const marker = await markerRepo.read(repoPath); + const modelsDir = resolveModelsDir(marker); + const absoluteModelsDir = resolve(repoDir, modelsDir); + const lockfilePath = join(absoluteModelsDir, "upstream_extensions.json"); + + // 3. Check for legacy extension layout + await requireCurrentExtensionLayout(lockfilePath); + + // 4. Resolve pulled-extension dirs + const pulledModelsDir = swampPath(repoDir, SWAMP_SUBDIRS.pulledModels); + const pulledWorkflowsDir = swampPath( + repoDir, + SWAMP_SUBDIRS.pulledWorkflows, + ); + const pulledVaultsDir = swampPath(repoDir, SWAMP_SUBDIRS.pulledVaults); + const pulledDriversDir = swampPath(repoDir, SWAMP_SUBDIRS.pulledDrivers); + const pulledDatastoresDir = swampPath( + repoDir, + SWAMP_SUBDIRS.pulledDatastores, + ); + const pulledReportsDir = swampPath(repoDir, SWAMP_SUBDIRS.pulledReports); + + // 4. Wire deps and execute + const serverUrl = resolveServerUrl(); + const client = new ExtensionApiClient(serverUrl); + + const ctx = createLibSwampContext({ logger: cliCtx.logger }); + const renderer = createExtensionInstallRenderer(cliCtx.outputMode); + + await consumeStream( + extensionInstall(ctx, { + lockfilePath, + repoDir, + createInstallContext: (_name, _version) => ({ + getExtension: (n) => client.getExtension(n), + downloadArchive: (n, v) => client.downloadArchive(n, v), + getChecksum: (n, v) => client.getChecksum(n, v), + logger: cliCtx.logger, + lockfilePath, + modelsDir: pulledModelsDir, + workflowsDir: pulledWorkflowsDir, + vaultsDir: pulledVaultsDir, + driversDir: pulledDriversDir, + datastoresDir: pulledDatastoresDir, + reportsDir: pulledReportsDir, + repoDir, + force: true, + alreadyPulled: new Set(), + depth: 0, + }), + }), + renderer.handlers(), + ); + + cliCtx.logger.debug("Extension install command completed"); + }); diff --git a/src/cli/commands/extension_list.ts b/src/cli/commands/extension_list.ts index 18103743..2fbe77d7 100644 --- a/src/cli/commands/extension_list.ts +++ b/src/cli/commands/extension_list.ts @@ -20,12 +20,19 @@ import { Command } from "@cliffy/command"; import { createContext, type GlobalOptions } from "../context.ts"; import { requireInitializedRepoReadOnly } from "../repo_context.ts"; +import { join, resolve } from "@std/path"; import { consumeStream, createExtensionListDeps, createLibSwampContext, extensionList, + requireCurrentExtensionLayout, } from "../../libswamp/mod.ts"; +import { resolveModelsDir } from "../resolve_models_dir.ts"; +import { + RepoMarkerRepository, +} from "../../infrastructure/persistence/repo_marker_repository.ts"; +import { RepoPath } from "../../domain/repo/repo_path.ts"; import { createExtensionListRenderer } from "../../presentation/renderers/extension_list.ts"; // deno-lint-ignore no-explicit-any @@ -49,6 +56,15 @@ export const extensionListCommand = new Command() outputMode: cliCtx.outputMode, }); + // Check for legacy extension layout + const repoPath = RepoPath.create(repoDir); + const markerRepo = new RepoMarkerRepository(); + const marker = await markerRepo.read(repoPath); + const modelsDir = resolveModelsDir(marker); + const absoluteModelsDir = resolve(repoDir, modelsDir); + const lockfilePath = join(absoluteModelsDir, "upstream_extensions.json"); + await requireCurrentExtensionLayout(lockfilePath); + const ctx = createLibSwampContext({ logger: cliCtx.logger }); const deps = await createExtensionListDeps(repoDir); diff --git a/src/cli/commands/extension_pull.ts b/src/cli/commands/extension_pull.ts index 7bee312f..4a750221 100644 --- a/src/cli/commands/extension_pull.ts +++ b/src/cli/commands/extension_pull.ts @@ -19,19 +19,19 @@ import { Command } from "@cliffy/command"; import type { Logger } from "@logtape/logtape"; +import { join, resolve } from "@std/path"; import { createContext, type GlobalOptions } from "../context.ts"; import { requireInitializedRepo } from "../repo_context.ts"; import { resolveModelsDir } from "../resolve_models_dir.ts"; -import { resolveVaultsDir } from "../resolve_vaults_dir.ts"; -import { resolveWorkflowsDir } from "../resolve_workflows_dir.ts"; -import { resolveDriversDir } from "../resolve_drivers_dir.ts"; -import { resolveDatastoresDir } from "../resolve_datastores_dir.ts"; -import { resolveReportsDir } from "../resolve_reports_dir.ts"; import { RepoMarkerRepository, } from "../../infrastructure/persistence/repo_marker_repository.ts"; import { RepoPath } from "../../domain/repo/repo_path.ts"; import { UserError } from "../../domain/errors.ts"; +import { + SWAMP_SUBDIRS, + swampPath, +} from "../../infrastructure/persistence/paths.ts"; import { ConflictError, consumeStream, @@ -41,6 +41,7 @@ import { type ExtensionPullDeps, type ExtensionRegistryInfo, parseExtensionRef, + requireCurrentExtensionLayout, resolveServerUrl, validateExtensionName, } from "../../libswamp/mod.ts"; @@ -88,6 +89,8 @@ export interface PullContext { downloadArchive: (name: string, version: string) => Promise; getChecksum: (name: string, version: string) => Promise; logger: Logger; + /** Full path to the upstream_extensions.json lockfile. */ + lockfilePath: string; modelsDir: string; workflowsDir: string; vaultsDir: string; @@ -114,6 +117,7 @@ export async function pullExtension( getExtension: ctx.getExtension, downloadArchive: ctx.downloadArchive, getChecksum: ctx.getChecksum, + lockfilePath: ctx.lockfilePath, modelsDir: ctx.modelsDir, workflowsDir: ctx.workflowsDir, vaultsDir: ctx.vaultsDir, @@ -160,7 +164,6 @@ export async function pullExtension( export const extensionPullCommand = new Command() .name("pull") - .alias("install") .description("Pull an extension from the swamp registry") .arguments("") .option("--repo-dir ", "Repository directory", { default: "." }) @@ -182,27 +185,42 @@ export const extensionPullCommand = new Command() // 3. Validate name format validateExtensionName(ref.name); - // 4. Resolve dirs from .swamp.yaml + // 4. Resolve lockfile path (stays in committed extensions/models/ dir) const repoPath = RepoPath.create(repoDir); const markerRepo = new RepoMarkerRepository(); const marker = await markerRepo.read(repoPath); const modelsDir = resolveModelsDir(marker); - const workflowsDir = resolveWorkflowsDir(marker); - const vaultsDir = resolveVaultsDir(marker); - const driversDir = resolveDriversDir(marker); - const datastoresDir = resolveDatastoresDir(marker); - const reportsDir = resolveReportsDir(marker); + const absoluteModelsDir = resolve(repoDir, modelsDir); + const lockfilePath = join(absoluteModelsDir, "upstream_extensions.json"); + + // 5. Check for legacy extension layout + await requireCurrentExtensionLayout(lockfilePath); + + // 6. Resolve pulled-extension dirs (.swamp/pulled-extensions/{type}/) + const pulledModelsDir = swampPath(repoDir, SWAMP_SUBDIRS.pulledModels); + const pulledWorkflowsDir = swampPath( + repoDir, + SWAMP_SUBDIRS.pulledWorkflows, + ); + const pulledVaultsDir = swampPath(repoDir, SWAMP_SUBDIRS.pulledVaults); + const pulledDriversDir = swampPath(repoDir, SWAMP_SUBDIRS.pulledDrivers); + const pulledDatastoresDir = swampPath( + repoDir, + SWAMP_SUBDIRS.pulledDatastores, + ); + const pulledReportsDir = swampPath(repoDir, SWAMP_SUBDIRS.pulledReports); - // 5. Create deps via factory and pull + // 7. Create deps via factory and pull const serverUrl = resolveServerUrl(); const deps = createExtensionPullDeps( serverUrl, - modelsDir, - workflowsDir, - vaultsDir, - driversDir, - datastoresDir, - reportsDir, + lockfilePath, + pulledModelsDir, + pulledWorkflowsDir, + pulledVaultsDir, + pulledDriversDir, + pulledDatastoresDir, + pulledReportsDir, repoDir, ); @@ -211,12 +229,13 @@ export const extensionPullCommand = new Command() downloadArchive: deps.downloadArchive, getChecksum: deps.getChecksum, logger: ctx.logger, - modelsDir, - workflowsDir, - vaultsDir, - driversDir, - datastoresDir, - reportsDir, + lockfilePath, + modelsDir: pulledModelsDir, + workflowsDir: pulledWorkflowsDir, + vaultsDir: pulledVaultsDir, + driversDir: pulledDriversDir, + datastoresDir: pulledDatastoresDir, + reportsDir: pulledReportsDir, repoDir, force: options.force ?? false, outputMode: ctx.outputMode, diff --git a/src/cli/commands/extension_pull_test.ts b/src/cli/commands/extension_pull_test.ts index 347e3a64..3415cd20 100644 --- a/src/cli/commands/extension_pull_test.ts +++ b/src/cli/commands/extension_pull_test.ts @@ -59,11 +59,12 @@ Deno.test("parseExtensionRef throws on empty version after @", () => { Deno.test("updateUpstreamExtensions persists files array", async () => { const tmpDir = await Deno.makeTempDir({ prefix: "swamp_test_" }); try { + const lockfilePath = join(tmpDir, "upstream_extensions.json"); const files = [ "extensions/models/foo/bar.yaml", "extensions/models/foo/baz.ts", ]; - await updateUpstreamExtensions(tmpDir, "@test/ext", "1.0.0", files); + await updateUpstreamExtensions(lockfilePath, "@test/ext", "1.0.0", files); const content = await Deno.readTextFile( join(tmpDir, "upstream_extensions.json"), @@ -81,10 +82,15 @@ Deno.test("updateUpstreamExtensions persists files array", async () => { Deno.test("updateUpstreamExtensions preserves existing entries", async () => { const tmpDir = await Deno.makeTempDir({ prefix: "swamp_test_" }); try { + const lockfilePath = join(tmpDir, "upstream_extensions.json"); // Write first extension - await updateUpstreamExtensions(tmpDir, "@test/first", "1.0.0", ["a.yaml"]); + await updateUpstreamExtensions(lockfilePath, "@test/first", "1.0.0", [ + "a.yaml", + ]); // Write second extension - await updateUpstreamExtensions(tmpDir, "@test/second", "2.0.0", ["b.yaml"]); + await updateUpstreamExtensions(lockfilePath, "@test/second", "2.0.0", [ + "b.yaml", + ]); const content = await Deno.readTextFile( join(tmpDir, "upstream_extensions.json"), @@ -103,7 +109,8 @@ Deno.test("updateUpstreamExtensions preserves existing entries", async () => { Deno.test("updateUpstreamExtensions handles empty files array", async () => { const tmpDir = await Deno.makeTempDir({ prefix: "swamp_test_" }); try { - await updateUpstreamExtensions(tmpDir, "@test/empty", "1.0.0", []); + const lockfilePath = join(tmpDir, "upstream_extensions.json"); + await updateUpstreamExtensions(lockfilePath, "@test/empty", "1.0.0", []); const content = await Deno.readTextFile( join(tmpDir, "upstream_extensions.json"), diff --git a/src/cli/commands/extension_rm.ts b/src/cli/commands/extension_rm.ts index 593f6700..67262733 100644 --- a/src/cli/commands/extension_rm.ts +++ b/src/cli/commands/extension_rm.ts @@ -18,7 +18,7 @@ // along with Swamp. If not, see . import { Command } from "@cliffy/command"; -import { resolve } from "@std/path"; +import { join, resolve } from "@std/path"; import { createContext, type GlobalOptions } from "../context.ts"; import { requireInitializedRepo } from "../repo_context.ts"; import { resolveModelsDir } from "../resolve_models_dir.ts"; @@ -33,6 +33,7 @@ import { extensionRm, extensionRmPreview, parseExtensionRef, + requireCurrentExtensionLayout, validateExtensionName, } from "../../libswamp/mod.ts"; import { @@ -84,10 +85,14 @@ export const extensionRemoveCommand = new Command() const marker = await markerRepo.read(repoPath); const modelsDir = resolveModelsDir(marker); const absoluteModelsDir = resolve(repoDir, modelsDir); + const lockfilePath = join(absoluteModelsDir, "upstream_extensions.json"); + + // Check for legacy extension layout + await requireCurrentExtensionLayout(lockfilePath); // Create libswamp context, deps, renderer const libCtx = createLibSwampContext({ logger: ctx.logger }); - const deps = createExtensionRmDeps(repoDir, absoluteModelsDir); + const deps = createExtensionRmDeps(repoDir, lockfilePath); const renderer = createExtensionRmRenderer(ctx.outputMode); const input = { extensionName: ref.name }; diff --git a/src/cli/commands/extension_rm_test.ts b/src/cli/commands/extension_rm_test.ts index dfacd7c4..c5f5aded 100644 --- a/src/cli/commands/extension_rm_test.ts +++ b/src/cli/commands/extension_rm_test.ts @@ -31,12 +31,17 @@ import { Deno.test("removeUpstreamExtension removes entry and preserves others", async () => { const tmpDir = await Deno.makeTempDir({ prefix: "swamp_test_" }); try { + const lockfilePath = join(tmpDir, "upstream_extensions.json"); // Set up two extensions - await updateUpstreamExtensions(tmpDir, "@test/first", "1.0.0", ["a.yaml"]); - await updateUpstreamExtensions(tmpDir, "@test/second", "2.0.0", ["b.yaml"]); + await updateUpstreamExtensions(lockfilePath, "@test/first", "1.0.0", [ + "a.yaml", + ]); + await updateUpstreamExtensions(lockfilePath, "@test/second", "2.0.0", [ + "b.yaml", + ]); // Remove the first one - await removeUpstreamExtension(tmpDir, "@test/first"); + await removeUpstreamExtension(lockfilePath, "@test/first"); const content = await Deno.readTextFile( join(tmpDir, "upstream_extensions.json"), @@ -54,10 +59,13 @@ Deno.test("removeUpstreamExtension removes entry and preserves others", async () Deno.test("removeUpstreamExtension handles non-existent extension gracefully", async () => { const tmpDir = await Deno.makeTempDir({ prefix: "swamp_test_" }); try { - await updateUpstreamExtensions(tmpDir, "@test/first", "1.0.0", ["a.yaml"]); + const lockfilePath = join(tmpDir, "upstream_extensions.json"); + await updateUpstreamExtensions(lockfilePath, "@test/first", "1.0.0", [ + "a.yaml", + ]); // Removing a non-existent entry should not throw - await removeUpstreamExtension(tmpDir, "@test/nonexistent"); + await removeUpstreamExtension(lockfilePath, "@test/nonexistent"); const content = await Deno.readTextFile( join(tmpDir, "upstream_extensions.json"), @@ -73,8 +81,9 @@ Deno.test("removeUpstreamExtension handles non-existent extension gracefully", a Deno.test("removeUpstreamExtension handles missing JSON file", async () => { const tmpDir = await Deno.makeTempDir({ prefix: "swamp_test_" }); try { + const lockfilePath = join(tmpDir, "upstream_extensions.json"); // Should not throw even when file doesn't exist - await removeUpstreamExtension(tmpDir, "@test/nonexistent"); + await removeUpstreamExtension(lockfilePath, "@test/nonexistent"); const content = await Deno.readTextFile( join(tmpDir, "upstream_extensions.json"), @@ -90,11 +99,12 @@ Deno.test("removeUpstreamExtension handles missing JSON file", async () => { Deno.test("readUpstreamExtensions reads existing entries", async () => { const tmpDir = await Deno.makeTempDir({ prefix: "swamp_test_" }); try { - await updateUpstreamExtensions(tmpDir, "@test/ext", "1.0.0", [ + const lockfilePath = join(tmpDir, "upstream_extensions.json"); + await updateUpstreamExtensions(lockfilePath, "@test/ext", "1.0.0", [ "extensions/models/foo.yaml", ]); - const data = await readUpstreamExtensions(tmpDir); + const data = await readUpstreamExtensions(lockfilePath); assertEquals(data["@test/ext"].version, "1.0.0"); assertEquals(data["@test/ext"].files, ["extensions/models/foo.yaml"]); @@ -106,7 +116,8 @@ Deno.test("readUpstreamExtensions reads existing entries", async () => { Deno.test("readUpstreamExtensions returns empty map when file missing", async () => { const tmpDir = await Deno.makeTempDir({ prefix: "swamp_test_" }); try { - const data = await readUpstreamExtensions(tmpDir); + const lockfilePath = join(tmpDir, "upstream_extensions.json"); + const data = await readUpstreamExtensions(lockfilePath); assertEquals(Object.keys(data).length, 0); } finally { await Deno.remove(tmpDir, { recursive: true }); diff --git a/src/cli/commands/extension_search.ts b/src/cli/commands/extension_search.ts index f19212e9..1aa25bcd 100644 --- a/src/cli/commands/extension_search.ts +++ b/src/cli/commands/extension_search.ts @@ -18,6 +18,7 @@ // along with Swamp. If not, see . import { Command } from "@cliffy/command"; +import { join, resolve } from "@std/path"; import { createContext, type GlobalOptions, @@ -25,11 +26,10 @@ import { } from "../context.ts"; import { requireInitializedRepo } from "../repo_context.ts"; import { resolveModelsDir } from "../resolve_models_dir.ts"; -import { resolveVaultsDir } from "../resolve_vaults_dir.ts"; -import { resolveDriversDir } from "../resolve_drivers_dir.ts"; -import { resolveDatastoresDir } from "../resolve_datastores_dir.ts"; -import { resolveReportsDir } from "../resolve_reports_dir.ts"; -import { resolveWorkflowsDir } from "../resolve_workflows_dir.ts"; +import { + SWAMP_SUBDIRS, + swampPath, +} from "../../infrastructure/persistence/paths.ts"; import { RepoMarkerRepository, } from "../../infrastructure/persistence/repo_marker_repository.ts"; @@ -44,6 +44,7 @@ import { createLibSwampContext, extensionSearch, type ExtensionSearchDeps, + requireCurrentExtensionLayout, } from "../../libswamp/mod.ts"; import { createExtensionSearchRenderer } from "../../presentation/renderers/extension_search.tsx"; @@ -194,11 +195,14 @@ export const extensionSearchCommand = new Command() const markerRepo = new RepoMarkerRepository(); const marker = await markerRepo.read(repoPath); const modelsDir = resolveModelsDir(marker); - const workflowsDir = resolveWorkflowsDir(marker); - const vaultsDir = resolveVaultsDir(marker); - const driversDir = resolveDriversDir(marker); - const datastoresDir = resolveDatastoresDir(marker); - const reportsDir = resolveReportsDir(marker); + const absoluteModelsDir = resolve(repoDir, modelsDir); + const lockfilePath = join( + absoluteModelsDir, + "upstream_extensions.json", + ); + + // Check for legacy extension layout before pulling + await requireCurrentExtensionLayout(lockfilePath); const pullCtx: PullContext = { getExtension: (name) => client.getExtension(name), @@ -206,12 +210,13 @@ export const extensionSearchCommand = new Command() client.downloadArchive(name, version), getChecksum: (name, version) => client.getChecksum(name, version), logger: ctx.logger, - modelsDir, - workflowsDir, - vaultsDir, - driversDir, - datastoresDir, - reportsDir, + lockfilePath, + modelsDir: swampPath(repoDir, SWAMP_SUBDIRS.pulledModels), + workflowsDir: swampPath(repoDir, SWAMP_SUBDIRS.pulledWorkflows), + vaultsDir: swampPath(repoDir, SWAMP_SUBDIRS.pulledVaults), + driversDir: swampPath(repoDir, SWAMP_SUBDIRS.pulledDrivers), + datastoresDir: swampPath(repoDir, SWAMP_SUBDIRS.pulledDatastores), + reportsDir: swampPath(repoDir, SWAMP_SUBDIRS.pulledReports), repoDir, force: false, outputMode: ctx.outputMode, diff --git a/src/cli/commands/extension_update.ts b/src/cli/commands/extension_update.ts index 613f76f3..9444b002 100644 --- a/src/cli/commands/extension_update.ts +++ b/src/cli/commands/extension_update.ts @@ -18,15 +18,14 @@ // along with Swamp. If not, see . import { Command } from "@cliffy/command"; -import { resolve } from "@std/path"; +import { join, resolve } from "@std/path"; import { createContext, type GlobalOptions } from "../context.ts"; import { requireInitializedRepo } from "../repo_context.ts"; import { resolveModelsDir } from "../resolve_models_dir.ts"; -import { resolveVaultsDir } from "../resolve_vaults_dir.ts"; -import { resolveDriversDir } from "../resolve_drivers_dir.ts"; -import { resolveDatastoresDir } from "../resolve_datastores_dir.ts"; -import { resolveReportsDir } from "../resolve_reports_dir.ts"; -import { resolveWorkflowsDir } from "../resolve_workflows_dir.ts"; +import { + SWAMP_SUBDIRS, + swampPath, +} from "../../infrastructure/persistence/paths.ts"; import { RepoMarkerRepository, } from "../../infrastructure/persistence/repo_marker_repository.ts"; @@ -41,6 +40,7 @@ import { createExtensionUpdateDeps, createLibSwampContext, extensionUpdate, + requireCurrentExtensionLayout, } from "../../libswamp/mod.ts"; import { createExtensionUpdateRenderer } from "../../presentation/renderers/extension_update.ts"; @@ -80,14 +80,27 @@ export const extensionUpdateCommand = new Command() const markerRepo = new RepoMarkerRepository(); const marker = await markerRepo.read(repoPath); const modelsDir = resolveModelsDir(marker); - const workflowsDir = resolveWorkflowsDir(marker); - const vaultsDir = resolveVaultsDir(marker); - const driversDir = resolveDriversDir(marker); - const datastoresDir = resolveDatastoresDir(marker); - const reportsDir = resolveReportsDir(marker); const absoluteModelsDir = resolve(repoDir, modelsDir); + const lockfilePath = join(absoluteModelsDir, "upstream_extensions.json"); + + // Pulled-extension dirs for install + const pulledModelsDir = swampPath(repoDir, SWAMP_SUBDIRS.pulledModels); + const pulledWorkflowsDir = swampPath( + repoDir, + SWAMP_SUBDIRS.pulledWorkflows, + ); + const pulledVaultsDir = swampPath(repoDir, SWAMP_SUBDIRS.pulledVaults); + const pulledDriversDir = swampPath(repoDir, SWAMP_SUBDIRS.pulledDrivers); + const pulledDatastoresDir = swampPath( + repoDir, + SWAMP_SUBDIRS.pulledDatastores, + ); + const pulledReportsDir = swampPath(repoDir, SWAMP_SUBDIRS.pulledReports); + + // 3. Check for legacy extension layout + await requireCurrentExtensionLayout(lockfilePath); - // 3. Parse extension name if given + // 4. Parse extension name if given let extensionName: string | undefined; if (extensionArg) { const ref = parseExtensionRef(extensionArg); @@ -99,17 +112,18 @@ export const extensionUpdateCommand = new Command() const ctx = createLibSwampContext({ logger: cliCtx.logger }); const deps = createExtensionUpdateDeps({ - absoluteModelsDir, + lockfilePath, serverUrl, installExtension: async (name: string, version: string) => { const installCtx = createInstallContext(serverUrl, { logger: cliCtx.logger, - modelsDir, - workflowsDir, - vaultsDir, - driversDir, - datastoresDir, - reportsDir, + lockfilePath, + modelsDir: pulledModelsDir, + workflowsDir: pulledWorkflowsDir, + vaultsDir: pulledVaultsDir, + driversDir: pulledDriversDir, + datastoresDir: pulledDatastoresDir, + reportsDir: pulledReportsDir, repoDir, force: true, }); diff --git a/src/cli/mod.ts b/src/cli/mod.ts index 9f176947..a80f1346 100644 --- a/src/cli/mod.ts +++ b/src/cli/mod.ts @@ -19,7 +19,12 @@ import { Command } from "@cliffy/command"; import { setColorEnabled } from "@std/fmt/colors"; -import { isAbsolute, resolve } from "@std/path"; +import { isAbsolute, join, resolve } from "@std/path"; +import { + SWAMP_SUBDIRS, + swampPath, +} from "../infrastructure/persistence/paths.ts"; +import { readUpstreamExtensions } from "../infrastructure/persistence/upstream_extensions.ts"; import { getLogger, parseLogLevel } from "@logtape/logtape"; import { initializeLogging } from "../infrastructure/logging/logger.ts"; import { VERSION, versionCommand } from "./commands/version.ts"; @@ -193,7 +198,7 @@ export function commandNeedsExtensions(args: string[]): boolean { /** A deferred warning message to emit after logging is initialized. */ export interface DeferredWarning { - kind: "model" | "vault" | "driver" | "datastore" | "report"; + kind: "model" | "vault" | "driver" | "datastore" | "report" | "extensions"; file: string; error: string; } @@ -215,7 +220,11 @@ async function loadUserModels( : resolve(repoDir, modelsDir); const loader = new UserModelLoader(denoRuntime, repoDir); - const result = await loader.loadModels(absoluteModelsDir); + const pulledDir = swampPath(repoDir, SWAMP_SUBDIRS.pulledModels); + const result = await loader.loadModels(absoluteModelsDir, { + additionalDirs: [pulledDir], + skipAlreadyRegistered: true, + }); // Collect failures for deferred logging (logging not yet initialized) for (const failure of result.failed) { @@ -246,7 +255,11 @@ async function loadUserVaults( : resolve(repoDir, vaultsDir); const loader = new UserVaultLoader(denoRuntime, repoDir); - const result = await loader.loadVaults(absoluteVaultsDir); + const pulledDir = swampPath(repoDir, SWAMP_SUBDIRS.pulledVaults); + const result = await loader.loadVaults(absoluteVaultsDir, { + additionalDirs: [pulledDir], + skipAlreadyRegistered: true, + }); for (const failure of result.failed) { deferredWarnings.push({ @@ -273,7 +286,11 @@ async function loadUserDrivers( : resolve(repoDir, driversDir); const loader = new UserDriverLoader(denoRuntime, repoDir); - const result = await loader.loadDrivers(absoluteDriversDir); + const pulledDir = swampPath(repoDir, SWAMP_SUBDIRS.pulledDrivers); + const result = await loader.loadDrivers(absoluteDriversDir, { + additionalDirs: [pulledDir], + skipAlreadyRegistered: true, + }); for (const failure of result.failed) { deferredWarnings.push({ @@ -300,7 +317,11 @@ async function loadUserDatastores( : resolve(repoDir, datastoresDir); const loader = new UserDatastoreLoader(denoRuntime, repoDir); - const result = await loader.loadDatastores(absoluteDatastoresDir); + const pulledDir = swampPath(repoDir, SWAMP_SUBDIRS.pulledDatastores); + const result = await loader.loadDatastores(absoluteDatastoresDir, { + additionalDirs: [pulledDir], + skipAlreadyRegistered: true, + }); for (const failure of result.failed) { deferredWarnings.push({ @@ -327,7 +348,11 @@ async function loadUserReports( : resolve(repoDir, reportsDir); const loader = new UserReportLoader(denoRuntime, repoDir); - const result = await loader.loadReports(absoluteReportsDir); + const pulledDir = swampPath(repoDir, SWAMP_SUBDIRS.pulledReports); + const result = await loader.loadReports(absoluteReportsDir, { + additionalDirs: [pulledDir], + skipAlreadyRegistered: true, + }); for (const failure of result.failed) { deferredWarnings.push({ @@ -341,6 +366,62 @@ async function loadUserReports( } } +/** + * Check if upstream_extensions.json has entries whose source files are + * missing from disk. This catches cases where pulled extensions weren't + * restored (e.g. after git clone without running `swamp extension install`). + */ +async function checkForMissingPulledExtensions( + repoDir: string, + marker: RepoMarkerData | null, + deferredWarnings: DeferredWarning[], +): Promise { + try { + const modelsDir = resolveModelsDir(marker); + const absoluteModelsDir = isAbsolute(modelsDir) + ? modelsDir + : resolve(repoDir, modelsDir); + const lockfilePath = join(absoluteModelsDir, "upstream_extensions.json"); + + const upstream = await readUpstreamExtensions(lockfilePath); + const extensionNames = Object.keys(upstream); + if (extensionNames.length === 0) return; + + // Check for any missing source files (skip bundle files — they're cached) + const missingExtensions: string[] = []; + for (const [name, entry] of Object.entries(upstream)) { + if (!entry.files) continue; + const sourceFiles = entry.files.filter((f) => + !f.endsWith(".js") && !f.endsWith(".md") && !f.endsWith(".txt") + ); + for (const file of sourceFiles) { + const absolutePath = join(repoDir, file); + try { + await Deno.stat(absolutePath); + } catch (error) { + if (error instanceof Deno.errors.NotFound) { + missingExtensions.push(name); + break; // One missing file is enough to flag this extension + } + } + } + } + + if (missingExtensions.length > 0) { + deferredWarnings.push({ + kind: "extensions", + file: lockfilePath, + error: + `${missingExtensions.length} pulled extension(s) have missing source files: ${ + missingExtensions.join(", ") + }. Run 'swamp extension install' to restore them.`, + }); + } + } catch { + // Non-fatal — don't block startup for lockfile read errors + } +} + /** Default telemetry endpoint */ const DEFAULT_TELEMETRY_ENDPOINT = "https://telemetry.swamp.club"; @@ -505,6 +586,13 @@ export async function runCli(args: string[]): Promise { loadUserDatastores(repoDir, marker, denoRuntime, deferredWarnings), loadUserReports(repoDir, marker, denoRuntime, deferredWarnings), ]); + + // Warn if lockfile has entries but pulled extension files are missing + await checkForMissingPulledExtensions( + repoDir, + marker, + deferredWarnings, + ); } // Load cached auth collectives for membership-based trust @@ -524,8 +612,6 @@ export async function runCli(args: string[]): Promise { const serverUrl = Deno.env.get("SWAMP_CLUB_URL") ?? "https://swamp.club"; const extensionClient = new ExtensionApiClient(serverUrl); const modelsDir = resolveModelsDir(marker); - const workflowsDir = resolveWorkflowsDir(marker); - const vaultsDir = resolveVaultsDir(marker); const denoRuntime = new EmbeddedDenoRuntime(); setAutoResolver( new ExtensionAutoResolver({ @@ -537,12 +623,16 @@ export async function runCli(args: string[]): Promise { extensionClient.downloadArchive(name, version), getChecksum: (name, version) => extensionClient.getChecksum(name, version), - modelsDir, - workflowsDir, - vaultsDir, - driversDir: resolveDriversDir(marker), - datastoresDir: resolveDatastoresDir(marker), - reportsDir: resolveReportsDir(marker), + lockfilePath: join( + resolve(repoDir, modelsDir), + "upstream_extensions.json", + ), + modelsDir: swampPath(repoDir, SWAMP_SUBDIRS.pulledModels), + workflowsDir: swampPath(repoDir, SWAMP_SUBDIRS.pulledWorkflows), + vaultsDir: swampPath(repoDir, SWAMP_SUBDIRS.pulledVaults), + driversDir: swampPath(repoDir, SWAMP_SUBDIRS.pulledDrivers), + datastoresDir: swampPath(repoDir, SWAMP_SUBDIRS.pulledDatastores), + reportsDir: swampPath(repoDir, SWAMP_SUBDIRS.pulledReports), repoDir, denoRuntime, }), @@ -604,8 +694,12 @@ export async function runCli(args: string[]): Promise { // Emit deferred warnings now that logging is initialized for (const warning of deferredWarnings) { - logger - .warn`Failed to load user ${warning.kind} ${warning.file}: ${warning.error}`; + if (warning.kind === "extensions") { + logger.warn`${warning.error}`; + } else { + logger + .warn`Failed to load user ${warning.kind} ${warning.file}: ${warning.error}`; + } } }) .error(unknownCommandErrorHandler) diff --git a/src/cli/repo_context.ts b/src/cli/repo_context.ts index 14cb5139..9f953387 100644 --- a/src/cli/repo_context.ts +++ b/src/cli/repo_context.ts @@ -41,6 +41,10 @@ import { import { UserError } from "../domain/errors.ts"; import { VERSION } from "./commands/version.ts"; import { resolveWorkflowsDir } from "./resolve_workflows_dir.ts"; +import { + SWAMP_SUBDIRS, + swampPath, +} from "../infrastructure/persistence/paths.ts"; import { resolveDatastoreConfig } from "./resolve_datastore.ts"; import { DefaultDatastorePathResolver } from "../infrastructure/persistence/default_datastore_path_resolver.ts"; import type { DatastorePathResolver } from "../domain/datastore/datastore_path_resolver.ts"; @@ -218,6 +222,9 @@ export async function requireInitializedRepoReadOnly( const repoContext = createRepositoryContext({ repoDir: repoPath.value, workflowsDir, + additionalWorkflowsDirs: [ + swampPath(repoPath.value, SWAMP_SUBDIRS.pulledWorkflows), + ], definitionsDir, yamlWorkflowsDir, vaultsDir, @@ -432,6 +439,9 @@ export async function requireInitializedRepoUnlocked( const repoContext = createRepositoryContext({ repoDir: repoPath.value, workflowsDir, + additionalWorkflowsDirs: [ + swampPath(repoPath.value, SWAMP_SUBDIRS.pulledWorkflows), + ], definitionsDir, yamlWorkflowsDir, vaultsDir, diff --git a/src/domain/datastore/user_datastore_loader.ts b/src/domain/datastore/user_datastore_loader.ts index c66d559d..8f7f3148 100644 --- a/src/domain/datastore/user_datastore_loader.ts +++ b/src/domain/datastore/user_datastore_loader.ts @@ -103,27 +103,41 @@ export class UserDatastoreLoader { * @param datastoresDir - The directory containing user datastore files * @returns Result containing lists of loaded and failed files */ - async loadDatastores(datastoresDir: string): Promise { + async loadDatastores( + datastoresDir: string, + options?: { + skipAlreadyRegistered?: boolean; + /** Additional directories to scan (e.g. pulled extensions). */ + additionalDirs?: string[]; + }, + ): Promise { const result: DatastoreLoadResult = { loaded: [], failed: [] }; // Ensure swamp's Zod is available on globalThis before importing bundles. installZodGlobal(); - // Check if directory exists - try { - await Deno.stat(datastoresDir); - } catch { - return result; // No user datastores directory - not an error - } - // Ensure deno is available before bundling const denoPath = await this.denoRuntime.ensureDeno(); - const files = await this.discoverFiles(datastoresDir); + // Discover files from primary dir and any additional dirs + const allFiles: Array<{ file: string; baseDir: string }> = []; + for ( + const dir of [datastoresDir, ...(options?.additionalDirs ?? [])] + ) { + try { + await Deno.stat(dir); + } catch { + continue; + } + const files = await this.discoverFiles(dir); + for (const file of files) { + allFiles.push({ file, baseDir: dir }); + } + } - for (const file of files) { + for (const { file, baseDir } of allFiles) { try { - const absolutePath = resolve(datastoresDir, file); + const absolutePath = resolve(baseDir, file); // Pre-check: only bundle files that declare a datastore export. const source = await Deno.readTextFile(absolutePath); @@ -136,7 +150,7 @@ export class UserDatastoreLoader { absolutePath, file, denoPath, - datastoresDir, + baseDir, ); const module = await this.importBundle(js, file); @@ -158,6 +172,9 @@ export class UserDatastoreLoader { // Register with the datastore type registry if (datastoreTypeRegistry.has(userDatastore.type)) { + if (options?.skipAlreadyRegistered) { + continue; + } result.failed.push({ file, error: diff --git a/src/domain/drivers/user_driver_loader.ts b/src/domain/drivers/user_driver_loader.ts index ea09050b..cd9b39d5 100644 --- a/src/domain/drivers/user_driver_loader.ts +++ b/src/domain/drivers/user_driver_loader.ts @@ -103,27 +103,41 @@ export class UserDriverLoader { * @param driversDir - The directory containing user driver files * @returns Result containing lists of loaded and failed files */ - async loadDrivers(driversDir: string): Promise { + async loadDrivers( + driversDir: string, + options?: { + skipAlreadyRegistered?: boolean; + /** Additional directories to scan (e.g. pulled extensions). */ + additionalDirs?: string[]; + }, + ): Promise { const result: DriverLoadResult = { loaded: [], failed: [] }; // Ensure swamp's Zod is available on globalThis before importing bundles. installZodGlobal(); - // Check if directory exists - try { - await Deno.stat(driversDir); - } catch { - return result; // No user drivers directory - not an error - } - // Ensure deno is available before bundling const denoPath = await this.denoRuntime.ensureDeno(); - const files = await this.discoverFiles(driversDir); + // Discover files from primary dir and any additional dirs + const allFiles: Array<{ file: string; baseDir: string }> = []; + for ( + const dir of [driversDir, ...(options?.additionalDirs ?? [])] + ) { + try { + await Deno.stat(dir); + } catch { + continue; + } + const files = await this.discoverFiles(dir); + for (const file of files) { + allFiles.push({ file, baseDir: dir }); + } + } - for (const file of files) { + for (const { file, baseDir } of allFiles) { try { - const absolutePath = resolve(driversDir, file); + const absolutePath = resolve(baseDir, file); // Pre-check: only bundle files that declare a driver export. const source = await Deno.readTextFile(absolutePath); @@ -136,7 +150,7 @@ export class UserDriverLoader { absolutePath, file, denoPath, - driversDir, + baseDir, ); const module = await this.importBundle(js, file); @@ -158,6 +172,9 @@ export class UserDriverLoader { // Register with the driver type registry if (driverTypeRegistry.has(userDriver.type)) { + if (options?.skipAlreadyRegistered) { + continue; + } result.failed.push({ file, error: `Driver type '${userDriver.type}' is already registered`, diff --git a/src/domain/models/user_model_loader.ts b/src/domain/models/user_model_loader.ts index 672f90e2..16d387ca 100644 --- a/src/domain/models/user_model_loader.ts +++ b/src/domain/models/user_model_loader.ts @@ -275,7 +275,11 @@ export class UserModelLoader { */ async loadModels( modelsDir: string, - options?: { skipAlreadyRegistered?: boolean }, + options?: { + skipAlreadyRegistered?: boolean; + /** Additional directories to scan (e.g. pulled extensions). */ + additionalDirs?: string[]; + }, ): Promise { const result: LoadResult = { loaded: [], extended: [], failed: [] }; @@ -283,17 +287,26 @@ export class UserModelLoader { // This prevents dual-instance issues in the compiled binary. installZodGlobal(); - // Check if directory exists - try { - await Deno.stat(modelsDir); - } catch { - return result; // No user models directory - not an error - } - // Ensure deno is available before bundling const denoPath = await this.denoRuntime.ensureDeno(); - const files = await this.discoverFiles(modelsDir); + // Discover files from primary dir and any additional dirs, merging into + // a single list of { file (relative), baseDir (absolute root) } tuples. + // Primary dir files come first so user extensions take precedence. + const allFiles: Array<{ file: string; baseDir: string }> = []; + for ( + const dir of [modelsDir, ...(options?.additionalDirs ?? [])] + ) { + try { + await Deno.stat(dir); + } catch { + continue; // Directory doesn't exist — skip + } + const files = await this.discoverFiles(dir); + for (const file of files) { + allFiles.push({ file, baseDir: dir }); + } + } // Import all files and classify by export name const modelFiles: Array<{ @@ -306,9 +319,9 @@ export class UserModelLoader { module: Record; }> = []; - for (const file of files) { + for (const { file, baseDir } of allFiles) { try { - const absolutePath = resolve(modelsDir, file); + const absolutePath = resolve(baseDir, file); // Pre-check: only bundle files that declare a model or extension export. // This avoids attempting to bundle helper scripts with unbundleable @@ -323,7 +336,7 @@ export class UserModelLoader { absolutePath, file, denoPath, - modelsDir, + baseDir, ); const module = await this.importBundle(js, file); diff --git a/src/domain/repo/repo_service.ts b/src/domain/repo/repo_service.ts index 107176f4..f57ea5d8 100644 --- a/src/domain/repo/repo_service.ts +++ b/src/domain/repo/repo_service.ts @@ -17,9 +17,12 @@ // You should have received a copy of the GNU Affero General Public License // along with Swamp. If not, see . -import { join } from "@std/path"; +import { dirname, join, resolve } from "@std/path"; import { ensureDir } from "@std/fs"; import { atomicWriteTextFile } from "../../infrastructure/persistence/atomic_write.ts"; +import { + readUpstreamExtensions, +} from "../../infrastructure/persistence/upstream_extensions.ts"; import type { RepoPath } from "./repo_path.ts"; import { SWAMP_SUBDIRS, @@ -340,6 +343,9 @@ export class RepoService { // Migrate from symlink-based layout to datastore layout await this.migrateFromSymlinks(repoPath); + // Migrate pulled extensions from extensions/ to .swamp/pulled-extensions/ + await this.migrateExtensionLayout(repoPath); + await this.markerRepo.write(repoPath, updatedMarker); // createUpgradeMarker always sets upgradedAt, but TypeScript doesn't know this @@ -1511,6 +1517,144 @@ export const SwampAudit: Plugin = async ({ directory }) => { } } + /** + * Migrates pulled extension files from extensions/{type}/ to + * .swamp/pulled-extensions/{type}/. Reads upstream_extensions.json + * to find tracked files, moves them, and updates the lockfile paths. + */ + private async migrateExtensionLayout(repoPath: RepoPath): Promise { + // Find the lockfile — use marker's modelsDir or default + const marker = await this.markerRepo.read(repoPath); + const modelsDir = marker?.modelsDir ?? "extensions/models"; + const absoluteModelsDir = resolve(repoPath.value, modelsDir); + const lockfilePath = join(absoluteModelsDir, "upstream_extensions.json"); + + const upstream = await readUpstreamExtensions(lockfilePath); + if (Object.keys(upstream).length === 0) { + return; // No extensions to migrate + } + + // Check if any files use the old layout (not in .swamp/) + const swampPrefix = ".swamp/"; + let hasLegacyFiles = false; + for (const entry of Object.values(upstream)) { + if (entry.files?.some((f) => !f.startsWith(swampPrefix))) { + hasLegacyFiles = true; + break; + } + } + + if (!hasLegacyFiles) { + return; // Already on new layout + } + + // Mapping from old extension dir prefixes to new pulled-extension subdirs + const dirMapping: Record = { + "extensions/models/": SWAMP_SUBDIRS.pulledModels + "/", + "extensions/vaults/": SWAMP_SUBDIRS.pulledVaults + "/", + "extensions/workflows/": SWAMP_SUBDIRS.pulledWorkflows + "/", + "extensions/drivers/": SWAMP_SUBDIRS.pulledDrivers + "/", + "extensions/datastores/": SWAMP_SUBDIRS.pulledDatastores + "/", + "extensions/reports/": SWAMP_SUBDIRS.pulledReports + "/", + }; + + const gitTrackedFiles: string[] = []; + + // Move files and update paths + for (const [name, entry] of Object.entries(upstream)) { + if (!entry.files) continue; + + const updatedFiles: string[] = []; + for (const file of entry.files) { + if (file.startsWith(swampPrefix)) { + // Already in .swamp/ — keep as-is (bundles, etc.) + updatedFiles.push(file); + continue; + } + + // Find matching dir prefix and compute new path + let newPath: string | null = null; + for (const [oldPrefix, newPrefix] of Object.entries(dirMapping)) { + if (file.startsWith(oldPrefix)) { + const relativePart = file.slice(oldPrefix.length); + newPath = swampPrefix + newPrefix + relativePart; + break; + } + } + + if (!newPath) { + // Unknown prefix — keep as-is but note it + updatedFiles.push(file); + continue; + } + + // Move the file + const srcAbsolute = join(repoPath.value, file); + const destAbsolute = join(repoPath.value, newPath); + + try { + await ensureDir(dirname(destAbsolute)); + await Deno.rename(srcAbsolute, destAbsolute); + gitTrackedFiles.push(file); + } catch (error) { + if (error instanceof Deno.errors.NotFound) { + // Source file missing — already removed or never committed + } else { + throw error; + } + } + + updatedFiles.push(newPath); + } + + upstream[name] = { ...entry, files: updatedFiles }; + } + + // Write updated lockfile + await atomicWriteTextFile( + lockfilePath, + JSON.stringify(upstream, null, 2) + "\n", + ); + + // Prune empty directories left behind + for (const oldPrefix of Object.keys(dirMapping)) { + const dirPath = join(repoPath.value, oldPrefix); + try { + await this.pruneEmptyDirsUp(dirPath, repoPath.value); + } catch { + // Non-fatal + } + } + } + + /** + * Removes empty directories walking upward until reaching stopDir + * or a non-empty directory. + */ + private async pruneEmptyDirsUp( + dir: string, + stopDir: string, + ): Promise { + let current = dir; + const resolvedStop = resolve(stopDir); + while (resolve(current) !== resolvedStop) { + try { + const entries: Deno.DirEntry[] = []; + for await (const entry of Deno.readDir(current)) { + entries.push(entry); + } + if (entries.length === 0) { + await Deno.remove(current); + current = dirname(current); + } else { + break; + } + } catch { + break; + } + } + } + /** * Migrates to top-level directory layout: * - Replaces `latest` symlinks in data dirs with text files diff --git a/src/domain/reports/user_report_loader.ts b/src/domain/reports/user_report_loader.ts index 27c79715..67cda80f 100644 --- a/src/domain/reports/user_report_loader.ts +++ b/src/domain/reports/user_report_loader.ts @@ -103,27 +103,41 @@ export class UserReportLoader { * @param reportsDir - The directory containing user report files * @returns Result containing lists of loaded and failed files */ - async loadReports(reportsDir: string): Promise { + async loadReports( + reportsDir: string, + options?: { + skipAlreadyRegistered?: boolean; + /** Additional directories to scan (e.g. pulled extensions). */ + additionalDirs?: string[]; + }, + ): Promise { const result: ReportLoadResult = { loaded: [], failed: [] }; // Ensure swamp's Zod is available on globalThis before importing bundles. installZodGlobal(); - // Check if directory exists - try { - await Deno.stat(reportsDir); - } catch { - return result; // No user reports directory - not an error - } - // Ensure deno is available before bundling const denoPath = await this.denoRuntime.ensureDeno(); - const files = await this.discoverFiles(reportsDir); + // Discover files from primary dir and any additional dirs + const allFiles: Array<{ file: string; baseDir: string }> = []; + for ( + const dir of [reportsDir, ...(options?.additionalDirs ?? [])] + ) { + try { + await Deno.stat(dir); + } catch { + continue; + } + const files = await this.discoverFiles(dir); + for (const file of files) { + allFiles.push({ file, baseDir: dir }); + } + } - for (const file of files) { + for (const { file, baseDir } of allFiles) { try { - const absolutePath = resolve(reportsDir, file); + const absolutePath = resolve(baseDir, file); // Pre-check: only bundle files that declare a report export. const source = await Deno.readTextFile(absolutePath); @@ -136,7 +150,7 @@ export class UserReportLoader { absolutePath, file, denoPath, - reportsDir, + baseDir, ); const module = await this.importBundle(js, file); @@ -158,6 +172,9 @@ export class UserReportLoader { // Register with the report registry if (reportRegistry.has(userReport.name)) { + if (options?.skipAlreadyRegistered) { + continue; + } result.failed.push({ file, error: `Report name '${userReport.name}' is already registered`, diff --git a/src/domain/vaults/user_vault_loader.ts b/src/domain/vaults/user_vault_loader.ts index d56b0740..471dbed1 100644 --- a/src/domain/vaults/user_vault_loader.ts +++ b/src/domain/vaults/user_vault_loader.ts @@ -105,28 +105,39 @@ export class UserVaultLoader { */ async loadVaults( vaultsDir: string, - options?: { skipAlreadyRegistered?: boolean }, + options?: { + skipAlreadyRegistered?: boolean; + /** Additional directories to scan (e.g. pulled extensions). */ + additionalDirs?: string[]; + }, ): Promise { const result: VaultLoadResult = { loaded: [], failed: [] }; // Ensure swamp's Zod is available on globalThis before importing bundles. installZodGlobal(); - // Check if directory exists - try { - await Deno.stat(vaultsDir); - } catch { - return result; // No user vaults directory - not an error - } - // Ensure deno is available before bundling const denoPath = await this.denoRuntime.ensureDeno(); - const files = await this.discoverFiles(vaultsDir); + // Discover files from primary dir and any additional dirs + const allFiles: Array<{ file: string; baseDir: string }> = []; + for ( + const dir of [vaultsDir, ...(options?.additionalDirs ?? [])] + ) { + try { + await Deno.stat(dir); + } catch { + continue; + } + const files = await this.discoverFiles(dir); + for (const file of files) { + allFiles.push({ file, baseDir: dir }); + } + } - for (const file of files) { + for (const { file, baseDir } of allFiles) { try { - const absolutePath = resolve(vaultsDir, file); + const absolutePath = resolve(baseDir, file); // Pre-check: only bundle files that declare a vault export. const source = await Deno.readTextFile(absolutePath); @@ -139,7 +150,7 @@ export class UserVaultLoader { absolutePath, file, denoPath, - vaultsDir, + baseDir, ); const module = await this.importBundle(js, file); diff --git a/src/infrastructure/persistence/extension_workflow_repository.ts b/src/infrastructure/persistence/extension_workflow_repository.ts index 3598b108..cf5d9402 100644 --- a/src/infrastructure/persistence/extension_workflow_repository.ts +++ b/src/infrastructure/persistence/extension_workflow_repository.ts @@ -41,9 +41,14 @@ const logger = getLogger(["extension-workflow-repo"]); * Any `*.yaml` file in the directory tree is treated as a workflow definition. */ export class ExtensionWorkflowRepository implements WorkflowRepository { + private readonly workflowsDirs: string[]; + constructor( - private readonly workflowsDir: string, - ) {} + workflowsDir: string, + additionalDirs?: string[], + ) { + this.workflowsDirs = [workflowsDir, ...(additionalDirs ?? [])]; + } async findById(id: WorkflowId): Promise { const workflows = await this.findAll(); @@ -57,31 +62,39 @@ export class ExtensionWorkflowRepository implements WorkflowRepository { async findAll(): Promise { const workflows: Workflow[] = []; + const seenNames = new Set(); - try { - for await ( - const entry of walk(this.workflowsDir, { - exts: [".yaml"], - includeDirs: false, - }) - ) { - try { - const content = await Deno.readTextFile(entry.path); - const data = parseYaml(content) as WorkflowData; - workflows.push(Workflow.fromData(data)); - } catch (parseError) { - const errorMsg = parseError instanceof Error - ? parseError.message - : String(parseError); - logger - .warn`Skipping broken extension workflow "${entry.path}": ${errorMsg}`; + for (const dir of this.workflowsDirs) { + try { + for await ( + const entry of walk(dir, { + exts: [".yaml"], + includeDirs: false, + }) + ) { + try { + const content = await Deno.readTextFile(entry.path); + const data = parseYaml(content) as WorkflowData; + const workflow = Workflow.fromData(data); + // Deduplicate: first directory wins (user dir before pulled dir) + if (!seenNames.has(workflow.name)) { + seenNames.add(workflow.name); + workflows.push(workflow); + } + } catch (parseError) { + const errorMsg = parseError instanceof Error + ? parseError.message + : String(parseError); + logger + .warn`Skipping broken extension workflow "${entry.path}": ${errorMsg}`; + } } + } catch (error) { + if (error instanceof Deno.errors.NotFound) { + continue; // Directory doesn't exist — skip + } + throw error; } - } catch (error) { - if (error instanceof Deno.errors.NotFound) { - return []; - } - throw error; } return workflows; @@ -110,7 +123,7 @@ export class ExtensionWorkflowRepository implements WorkflowRepository { getPath(id: WorkflowId): string { // Scan the directory to find the file for a given workflow ID // This is a synchronous fallback — for display purposes only - return `${this.workflowsDir}/workflow-${id}.yaml`; + return `${this.workflowsDirs[0]}/workflow-${id}.yaml`; } /** @@ -118,25 +131,27 @@ export class ExtensionWorkflowRepository implements WorkflowRepository { * Returns null if not found. */ async findPath(id: WorkflowId): Promise { - try { - for await ( - const entry of walk(this.workflowsDir, { - exts: [".yaml"], - includeDirs: false, - }) - ) { - try { - const content = await Deno.readTextFile(entry.path); - const data = parseYaml(content) as WorkflowData; - if (data.id === id) { - return entry.path; + for (const dir of this.workflowsDirs) { + try { + for await ( + const entry of walk(dir, { + exts: [".yaml"], + includeDirs: false, + }) + ) { + try { + const content = await Deno.readTextFile(entry.path); + const data = parseYaml(content) as WorkflowData; + if (data.id === id) { + return entry.path; + } + } catch { + // Skip broken files } - } catch { - // Skip broken files } + } catch { + // Directory doesn't exist } - } catch { - // Directory doesn't exist } return null; } diff --git a/src/infrastructure/persistence/paths.ts b/src/infrastructure/persistence/paths.ts index 8f7f9dcb..5018982f 100644 --- a/src/infrastructure/persistence/paths.ts +++ b/src/infrastructure/persistence/paths.ts @@ -78,6 +78,18 @@ export const SWAMP_SUBDIRS = { audit: "audit", /** Legacy: resource definitions */ resources: "resources", + /** Pulled extension source: models */ + pulledModels: "pulled-extensions/models", + /** Pulled extension source: vaults */ + pulledVaults: "pulled-extensions/vaults", + /** Pulled extension source: workflows */ + pulledWorkflows: "pulled-extensions/workflows", + /** Pulled extension source: drivers */ + pulledDrivers: "pulled-extensions/drivers", + /** Pulled extension source: datastores */ + pulledDatastores: "pulled-extensions/datastores", + /** Pulled extension source: reports */ + pulledReports: "pulled-extensions/reports", } as const; /** diff --git a/src/infrastructure/persistence/repository_factory.ts b/src/infrastructure/persistence/repository_factory.ts index 3086f9c3..40d79108 100644 --- a/src/infrastructure/persistence/repository_factory.ts +++ b/src/infrastructure/persistence/repository_factory.ts @@ -187,6 +187,8 @@ export interface RepositoryFactoryConfig { repoDir: string; enableIndexing?: boolean; workflowsDir?: string; + /** Additional workflow directories to scan (e.g. pulled extensions). */ + additionalWorkflowsDirs?: string[]; definitionsDir?: string; yamlWorkflowsDir?: string; vaultsDir?: string; @@ -222,6 +224,7 @@ export function createRepositoryContext( repoDir, enableIndexing = true, workflowsDir, + additionalWorkflowsDirs, definitionsDir, yamlWorkflowsDir, vaultsDir, @@ -250,7 +253,7 @@ export function createRepositoryContext( // Create composite workflow repo if extension workflows dir is provided const extensionWorkflowRepo = workflowsDir - ? new ExtensionWorkflowRepository(workflowsDir) + ? new ExtensionWorkflowRepository(workflowsDir, additionalWorkflowsDirs) : null; const workflowRepo: WorkflowRepository = new CompositeWorkflowRepository( yamlWorkflowRepo, diff --git a/src/infrastructure/persistence/upstream_extensions.ts b/src/infrastructure/persistence/upstream_extensions.ts index e4562abb..db501898 100644 --- a/src/infrastructure/persistence/upstream_extensions.ts +++ b/src/infrastructure/persistence/upstream_extensions.ts @@ -17,14 +17,16 @@ // You should have received a copy of the GNU Affero General Public License // along with Swamp. If not, see . -import { join } from "@std/path"; - /** Entry in upstream_extensions.json. */ export interface UpstreamExtensionEntry { version: string; pulledAt: string; files?: string[]; include?: string[]; + /** SHA-256 checksum of the extension archive, for verification on re-install. */ + checksum?: string; + /** Registry server URL used when pulling, for non-default registries. */ + serverUrl?: string; } /** Shape of upstream_extensions.json. */ @@ -32,13 +34,14 @@ export type UpstreamExtensionsMap = Record; /** * Reads upstream_extensions.json and returns the parsed map. + * + * @param lockfilePath Full path to the upstream_extensions.json file. */ export async function readUpstreamExtensions( - modelsDir: string, + lockfilePath: string, ): Promise { - const jsonPath = join(modelsDir, "upstream_extensions.json"); try { - const content = await Deno.readTextFile(jsonPath); + const content = await Deno.readTextFile(lockfilePath); return JSON.parse(content) as UpstreamExtensionsMap; } catch (error) { if (error instanceof Deno.errors.NotFound) { diff --git a/src/libswamp/extensions/install.ts b/src/libswamp/extensions/install.ts new file mode 100644 index 00000000..31fcbb56 --- /dev/null +++ b/src/libswamp/extensions/install.ts @@ -0,0 +1,145 @@ +// Swamp, an Automation Framework +// Copyright (C) 2026 System Initiative, Inc. +// +// This file is part of Swamp. +// +// Swamp is free software: you can redistribute it and/or modify +// it under the terms of the GNU Affero General Public License version 3 +// as published by the Free Software Foundation, with the Swamp +// Extension and Definition Exception (found in the "COPYING-EXCEPTION" +// file). +// +// Swamp is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU Affero General Public License for more details. +// +// You should have received a copy of the GNU Affero General Public License +// along with Swamp. If not, see . + +import { join } from "@std/path"; +import { readUpstreamExtensions } from "../../infrastructure/persistence/upstream_extensions.ts"; +import type { LibSwampContext } from "../context.ts"; +import type { SwampError } from "../errors.ts"; +import { withGeneratorSpan } from "../../infrastructure/tracing/mod.ts"; +import { + type InstallContext, + installExtension, + parseExtensionRef, +} from "./pull.ts"; + +/** Result of installing a single extension during bulk install. */ +export interface ExtensionInstallEntry { + name: string; + version: string; + status: "installed" | "up_to_date" | "failed"; + error?: string; +} + +/** Data for the completed event. */ +export interface ExtensionInstallData { + entries: ExtensionInstallEntry[]; + installed: number; + upToDate: number; + failed: number; +} + +export type ExtensionInstallEvent = + | { kind: "resolving" } + | { kind: "installing"; name: string; version: string } + | { kind: "completed"; data: ExtensionInstallData } + | { kind: "error"; error: SwampError }; + +/** Dependencies for the extension install operation. */ +export interface ExtensionInstallDeps { + lockfilePath: string; + repoDir: string; + createInstallContext: ( + name: string, + version: string, + ) => InstallContext; +} + +/** + * Reads upstream_extensions.json and re-pulls any extensions whose files + * are missing from disk. Analogous to `npm install` restoring node_modules + * from package-lock.json. + */ +export async function* extensionInstall( + _ctx: LibSwampContext, + deps: ExtensionInstallDeps, +): AsyncIterable { + yield* withGeneratorSpan( + "swamp.extension.install", + {}, + (async function* () { + yield { kind: "resolving" }; + + const upstream = await readUpstreamExtensions(deps.lockfilePath); + const entries: ExtensionInstallEntry[] = []; + let installed = 0; + let upToDate = 0; + let failed = 0; + + for (const [name, entry] of Object.entries(upstream)) { + const version = entry.version; + + // Check if any source files are missing + const isMissing = await hasAnyMissingFiles( + entry.files ?? [], + deps.repoDir, + ); + + if (!isMissing) { + entries.push({ name, version, status: "up_to_date" }); + upToDate++; + continue; + } + + yield { kind: "installing", name, version }; + + try { + const installCtx = deps.createInstallContext(name, version); + const ref = parseExtensionRef(`${name}@${version}`); + await installExtension(ref, installCtx); + entries.push({ name, version, status: "installed" }); + installed++; + } catch (error) { + entries.push({ + name, + version, + status: "failed", + error: String(error), + }); + failed++; + } + } + + yield { + kind: "completed", + data: { entries, installed, upToDate, failed }, + }; + })(), + ); +} + +/** + * Checks if any of the given file paths are missing from disk. + */ +async function hasAnyMissingFiles( + files: string[], + repoDir: string, +): Promise { + for (const file of files) { + const absolutePath = join(repoDir, file); + try { + await Deno.stat(absolutePath); + } catch (error) { + if (error instanceof Deno.errors.NotFound) { + return true; + } + throw error; + } + } + return false; +} diff --git a/src/libswamp/extensions/install_test.ts b/src/libswamp/extensions/install_test.ts new file mode 100644 index 00000000..9407137a --- /dev/null +++ b/src/libswamp/extensions/install_test.ts @@ -0,0 +1,205 @@ +// Swamp, an Automation Framework +// Copyright (C) 2026 System Initiative, Inc. +// +// This file is part of Swamp. +// +// Swamp is free software: you can redistribute it and/or modify +// it under the terms of the GNU Affero General Public License version 3 +// as published by the Free Software Foundation, with the Swamp +// Extension and Definition Exception (found in the "COPYING-EXCEPTION" +// file). +// +// Swamp is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU Affero General Public License for more details. +// +// You should have received a copy of the GNU Affero General Public License +// along with Swamp. If not, see . + +import { assertEquals } from "@std/assert"; +import { join } from "@std/path"; +import { ensureDir } from "@std/fs"; +import { extensionInstall, type ExtensionInstallEvent } from "./install.ts"; +import { createLibSwampContext } from "../context.ts"; + +async function collectEvents( + gen: AsyncIterable, +): Promise { + const events: ExtensionInstallEvent[] = []; + for await (const event of gen) { + events.push(event); + } + return events; +} + +Deno.test("extensionInstall: empty lockfile yields all up to date", async () => { + const tmpDir = await Deno.makeTempDir({ prefix: "swamp_test_" }); + try { + const lockfilePath = join(tmpDir, "upstream_extensions.json"); + // Write empty lockfile + await Deno.writeTextFile(lockfilePath, "{}"); + + const ctx = createLibSwampContext({}); + const events = await collectEvents( + extensionInstall(ctx, { + lockfilePath, + repoDir: tmpDir, + createInstallContext: () => { + throw new Error("should not be called"); + }, + }), + ); + + const completed = events.find((e) => e.kind === "completed"); + assertEquals(completed?.kind, "completed"); + if (completed?.kind === "completed") { + assertEquals(completed.data.installed, 0); + assertEquals(completed.data.upToDate, 0); + assertEquals(completed.data.failed, 0); + assertEquals(completed.data.entries, []); + } + } finally { + await Deno.remove(tmpDir, { recursive: true }); + } +}); + +Deno.test("extensionInstall: skips extensions with all files present", async () => { + const tmpDir = await Deno.makeTempDir({ prefix: "swamp_test_" }); + try { + // Create the files on disk + const pulledDir = join(tmpDir, ".swamp", "pulled-extensions", "models"); + await ensureDir(pulledDir); + await Deno.writeTextFile(join(pulledDir, "test.ts"), "// test"); + + const lockfilePath = join(tmpDir, "upstream_extensions.json"); + await Deno.writeTextFile( + lockfilePath, + JSON.stringify({ + "@test/ext": { + version: "1.0.0", + pulledAt: "2026-01-01T00:00:00Z", + files: [".swamp/pulled-extensions/models/test.ts"], + }, + }), + ); + + const ctx = createLibSwampContext({}); + const events = await collectEvents( + extensionInstall(ctx, { + lockfilePath, + repoDir: tmpDir, + createInstallContext: () => { + throw new Error("should not be called for up-to-date"); + }, + }), + ); + + const completed = events.find((e) => e.kind === "completed"); + assertEquals(completed?.kind, "completed"); + if (completed?.kind === "completed") { + assertEquals(completed.data.upToDate, 1); + assertEquals(completed.data.installed, 0); + assertEquals(completed.data.entries[0].status, "up_to_date"); + assertEquals(completed.data.entries[0].name, "@test/ext"); + } + } finally { + await Deno.remove(tmpDir, { recursive: true }); + } +}); + +Deno.test("extensionInstall: detects missing files and calls install", async () => { + const tmpDir = await Deno.makeTempDir({ prefix: "swamp_test_" }); + try { + const lockfilePath = join(tmpDir, "upstream_extensions.json"); + await Deno.writeTextFile( + lockfilePath, + JSON.stringify({ + "@test/missing": { + version: "2.0.0", + pulledAt: "2026-01-01T00:00:00Z", + files: [".swamp/pulled-extensions/models/missing.ts"], + }, + }), + ); + + let installCalled = false; + const ctx = createLibSwampContext({}); + const events = await collectEvents( + extensionInstall(ctx, { + lockfilePath, + repoDir: tmpDir, + createInstallContext: (_name, _version) => { + installCalled = true; + // Return a minimal context that won't actually pull + // (installExtension will fail, which we catch) + return { + getExtension: () => Promise.resolve(null), + downloadArchive: () => Promise.reject(new Error("test stub")), + getChecksum: () => Promise.resolve(null), + lockfilePath, + modelsDir: join(tmpDir, ".swamp/pulled-extensions/models"), + workflowsDir: join(tmpDir, ".swamp/pulled-extensions/workflows"), + vaultsDir: join(tmpDir, ".swamp/pulled-extensions/vaults"), + driversDir: join(tmpDir, ".swamp/pulled-extensions/drivers"), + datastoresDir: join( + tmpDir, + ".swamp/pulled-extensions/datastores", + ), + reportsDir: join(tmpDir, ".swamp/pulled-extensions/reports"), + repoDir: tmpDir, + force: true, + alreadyPulled: new Set(), + depth: 0, + }; + }, + }), + ); + + assertEquals(installCalled, true); + + // Should have an "installing" event + const installing = events.find((e) => e.kind === "installing"); + assertEquals(installing?.kind, "installing"); + if (installing?.kind === "installing") { + assertEquals(installing.name, "@test/missing"); + assertEquals(installing.version, "2.0.0"); + } + + // Install will fail since our stub rejects — that's fine, we're testing detection + const completed = events.find((e) => e.kind === "completed"); + assertEquals(completed?.kind, "completed"); + if (completed?.kind === "completed") { + assertEquals(completed.data.failed, 1); + assertEquals(completed.data.entries[0].status, "failed"); + } + } finally { + await Deno.remove(tmpDir, { recursive: true }); + } +}); + +Deno.test("extensionInstall: missing lockfile yields empty result", async () => { + const tmpDir = await Deno.makeTempDir({ prefix: "swamp_test_" }); + try { + const lockfilePath = join(tmpDir, "nonexistent.json"); + + const ctx = createLibSwampContext({}); + const events = await collectEvents( + extensionInstall(ctx, { + lockfilePath, + repoDir: tmpDir, + createInstallContext: () => { + throw new Error("should not be called"); + }, + }), + ); + + const completed = events.find((e) => e.kind === "completed"); + assertEquals(completed?.kind, "completed"); + if (completed?.kind === "completed") { + assertEquals(completed.data.entries.length, 0); + } + } finally { + await Deno.remove(tmpDir, { recursive: true }); + } +}); diff --git a/src/libswamp/extensions/layout.ts b/src/libswamp/extensions/layout.ts new file mode 100644 index 00000000..1bc494f0 --- /dev/null +++ b/src/libswamp/extensions/layout.ts @@ -0,0 +1,71 @@ +// Swamp, an Automation Framework +// Copyright (C) 2026 System Initiative, Inc. +// +// This file is part of Swamp. +// +// Swamp is free software: you can redistribute it and/or modify +// it under the terms of the GNU Affero General Public License version 3 +// as published by the Free Software Foundation, with the Swamp +// Extension and Definition Exception (found in the "COPYING-EXCEPTION" +// file). +// +// Swamp is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU Affero General Public License for more details. +// +// You should have received a copy of the GNU Affero General Public License +// along with Swamp. If not, see . + +import { SWAMP_DATA_DIR } from "../../infrastructure/persistence/paths.ts"; +import { readUpstreamExtensions } from "../../infrastructure/persistence/upstream_extensions.ts"; +import { UserError } from "../../domain/errors.ts"; + +/** + * Detects whether a repository has pulled extensions in the legacy layout + * (files in extensions/{type}/ instead of .swamp/pulled-extensions/{type}/). + * + * Checks the upstream_extensions.json lockfile for file paths that don't + * start with the .swamp/ prefix, indicating the old layout. + * + * @param lockfilePath Full path to upstream_extensions.json + * @returns List of legacy file paths, or empty array if layout is current + */ +export async function detectLegacyExtensionLayout( + lockfilePath: string, +): Promise { + const upstream = await readUpstreamExtensions(lockfilePath); + const legacyFiles: string[] = []; + + for (const [_name, entry] of Object.entries(upstream)) { + if (!entry.files) continue; + for (const file of entry.files) { + // Files in the new layout start with .swamp/ + // Bundle files already live in .swamp/ and are fine + if (!file.startsWith(`${SWAMP_DATA_DIR}/`)) { + legacyFiles.push(file); + } + } + } + + return legacyFiles; +} + +/** + * Checks for legacy extension layout and throws a UserError if detected. + * Call this at the start of extension commands to prevent operations on + * repos that haven't been migrated. + * + * @param lockfilePath Full path to upstream_extensions.json + */ +export async function requireCurrentExtensionLayout( + lockfilePath: string, +): Promise { + const legacyFiles = await detectLegacyExtensionLayout(lockfilePath); + if (legacyFiles.length > 0) { + throw new UserError( + `This repo has pulled extensions in the old layout (extensions/).\n` + + `Run 'swamp repo upgrade' to migrate them to .swamp/pulled-extensions/.`, + ); + } +} diff --git a/src/libswamp/extensions/layout_test.ts b/src/libswamp/extensions/layout_test.ts new file mode 100644 index 00000000..26bb42a4 --- /dev/null +++ b/src/libswamp/extensions/layout_test.ts @@ -0,0 +1,134 @@ +// Swamp, an Automation Framework +// Copyright (C) 2026 System Initiative, Inc. +// +// This file is part of Swamp. +// +// Swamp is free software: you can redistribute it and/or modify +// it under the terms of the GNU Affero General Public License version 3 +// as published by the Free Software Foundation, with the Swamp +// Extension and Definition Exception (found in the "COPYING-EXCEPTION" +// file). +// +// Swamp is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU Affero General Public License for more details. +// +// You should have received a copy of the GNU Affero General Public License +// along with Swamp. If not, see . + +import { assertEquals, assertRejects } from "@std/assert"; +import { join } from "@std/path"; +import { + detectLegacyExtensionLayout, + requireCurrentExtensionLayout, +} from "./layout.ts"; +import { UserError } from "../../domain/errors.ts"; + +Deno.test("detectLegacyExtensionLayout: returns empty for new layout", async () => { + const tmpDir = await Deno.makeTempDir({ prefix: "swamp_test_" }); + try { + const lockfilePath = join(tmpDir, "upstream_extensions.json"); + await Deno.writeTextFile( + lockfilePath, + JSON.stringify({ + "@test/ext": { + version: "1.0.0", + pulledAt: "2026-01-01T00:00:00Z", + files: [ + ".swamp/pulled-extensions/models/ext.ts", + ".swamp/bundles/ext.js", + ], + }, + }), + ); + + const legacyFiles = await detectLegacyExtensionLayout(lockfilePath); + assertEquals(legacyFiles, []); + } finally { + await Deno.remove(tmpDir, { recursive: true }); + } +}); + +Deno.test("detectLegacyExtensionLayout: detects old layout files", async () => { + const tmpDir = await Deno.makeTempDir({ prefix: "swamp_test_" }); + try { + const lockfilePath = join(tmpDir, "upstream_extensions.json"); + await Deno.writeTextFile( + lockfilePath, + JSON.stringify({ + "@test/ext": { + version: "1.0.0", + pulledAt: "2026-01-01T00:00:00Z", + files: [ + "extensions/models/ext.ts", + ".swamp/bundles/ext.js", + ], + }, + }), + ); + + const legacyFiles = await detectLegacyExtensionLayout(lockfilePath); + assertEquals(legacyFiles, ["extensions/models/ext.ts"]); + } finally { + await Deno.remove(tmpDir, { recursive: true }); + } +}); + +Deno.test("detectLegacyExtensionLayout: returns empty when no lockfile", async () => { + const tmpDir = await Deno.makeTempDir({ prefix: "swamp_test_" }); + try { + const lockfilePath = join(tmpDir, "upstream_extensions.json"); + const legacyFiles = await detectLegacyExtensionLayout(lockfilePath); + assertEquals(legacyFiles, []); + } finally { + await Deno.remove(tmpDir, { recursive: true }); + } +}); + +Deno.test("requireCurrentExtensionLayout: throws on legacy layout", async () => { + const tmpDir = await Deno.makeTempDir({ prefix: "swamp_test_" }); + try { + const lockfilePath = join(tmpDir, "upstream_extensions.json"); + await Deno.writeTextFile( + lockfilePath, + JSON.stringify({ + "@test/ext": { + version: "1.0.0", + pulledAt: "2026-01-01T00:00:00Z", + files: ["extensions/models/ext.ts"], + }, + }), + ); + + await assertRejects( + () => requireCurrentExtensionLayout(lockfilePath), + UserError, + "old layout", + ); + } finally { + await Deno.remove(tmpDir, { recursive: true }); + } +}); + +Deno.test("requireCurrentExtensionLayout: passes on current layout", async () => { + const tmpDir = await Deno.makeTempDir({ prefix: "swamp_test_" }); + try { + const lockfilePath = join(tmpDir, "upstream_extensions.json"); + await Deno.writeTextFile( + lockfilePath, + JSON.stringify({ + "@test/ext": { + version: "1.0.0", + pulledAt: "2026-01-01T00:00:00Z", + files: [".swamp/pulled-extensions/models/ext.ts"], + }, + }), + ); + + // Should not throw + await requireCurrentExtensionLayout(lockfilePath); + } finally { + await Deno.remove(tmpDir, { recursive: true }); + } +}); diff --git a/src/libswamp/extensions/list.ts b/src/libswamp/extensions/list.ts index 6ae2a862..faffeba3 100644 --- a/src/libswamp/extensions/list.ts +++ b/src/libswamp/extensions/list.ts @@ -17,7 +17,7 @@ // You should have received a copy of the GNU Affero General Public License // along with Swamp. If not, see . -import { resolve } from "@std/path"; +import { join, resolve } from "@std/path"; import { RepoPath } from "../../domain/repo/repo_path.ts"; import { RepoMarkerRepository, @@ -67,8 +67,9 @@ export async function createExtensionListDeps( const envModelsDir = Deno.env.get("SWAMP_MODELS_DIR"); const modelsDir = envModelsDir ?? marker?.modelsDir ?? "extensions/models"; const absoluteModelsDir = resolve(repoDir, modelsDir); + const lockfilePath = join(absoluteModelsDir, "upstream_extensions.json"); return { - readUpstreamExtensions: () => readUpstreamExtensions(absoluteModelsDir), + readUpstreamExtensions: () => readUpstreamExtensions(lockfilePath), }; } diff --git a/src/libswamp/extensions/pull.ts b/src/libswamp/extensions/pull.ts index a060214b..890d8bd5 100644 --- a/src/libswamp/extensions/pull.ts +++ b/src/libswamp/extensions/pull.ts @@ -82,6 +82,8 @@ export interface InstallContext { version: string, ) => Promise; logger?: Logger; + /** Full path to the upstream_extensions.json lockfile. */ + lockfilePath: string; modelsDir: string; workflowsDir: string; vaultsDir: string; @@ -123,6 +125,8 @@ export interface ExtensionPullDeps { getExtension: (name: string) => Promise; downloadArchive: (name: string, version: string) => Promise; getChecksum: (name: string, version: string) => Promise; + /** Full path to the upstream_extensions.json lockfile. */ + lockfilePath: string; modelsDir: string; workflowsDir: string; vaultsDir: string; @@ -220,17 +224,27 @@ async function acquireLock(lockPath: string): Promise { /** * Updates upstream_extensions.json with a new entry, using a lockfile * for concurrency safety and atomicWriteTextFile for crash safety. + * + * @param lockfilePath Full path to the upstream_extensions.json file. */ export async function updateUpstreamExtensions( - modelsDir: string, + lockfilePath: string, name: string, version: string, files: string[], - include?: string[], + options?: { + include?: string[]; + checksum?: string; + serverUrl?: string; + }, ): Promise { - const jsonPath = join(modelsDir, "upstream_extensions.json"); + const jsonPath = lockfilePath; const lockPath = `${jsonPath}.lock`; + // Ensure parent directory exists (lockfile may be in extensions/models/ + // which doesn't exist in a fresh repo that only has .swamp/) + await Deno.mkdir(dirname(jsonPath), { recursive: true }); + const lockFile = await acquireLock(lockPath); try { let data: UpstreamExtensionsMap = {}; @@ -247,7 +261,11 @@ export async function updateUpstreamExtensions( version, pulledAt: new Date().toISOString(), files, - ...(include && include.length > 0 ? { include } : {}), + ...(options?.include && options.include.length > 0 + ? { include: options.include } + : {}), + ...(options?.checksum ? { checksum: options.checksum } : {}), + ...(options?.serverUrl ? { serverUrl: options.serverUrl } : {}), }; await atomicWriteTextFile(jsonPath, JSON.stringify(data, null, 2) + "\n"); @@ -264,12 +282,14 @@ export async function updateUpstreamExtensions( /** * Removes an extension entry from upstream_extensions.json, using a lockfile * for concurrency safety and atomicWriteTextFile for crash safety. + * + * @param lockfilePath Full path to the upstream_extensions.json file. */ export async function removeUpstreamExtension( - modelsDir: string, + lockfilePath: string, name: string, ): Promise { - const jsonPath = join(modelsDir, "upstream_extensions.json"); + const jsonPath = lockfilePath; const lockPath = `${jsonPath}.lock`; const lockFile = await acquireLock(lockPath); @@ -895,11 +915,15 @@ export async function installExtension( : undefined; await updateUpstreamExtensions( - absoluteModelsDir, + ctx.lockfilePath, ref.name, version, extractedFiles, - includeFiles, + { + include: includeFiles, + checksum: localChecksum, + serverUrl: resolveServerUrl(), + }, ); const dependencyResults: InstallResult[] = []; @@ -909,13 +933,9 @@ export async function installExtension( continue; } - const upstreamPath = join( - absoluteModelsDir, - "upstream_extensions.json", - ); let isInstalled = false; try { - const upstreamContent = await Deno.readTextFile(upstreamPath); + const upstreamContent = await Deno.readTextFile(ctx.lockfilePath); const upstream = JSON.parse( upstreamContent, ) as UpstreamExtensionsMap; @@ -978,6 +998,7 @@ export async function* extensionPull( downloadArchive: deps.downloadArchive, getChecksum: deps.getChecksum, logger: ctx.logger, + lockfilePath: deps.lockfilePath, modelsDir: deps.modelsDir, workflowsDir: deps.workflowsDir, vaultsDir: deps.vaultsDir, @@ -1002,6 +1023,7 @@ export async function* extensionPull( /** Wires real infrastructure into ExtensionPullDeps. */ export function createExtensionPullDeps( serverUrl: string, + lockfilePath: string, modelsDir: string, workflowsDir: string, vaultsDir: string, @@ -1015,6 +1037,7 @@ export function createExtensionPullDeps( getExtension: (name) => client.getExtension(name), downloadArchive: (name, version) => client.downloadArchive(name, version), getChecksum: (name, version) => client.getChecksum(name, version), + lockfilePath, modelsDir, workflowsDir, vaultsDir, @@ -1031,6 +1054,7 @@ export function createExtensionPullDeps( export function createInstallContext( serverUrl: string, opts: { + lockfilePath: string; modelsDir: string; workflowsDir: string; vaultsDir: string; @@ -1048,6 +1072,7 @@ export function createInstallContext( downloadArchive: (name, version) => client.downloadArchive(name, version), getChecksum: (name, version) => client.getChecksum(name, version), logger: opts.logger, + lockfilePath: opts.lockfilePath, modelsDir: opts.modelsDir, workflowsDir: opts.workflowsDir, vaultsDir: opts.vaultsDir, diff --git a/src/libswamp/extensions/pull_test.ts b/src/libswamp/extensions/pull_test.ts index 08c90f8c..c7d26670 100644 --- a/src/libswamp/extensions/pull_test.ts +++ b/src/libswamp/extensions/pull_test.ts @@ -19,6 +19,7 @@ import { assertEquals, assertThrows } from "@std/assert"; import { assertStringIncludes } from "@std/assert/string-includes"; +import { join } from "@std/path"; import { parseExtensionRef, updateUpstreamExtensions, @@ -82,13 +83,12 @@ Deno.test("validateExtensionName: rejects invalid names", () => { Deno.test("updateUpstreamExtensions: writes and updates entries", async () => { const tmpDir = await Deno.makeTempDir({ prefix: "swamp_test_" }); try { - await updateUpstreamExtensions(tmpDir, "@test/first", "1.0.0", [ + const lockfilePath = join(tmpDir, "upstream_extensions.json"); + await updateUpstreamExtensions(lockfilePath, "@test/first", "1.0.0", [ "a.yaml", ]); - const content = await Deno.readTextFile( - `${tmpDir}/upstream_extensions.json`, - ); + const content = await Deno.readTextFile(lockfilePath); const data = JSON.parse(content); assertEquals(data["@test/first"].version, "1.0.0"); assertEquals(data["@test/first"].files, ["a.yaml"]); diff --git a/src/libswamp/extensions/rm.ts b/src/libswamp/extensions/rm.ts index 40a7ecad..70445041 100644 --- a/src/libswamp/extensions/rm.ts +++ b/src/libswamp/extensions/rm.ts @@ -69,7 +69,7 @@ export interface ExtensionRmInput { /** Dependencies for the extension rm operation. */ export interface ExtensionRmDeps { - readUpstreamExtensions: (modelsDir: string) => Promise; + readUpstreamExtensions: (lockfilePath: string) => Promise; findDependents: ( repoDir: string, upstreamData: UpstreamMap, @@ -78,8 +78,12 @@ export interface ExtensionRmDeps { removeFile: (path: string) => Promise; readDirEntries: (path: string) => Promise; removeDir: (path: string) => Promise; - removeUpstreamExtension: (modelsDir: string, name: string) => Promise; - modelsDir: string; + removeUpstreamExtension: ( + lockfilePath: string, + name: string, + ) => Promise; + /** Full path to the upstream_extensions.json lockfile. */ + lockfilePath: string; repoDir: string; } @@ -183,12 +187,14 @@ async function acquireLock(lockPath: string): Promise { /** * Removes an extension entry from upstream_extensions.json, using a lockfile * for concurrency safety and atomicWriteTextFile for crash safety. + * + * @param lockfilePath Full path to the upstream_extensions.json file. */ export async function removeUpstreamExtension( - modelsDir: string, + lockfilePath: string, name: string, ): Promise { - const jsonPath = join(modelsDir, "upstream_extensions.json"); + const jsonPath = lockfilePath; const lockPath = `${jsonPath}.lock`; const lockFile = await acquireLock(lockPath); @@ -224,7 +230,7 @@ export async function extensionRmPreview( ): Promise { ctx.logger.debug`Looking up extension: ${input.extensionName}`; - const upstreamData = await deps.readUpstreamExtensions(deps.modelsDir); + const upstreamData = await deps.readUpstreamExtensions(deps.lockfilePath); const entry = upstreamData[input.extensionName]; if (!entry) { @@ -265,7 +271,7 @@ export async function* extensionRm( (async function* () { yield { kind: "deleting" }; - const upstreamData = await deps.readUpstreamExtensions(deps.modelsDir); + const upstreamData = await deps.readUpstreamExtensions(deps.lockfilePath); const entry = upstreamData[input.extensionName]; if (!entry || !entry.files) { @@ -301,7 +307,10 @@ export async function* extensionRm( const dirsRemoved = await pruneEmptyDirs(parentDirs, deps.repoDir, deps); - await deps.removeUpstreamExtension(deps.modelsDir, input.extensionName); + await deps.removeUpstreamExtension( + deps.lockfilePath, + input.extensionName, + ); yield { kind: "completed", @@ -320,7 +329,7 @@ export async function* extensionRm( /** Wires real infrastructure into ExtensionRmDeps. */ export function createExtensionRmDeps( repoDir: string, - modelsDir: string, + lockfilePath: string, ): ExtensionRmDeps { return { readUpstreamExtensions, @@ -335,7 +344,7 @@ export function createExtensionRmDeps( }, removeDir: (path: string) => Deno.remove(path), removeUpstreamExtension, - modelsDir, + lockfilePath, repoDir, }; } diff --git a/src/libswamp/extensions/rm_test.ts b/src/libswamp/extensions/rm_test.ts index c9b80d6a..9d27de53 100644 --- a/src/libswamp/extensions/rm_test.ts +++ b/src/libswamp/extensions/rm_test.ts @@ -51,7 +51,7 @@ function fakeDeps( readDirEntries: () => Promise.resolve([]), removeDir: () => Promise.resolve(), removeUpstreamExtension: () => Promise.resolve(), - modelsDir: "/fake/models", + lockfilePath: "/fake/models/upstream_extensions.json", repoDir: "/fake/repo", ...overrides, }; diff --git a/src/libswamp/extensions/update.ts b/src/libswamp/extensions/update.ts index fc9a0005..79b2df50 100644 --- a/src/libswamp/extensions/update.ts +++ b/src/libswamp/extensions/update.ts @@ -68,7 +68,7 @@ export interface ExtensionUpdateDeps { /** Wires real infrastructure into ExtensionUpdateDeps. */ export function createExtensionUpdateDeps(options: { - absoluteModelsDir: string; + lockfilePath: string; serverUrl?: string; installExtension: (name: string, version: string) => Promise; }): ExtensionUpdateDeps { @@ -76,8 +76,7 @@ export function createExtensionUpdateDeps(options: { options.serverUrl ?? resolveServerUrl(), ); return { - readUpstreamExtensions: () => - readUpstreamExtensions(options.absoluteModelsDir), + readUpstreamExtensions: () => readUpstreamExtensions(options.lockfilePath), getExtension: async (name) => { try { const info = await extensionClient.getExtension(name); diff --git a/src/libswamp/mod.ts b/src/libswamp/mod.ts index 06cc5dea..19359867 100644 --- a/src/libswamp/mod.ts +++ b/src/libswamp/mod.ts @@ -537,6 +537,21 @@ export { removeUpstreamExtension, } from "./extensions/rm.ts"; +// Extension layout detection +export { + detectLegacyExtensionLayout, + requireCurrentExtensionLayout, +} from "./extensions/layout.ts"; + +// Extension install (restore from lockfile) +export { + extensionInstall, + type ExtensionInstallData, + type ExtensionInstallDeps, + type ExtensionInstallEntry, + type ExtensionInstallEvent, +} from "./extensions/install.ts"; + // Model edit operations export { createModelEditDeps, diff --git a/src/presentation/renderers/extension_install.ts b/src/presentation/renderers/extension_install.ts new file mode 100644 index 00000000..307eb411 --- /dev/null +++ b/src/presentation/renderers/extension_install.ts @@ -0,0 +1,106 @@ +// Swamp, an Automation Framework +// Copyright (C) 2026 System Initiative, Inc. +// +// This file is part of Swamp. +// +// Swamp is free software: you can redistribute it and/or modify +// it under the terms of the GNU Affero General Public License version 3 +// as published by the Free Software Foundation, with the Swamp +// Extension and Definition Exception (found in the "COPYING-EXCEPTION" +// file). +// +// Swamp is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU Affero General Public License for more details. +// +// You should have received a copy of the GNU Affero General Public License +// along with Swamp. If not, see . + +import type { + EventHandlers, + ExtensionInstallEvent, +} from "../../libswamp/mod.ts"; +import type { Renderer } from "../renderer.ts"; +import type { OutputMode } from "../output/output.ts"; +import { getSwampLogger } from "../../infrastructure/logging/logger.ts"; +import { UserError } from "../../domain/errors.ts"; + +const logger = getSwampLogger(["extension", "install"]); + +class LogExtensionInstallRenderer implements Renderer { + handlers(): EventHandlers { + return { + resolving: () => { + logger.info("Reading lockfile..."); + }, + installing: (e) => { + logger.info("Installing {name}@{version}...", { + name: e.name, + version: e.version, + }); + }, + completed: (e) => { + const { installed, upToDate, failed } = e.data; + if (e.data.entries.length === 0) { + logger.info("No extensions in lockfile."); + return; + } + if (installed === 0 && failed === 0) { + logger.info("All extensions up to date."); + return; + } + if (installed > 0) { + logger.info("Installed {count} extension(s).", { count: installed }); + } + if (upToDate > 0) { + logger.info("{count} extension(s) already up to date.", { + count: upToDate, + }); + } + if (failed > 0) { + logger.warn("{count} extension(s) failed to install.", { + count: failed, + }); + for (const entry of e.data.entries) { + if (entry.status === "failed") { + logger.warn(" {name}: {error}", { + name: entry.name, + error: entry.error ?? "unknown error", + }); + } + } + } + }, + error: (e) => { + throw new UserError(e.error.message); + }, + }; + } +} + +class JsonExtensionInstallRenderer implements Renderer { + handlers(): EventHandlers { + return { + resolving: () => {}, + installing: () => {}, + completed: (e) => { + console.log(JSON.stringify(e.data, null, 2)); + }, + error: (e) => { + throw new UserError(e.error.message); + }, + }; + } +} + +export function createExtensionInstallRenderer( + mode: OutputMode, +): Renderer { + switch (mode) { + case "json": + return new JsonExtensionInstallRenderer(); + case "log": + return new LogExtensionInstallRenderer(); + } +}