diff --git a/integration-tests/cli/CHANGELOG.md b/integration-tests/cli/CHANGELOG.md index 4f7679a6f..cf293004a 100644 --- a/integration-tests/cli/CHANGELOG.md +++ b/integration-tests/cli/CHANGELOG.md @@ -1,5 +1,17 @@ # @openfn/integration-tests-cli +## 1.0.7 + +### Patch Changes + +- Updated dependencies [4cc799b] +- Updated dependencies [4cc799b] +- Updated dependencies [3e63c08] +- Updated dependencies [4cc799b] +- Updated dependencies [6689ad0] + - @openfn/lightning-mock@2.4.0 + - @openfn/project@0.10.0 + ## 1.0.6 ### Patch Changes diff --git a/integration-tests/cli/package.json b/integration-tests/cli/package.json index 75cdb892c..7d3708edc 100644 --- a/integration-tests/cli/package.json +++ b/integration-tests/cli/package.json @@ -1,7 +1,7 @@ { "name": "@openfn/integration-tests-cli", "private": true, - "version": "1.0.6", + "version": "1.0.7", "description": "CLI integration tests", "author": "Open Function Group ", "license": "ISC", @@ -16,6 +16,7 @@ }, "dependencies": { "@openfn/lightning-mock": "workspace:^", + "@openfn/project": "workspace:*", "@types/node": "^18.19.127", "ava": "5.3.1", "date-fns": "^2.30.0", diff --git a/integration-tests/cli/test/deploy.test.ts b/integration-tests/cli/test/deploy.test.ts index 250f931f2..6fd11dd3f 100644 --- a/integration-tests/cli/test/deploy.test.ts +++ b/integration-tests/cli/test/deploy.test.ts @@ -1,6 +1,8 @@ import test from 'ava'; import run from '../src/run'; -import createLightningServer from '@openfn/lightning-mock'; +import createLightningServer, { + DEFAULT_PROJECT_ID, +} from '@openfn/lightning-mock'; import { extractLogs, assertLog } from '../src/util'; import { rimraf } from 'rimraf'; @@ -18,7 +20,7 @@ test.before(async () => { // This should fail against the built CLI right now test.serial( - `OPENFN_ENDPOINT=${endpoint} openfn pull 123 --log-json`, + `OPENFN_ENDPOINT=${endpoint} openfn pull ${DEFAULT_PROJECT_ID} --log-json`, async (t) => { const { stdout, stderr } = await run(t.title); t.falsy(stderr); diff --git a/integration-tests/cli/test/execute-workflow.test.ts b/integration-tests/cli/test/execute-workflow.test.ts index da069b938..5d5adacbc 100644 --- a/integration-tests/cli/test/execute-workflow.test.ts +++ b/integration-tests/cli/test/execute-workflow.test.ts @@ -1,9 +1,24 @@ import test from 'ava'; import { rm, mkdir } from 'node:fs/promises'; import path from 'node:path'; + +import createLightningServer from '@openfn/lightning-mock'; + import run from '../src/run'; import { getJSON } from '../src/util'; +// set up a lightning mock +let server: any; + +const port = 8968; + +test.before(async () => { + server = await createLightningServer({ port }); + server.collections.createCollection('stuff'); + // Important: the collection value MUST be as string + server.collections.upsert('stuff', 'x', JSON.stringify({ id: 'x' })); +}); + const jobsPath = path.resolve('test/fixtures'); // Note that these tests are STATEFUL @@ -147,6 +162,17 @@ test.serial( } ); +test.serial( + `openfn ${jobsPath}/wf-creds.json --credentials ${jobsPath}/creds.json`, + async (t) => { + const { err, stdout, stderr } = await run(t.title); + t.falsy(err); + + const out = getJSON(); + t.is(out.value, 'admin:admin'); + } +); + test.serial( `openfn ${jobsPath}/wf-errors.json -S "{ \\"data\\": { \\"number\\": 2 } }"`, async (t) => { @@ -273,3 +299,16 @@ test.serial( }); } ); + +// collections basic test +test.serial( + `openfn ${jobsPath}/collections.json --endpoint http://localhost:${port} --api-key xyz`, + async (t) => { + const { err } = await run(t.title); + t.falsy(err); + + const out = getJSON(); + + t.deepEqual(out.data, { id: 'x' }); + } +); diff --git a/integration-tests/cli/test/fixtures/collections.json b/integration-tests/cli/test/fixtures/collections.json new file mode 100644 index 000000000..a0a3692f1 --- /dev/null +++ b/integration-tests/cli/test/fixtures/collections.json @@ -0,0 +1,10 @@ +{ + "workflow": { + "steps": [ + { + "adaptor": "common", + "expression": "collections.get('stuff', 'x')" + } + ] + } +} diff --git a/integration-tests/cli/test/fixtures/creds.js b/integration-tests/cli/test/fixtures/creds.js new file mode 100644 index 000000000..a9b4dc048 --- /dev/null +++ b/integration-tests/cli/test/fixtures/creds.js @@ -0,0 +1,4 @@ +fn((s) => { + s.value = `${s.configuration.user}:${s.configuration.password}`; + return s; +}); diff --git a/integration-tests/cli/test/fixtures/creds.json b/integration-tests/cli/test/fixtures/creds.json new file mode 100644 index 000000000..a757301e8 --- /dev/null +++ b/integration-tests/cli/test/fixtures/creds.json @@ -0,0 +1,6 @@ +{ + "08089249-0890-4a73-8799-e2ec2b9e5d77": { + "user": "admin", + "password": "admin" + } +} diff --git a/integration-tests/cli/test/fixtures/wf-creds.json b/integration-tests/cli/test/fixtures/wf-creds.json new file mode 100644 index 000000000..afc9f339b --- /dev/null +++ b/integration-tests/cli/test/fixtures/wf-creds.json @@ -0,0 +1,11 @@ +{ + "workflow": { + "steps": [ + { + "adaptor": "common", + "configuration": "08089249-0890-4a73-8799-e2ec2b9e5d77", + "expression": "creds.js" + } + ] + } +} diff --git a/integration-tests/cli/test/sync.test.ts b/integration-tests/cli/test/sync.test.ts new file mode 100644 index 000000000..13e407b5a --- /dev/null +++ b/integration-tests/cli/test/sync.test.ts @@ -0,0 +1,287 @@ +import test from 'ava'; +import path from 'node:path'; +import fs from 'node:fs'; +import run from '../src/run'; +import { generateProject } from '@openfn/project'; +import createLightningServer from '@openfn/lightning-mock'; +import { rimraf } from 'rimraf'; + +let PORT = 5353; +let lightning; +const endpoint = `http://localhost:${PORT}/api/provision`; + +test.before(async () => { + await rimraf('tmp/sync'); + + lightning = createLightningServer({ + port: PORT, + }); +}); + +const initWorkspace = (t: any) => { + const id = t.title.replaceAll(' ', '_').toLowerCase(); + const p = path.resolve('tmp/sync', id); + + return { + workspace: p, + read: (filePath: string) => { + return fs.readFileSync(path.resolve(p, filePath), 'utf8'); + }, + }; +}; + +const gen = (name = 'patients', workflows = ['trigger-job(body="fn()")']) => { + // generate a project + const project = generateProject(name, workflows, { + openfnUuid: true, + }); + const state = project.serialize('state', { format: 'json' }); + lightning.addProject(state); + return project; +}; + +test('fetch a new project', async (t) => { + const { workspace, read } = initWorkspace(t); + const project = gen(); + + await run( + `openfn project fetch \ + --workspace ${workspace} \ + --endpoint ${endpoint} \ + --api-key abc \ + ${project.openfn.uuid}` + ); + + // now check that the filesystem is roughly right + const pyaml = read('.projects/main@localhost.yaml'); + + t.regex(pyaml, /id: patients/); + t.regex(pyaml, new RegExp(`uuid: ${project.openfn.uuid}`)); +}); + +test('fetch a new project with an alias', async (t) => { + const { workspace, read } = initWorkspace(t); + const project = gen(); + + await run( + `openfn project fetch \ + --workspace ${workspace} \ + --endpoint ${endpoint} \ + --api-key abc \ + --alias staging\ + ${project.openfn.uuid}` + ); + + // now check that the filesystem is roughly right + const pyaml = read('.projects/staging@localhost.yaml'); + + t.regex(pyaml, /id: patients/); + t.regex(pyaml, new RegExp(`uuid: ${project.openfn.uuid}`)); +}); + +test('fetch a new project to a path', async (t) => { + const { workspace, read } = initWorkspace(t); + const project = gen(); + + await run( + `openfn project fetch \ + --workspace ${workspace} \ + --endpoint ${endpoint} \ + --api-key abc \ + --output ${workspace}/project.yaml\ + ${project.openfn.uuid}` + ); + + // now check that the filesystem is roughly right + const pyaml = read('project.yaml'); + + t.regex(pyaml, /id: patients/); + t.regex(pyaml, new RegExp(`uuid: ${project.openfn.uuid}`)); +}); + +test.todo('fetch throws if writing a new project UUID to an existing file'); + +test('fetch an existing project with an alias', async (t) => { + const { workspace, read } = initWorkspace(t); + const project = gen(); + + // fetch the project locally + await run( + `openfn project fetch \ + --workspace ${workspace} \ + --endpoint ${endpoint} \ + --api-key abc \ + --alias staging \ + ${project.openfn.uuid}` + ); + + const before = read('.projects/staging@localhost.yaml'); + t.regex(before, /fn\(\)/); + + // now update the remote project + project.workflows[0].steps[0].expression = 'fn(x)'; + const state = project.serialize('state', { format: 'json' }); + lightning.addProject(state); + + // Now run another fetch but only use the alias - no uuid + await run( + `openfn project fetch \ + --workspace ${workspace} \ + --endpoint ${endpoint} \ + --api-key abc \ + staging` + ); + + // now check that the filesystem is roughly right + const after = read('.projects/staging@localhost.yaml'); + + t.regex(after, /fn\(x\)/); +}); + +test('pull a new project', async (t) => { + const { workspace, read } = initWorkspace(t); + const project = gen(); + + await run( + `openfn project pull \ + --workspace ${workspace} \ + --endpoint ${endpoint} \ + --api-key abc \ + --log debug \ + ${project.openfn.uuid}` + ); + + // now check that the filesystem is roughly right + const proj_yaml = read('.projects/main@localhost.yaml'); + + t.regex(proj_yaml, /id: patients/); + t.regex(proj_yaml, new RegExp(`uuid: ${project.openfn.uuid}`)); + + const openfn_yaml = read('openfn.yaml'); + t.regex(openfn_yaml, new RegExp(`uuid: ${project.openfn.uuid}`)); + t.regex(openfn_yaml, new RegExp(`endpoint: ${endpoint}`)); + + const job = read('workflows/workflow/job.js'); + t.is(job, 'fn()'); +}); + +test('pull a new project with an alias', async (t) => { + const { workspace, read } = initWorkspace(t); + const project = gen(); + + await run( + `openfn project pull \ + --workspace ${workspace} \ + --endpoint ${endpoint} \ + --api-key abc \ + --log debug \ + --alias staging \ + ${project.openfn.uuid}` + ); + + // now check that the filesystem is roughly right + const proj_yaml = read('.projects/staging@localhost.yaml'); + + t.regex(proj_yaml, /id: patients/); + t.regex(proj_yaml, new RegExp(`uuid: ${project.openfn.uuid}`)); + + const openfn_yaml = read('openfn.yaml'); + t.regex(openfn_yaml, new RegExp(`uuid: ${project.openfn.uuid}`)); + t.regex(openfn_yaml, new RegExp(`endpoint: ${endpoint}`)); + + const job = read('workflows/workflow/job.js'); + t.is(job, 'fn()'); +}); + +test('pull an update to project', async (t) => { + const { workspace, read } = initWorkspace(t); + const project = gen(); + + // fetch the project once to set up the repo + await run( + `openfn project pull \ + --workspace ${workspace} \ + --endpoint ${endpoint} \ + --api-key abc \ + ${project.openfn.uuid}` + ); + + const job = read('workflows/workflow/job.js'); + t.is(job, 'fn()'); + + // now update the remote project + project.workflows[0].steps[0].expression = 'fn(x)'; + const state = project.serialize('state', { format: 'json' }); + lightning.addProject(state); + // (note that the verison hash hasn't updated so not the best test) + + // and refetch + await run( + `openfn project pull \ + --workspace ${workspace} \ + --endpoint ${endpoint} \ + --api-key abc \ + ${project.openfn.uuid}` + ); + + const proj_yaml = read('.projects/main@localhost.yaml'); + t.regex(proj_yaml, /fn\(x\)/); + t.regex(proj_yaml, new RegExp(`uuid: ${project.openfn.uuid}`)); + + const openfn_yaml = read('openfn.yaml'); + t.regex(openfn_yaml, new RegExp(`uuid: ${project.openfn.uuid}`)); + t.regex(openfn_yaml, new RegExp(`endpoint: ${endpoint}`)); + + const job_updated = read('workflows/workflow/job.js'); + t.is(job_updated, 'fn()'); +}); + +test('checkout by alias', async (t) => { + const { workspace, read } = initWorkspace(t); + const main = gen(); + const staging = gen('patients-staging', ['trigger-job(body="fn(x)")']); + + await run( + `openfn project fetch \ + --workspace ${workspace} \ + --endpoint ${endpoint} \ + --api-key abc \ + --alias main\ + ${main.openfn.uuid}` + ); + await run( + `openfn project fetch \ + --workspace ${workspace} \ + --endpoint ${endpoint} \ + --api-key abc \ + --alias staging\ + ${staging.openfn.uuid}` + ); + + // Ensure the repo is set up correctly + const main_yaml = read('.projects/main@localhost.yaml'); + t.regex(main_yaml, /fn\(\)/); + const staging_yaml = read('.projects/staging@localhost.yaml'); + t.regex(staging_yaml, /fn\(x\)/); + + await run( + `openfn project checkout main \ + --workspace ${workspace}` + ); + + // only do a rough check of the file system + // local tests can be more thorough - at this level + // I just want to see that the command has basically worked + let job = read('workflows/workflow/job.js'); + t.is(job, 'fn()'); + + await run( + `openfn project checkout staging \ + --workspace ${workspace}` + ); + + job = read('workflows/workflow/job.js'); + t.is(job, 'fn(x)'); +}); + +test.todo('merge by alias'); diff --git a/packages/cli/CHANGELOG.md b/packages/cli/CHANGELOG.md index 24118df44..ebd0f73ad 100644 --- a/packages/cli/CHANGELOG.md +++ b/packages/cli/CHANGELOG.md @@ -1,5 +1,19 @@ # @openfn/cli +## 1.21.0 + +### Minor Changes + +- 6689ad0: Add support for aliases on all project subcommands (ie, `openfn project fetch --staging && openfn project checkout staging`) +- 3e63c08: Allow credential map, as json or yaml, to be passed via --credentials +- 6689ad0: Full native support for Collections (no need to manually set `adaptors` key to an array) + +### Patch Changes + +- 4cc799b: Refactor pull into a project command +- Updated dependencies [4cc799b] + - @openfn/project@0.10.0 + ## 1.20.3 ### Patch Changes diff --git a/packages/cli/package.json b/packages/cli/package.json index 5ead44d0d..3db8ed66b 100644 --- a/packages/cli/package.json +++ b/packages/cli/package.json @@ -1,13 +1,13 @@ { "name": "@openfn/cli", - "version": "1.20.3", + "version": "1.21.0", "description": "CLI devtools for the OpenFn toolchain", "engines": { "node": ">=18", "pnpm": ">=7" }, "scripts": { - "test": "pnpm ava", + "test": "pnpm ava --timeout 10m", "test:watch": "pnpm ava -w", "test:types": "pnpm tsc --project tsconfig.test.json", "build": "tsup --config ./tsup.config.js", diff --git a/packages/cli/src/collections/command.ts b/packages/cli/src/collections/command.ts index 4d872c1a8..4dd518ad0 100644 --- a/packages/cli/src/collections/command.ts +++ b/packages/cli/src/collections/command.ts @@ -88,14 +88,6 @@ const key = { }, }; -const token = { - name: 'pat', - yargs: { - alias: ['token'], - description: 'Lightning Personal Access Token (PAT)', - }, -}; - const endpoint = { name: 'endpoint', yargs: { @@ -160,7 +152,7 @@ const updatedAfter = { const getOptions = [ collectionName, key, - token, + o.apiKey, endpoint, pageSize, limit, @@ -201,7 +193,7 @@ const dryRun = { const removeOptions = [ collectionName, key, - token, + o.apiKey, endpoint, dryRun, @@ -243,7 +235,7 @@ const setOptions = [ override(key as any, { demand: false, }), - token, + o.apiKey, endpoint, value, items, diff --git a/packages/cli/src/commands.ts b/packages/cli/src/commands.ts index cbded7e4f..b0cfa0880 100644 --- a/packages/cli/src/commands.ts +++ b/packages/cli/src/commands.ts @@ -39,6 +39,7 @@ export type CommandList = | 'repo-install' | 'repo-list' | 'repo-pwd' + | 'project-pull' | 'project-list' | 'project-version' | 'project-merge' @@ -66,6 +67,7 @@ const handlers = { ['repo-install']: repo.install, ['repo-pwd']: repo.pwd, ['repo-list']: repo.list, + ['project-pull']: projects.pull, ['project-list']: projects.list, ['project-version']: projects.version, ['project-merge']: projects.merge, diff --git a/packages/cli/src/deploy/beta.ts b/packages/cli/src/deploy/beta.ts index 83308b5fe..834d37039 100644 --- a/packages/cli/src/deploy/beta.ts +++ b/packages/cli/src/deploy/beta.ts @@ -9,14 +9,7 @@ import { loadAppAuthConfig } from '../projects/util'; export type DeployOptionsBeta = Required< Pick< Opts, - | 'beta' - | 'command' - | 'log' - | 'logJson' - | 'apiKey' - | 'endpoint' - | 'path' - | 'workspace' + 'beta' | 'command' | 'log' | 'logJson' | 'apiKey' | 'endpoint' | 'path' > >; @@ -25,7 +18,10 @@ export async function handler(options: DeployOptionsBeta, logger: Logger) { // TMP use options.path to set the directory for now // We'll need to manage this a bit better - const project = await Project.from('fs', { root: options.workspace || '.' }); + // TODO this is fixed on another branch + const project = await Project.from('fs', { + root: (options as any).workspace || '.', + }); // TODO: work out if there's any diff // generate state for the provisioner diff --git a/packages/cli/src/deploy/command.ts b/packages/cli/src/deploy/command.ts index 16ea074bf..79ed15cc4 100644 --- a/packages/cli/src/deploy/command.ts +++ b/packages/cli/src/deploy/command.ts @@ -2,6 +2,7 @@ import yargs from 'yargs'; import { build, ensure, override } from '../util/command-builders'; import { Opts } from '../options'; import * as o from '../options'; +import * as o2 from '../projects/options'; export type DeployOptions = Required< Pick< @@ -27,7 +28,7 @@ const options = [ o.projectPath, o.statePath, - override(o.workspace, { hidden: true }), + override(o2.workspace, { hidden: true }), ]; const deployCommand: yargs.CommandModule = { diff --git a/packages/cli/src/execute/apply-credential-map.ts b/packages/cli/src/execute/apply-credential-map.ts new file mode 100644 index 000000000..e0c75691d --- /dev/null +++ b/packages/cli/src/execute/apply-credential-map.ts @@ -0,0 +1,55 @@ +/** + * utility to take a workflow and a credential map + * and apply credentials to each step + */ + +import { ExecutionPlan } from '@openfn/lexicon'; +import { Logger } from '../util'; + +type JobId = string; + +export type CredentialMap = Record; + +const applyCredentialMap = ( + plan: ExecutionPlan, + map: CredentialMap = {}, + logger?: Logger +) => { + const stepsWithCredentialIds = plan.workflow.steps.filter( + (step: any) => + typeof step.configuration === 'string' && + !step.configuration.endsWith('.json') + ) as { configuration: string; name?: string; id: string }[]; + + const unmapped: Record = {}; + + for (const step of stepsWithCredentialIds) { + if (map[step.configuration]) { + logger?.debug( + `Applying credential ${step.configuration} to "${step.name ?? step.id}"` + ); + step.configuration = map[step.configuration]; + } else { + unmapped[step.configuration] = true; + // @ts-ignore + delete step.configuration; + } + } + + if (Object.keys(unmapped).length) { + logger?.warn( + `WARNING: credential IDs were found in the workflow, but values have not been provided:` + ); + logger?.warn(' ', Object.keys(unmapped).join(',')); + if (map) { + logger?.warn( + 'If the workflow fails, add these credentials to the credential map' + ); + } else { + // TODO if running from project file this might be bad advice + logger?.warn('Pass a credential map with --credentials'); + } + } +}; + +export default applyCredentialMap; diff --git a/packages/cli/src/execute/command.ts b/packages/cli/src/execute/command.ts index 33c9be47f..71cdc65cd 100644 --- a/packages/cli/src/execute/command.ts +++ b/packages/cli/src/execute/command.ts @@ -1,5 +1,5 @@ import yargs from 'yargs'; -import { build, ensure } from '../util/command-builders'; +import { build, ensure, override } from '../util/command-builders'; import * as o from '../options'; import type { Opts } from '../options'; @@ -7,12 +7,16 @@ import type { Opts } from '../options'; export type ExecuteOptions = Required< Pick< Opts, + | 'apiKey' | 'adaptors' | 'autoinstall' | 'baseDir' | 'cacheSteps' | 'command' | 'compile' + | 'credentials' + | 'collectionsEndpoint' + | 'collectionsVersion' | 'expandAdaptors' | 'end' | 'immutable' @@ -43,9 +47,16 @@ const options = [ o.expandAdaptors, // order is important o.adaptors, + override(o.apiKey, { + description: 'API token for collections', + alias: ['collections-api-key', 'collections-token', 'pat'], + }), o.autoinstall, o.cacheSteps, o.compile, + o.credentials, + o.collectionsEndpoint, + o.collectionsVersion, o.end, o.ignoreImports, o.immutable, diff --git a/packages/cli/src/execute/handler.ts b/packages/cli/src/execute/handler.ts index ed4b30eee..66f0d5fcc 100644 --- a/packages/cli/src/execute/handler.ts +++ b/packages/cli/src/execute/handler.ts @@ -1,9 +1,13 @@ import type { ExecutionPlan } from '@openfn/lexicon'; +import { yamlToJson } from '@openfn/project'; +import { readFile } from 'node:fs/promises'; +import path from 'node:path'; import type { ExecuteOptions } from './command'; import execute from './execute'; import serializeOutput from './serialize-output'; import getAutoinstallTargets from './get-autoinstall-targets'; +import applyCredentialMap from './apply-credential-map'; import { install } from '../repo/handler'; import compile from '../compile/compile'; @@ -44,6 +48,35 @@ const matchStep = ( return ''; }; +const loadAndApplyCredentialMap = async ( + plan: ExecutionPlan, + options: ExecuteOptions, + logger: Logger +) => { + let creds = {}; + if (options.credentials) { + try { + const credsRaw = await readFile( + path.resolve(options.credentials), + 'utf8' + ); + if (options.credentials.endsWith('.json')) { + creds = JSON.parse(credsRaw); + } else { + creds = yamlToJson(credsRaw); + } + } catch (e) { + logger.error('Error processing credential map:'); + logger.error(e); + // probably want to exist if the credential map is invalid + process.exitCode = 1; + return; + } + logger.info('Credential map loaded '); + } + return applyCredentialMap(plan, creds, logger); +}; + const executeHandler = async (options: ExecuteOptions, logger: Logger) => { const start = new Date().getTime(); assertPath(options.path); @@ -51,7 +84,7 @@ const executeHandler = async (options: ExecuteOptions, logger: Logger) => { let plan = await loadPlan(options, logger); validatePlan(plan, logger); - + await loadAndApplyCredentialMap(plan, options, logger); if (options.cacheSteps) { await clearCache(plan, options, logger); } diff --git a/packages/cli/src/options.ts b/packages/cli/src/options.ts index b6f11bad5..0d050644e 100644 --- a/packages/cli/src/options.ts +++ b/packages/cli/src/options.ts @@ -8,7 +8,6 @@ import { ensureLogOpts, LogLevel, } from './util'; -import getCLIOptionObject from './util/get-cli-option-object'; // Central type definition for the main options // This represents the types coming out of yargs, @@ -31,9 +30,11 @@ export type Opts = { compile?: boolean; configPath?: string; confirm?: boolean; + credentials?: string; + collectionsEndpoint?: string; + collectionsVersion?: string; describe?: string; end?: string; // workflow end node - env?: string; expandAdaptors?: boolean; // for unit tests really expressionPath?: string; endpoint?: string; @@ -66,10 +67,6 @@ export type Opts = { trace?: boolean; useAdaptorsMonorepo?: boolean; workflow: string; - // merge options - removeUnmapped?: boolean | undefined; - workflowMappings?: Record | undefined; - workspace?: string; // deprecated workflowPath?: string; @@ -141,12 +138,17 @@ export const autoinstall: CLIOption = { }, }; -export const apikey: CLIOption = { +export const apiKey: CLIOption = { name: 'apikey', yargs: { - alias: ['key', 'pat', 'token'], + alias: ['pat', 'token', 'api-key'], description: - '[beta only] API Key, Personal Access Token (Pat), or other access token', + 'API Key, Personal Access Token (PAT), or other access token from Lightning', + }, + ensure: (opts: any) => { + if (!opts.apikey) { + opts.apiKey = process.env.OPENFN_API_KEY; + } }, }; @@ -245,6 +247,31 @@ export const configPath: CLIOption = { }, }; +export const collectionsVersion: CLIOption = { + name: 'collections-version', + yargs: { + description: + 'The version of the collections adaptor to use. Defaults to latest. Use OPENFN_COLLECTIONS_VERSION env.', + }, +}; + +export const collectionsEndpoint: CLIOption = { + name: 'collections-endpoint', + yargs: { + alias: ['endpoint'], + description: + 'The Lightning server to use for collections. Will use the project endpoint if available. Use OPENFN_COLLECTIONS_ENDPOINT env.', + }, +}; + +export const credentials: CLIOption = { + name: 'credentials', + yargs: { + alias: ['creds'], + description: 'A path which points to a credential map', + }, +}; + export const describe: CLIOption = { name: 'describe', yargs: { @@ -275,13 +302,6 @@ export const endpoint: CLIOption = { }, }; -export const env: CLIOption = { - name: 'env', - yargs: { - description: '[beta only] Environment name (eg staging, prod, branch)', - }, -}; - export const force: CLIOption = { name: 'force', yargs: { @@ -584,39 +604,3 @@ export const workflow: CLIOption = { description: 'Name of the workflow to execute', }, }; - -// merge options -export const removeUnmapped: CLIOption = { - name: 'remove-unmapped', - yargs: { - boolean: true, - description: - "Removes all workflows that didn't get mapped from the final project after merge", - }, -}; - -export const workflowMappings: CLIOption = { - name: 'workflow-mappings', - yargs: { - type: 'string', - coerce: getCLIOptionObject, - description: - 'A manual object mapping of which workflows in source and target should be matched for a merge.', - }, -}; - -export const workspace: CLIOption = { - name: 'workspace', - yargs: { - alias: ['w'], - description: 'Path to the project workspace (ie, path to openfn.yaml)', - }, - ensure: (opts) => { - const ws = opts.workspace ?? process.env.OPENFN_WORKSPACE; - if (!ws) { - opts.workspace = process.cwd(); - } else { - opts.workspace = nodePath.resolve(ws); - } - }, -}; diff --git a/packages/cli/src/projects/checkout.ts b/packages/cli/src/projects/checkout.ts index 652ce4af0..4163631a1 100644 --- a/packages/cli/src/projects/checkout.ts +++ b/packages/cli/src/projects/checkout.ts @@ -7,27 +7,32 @@ import { rimraf } from 'rimraf'; import { ensure, build } from '../util/command-builders'; import type { Logger } from '../util/logger'; import * as o from '../options'; +import * as po from './options'; -import type { Opts } from '../options'; +import type { Opts } from './options'; export type CheckoutOptions = Pick< Opts, - 'command' | 'projectId' | 'workspace' | 'log' + 'command' | 'project' | 'workspace' | 'log' >; -const options = [o.projectId, o.workspace, o.log]; +const options = [o.log, po.workspace]; const command: yargs.CommandModule = { - command: 'checkout ', + command: 'checkout ', describe: 'Switch to a different OpenFn project in the same workspace', handler: ensure('project-checkout', options), - builder: (yargs) => build(options, yargs), + builder: (yargs) => + build(options, yargs).positional('project', { + describe: 'The id, alias or UUID of the project to chcekout', + demandOption: true, + }), }; export default command; export const handler = async (options: CheckoutOptions, logger: Logger) => { - const projectId = options.projectId!; + const projectIdentifier = options.project!; const workspacePath = options.workspace ?? process.cwd(); const workspace = new Workspace(workspacePath, logger); @@ -37,19 +42,21 @@ export const handler = async (options: CheckoutOptions, logger: Logger) => { // get the project let switchProject; - if (/\.(yaml|json)$/.test(projectId)) { + if (/\.(yaml|json)$/.test(projectIdentifier)) { // TODO: should we allow checkout into an arbitrary folder? - const filePath = projectId.startsWith('/') - ? projectId - : path.join(workspacePath, projectId); + const filePath = projectIdentifier.startsWith('/') + ? projectIdentifier + : path.join(workspacePath, projectIdentifier); logger.debug('Loading project from path ', filePath); switchProject = await Project.from('path', filePath, config); } else { - switchProject = workspace.get(projectId); + switchProject = workspace.get(projectIdentifier); } if (!switchProject) { - throw new Error(`Project with id ${projectId} not found in the workspace`); + throw new Error( + `Project with id ${projectIdentifier} not found in the workspace` + ); } // delete workflow dir before expanding project diff --git a/packages/cli/src/projects/command.ts b/packages/cli/src/projects/command.ts index fe666789b..258ce0f21 100644 --- a/packages/cli/src/projects/command.ts +++ b/packages/cli/src/projects/command.ts @@ -3,6 +3,7 @@ import version from './version'; import merge from './merge'; import checkout from './checkout'; import fetch from './fetch'; +import { command as pull } from './pull'; import type yargs from 'yargs'; @@ -13,6 +14,7 @@ export const projectsCommand = { handler: () => {}, builder: (yargs: yargs.Argv) => yargs + .command(pull) .command(list) .command(version) .command(merge) diff --git a/packages/cli/src/projects/fetch.ts b/packages/cli/src/projects/fetch.ts index 9cbbbcf8c..6358a2602 100644 --- a/packages/cli/src/projects/fetch.ts +++ b/packages/cli/src/projects/fetch.ts @@ -2,12 +2,14 @@ import yargs from 'yargs'; import path from 'node:path'; import Project, { Workspace } from '@openfn/project'; +import resolvePath from '../util/resolve-path'; import { build, ensure, override } from '../util/command-builders'; import type { Logger } from '../util/logger'; import * as o from '../options'; +import * as po from './options'; -import type { Opts } from '../options'; -import { serialize, getProject, loadAppAuthConfig } from './util'; +import type { Opts } from './options'; +import { serialize, fetchProject, loadAppAuthConfig } from './util'; // TODO need to implement these // type Config = { @@ -17,6 +19,7 @@ import { serialize, getProject, loadAppAuthConfig } from './util'; export type FetchOptions = Pick< Opts, + | 'alias' | 'apiKey' | 'command' | 'endpoint' @@ -24,38 +27,36 @@ export type FetchOptions = Pick< | 'force' | 'log' | 'logJson' + | 'snapshots' | 'outputPath' - | 'projectId' + | 'project' | 'workspace' >; const options = [ - o.apikey, - o.configPath, + po.alias, + o.apiKey, o.endpoint, - o.env, o.log, - override(o.outputPath, { - description: 'Path to output the fetched project to', - }), o.logJson, - o.workspace, - o.snapshots, - o.statePath, + o.snapshots, // TODO need to add support for this override(o.force, { description: 'Overwrite local file contents with the fetched contents', }), + + po.outputPath, + po.env, + po.workspace, ]; const command: yargs.CommandModule = { - command: 'fetch [projectId]', - describe: `Fetch a project's state and spec from a Lightning Instance to the local state file without expanding to the filesystem.`, + command: 'fetch [project]', + describe: `Download the latest version of a project from a lightning server (does not expand the project, use checkout)`, builder: (yargs: yargs.Argv) => build(options, yargs) - .positional('projectId', { + .positional('project', { describe: - 'The id of the project that should be fetched, should be a UUID', - demandOption: true, + 'The id, alias or UUID of the project to fetch. If not set, will default to the active project', }) .example( 'fetch 57862287-23e6-4650-8d79-e1dd88b24b1c', @@ -66,32 +67,33 @@ const command: yargs.CommandModule = { export default command; -export const handler = async (options: FetchOptions, logger: Logger) => { - const workspacePath = path.resolve(options.workspace ?? process.cwd()); - const workspace = new Workspace(workspacePath); - const { projectId, outputPath } = options; +const printProjectName = (project: Project) => + `${project.qname} (${project.id})`; - const config = loadAppAuthConfig(options, logger); +export const handler = async (options: FetchOptions, logger: Logger) => { + const workspacePath = options.workspace ?? process.cwd(); + logger.debug('Using workspace at', workspacePath); - const { data } = await getProject(logger, config, projectId!); + const workspace = new Workspace(workspacePath, logger, false); + const { outputPath } = options; - const project = await Project.from( - 'state', - data!, - { - endpoint: config.endpoint, - env: options.env || 'project', - }, - workspace.getConfig() + const localTargetProject = await resolveOutputProject( + workspace, + options, + logger ); - // Work out where and how to serialize the project - const outputRoot = path.resolve(outputPath || workspacePath); - const projectFileName = project.getIdentifier(); - const projectsDir = project.config.dirs.projects ?? '.projects'; + const remoteProject = await fetchRemoteProject(workspace, options, logger); + + ensureTargetCompatible(options, remoteProject, localTargetProject); + // TODO should we use the local target project for output? + + // Work out where and how to serialize the project + const outputRoot = resolvePath(outputPath || workspacePath); + const projectsDir = remoteProject?.config.dirs.projects ?? '.projects'; const finalOutputPath = - outputPath ?? `${outputRoot}/${projectsDir}/${projectFileName}`; + outputPath ?? `${outputRoot}/${projectsDir}/${remoteProject.qname}`; let format: undefined | 'json' | 'yaml' = undefined; if (outputPath) { @@ -100,45 +102,186 @@ export const handler = async (options: FetchOptions, logger: Logger) => { if (ext.length) { format = ext; } + + if (options.alias) { + logger.warn( + `WARNING: alias "${options.alias}" was set, but will be ignored as output path was provided` + ); + } } - // See if a project already exists there - const finalOutput = await serialize( - project, - finalOutputPath!, - format, - true // dry run - this won't trigger an actual write! + // TODO report whether we've updated or not + + // finally, write it! + await serialize(remoteProject, finalOutputPath!, format as any); + + logger.success( + `Fetched project file to ${finalOutputPath}.${format ?? 'yaml'}` ); - // If a project already exists at the output path, make sure it's compatible - let current: Project | null = null; - try { - current = await Project.from('path', finalOutput); - } catch (e) { - // Do nothing - project doesn't exist + return remoteProject; +}; + +// Work out the existing target project, if any, to fetch to +async function resolveOutputProject( + workspace: Workspace, + options: FetchOptions, + logger: Logger +) { + logger.debug('Checking for local copy of project...'); + + // If the user is writing to an explicit path, + // check to see i fanything exists there + if (options.outputPath) { + try { + const customProject = await Project.from('path', options.outputPath); + logger.debug( + `Found existing local project ${printProjectName(customProject)} at`, + options.outputPath + ); + return customProject; + } catch (e) { + logger.debug('No project found at', options.outputPath); + } + } + // if an alias is specified, we use that as the output + if (options.alias) { + const aliasProject = workspace.get(options.alias); + if (aliasProject) { + logger.debug( + `Found local project from alias:`, + printProjectName(aliasProject) + ); + return aliasProject; + } else { + logger.debug(`No local project found with alias ${options.alias}`); + } } - const hasAnyHistory = project.workflows.find( - (w) => w.workflow.history?.length - ); + // Otherwise we try and resolve to the projcet identifier to something in teh workspace + const project = workspace.get(options.project!); + if (project) { + logger.debug( + `Found local project from identifier:`, + printProjectName(project) + ); + return project; + } else { + logger.debug( + `No local project found matching identifier: `, + options.project + ); + } +} + +// This will fetch the remote project the user wants - // Skip version checking if: - const skipVersionCheck = - options.force || // The user forced the checkout - !current || // there is no project on disk - !hasAnyHistory; // the remote project has no history (can happen in old apps) +async function fetchRemoteProject( + workspace: Workspace, + options: FetchOptions, + logger: Logger +) { + logger.debug(`Fetching latest project data from app`); - if (!skipVersionCheck && !project.canMergeInto(current!)) { - // TODO allow rename - throw new Error('Error! An incompatible project exists at this location'); + const config = loadAppAuthConfig(options, logger); + + let projectUUID: string = options.project!; + + // First, we need to see if the project argument, which might be a UUID, id or alias, + // resolves to anything + const localProject = workspace.get(options.project!); + if ( + localProject?.openfn?.uuid && + localProject.openfn.uuid !== options.project + ) { + // ifwe resolve the UUID to something other than what the user gave us, + // debug-log the UUID we're actually going to use + projectUUID = localProject.openfn.uuid as string; + logger.debug( + `Resolved ${ + options.project + } to UUID ${projectUUID} from local project ${printProjectName( + localProject + )}}` + ); } - // TODO report whether we've updated or not + const projectEndpoint = localProject?.openfn?.endpoint ?? config.endpoint; - // finally, write it! - await serialize(project, finalOutputPath!, format as any); + const { data } = await fetchProject( + projectEndpoint, + config.apiKey, + projectUUID, + logger + ); - logger.success(`Fetched project file to ${finalOutput}`); + const project = await Project.from( + 'state', + data!, + { + endpoint: projectEndpoint, + }, + { + ...workspace.getConfig(), + alias: options.alias ?? localProject?.alias ?? 'main', + } + ); + logger.debug( + `Loaded remote project ${project.openfn!.uuid} with id ${ + project.id + } and alias ${project.alias}` + ); return project; -}; +} + +function ensureTargetCompatible( + options: FetchOptions, + remoteProject: Project, + localProject?: Project +) { + if (localProject) { + if (!options.force && localProject.uuid != remoteProject.uuid) { + // TODO make this prettier in output + const error: any = new Error('PROJECT_EXISTS'); + error.message = 'A project with a different UUID exists at this location'; + error.fix = `You have tried to fetch a remote project into a local project with a different UUID + +Try adding an alias to rename the new project: + + openfn fetch ${options.project} --alias ${remoteProject.id} + +To ignore this error and override the local file, pass --force (-f) + + openfn fetch ${options.project} --force +`; + error.fetched_project = { + uuid: remoteProject.uuid, + id: remoteProject.id, + alias: remoteProject.alias, + }; + error.local_project = { + uuid: localProject.uuid, + id: localProject.id, + alias: localProject.alias, + }; + delete error.stack; + + throw error; + } + + const hasAnyHistory = remoteProject.workflows.find( + (w) => w.workflow.history?.length + ); + + // Skip version checking if: + const skipVersionCheck = + options.force || // The user forced the checkout + !hasAnyHistory; // the remote project has no history (can happen in old apps) + + if (!skipVersionCheck && !remoteProject.canMergeInto(localProject!)) { + // TODO allow rename + throw new Error('Error! An incompatible project exists at this location'); + } + } +} diff --git a/packages/cli/src/projects/handler.ts b/packages/cli/src/projects/handler.ts index b8f1e4cc0..27b33100c 100644 --- a/packages/cli/src/projects/handler.ts +++ b/packages/cli/src/projects/handler.ts @@ -3,3 +3,4 @@ export { handler as version } from './version'; export { handler as merge } from './merge'; export { handler as checkout } from './checkout'; export { handler as fetch } from './fetch'; +export { handler as pull } from './pull'; diff --git a/packages/cli/src/projects/list.ts b/packages/cli/src/projects/list.ts index c6dd2fe39..dc1e84848 100644 --- a/packages/cli/src/projects/list.ts +++ b/packages/cli/src/projects/list.ts @@ -4,12 +4,13 @@ import Project, { Workspace } from '@openfn/project'; import { ensure, build } from '../util/command-builders'; import type { Logger } from '../util/logger'; import * as o from '../options'; +import * as po from './options'; -import type { Opts } from '../options'; +import type { Opts } from './options'; -export type ProjectsOptions = Required>; +export type ProjectListOptions = Pick; -const options = [o.log, o.workspace]; +const options = [o.log, po.workspace]; const command: yargs.CommandModule = { command: 'list [project-path]', @@ -21,21 +22,24 @@ const command: yargs.CommandModule = { export default command; -export const handler = async (options: ProjectsOptions, logger: Logger) => { +export const handler = async (options: ProjectListOptions, logger: Logger) => { logger.info('Searching for projects in workspace at:'); logger.info(' ', options.workspace); logger.break(); - const workspace = new Workspace(options.workspace); + const workspace = new Workspace(options.workspace!); if (!workspace.valid) { // TODO how can we be more helpful here? + // eg, this will happen if there's no openfn.yaml file + // basically we need the workspace to return a reason + // (again, I'm thinking of removing the validation entirely) throw new Error('No OpenFn projects found'); } logger.always(`Available openfn projects\n\n${workspace .list() - .map((p) => describeProject(p, p.id === workspace.activeProjectId)) + .map((p) => describeProject(p, p === workspace.getActiveProject())) .join('\n\n')} `); }; @@ -43,7 +47,9 @@ export const handler = async (options: ProjectsOptions, logger: Logger) => { function describeProject(project: Project, active = false) { // @ts-ignore const uuid = project.openfn?.uuid; - return `${project.id} ${active ? '(active)' : ''}\n ${ - uuid || '' - }\n workflows:\n${project.workflows.map((w) => ' - ' + w.id).join('\n')}`; + return `${project.alias || '(no alias)'} | ${project.id} ${ + active ? '(active)' : '' + }\n ${uuid || ''}\n workflows:\n${project.workflows + .map((w) => ' - ' + w.id) + .join('\n')}`; } diff --git a/packages/cli/src/projects/merge.ts b/packages/cli/src/projects/merge.ts index efe6ac155..af237a646 100644 --- a/packages/cli/src/projects/merge.ts +++ b/packages/cli/src/projects/merge.ts @@ -6,30 +6,27 @@ import fs from 'node:fs/promises'; import { ensure, build, override } from '../util/command-builders'; import type { Logger } from '../util/logger'; import * as o from '../options'; +import * as po from './options'; -import type { Opts } from '../options'; +import type { Opts } from './options'; import { handler as checkout } from './checkout'; export type MergeOptions = Required< Pick< Opts, - | 'command' - | 'projectId' - | 'workspace' - | 'removeUnmapped' - | 'workflowMappings' + 'command' | 'project' | 'workspace' | 'removeUnmapped' | 'workflowMappings' > > & Pick & { base?: string }; const options = [ - o.projectId, - o.removeUnmapped, - o.workflowMappings, + po.removeUnmapped, + po.workflowMappings, + po.workspace, o.log, - o.workspace, // custom output because we don't want defaults or anything { + // TODO presumably if we do this we don't also checkout? name: 'output-path', yargs: { alias: 'o', @@ -51,9 +48,9 @@ const options = [ ]; const command: yargs.CommandModule = { - command: 'merge ', + command: 'merge ', describe: - 'Merges the specified project into the currently checked out project', + 'Merges the specified project (by UUID, id or alias) into the currently checked out project', handler: ensure('project-merge', options), builder: (yargs) => build(options, yargs), }; @@ -61,8 +58,8 @@ const command: yargs.CommandModule = { export default command; export const handler = async (options: MergeOptions, logger: Logger) => { - const commandPath = options.workspace; - const workspace = new Workspace(commandPath); + const workspacePath = options.workspace; + const workspace = new Workspace(workspacePath); if (!workspace.valid) { logger.error('Command was run in an invalid openfn workspace'); return; @@ -82,18 +79,24 @@ export const handler = async (options: MergeOptions, logger: Logger) => { logger.debug(`Loading target project from workspace (${targetProject.id})`); } + const sourceProjectIdentifier = options.project; + // Lookup the source project - the thing we are getting changes from let sourceProject; - if (/\.(yaml|json)$/.test(options.projectId)) { - const filePath = path.join(commandPath, options.projectId); + if (/\.(ya?ml|json)$/.test(sourceProjectIdentifier)) { + const filePath = path.join(workspacePath, sourceProjectIdentifier); logger.debug('Loading source project from path ', filePath); sourceProject = await Project.from('path', filePath); } else { - logger.debug(`Loading source project from workspace ${options.projectId}`); - sourceProject = workspace.get(options.projectId); + logger.debug( + `Loading source project from workspace ${sourceProjectIdentifier}` + ); + sourceProject = workspace.get(sourceProjectIdentifier); } if (!sourceProject) { - logger.error(`Project "${options.projectId}" not found in the workspace`); + logger.error( + `Project "${sourceProjectIdentifier}" not found in the workspace` + ); return; } @@ -143,13 +146,13 @@ export const handler = async (options: MergeOptions, logger: Logger) => { // Checkout after merge to expand updated files into filesystem await checkout( { - command: 'project-checkout', - workspace: commandPath, - projectId: options.outputPath ? finalPath : final.id, + workspace: workspacePath, + project: options.outputPath ? finalPath : final.id, log: options.log, }, logger ); + logger.success( `Project ${sourceProject.id} has been merged into Project ${targetProject.id}` ); diff --git a/packages/cli/src/projects/options.ts b/packages/cli/src/projects/options.ts new file mode 100644 index 000000000..be95eeb90 --- /dev/null +++ b/packages/cli/src/projects/options.ts @@ -0,0 +1,74 @@ +import resolvePath from '../util/resolve-path'; +import { Opts as BaseOpts, CLIOption } from '../options'; +import getCLIOptionObject from '../util/get-cli-option-object'; + +export type Opts = BaseOpts & { + alias?: string; + env?: string; + workspace?: string; + removeUnmapped?: boolean | undefined; + workflowMappings?: Record | undefined; + project?: string; +}; + +// project specific options +export const env: CLIOption = { + name: 'env', + yargs: { + description: 'Environment name (eg staging, prod, branch)', + hidden: true, + }, +}; + +export const alias: CLIOption = { + name: 'alias', + yargs: { + description: 'Environment name (eg staging, prod, branch)', + }, +}; + +export const removeUnmapped: CLIOption = { + name: 'remove-unmapped', + yargs: { + boolean: true, + description: + "Removes all workflows that didn't get mapped from the final project after merge", + }, +}; + +export const workflowMappings: CLIOption = { + name: 'workflow-mappings', + yargs: { + type: 'string', + coerce: getCLIOptionObject, + description: + 'A manual object mapping of which workflows in source and target should be matched for a merge.', + }, +}; + +// We declare a new output path here, overriding the default cli one, +// because default rules are different +export const outputPath: CLIOption = { + name: 'output-path', + yargs: { + alias: ['output'], + type: 'string', + description: 'Path to output the fetched project to', + }, +}; + +export const workspace: CLIOption = { + name: 'workspace', + yargs: { + alias: ['w'], + description: 'Path to the project workspace (ie, path to openfn.yaml)', + }, + ensure: (opts: any) => { + const ws = opts.workspace ?? process.env.OPENFN_WORKSPACE; + if (!ws) { + opts.workspace = process.cwd(); + } else { + opts.workspace = resolvePath(ws); + } + }, +}; diff --git a/packages/cli/src/projects/pull.ts b/packages/cli/src/projects/pull.ts index 07f8ae75c..12bf8e6d8 100644 --- a/packages/cli/src/projects/pull.ts +++ b/packages/cli/src/projects/pull.ts @@ -1,32 +1,68 @@ +import yargs from 'yargs'; +import { build, ensure, override } from '../util/command-builders'; import { handler as fetch } from './fetch'; import { handler as checkout } from './checkout'; +import * as o from '../options'; +import * as o2 from './options'; import type { Logger } from '../util/logger'; -import type { Opts } from '../options'; +import type { Opts } from './options'; export type PullOptions = Pick< Opts, - | 'apiKey' - | 'endpoint' - | 'env' - | 'force' + | 'beta' + | 'command' | 'log' | 'logJson' - | 'projectId' - | 'workspace' + | 'statePath' + | 'projectPath' + | 'configPath' + | 'project' + | 'confirm' + | 'snapshots' >; +const options = [ + // local options + // TODO: need to port more of these + o2.alias, + o2.env, + o2.workspace, + + // general options + o.apiKey, + o.endpoint, + o.log, + override(o.path, { + description: 'path to output the project to', + }), + o.logJson, + o.projectPath, + o.snapshots, + o.path, + o.force, +]; + +export const command: yargs.CommandModule = { + command: 'pull [project]', + describe: `Pull a project from a Lightning Instance and expand to the file system (ie fetch + checkout)`, + builder: (yargs: yargs.Argv) => + build(options, yargs) + .positional('project', { + describe: 'The UUID, local id or local alias of the project to pull', + }) + .example( + 'pull 57862287-23e6-4650-8d79-e1dd88b24b1c', + 'Pull project with a UUID from a lightning instance' + ), + handler: ensure('project-pull', options), +}; + export async function handler(options: PullOptions, logger: Logger) { - const project = await fetch(options, logger); + await fetch(options, logger); logger.success(`Downloaded latest project version`); - await checkout( - { - ...options, - projectId: project.id, - }, - logger - ); + await checkout(options, logger); logger.success(`Checked out project locally`); } diff --git a/packages/cli/src/projects/util.ts b/packages/cli/src/projects/util.ts index 3763c9f69..503d2076a 100644 --- a/packages/cli/src/projects/util.ts +++ b/packages/cli/src/projects/util.ts @@ -73,31 +73,29 @@ export const serialize = async ( }; export const getLightningUrl = ( - config: AuthOptions, + endpoint: string, path: string = '', snapshots?: string[] ) => { const params = new URLSearchParams(); snapshots?.forEach((snapshot) => params.append('snapshots[]', snapshot)); - return new URL( - `/api/provision/${path}?${params.toString()}`, - config.endpoint - ); + return new URL(`/api/provision/${path}?${params.toString()}`, endpoint); }; -export async function getProject( - logger: Logger, - config: AuthOptions, +export async function fetchProject( + endpoint: string, + apiKey: string, projectId: string, + logger?: Logger, snapshots?: string[] ): Promise<{ data: Provisioner.Project | null }> { - const url = getLightningUrl(config, projectId, snapshots); - logger.info(`Checking ${url} for existing project`); + const url = getLightningUrl(endpoint, projectId, snapshots); + logger?.info(`Checking ${url} for existing project`); try { const response = await fetch(url, { headers: { - Authorization: `Bearer ${config.apiKey}`, + Authorization: `Bearer ${apiKey}`, Accept: 'application/json', }, }); @@ -105,7 +103,7 @@ export async function getProject( if (!response.ok) { if (response.status === 401 || response.status === 403) { throw new CLIError( - `Failed to authorize request with endpoint ${config.endpoint}, got ${response.status} ${response.statusText}` + `Failed to authorize request with endpoint ${endpoint}, got ${response.status} ${response.statusText}` ); } if (response.status === 404) { @@ -116,11 +114,10 @@ export async function getProject( `Failed to fetch project ${projectId}: ${response.statusText}` ); } - - logger.info('Project found'); + logger?.info(`Project retrieved from ${endpoint}`); return response.json(); } catch (error: any) { - handleCommonErrors(config, error); + handleCommonErrors({ endpoint, apiKey }, error); throw error; } diff --git a/packages/cli/src/projects/version.ts b/packages/cli/src/projects/version.ts index e3197b5e1..dfe0c90bf 100644 --- a/packages/cli/src/projects/version.ts +++ b/packages/cli/src/projects/version.ts @@ -4,14 +4,15 @@ import { Workspace } from '@openfn/project'; import { ensure, build } from '../util/command-builders'; import type { Logger } from '../util/logger'; import * as o from '../options'; +import * as po from './options'; -import type { Opts } from '../options'; +import type { Opts } from './options'; export type VersionOptions = Required< Pick >; -const options = [o.workflow, o.workspace, o.workflowMappings]; +const options = [o.workflow, po.workspace, po.workflowMappings]; const command: yargs.CommandModule = { command: 'version [workflow]', diff --git a/packages/cli/src/pull/command.ts b/packages/cli/src/pull/command.ts index 2ddb8c1f3..ba6dd6620 100644 --- a/packages/cli/src/pull/command.ts +++ b/packages/cli/src/pull/command.ts @@ -2,6 +2,7 @@ import yargs from 'yargs'; import { build, ensure, override } from '../util/command-builders'; import { Opts } from '../options'; import * as o from '../options'; +import * as po from '../projects/options'; export type PullOptions = Required< Pick< @@ -20,12 +21,11 @@ export type PullOptions = Required< >; const options = [ - o.apikey, - o.beta, + o.apiKey, o.beta, o.configPath, o.endpoint, - o.env, + po.env, o.log, override(o.path, { description: 'path to output the project to', @@ -39,7 +39,7 @@ const options = [ // These are hidden commands used only by beta // The need to be declared here to be initialised and defaulted properly override(o.force, { hidden: true }), - override(o.workspace, { hidden: true }), + override(po.workspace, { hidden: true }), ]; const pullCommand: yargs.CommandModule = { @@ -49,7 +49,7 @@ const pullCommand: yargs.CommandModule = { build(options, yargs) .positional('projectId', { describe: - 'The id of the project that should be pulled shouled be a UUID', + 'The id of the project that should be pulled should be a UUID', demandOption: true, }) .example( diff --git a/packages/cli/src/util/command-builders.ts b/packages/cli/src/util/command-builders.ts index 6c839c659..a678788e4 100644 --- a/packages/cli/src/util/command-builders.ts +++ b/packages/cli/src/util/command-builders.ts @@ -13,10 +13,14 @@ const expandYargs = (y: {} | (() => any)) => { // build helper to chain options export function build(opts: CLIOption[], yargs: yargs.Argv) { - return opts.reduce( - (_y, o) => yargs.option(o.name, expandYargs(o.yargs)), - yargs - ); + return opts.reduce((_y, o) => { + if (!o?.name) { + console.error(`ERROR: INVALID COMMAND OPTION PASSED`, o); + console.error('Check the options passed to the command builder'); + throw new Error('Invalid command'); + } + return yargs.option(o.name, expandYargs(o.yargs)); + }, yargs); } // Mutate the incoming argv with defaults etc diff --git a/packages/cli/src/util/load-plan.ts b/packages/cli/src/util/load-plan.ts index fa7fe7811..00dba22db 100644 --- a/packages/cli/src/util/load-plan.ts +++ b/packages/cli/src/util/load-plan.ts @@ -10,6 +10,7 @@ import type { ExecutionPlan, Job, WorkflowOptions } from '@openfn/lexicon'; import type { Opts } from '../options'; import type { Logger } from './logger'; import type { CLIExecutionPlan, CLIJobNode, OldCLIWorkflow } from '../types'; +import resolvePath from './resolve-path'; const loadPlan = async ( options: Pick< @@ -226,10 +227,7 @@ const fetchFile = async ( ) => { const { rootDir = '', filePath, name } = fileInfo; try { - // Special handling for ~ feels like a necessary evil - const fullPath = filePath.startsWith('~') - ? filePath - : path.resolve(rootDir, filePath); + const fullPath = resolvePath(filePath, rootDir); const result = await fs.readFile(fullPath, 'utf8'); log.debug('Loaded file', fullPath); return result; @@ -333,11 +331,70 @@ const ensureAdaptors = (plan: CLIExecutionPlan) => { }); }; +type ensureCollectionsOptions = { + endpoint?: string; + version?: string; + apiKey?: string; +}; + +const ensureCollections = ( + plan: CLIExecutionPlan, + { + endpoint = 'https://app.openfn.org', + version = 'latest', + apiKey = 'null', + }: ensureCollectionsOptions = {}, + logger?: Logger +) => { + let collectionsFound = false; + + Object.values(plan.workflow.steps) + .filter((step) => (step as any).expression?.match(/(collections\.)/)) + .forEach((step) => { + const job = step as CLIJobNode; + if ( + !job.adaptors?.find((v: string) => + v.startsWith('@openfn/language-collections') + ) + ) { + collectionsFound = true; + job.adaptors ??= []; + job.adaptors.push( + `@openfn/language-collections@${version || 'latest'}` + ); + + job.configuration = Object.assign({}, job.configuration, { + collections_endpoint: `${endpoint}/collections`, + collections_token: apiKey, + }); + } + }); + + if (collectionsFound) { + if (!apiKey || apiKey === 'null') { + logger?.warn( + 'WARNING: collections API was not set. Pass --api-key or OPENFN_API_KEY' + ); + } + logger?.info( + `Configured collections to use endpoint ${endpoint} and API Key ending with ${apiKey?.substring( + apiKey.length - 10 + )}` + ); + } +}; + const loadXPlan = async ( plan: CLIExecutionPlan, options: Pick< Opts, - 'monorepoPath' | 'baseDir' | 'expandAdaptors' | 'globals' + | 'monorepoPath' + | 'baseDir' + | 'expandAdaptors' + | 'globals' + | 'collectionsVersion' + | 'collectionsEndpoint' + | 'apiKey' >, logger: Logger, defaultName: string = '' @@ -350,6 +407,15 @@ const loadXPlan = async ( plan.workflow.name = defaultName; } ensureAdaptors(plan); + ensureCollections( + plan, + { + version: options.collectionsVersion, + apiKey: options.apiKey, + endpoint: options.collectionsEndpoint, + }, + logger + ); // import global functions // if globals is provided via cli argument. it takes precedence diff --git a/packages/cli/src/util/resolve-path.ts b/packages/cli/src/util/resolve-path.ts new file mode 100644 index 000000000..13168b37c --- /dev/null +++ b/packages/cli/src/util/resolve-path.ts @@ -0,0 +1,9 @@ +import nodepath from 'node:path'; +import os from 'node:os'; + +export default (path: string, root?: string) => { + // Special handling for ~ feels like a necessary evil + return path.startsWith('~') + ? path.replace(`~`, os.homedir) + : nodepath.resolve(root ?? '', path); +}; diff --git a/packages/cli/test/commands.test.ts b/packages/cli/test/commands.test.ts index 3679e01c9..b1ff19154 100644 --- a/packages/cli/test/commands.test.ts +++ b/packages/cli/test/commands.test.ts @@ -1,5 +1,7 @@ import { createMockLogger } from '@openfn/logger'; -import createLightningServer from '@openfn/lightning-mock'; +import createLightningServer, { + DEFAULT_PROJECT_ID, +} from '@openfn/lightning-mock'; import test from 'ava'; import mock from 'mock-fs'; import { execSync } from 'node:child_process'; @@ -16,7 +18,7 @@ import type { Opts } from '../src/options'; // because of all the FS mocking // To make them more robust in CI, all the tests in this file need // an increased timeout -const TIMEOUT = 1000 * 20; +const TIMEOUT = 1000 * 30; const logger = createMockLogger('', { level: 'debug' }); @@ -845,7 +847,7 @@ test.serial('pull: should pull a simple project', async (t) => { }); process.env.OPENFN_ENDPOINT = endpoint; - const opts = cmd.parse('pull 123') as Opts; + const opts = cmd.parse(`pull ${DEFAULT_PROJECT_ID}`) as Opts; await commandParser(opts, logger); const last = logger._parse(logger._history.at(-1)); diff --git a/packages/cli/test/execute/apply-credential-map.test.ts b/packages/cli/test/execute/apply-credential-map.test.ts new file mode 100644 index 000000000..e4eb72c10 --- /dev/null +++ b/packages/cli/test/execute/apply-credential-map.test.ts @@ -0,0 +1,95 @@ +import test from 'ava'; +import applyCredentialMap from '../../src/execute/apply-credential-map'; +import { createMockLogger } from '@openfn/logger/dist'; + +const fn = `const fn = (fn) => (s) => fn(s); +`; + +const createWorkflow = (steps?: any[]) => ({ + workflow: { + steps: steps ?? [ + { + id: 'a', + expression: `${fn}fn(() => ({ data: { count: 42 } }));`, + // project_credential_id must map here + // what about keychain_credential_id ? + // Should we map to credential, rather than configuration? I don't think so + configuration: 'A', + next: { b: true }, + }, + ], + }, +}); + +test('do nothing if map is undefined', (t) => { + const wf = createWorkflow(); + delete wf.workflow.steps[0].configuration; + + applyCredentialMap(wf); + + t.falsy(wf.workflow.steps[0].configuration); +}); + +test('do nothing if map is empty', (t) => { + const wf = createWorkflow(); + delete wf.workflow.steps[0].configuration; + + applyCredentialMap(wf, {}); + + t.falsy(wf.workflow.steps[0].configuration); +}); + +test('apply a credential to a single step', (t) => { + const wf = createWorkflow(); + const map = { + A: { user: 'Anne Arnold' }, + }; + + t.is(wf.workflow.steps[0].configuration, 'A'); + + applyCredentialMap(wf, map); + + t.deepEqual(wf.workflow.steps[0].configuration, map.A); +}); + +test('apply a credential to several steps', (t) => { + const wf = createWorkflow([ + { id: 'a', configuration: 'A' }, + { id: 'b', configuration: 'B' }, + ]); + const map = { + A: { user: 'Anne Arnold' }, + B: { user: 'Belle Bellvue' }, + }; + + t.is(wf.workflow.steps[0].configuration, 'A'); + t.is(wf.workflow.steps[1].configuration, 'B'); + + applyCredentialMap(wf, map); + + t.deepEqual(wf.workflow.steps[0].configuration, map.A); + t.deepEqual(wf.workflow.steps[1].configuration, map.B); +}); + +test('wipe string credential if unmapped', (t) => { + const wf = createWorkflow(); + + t.truthy(wf.workflow.steps[0].configuration); + + applyCredentialMap(wf, {}); + + t.falsy(wf.workflow.steps[0].configuration); +}); + +test('warn if credential unmapped', (t) => { + const wf = createWorkflow(); + + const logger = createMockLogger(); + t.truthy(wf.workflow.steps[0].configuration); + + applyCredentialMap(wf, {}, logger); + + t.truthy( + logger._find('warn', /WARNING: credential IDs were found in the workflow/i) + ); +}); diff --git a/packages/cli/test/execute/execute.test.ts b/packages/cli/test/execute/execute.test.ts index a1e801661..1e28d46d4 100644 --- a/packages/cli/test/execute/execute.test.ts +++ b/packages/cli/test/execute/execute.test.ts @@ -93,6 +93,80 @@ test.serial('run a workflow', async (t) => { t.is(result.data.count, 84); }); +test.serial('run a workflow with a JSON credential map', async (t) => { + const workflow = { + workflow: { + steps: [ + { + id: 'a', + // The two steps in this workflow will just write the credential to state + expression: `${fn}fn(s => { s.a = s.configuration.password; return s; })`, + configuration: 'A', + next: { b: true }, + }, + { + id: 'b', + expression: `${fn}fn(s => { s.b = s.configuration.password; return s; })`, + configuration: 'B', + }, + ], + }, + }; + mockFs({ + '/workflow.json': JSON.stringify(workflow), + '/creds.json': JSON.stringify({ + A: { password: 'a' }, + B: { password: 'b' }, + }), + }); + + const options = { + ...defaultOptions, + workflowPath: '/workflow.json', + credentials: '/creds.json', + }; + const result = await handler(options, logger); + t.is(result.a, 'a'); + t.is(result.b, 'b'); +}); + +test.serial.skip('run a workflow with a YAML credential map', async (t) => { + const workflow = { + workflow: { + steps: [ + { + id: 'a', + // The two steps in this workflow will just write the credential to state + expression: `${fn}fn(s => { s.a = s.configuration.password; return s; })`, + configuration: 'A', + next: { b: true }, + }, + { + id: 'b', + expression: `${fn}fn(s => { s.b = s.configuration.password; return s; })`, + configuration: 'B', + }, + ], + }, + }; + mockFs({ + '/workflow.json': JSON.stringify(workflow), + '/creds.yaml': `A: + password: a +B: + password: b`, + }); + + const options = { + ...defaultOptions, + workflowPath: '/workflow.json', + credentials: '/creds.yaml', + }; + const result = await handler(options, logger); + t.is(result.a, 'a'); + t.is(result.b, 'b'); +}); + test.serial('run a workflow with state', async (t) => { const workflow = { workflow: { diff --git a/packages/cli/test/projects/checkout.test.ts b/packages/cli/test/projects/checkout.test.ts index a6543cd64..cfe2a911d 100644 --- a/packages/cli/test/projects/checkout.test.ts +++ b/packages/cli/test/projects/checkout.test.ts @@ -150,19 +150,13 @@ test.beforeEach(() => { const logger = createMockLogger('', { level: 'debug' }); -test.serial('get active project', (t) => { - const workspace = new Workspace('/ws'); - t.is(workspace.valid, true); - t.is(workspace.activeProjectId, 'my-project'); -}); - test.serial('checkout: invalid project id', async (t) => { await t.throwsAsync( () => checkoutHandler( { command: 'project-checkout', - projectId: 'not-known', + project: 'not-known', workspace: '/ws', }, logger @@ -176,10 +170,10 @@ test.serial('checkout: invalid project id', async (t) => { test.serial('checkout: to a different valid project', async (t) => { // before checkout. my-project is active and expanded const bcheckout = new Workspace('/ws'); - t.is(bcheckout.activeProject.id, 'my-project'); + t.is(bcheckout.activeProject!.id, 'my-project'); await checkoutHandler( - { command: 'project-checkout', projectId: 'my-project', workspace: '/ws' }, + { command: 'project-checkout', project: 'my-project', workspace: '/ws' }, logger ); const { message } = logger._parse(logger._last); @@ -187,7 +181,7 @@ test.serial('checkout: to a different valid project', async (t) => { // after checkout. my-project is active and expanded const acheckout = new Workspace('/ws'); - t.is(acheckout.activeProject.id, 'my-project'); + t.is(acheckout.activeProject!.id, 'my-project'); // check if files where well expanded t.deepEqual( @@ -199,12 +193,12 @@ test.serial('checkout: to a different valid project', async (t) => { test.serial('checkout: same id as active', async (t) => { // before checkout. my-project is active and expanded const bcheckout = new Workspace('/ws'); - t.is(bcheckout.activeProject.id, 'my-project'); + t.is(bcheckout.activeProject!.id, 'my-project'); await checkoutHandler( { command: 'project-checkout', - projectId: 'my-project', + project: 'my-project', workspace: '/ws', }, logger @@ -214,7 +208,7 @@ test.serial('checkout: same id as active', async (t) => { // after checkout. my-project is active and expanded const acheckout = new Workspace('/ws'); - t.is(acheckout.activeProject.id, 'my-project'); + t.is(acheckout.activeProject!.id, 'my-project'); // check if files where well expanded t.deepEqual( @@ -226,11 +220,11 @@ test.serial('checkout: same id as active', async (t) => { test.serial('checkout: switching to and back between projects', async (t) => { // before checkout. my-project is active and expanded const bcheckout = new Workspace('/ws'); - t.is(bcheckout.activeProject.id, 'my-project'); + t.is(bcheckout.activeProject!.id, 'my-project'); // 1. switch from my-project to my-staging await checkoutHandler( - { command: 'project-checkout', projectId: 'my-staging', workspace: '/ws' }, + { command: 'project-checkout', project: 'my-staging', workspace: '/ws' }, logger ); const { message } = logger._parse(logger._last); @@ -238,7 +232,7 @@ test.serial('checkout: switching to and back between projects', async (t) => { // after checkout. my-staging is active and expanded const acheckout = new Workspace('/ws'); - t.is(acheckout.activeProject.id, 'my-staging'); + t.is(acheckout.activeProject!.id, 'my-staging'); // check if files where well expanded t.deepEqual( @@ -250,7 +244,7 @@ test.serial('checkout: switching to and back between projects', async (t) => { await checkoutHandler( { command: 'project-checkout', - projectId: 'my-project', + project: 'my-project', workspace: '/ws', }, logger @@ -260,7 +254,7 @@ test.serial('checkout: switching to and back between projects', async (t) => { // after checkout. my-project is active and expanded const fcheckout = new Workspace('/ws'); - t.is(fcheckout.activeProject.id, 'my-project'); + t.is(fcheckout.activeProject!.id, 'my-project'); // check if files where well expanded t.deepEqual( @@ -269,6 +263,67 @@ test.serial('checkout: switching to and back between projects', async (t) => { ); }); +test.serial('checkout: switch with id', async (t) => { + const before = new Workspace('/ws'); + t.is(before.activeProject!.id, 'my-project'); + + await checkoutHandler( + { + command: 'project-checkout', + project: 'my-staging', + workspace: '/ws', + }, + logger + ); + const { message } = logger._parse(logger._last); + t.is(message, 'Expanded project to /ws'); + + const after = new Workspace('/ws'); + t.is(after.activeProject!.id, 'my-staging'); +}); + +test.serial('checkout: switch with alias', async (t) => { + const before = new Workspace('/ws'); + t.is(before.activeProject!.id, 'my-project'); + + await checkoutHandler( + { + command: 'project-checkout', + project: 'staging', // this is actually an alias + workspace: '/ws', + }, + logger + ); + const { message } = logger._parse(logger._last); + t.is(message, 'Expanded project to /ws'); + + const after = new Workspace('/ws'); + t.is(after.activeProject!.id, 'my-staging'); +}); + +// TODO this doesn't work locally because the serialized files in are in v1, +// and have no domain information attached +// This fuzzy match is better covered in the projects testing though +test.serial.skip('checkout: switch with alias and domain', async (t) => { + const before = new Workspace('/ws'); + t.is(before.activeProject!.id, 'my-project'); + + await checkoutHandler( + { + command: 'project-checkout', + project: 'staging@app.openfn.org', // this is actually an alias + workspace: '/ws', + }, + logger + ); + const { message } = logger._parse(logger._last); + t.is(message, 'Expanded project to /ws'); + + // after checkout: staging is active and expanded + const after = new Workspace('/ws'); + t.is(after.activeProject!.id, 'my-staging'); +}); + test.serial('respect openfn.yaml settings', async (t) => { mock({ '/ws1/w': {}, @@ -361,7 +416,7 @@ test.serial('respect openfn.yaml settings', async (t) => { await checkoutHandler( { command: 'project-checkout', - projectId: 'staging', + project: 'staging', workspace: '/ws1', }, logger diff --git a/packages/cli/test/projects/fetch.test.ts b/packages/cli/test/projects/fetch.test.ts index fc880f42e..0072984f5 100644 --- a/packages/cli/test/projects/fetch.test.ts +++ b/packages/cli/test/projects/fetch.test.ts @@ -1,4 +1,4 @@ -import { readFile } from 'node:fs/promises'; +import { readFile, writeFile } from 'node:fs/promises'; import test from 'ava'; import mock from 'mock-fs'; import { MockAgent, setGlobalDispatcher } from 'undici'; @@ -10,7 +10,16 @@ import { myProject_v1, myProject_yaml } from './fixtures'; const logger = createMockLogger('', { level: 'debug' }); const ENDPOINT = 'https://app.openfn.org'; -const PROJECT_ID = 'e16c5f09-f0cb-4ba7-a4c2-73fcb2f29d00'; +const PROJECT_UUID = 'e16c5f09-f0cb-4ba7-a4c2-73fcb2f29d00'; + +// Track two different versions of a project yaml +// v1 might be stored locallym ready to be updated +const yaml_v1 = myProject_yaml.replace('fn()', 'alterState()'); +// v2 is always returned by the mock lightning +const yaml_v2 = myProject_yaml; + +const getYamlPath = (alias = 'main') => + `/ws/.projects/${alias}@app.openfn.org.yaml`; let mockAgent = new MockAgent(); mockAgent.disableNetConnect(); @@ -20,7 +29,7 @@ test.before(() => { const mockPool = mockAgent.get(ENDPOINT); mockPool .intercept({ - path: `/api/provision/${PROJECT_ID}?`, + path: `/api/provision/${PROJECT_UUID}?`, method: 'GET', }) .reply(200, { @@ -41,41 +50,228 @@ test.afterEach(() => { mock.restore(); }); -test.serial('fetch from lightning and save as v2 yaml file', async (t) => { +test.serial('fetch by UUID to default new alias', async (t) => { + t.throwsAsync(() => readFile(getYamlPath('main'), 'utf-8')); + await fetchHandler( { - projectId: PROJECT_ID, + project: PROJECT_UUID, + endpoint: ENDPOINT, apiKey: 'test-api-key', + workspace: '/ws', + } as any, + logger + ); + const fileContent = await readFile(getYamlPath('main'), 'utf-8'); + + t.is(fileContent.trim(), yaml_v2); +}); + +test.serial('fetch by UUID to new custom alias', async (t) => { + t.throwsAsync(() => readFile(getYamlPath('staging'), 'utf-8')); + + await fetchHandler( + { + project: PROJECT_UUID, + alias: 'staging', + + endpoint: ENDPOINT, + apiKey: 'test-api-key', + workspace: '/ws', + } as any, + logger + ); + + const fileContent = await readFile(getYamlPath('staging'), 'utf-8'); + + t.is(fileContent.trim(), yaml_v2); +}); + +test.serial('fetch by UUID to existing custom alias', async (t) => { + // Set up a v1 project file + await writeFile(getYamlPath('staging'), yaml_v1); + const beforeContents = await readFile(getYamlPath('staging'), 'utf-8'); + t.regex(beforeContents, /alterState\(\)/); + + // Now fetch + await fetchHandler( + { + project: PROJECT_UUID, + alias: 'staging', + + endpoint: ENDPOINT, + apiKey: 'test-api-key', + workspace: '/ws', + } as any, + logger + ); + + // Now ensure the yaml is updated + const fileContent = await readFile(getYamlPath('staging'), 'utf-8'); + t.is(fileContent.trim(), yaml_v2); +}); + +test.serial('error: fetch by UUID to incompatible custom alias ', async (t) => { + // Set up a v1 project file with different UUID + await writeFile( + getYamlPath('staging'), + yaml_v1.replace(PROJECT_UUID, 'abcdefg') + ); + + // The fetch should now throw + await t.throwsAsync( + () => + fetchHandler( + { + project: PROJECT_UUID, + alias: 'staging', + + endpoint: ENDPOINT, + apiKey: 'test-api-key', + workspace: '/ws', + } as any, + logger + ), + { + message: /A project with a different UUID exists at this location/i, + } + ); +}); + +test.serial('force fetch by UUID to incompatible custom alias ', async (t) => { + // Set up a v1 project file with different UUID + await writeFile( + getYamlPath('staging'), + yaml_v1.replace(PROJECT_UUID, 'abcdefg') + ); + + await fetchHandler( + { + project: PROJECT_UUID, + alias: 'staging', + force: true, + + endpoint: ENDPOINT, + apiKey: 'test-api-key', + workspace: '/ws', + } as any, + logger + ); + + // Now ensure the yaml is updated + const fileContent = await readFile(getYamlPath('staging'), 'utf-8'); + t.is(fileContent.trim(), yaml_v2); +}); + +test.serial('fetch by existing alias', async (t) => { + // first set up the file system with a preloaded project file + const filePath = '/ws/.projects/staging@app.openfn.org.yaml'; + await writeFile(filePath, myProject_yaml.replace('fn()', 'alterState()')); + + // Now fetch with an alias value + await fetchHandler( + { + project: 'staging', // alias + + endpoint: ENDPOINT, + apiKey: 'test-api-key', workspace: '/ws', - env: 'project', } as any, logger ); - const filePath = '/ws/.projects/project@app.openfn.org.yaml'; const fileContent = await readFile(filePath, 'utf-8'); - const yaml = myProject_yaml; + // Content should be restored to the default + t.is(fileContent.trim(), myProject_yaml); +}); - t.is(fileContent.trim(), yaml); +test.serial('fetch by alias and save to a different alias', async (t) => { + await writeFile(getYamlPath('staging'), yaml_v1); + const beforeContents = await readFile(getYamlPath('staging'), 'utf-8'); + t.regex(beforeContents, /alterState\(\)/); - const { message, level } = logger._parse(logger._last); - t.is(level, 'success'); - t.regex(message, /Fetched project file to/); + await fetchHandler( + { + project: PROJECT_UUID, + alias: 'testing', + + endpoint: ENDPOINT, + apiKey: 'test-api-key', + workspace: '/ws', + } as any, + logger + ); + + // Now ensure the yaml is updated + const fileContent = await readFile(getYamlPath('testing'), 'utf-8'); + t.is(fileContent.trim(), yaml_v2); + + // Now ensure that the staging alias is unchanged + const stagingContents = await readFile(getYamlPath('staging'), 'utf-8'); + t.is(stagingContents.trim(), beforeContents); }); -test.serial('save to a custom location', async (t) => { +test.serial('fetch by local id', async (t) => { + // create a local staging project + await writeFile(getYamlPath('staging'), yaml_v1); + const beforeContents = await readFile(getYamlPath('staging'), 'utf-8'); + t.regex(beforeContents, /alterState\(\)/); + await fetchHandler( { - projectId: PROJECT_ID, + // use the project id but specify no alias + project: 'my-project', + endpoint: ENDPOINT, apiKey: 'test-api-key', + workspace: '/ws', + } as any, + logger + ); + + const fileContent = await readFile(getYamlPath('staging'), 'utf-8'); + t.is(fileContent.trim(), yaml_v2); +}); + +test.serial('fetch by local id and save to a new alias', async (t) => { + // create a local staging project + await writeFile(getYamlPath('staging'), yaml_v1); + const beforeContents = await readFile(getYamlPath('staging'), 'utf-8'); + t.regex(beforeContents, /alterState\(\)/); + + await fetchHandler( + { + // use the project id but specify no alias + project: 'my-project', + alias: 'testing', + endpoint: ENDPOINT, + apiKey: 'test-api-key', workspace: '/ws', - env: 'project', + } as any, + logger + ); + + const fileContent = await readFile(getYamlPath('testing'), 'utf-8'); + t.is(fileContent.trim(), yaml_v2); + + // Now ensure that the staging alias is unchanged + const stagingContents = await readFile(getYamlPath('staging'), 'utf-8'); + t.is(stagingContents.trim(), beforeContents); +}); + +test.serial('save to a local file with --out', async (t) => { + await fetchHandler( + { + project: PROJECT_UUID, outputPath: '/ws/out.yaml', + + workspace: '/ws', + endpoint: ENDPOINT, + apiKey: 'test-api-key', } as any, logger ); @@ -92,18 +288,110 @@ test.serial('save to a custom location', async (t) => { t.regex(message, /Fetched project file to/); }); +test.serial('warn if --out and --alias are both set', async (t) => { + await fetchHandler( + { + project: PROJECT_UUID, + endpoint: ENDPOINT, + apiKey: 'test-api-key', + + workspace: '/ws', + outputPath: '/ws/out.yaml', + alias: 'jam', + } as any, + logger + ); + + const warn = logger._find('warn', /alias "jam" was set/i); + t.truthy(warn); + + // Should still output to the right place + const filePath = '/ws/out.yaml'; + const fileContent = await readFile(filePath, 'utf-8'); + + t.is(fileContent.trim(), myProject_yaml); +}); + +test.todo('throw if identifier resolution is ambiguous'); + +test.serial('fetch using endpoint in project file', async (t) => { + // first set up the file system with a preloaded project file + const filePath = '/ws/.projects/staging@app.openfn.org.yaml'; + await writeFile(filePath, myProject_yaml); + + await fetchHandler( + { + project: 'staging', + apiKey: 'test-api-key', + workspace: '/ws', + // No endpoint provided! + } as any, + logger + ); + + const fileContent = await readFile(filePath, 'utf-8'); + + t.is(fileContent.trim(), myProject_yaml); +}); + +test.serial('fetch by alias and domain', async (t) => { + // set up a mock at localhost + const mockPool = mockAgent.get('http://localhost'); + mockPool + .intercept({ + path: `/api/provision/${PROJECT_UUID}?`, + method: 'GET', + }) + .reply(200, { + data: myProject_v1, + }); + + // first set up the file system with preloaded project files + await writeFile( + '/ws/.projects/staging@app.openfn.org.yaml', + myProject_yaml.replace('fn()', 'jam()') + ); + + await writeFile( + '/ws/.projects/staging@localhost.yaml', + myProject_yaml + .replace('fn()', 'alterState()') + .replace('https://app.openfn.org', 'http://localhost') + ); + + // Now fetch with an alias value and no endoint + await fetchHandler( + { + project: 'staging@localhost', + + apiKey: 'test-api-key', + workspace: '/ws', + } as any, + logger + ); + + const fileContent = await readFile( + '/ws/.projects/staging@localhost.yaml', + 'utf-8' + ); + + t.is( + fileContent.trim(), + myProject_yaml.replace('https://app.openfn.org', 'http://localhost') + ); +}); + test.serial( 'save JSON to a custom location, overriding project defaults', async (t) => { await fetchHandler( { - projectId: PROJECT_ID, + project: PROJECT_UUID, + outputPath: '/ws/out.json', + endpoint: ENDPOINT, apiKey: 'test-api-key', - workspace: '/ws', - env: 'project', - outputPath: '/ws/out.json', } as any, logger ); @@ -114,14 +402,15 @@ test.serial( const json = { id: 'my-project', name: 'My Project', - version: 2, + cli: { + version: 2, + }, description: 'my lovely project', collections: [], credentials: [], openfn: { uuid: 'e16c5f09-f0cb-4ba7-a4c2-73fcb2f29d00', endpoint: 'https://app.openfn.org', - env: 'project', inserted_at: '2025-04-23T11:15:59Z', updated_at: '2025-04-23T11:15:59Z', }, @@ -168,7 +457,7 @@ test.serial( lock_version: 1, }, id: 'my-workflow', - history: ['a'], + history: ['cli:02582f3bb088'], }, ], }; @@ -180,76 +469,50 @@ test.serial( t.regex(message, /Fetched project file to/); } ); - -test.serial('Override a compatible project', async (t) => { - // Change project.yaml - const modified = myProject_yaml.replace('my lovely project', 'renamed'); - - mock({ - '/ws/.projects': {}, - '/ws/openfn.yaml': '', - '/ws/.projects/project@app.openfn.org.yaml': modified, - }); - - await fetchHandler( - { - projectId: PROJECT_ID, - endpoint: ENDPOINT, - apiKey: 'test-api-key', - - workspace: '/ws', - env: 'project', - } as any, - logger - ); - - const filePath = '/ws/.projects/project@app.openfn.org.yaml'; - const fileContent = await readFile(filePath, 'utf-8'); - - // This should overwrite the renamed value back to the default - t.regex(fileContent, /my lovely project/); -}); - // In this test, the file on disk has diverged from the remove // This means changes could be lost, so we throw! -test.serial('throw for an incompatible project', async (t) => { - // Change project.yaml - const modified = myProject_yaml - .replace('fn()', 'fn(x)') // arbitrary edit so that we can track the change - .replace(' - a', ' - z'); // change the local history to be incompatible - - mock({ - '/ws/.projects': {}, - '/ws/openfn.yaml': '', - '/ws/.projects/project@app.openfn.org.yaml': modified, - }); - - await t.throwsAsync( - () => - fetchHandler( - { - projectId: PROJECT_ID, - endpoint: ENDPOINT, - apiKey: 'test-api-key', - - workspace: '/ws', - env: 'project', - } as any, - logger - ), - { - message: /incompatible project/, - } - ); +test.serial( + 'error: throw if fetching a project that has diverged', + async (t) => { + // Change project.yaml + const modified = myProject_yaml + .replace('fn()', 'fn(x)') // arbitrary edit so that we can track the change + .replace(' - a', ' - z'); // change the local history to be incompatible + + // Make it look like we've checked out hte project + mock({ + '/ws/.projects': {}, + '/ws/openfn.yaml': '', + '/ws/.projects/project@app.openfn.org.yaml': modified, + }); + + await t.throwsAsync( + () => + fetchHandler( + { + project: PROJECT_UUID, + alias: 'project', + + endpoint: ENDPOINT, + apiKey: 'test-api-key', + workspace: '/ws', + } as any, + logger + ), + { + message: /incompatible project/, + } + ); - const filePath = '/ws/.projects/project@app.openfn.org.yaml'; - const fileContent = await readFile(filePath, 'utf-8'); + const filePath = '/ws/.projects/project@app.openfn.org.yaml'; + const fileContent = await readFile(filePath, 'utf-8'); - // The file should NOT be overwritten - t.regex(fileContent, /fn\(x\)/); -}); + // The file should NOT be overwritten + t.regex(fileContent, /fn\(x\)/); + } +); -test.serial('force merge an incompatible project', async (t) => { +test.serial('force merge a diverged project', async (t) => { // Change project.yaml const modified = myProject_yaml.replace('fn()', 'fn(x)'); @@ -261,13 +524,13 @@ test.serial('force merge an incompatible project', async (t) => { await fetchHandler( { - projectId: PROJECT_ID, + project: PROJECT_UUID, + alias: 'project', + force: true, + endpoint: ENDPOINT, apiKey: 'test-api-key', - workspace: '/ws', - env: 'project', - force: true, } as any, logger ); diff --git a/packages/cli/test/projects/fixtures.ts b/packages/cli/test/projects/fixtures.ts index de1f06896..a8013466a 100644 --- a/packages/cli/test/projects/fixtures.ts +++ b/packages/cli/test/projects/fixtures.ts @@ -42,7 +42,9 @@ export const myProject_v1: Provisioner.Project = { ], lock_version: 1, deleted_at: null, - version_history: ['a'], // TODO not implemented yet? needed for tests + version_history: [ + 'cli:02582f3bb088', // alterstate + ], }, ], updated_at: '2025-04-23T11:15:59Z', @@ -57,14 +59,14 @@ export const myProject_v1: Provisioner.Project = { export const myProject_yaml = `id: my-project name: My Project -version: 2 +cli: + version: 2 description: my lovely project collections: [] credentials: [] openfn: uuid: e16c5f09-f0cb-4ba7-a4c2-73fcb2f29d00 endpoint: https://app.openfn.org - env: project inserted_at: 2025-04-23T11:15:59Z updated_at: 2025-04-23T11:15:59Z options: @@ -92,7 +94,7 @@ workflows: openfn: uuid: a9a3adef-b394-4405-814d-3ac4323f4b4b history: - - a + - cli:02582f3bb088 openfn: uuid: 72ca3eb0-042c-47a0-a2a1-a545ed4a8406 inserted_at: 2025-04-23T11:19:32Z diff --git a/packages/cli/test/projects/list.test.ts b/packages/cli/test/projects/list.test.ts index a08ff6697..dbc4e7e4a 100644 --- a/packages/cli/test/projects/list.test.ts +++ b/packages/cli/test/projects/list.test.ts @@ -9,6 +9,7 @@ mock({ '/ws/openfn.yaml': jsonToYaml({ project: { id: 'my-project', + uuid: '', }, workspace: { dirs: { @@ -157,12 +158,12 @@ test('valid workspace', async (t) => { t.is( `Available openfn projects -my-project (active) +main | my-project (active) workflows: - simple-workflow -my-project (active) +main | my-project workflows: - simple-workflow diff --git a/packages/cli/test/projects/merge.test.ts b/packages/cli/test/projects/merge.test.ts index 44a99fe98..e27aa59f6 100644 --- a/packages/cli/test/projects/merge.test.ts +++ b/packages/cli/test/projects/merge.test.ts @@ -108,7 +108,7 @@ test.serial('merging into the same project', async (t) => { { command: 'project-merge', workspace: '/ws', - projectId: 'my-project', + project: 'my-project', removeUnmapped: false, workflowMappings: {}, }, @@ -117,13 +117,14 @@ test.serial('merging into the same project', async (t) => { const { message, level } = logger._parse(logger._last); t.is(level, 'error'); - t.regex(message, /Merging into the same project not allowed/); + t.regex(message as string, /Merging into the same project not allowed/); }); test.serial('merging a different project into checked-out', async (t) => { // state of main projects workflow before sandbox is merged in const beforeWs = new Workspace('/ws'); - t.is(beforeWs.activeProject.id, 'my-project'); + t.is(beforeWs.activeProject!.id, 'my-project'); + const beforeProjects = beforeWs.list(); t.is(beforeProjects[0].workflows[0].steps.length, 2); t.is(beforeProjects[0].workflows[0].steps[1].name, 'Job A'); @@ -133,7 +134,7 @@ test.serial('merging a different project into checked-out', async (t) => { { command: 'project-merge', workspace: '/ws', - projectId: 'my-sandbox', + project: 'my-sandbox', removeUnmapped: false, workflowMappings: {}, }, @@ -142,7 +143,8 @@ test.serial('merging a different project into checked-out', async (t) => { // state of main projects workflow AFTER sandbox is merged in const afterWorkspace = new Workspace('/ws'); - t.is(afterWorkspace.activeProject.id, 'my-project'); + t.is(afterWorkspace.activeProject!.id, 'my-project'); + const afterProjects = afterWorkspace.list(); const wf = afterProjects[0].workflows[0]; t.is(wf.steps.length, 3); @@ -159,14 +161,14 @@ test.serial('merging a different project into checked-out', async (t) => { test.serial('Write to a different project file', async (t) => { // state of main projects workflow before sandbox is merged in const before = new Workspace('/ws'); - t.is(before.activeProject.id, 'my-project'); + t.is(before.activeProject!.id, 'my-project'); // do merging await mergeHandler( { command: 'project-merge', workspace: '/ws', - projectId: 'my-sandbox', + project: 'my-sandbox', removeUnmapped: false, workflowMappings: {}, outputPath: '/ws/backup.yaml', @@ -186,14 +188,14 @@ test.serial( async (t) => { // state of main projects workflow before sandbox is merged in const before = new Workspace('/ws'); - t.is(before.activeProject.id, 'my-project'); + t.is(before.activeProject!.id, 'my-project'); // do merging await mergeHandler( { command: 'project-merge', workspace: '/ws', - projectId: 'my-sandbox', + project: 'my-sandbox', removeUnmapped: false, workflowMappings: {}, outputPath: '/ws/backup.json', @@ -233,7 +235,7 @@ test.serial('Write to JSON using project config', async (t) => { // state of main projects workflow before sandbox is merged in const before = new Workspace('/ws'); - t.is(before.activeProject.id, 'my-project'); + t.is(before.activeProject!.id, 'my-project'); t.is(before.list()[0].workflows[0].steps[1].name, 'Job A'); t.is(before.list()[0].workflows[0].steps[1].openfn?.uuid, 'job-a'); // id Aot retained @@ -243,7 +245,7 @@ test.serial('Write to JSON using project config', async (t) => { { command: 'project-merge', workspace: '/ws', - projectId: 'my-sandbox', + project: 'my-sandbox', removeUnmapped: false, workflowMappings: {}, }, @@ -291,17 +293,17 @@ test.serial('merge with custom base', async (t) => { // state of main projects workflow before sandbox is merged in const before = new Workspace('/ws'); - t.is(before.activeProject.id, 'my-project'); + t.is(before.activeProject!.id, 'my-project'); - t.is(before.list()[0].workflows[0].steps[1].name, 'Job A'); - t.is(before.list()[0].workflows[0].steps[1].openfn?.uuid, 'job-a'); // id Aot retained + const [_trigger, step] = before.list()[0].workflows[0].steps; + t.is(step.name, 'Job A'); + t.is(step.openfn?.uuid, 'job-a'); - // do merging await mergeHandler( { command: 'project-merge', workspace: '/ws', - projectId: 'my-sandbox', + project: 'my-sandbox', base: '/ws/.projects/project@app.openfn.org.yaml', removeUnmapped: false, workflowMappings: {}, diff --git a/packages/cli/test/util/load-plan.test.ts b/packages/cli/test/util/load-plan.test.ts index 190dbb118..6e50dde63 100644 --- a/packages/cli/test/util/load-plan.test.ts +++ b/packages/cli/test/util/load-plan.test.ts @@ -4,7 +4,11 @@ import { createMockLogger } from '@openfn/logger'; import type { Job } from '@openfn/lexicon'; import loadPlan from '../../src/util/load-plan'; -import { Opts } from '../../src/options'; +import { + collectionsEndpoint, + collectionsVersion, + Opts, +} from '../../src/options'; const logger = createMockLogger(undefined, { level: 'debug' }); @@ -28,6 +32,7 @@ const createPlan = (steps: Partial[] = []) => ({ test.beforeEach(() => { mock({ 'test/job.js': 'x', + 'test/collections.js': 'collections.get()', 'test/wf-old.json': JSON.stringify({ start: 'a', jobs: [{ id: 'a', expression: 'x()' }], @@ -114,6 +119,50 @@ test.serial('expression: set a start on the plan', async (t) => { t.is(plan.options.start, 'x'); }); +test.serial('expression: load the collections adaptor', async (t) => { + const opts = { + expressionPath: 'test/collections.js', + } as Partial; + + const plan = await loadPlan(opts as Opts, logger); + + t.deepEqual(plan.workflow.steps[0].adaptors, [ + '@openfn/language-collections@latest', + ]); +}); + +test.serial( + 'expression: load the collections adaptor with another', + async (t) => { + const opts = { + expressionPath: 'test/collections.js', + adaptors: ['@openfn/language-common@latest'], + } as Partial; + + const plan = await loadPlan(opts as Opts, logger); + + t.deepEqual(plan.workflow.steps[0].adaptors, [ + '@openfn/language-common@latest', + '@openfn/language-collections@latest', + ]); + } +); +test.serial( + 'expression: load the collections adaptor with a specific version', + async (t) => { + const opts = { + expressionPath: 'test/collections.js', + collectionsVersion: '1.1.1', + } as Partial; + + const plan = await loadPlan(opts as Opts, logger); + + t.deepEqual(plan.workflow.steps[0].adaptors, [ + '@openfn/language-collections@1.1.1', + ]); + } +); + test.serial('xplan: load a plan from workflow path', async (t) => { const opts = { workflowPath: 'test/wf.json', @@ -343,3 +392,40 @@ test.serial('xplan: support multiple adaptors', async (t) => { // @ts-ignore t.is(step.adaptor, undefined); }); + +test.serial('xplan: append collections', async (t) => { + const opts = { + workflowPath: 'test/wf.json', + collectionsVersion: '1.1.1', + collectionsEndpoint: 'https://localhost:4000/', + apiKey: 'abc', + }; + + const plan = createPlan([ + { + id: 'a', + expression: 'collections.get()', + adaptors: ['@openfn/language-common@1.0.0'], + }, + ]); + + mock({ + 'test/wf.json': JSON.stringify(plan), + }); + + const result = await loadPlan(opts, logger); + t.truthy(result); + + const step = result.workflow.steps[0] as Job; + t.deepEqual(step.adaptors, [ + '@openfn/language-common@1.0.0', + '@openfn/language-collections@1.1.1', + ]); + // @ts-ignore + t.is(step.adaptor, undefined); + + t.deepEqual(step.configuration, { + collections_endpoint: `${opts.collectionsEndpoint}/collections`, + collections_token: opts.apiKey, + }); +}); diff --git a/packages/cli/test/util/resolve-path.test.ts b/packages/cli/test/util/resolve-path.test.ts new file mode 100644 index 000000000..ca0fa7971 --- /dev/null +++ b/packages/cli/test/util/resolve-path.test.ts @@ -0,0 +1,32 @@ +import test from 'ava'; +import nodepath from 'node:path'; +import { homedir } from 'node:os'; + +import resolvePath from '../../src/util/resolve-path'; + +const workingDir = nodepath.resolve(); + +test('should resolve a relative path', (t) => { + const path = resolvePath('a/b/c'); + t.is(path, workingDir + '/a/b/c'); +}); + +test('should resolve an absolute path', (t) => { + const path = resolvePath('/a/b/c'); + t.is(path, '/a/b/c'); +}); + +test('should resolve a home path', (t) => { + const path = resolvePath('~/a/b/c'); + t.is(path, homedir + '/a/b/c'); +}); + +test('should resolve path relative to a relative root', (t) => { + const path = resolvePath('a/b/c', 'tmp'); + t.is(path, workingDir + '/tmp/a/b/c'); +}); + +test('should resolve path relative to an absolute root', (t) => { + const path = resolvePath('a/b/c', '/tmp'); + t.is(path, '/tmp/a/b/c'); +}); diff --git a/packages/engine-multi/src/worker/pool.ts b/packages/engine-multi/src/worker/pool.ts index 54d398279..7a306b974 100644 --- a/packages/engine-multi/src/worker/pool.ts +++ b/packages/engine-multi/src/worker/pool.ts @@ -294,6 +294,7 @@ function createPool(script: string, options: PoolOptions = {}, logger: Logger) { const killPromises: Promise[] = []; // Drain the pool + // Workers should always be idl while (pool.length) { const worker = pool.pop(); if (worker) { @@ -304,7 +305,7 @@ function createPool(script: string, options: PoolOptions = {}, logger: Logger) { if (immediate) { Object.values(allWorkers).forEach((worker) => { - killPromises.push(waitForWorkerExit(worker, 1000)); + killPromises.push(waitForWorkerExit(worker, 1)); delete allWorkers[worker.pid!]; }); } diff --git a/packages/engine-multi/test/worker/pool.test.ts b/packages/engine-multi/test/worker/pool.test.ts index 190dacbbe..fe162d249 100644 --- a/packages/engine-multi/test/worker/pool.test.ts +++ b/packages/engine-multi/test/worker/pool.test.ts @@ -201,7 +201,8 @@ test('destroy should handle un-initialised workers', async (t) => { t.is(pool._pool.length, 0); }); -test('destroy should close all child processes', async (t) => { +// Flaky - see https://github.com/OpenFn/kit/issues/1192 +test.skip('destroy should close all child processes', async (t) => { // warm up a pool const pool = createPool(workerPath, { capacity: 10 }, logger); diff --git a/packages/lexicon/core.d.ts b/packages/lexicon/core.d.ts index e26ef3385..15a198974 100644 --- a/packages/lexicon/core.d.ts +++ b/packages/lexicon/core.d.ts @@ -34,7 +34,7 @@ export type Project = { config: WorkspaceConfig; /** Stuff only used by the CLI for this project */ - meta?: LocalMeta; + cli?: LocalMeta; }; export interface LocalMeta { diff --git a/packages/lightning-mock/CHANGELOG.md b/packages/lightning-mock/CHANGELOG.md index 371833a90..e9c6b0ad4 100644 --- a/packages/lightning-mock/CHANGELOG.md +++ b/packages/lightning-mock/CHANGELOG.md @@ -1,5 +1,12 @@ # @openfn/lightning-mock +## 2.4.0 + +### Minor Changes + +- 4cc799b: Update the provisioner API to support real data +- 6689ad0: Add basic collections support (GET only) + ## 2.3.10 ### Patch Changes diff --git a/packages/lightning-mock/package.json b/packages/lightning-mock/package.json index 997fee8f8..8a46ae650 100644 --- a/packages/lightning-mock/package.json +++ b/packages/lightning-mock/package.json @@ -1,6 +1,6 @@ { "name": "@openfn/lightning-mock", - "version": "2.3.10", + "version": "2.4.0", "private": true, "description": "A mock Lightning server", "main": "dist/index.js", @@ -18,6 +18,7 @@ "dependencies": { "@koa/router": "^12.0.2", "@openfn/engine-multi": "workspace:*", + "@openfn/language-collections": "0.8.0", "@openfn/lexicon": "workspace:^", "@openfn/logger": "workspace:*", "@openfn/runtime": "workspace:*", diff --git a/packages/lightning-mock/src/api-dev.ts b/packages/lightning-mock/src/api-dev.ts index 4441eb91a..473c02343 100644 --- a/packages/lightning-mock/src/api-dev.ts +++ b/packages/lightning-mock/src/api-dev.ts @@ -9,6 +9,7 @@ import { Logger } from '@openfn/logger'; import type { LightningPlan, RunCompletePayload, + Provisioner, } from '@openfn/lexicon/lightning'; import { ServerState } from './server'; @@ -64,6 +65,10 @@ const setupDevAPI = ( app.getState = () => state; + app.addProject = (project: Provisioner.Project_v1) => { + state.projects[project.id] = project; + }; + // Promise which returns when a workflow is complete app.waitForResult = (runId: string) => { return new Promise((resolve) => { @@ -85,6 +90,7 @@ const setupDevAPI = ( app.reset = () => { state.queue = []; state.results = {}; + state.projects = {}; state.events.removeAllListeners(); }; diff --git a/packages/lightning-mock/src/api-rest.ts b/packages/lightning-mock/src/api-rest.ts index 2593ce37a..19fe283b7 100644 --- a/packages/lightning-mock/src/api-rest.ts +++ b/packages/lightning-mock/src/api-rest.ts @@ -1,12 +1,15 @@ import Koa from 'koa'; import Router from '@koa/router'; +import bodyParser from 'koa-bodyparser'; import { Logger } from '@openfn/logger'; import { ServerState } from './server'; import type { DevServer } from './types'; +export const DEFAULT_PROJECT_ID = 'e16c5f09-f0cb-4ba7-a4c2-73fcb2f29d00'; + const proj = { - id: 'e16c5f09-f0cb-4ba7-a4c2-73fcb2f29d00', + id: DEFAULT_PROJECT_ID, name: 'aaa', description: 'a project', concurrency: null, @@ -81,12 +84,16 @@ workflows: `; export default ( - _app: DevServer, - _state: ServerState, + app: DevServer, + state: ServerState, _logger: Logger, _api: any ): Koa.Middleware => { const router = new Router(); + router.use(bodyParser()); + + // load a sample project as a default + state.projects[DEFAULT_PROJECT_ID] = proj as any; // we also need to provide a yaml endpoint router.get('/api/provision/:id', (ctx) => { @@ -96,21 +103,45 @@ export default ( // just return a hard-coded project for now ctx.response.body = yaml; } else { - ctx.response.body = { - data: { - ...proj, - id: ctx.params.id, - }, - }; + // TODO what if doesn't exist? + ctx.response.body = { data: state.projects[ctx.params.id] }; } }); router.post('/api/provision', (ctx) => { - // const project = ctx.request.body as LightningPlan; - // TODO just return 200 for now + const proj: any = ctx.request.body; + state.projects[proj.id] = proj; ctx.response.status = 200; }); + // list with query + router.get('/collections/:name', (ctx) => { + const { query, ...opts } = ctx.query; + try { + ctx.body = app.collections.fetch(ctx.params.name, query, opts); + } catch (e: any) { + if ((e.message = 'COLLECTION_NOT_FOUND')) { + ctx.status = 404; + } + } + }); + + // get by key + router.get('/collections/:name/:key', (ctx) => { + const { name, key } = ctx.params; + try { + const result = app.collections.byKey(name, key); + ctx.body = { + key, + value: result, + }; + } catch (e: any) { + if ((e.message = 'COLLECTION_NOT_FOUND')) { + ctx.status = 404; + } + } + }); + return router.routes() as unknown as Koa.Middleware; }; diff --git a/packages/lightning-mock/src/index.ts b/packages/lightning-mock/src/index.ts index 94cc4ef21..13c2ccd59 100644 --- a/packages/lightning-mock/src/index.ts +++ b/packages/lightning-mock/src/index.ts @@ -2,3 +2,5 @@ import createLightningServer from './server'; export default createLightningServer; export { toBase64, generateKeys } from './util'; + +export { DEFAULT_PROJECT_ID } from './api-rest'; diff --git a/packages/lightning-mock/src/server.ts b/packages/lightning-mock/src/server.ts index 5a041a903..22b6568a7 100644 --- a/packages/lightning-mock/src/server.ts +++ b/packages/lightning-mock/src/server.ts @@ -7,8 +7,13 @@ import createLogger, { LogLevel, Logger, } from '@openfn/logger'; +import { collections } from '@openfn/language-collections'; import type { StepId } from '@openfn/lexicon'; -import type { LightningPlan, RunLogLine } from '@openfn/lexicon/lightning'; +import type { + LightningPlan, + Provisioner, + RunLogLine, +} from '@openfn/lexicon/lightning'; import createWebSocketAPI from './api-sockets'; import createDevAPI from './api-dev'; @@ -47,6 +52,11 @@ export type ServerState = { events: EventEmitter; options: LightningOptions; + + projects: Record; + + /** Mock collections API (imported from the adaptor) */ + collections: any; }; export type LightningOptions = { @@ -70,11 +80,13 @@ const createLightningServer = (options: LightningOptions = {}) => { const runPrivateKey = options.runPrivateKey ? fromBase64(options.runPrivateKey) : undefined; + const state = { credentials: {}, runs: {}, dataclips: {}, pending: {}, + projects: {}, queue: [] as RunId[], results: {}, @@ -91,6 +103,8 @@ const createLightningServer = (options: LightningOptions = {}) => { app.state = state; + app.collections = collections.createMockAPI(); + const port = options.port || 8888; const server = app.listen(port); logger.info('Listening on ', port); diff --git a/packages/lightning-mock/src/types.ts b/packages/lightning-mock/src/types.ts index 92d7e5626..389ed7c2d 100644 --- a/packages/lightning-mock/src/types.ts +++ b/packages/lightning-mock/src/types.ts @@ -3,6 +3,7 @@ import type { LightningPlan, DataClip, Credential, + Provisioner, } from '@openfn/lexicon/lightning'; import type { ServerState } from './server'; import { PhoenixEvent } from './socket-server'; @@ -13,6 +14,7 @@ export type DevServer = Koa & { state: ServerState; addCredential(id: string, cred: Credential): void; addDataclip(id: string, data: DataClip): void; + addProject(proj: Provisioner.Project_v1): void; enqueueRun(run: LightningPlan): void; destroy: () => Promise; getRun(id: string): LightningPlan; @@ -35,4 +37,7 @@ export type DevServer = Koa & { reset(): void; startRun(id: string): any; waitForResult(runId: string): Promise; + + /** Collections API (from the adaptor) */ + collections: any; }; diff --git a/packages/lightning-mock/test/rest.test.ts b/packages/lightning-mock/test/rest.test.ts index 21631bcc7..cbe7da23c 100644 --- a/packages/lightning-mock/test/rest.test.ts +++ b/packages/lightning-mock/test/rest.test.ts @@ -2,6 +2,7 @@ import test from 'ava'; import { setup } from './util'; +import { DEFAULT_PROJECT_ID } from '../src/api-rest'; // @ts-ignore let server: any; @@ -13,10 +14,12 @@ const endpoint = `http://localhost:${port}`; test.before(async () => ({ server } = await setup(port))); test.serial('should pull a project', async (t) => { - const response = await fetch(`${endpoint}/api/provision/123`); + const response = await fetch( + `${endpoint}/api/provision/${DEFAULT_PROJECT_ID}` + ); const { data: proj } = await response.json(); - t.is(proj.id, '123'); + t.is(proj.id, DEFAULT_PROJECT_ID); t.is(proj.name, 'aaa'); t.truthy(proj.workflows); }); @@ -29,14 +32,48 @@ test.serial('should pull a project as yaml', async (t) => { t.regex(proj, /name: wf1/); }); -test.serial('should deploy a project', async (t) => { +test.serial('should deploy a project and fetch it back', async (t) => { const response = await fetch(`${endpoint}/api/provision`, { method: 'POST', - body: JSON.stringify({}), // Not a very good test right now!! + body: JSON.stringify({ + id: 'abc', + name: 'my project', + }), headers: { 'content-type': 'application/json', }, }); t.is(response.status, 200); + + const res2 = await fetch(`${endpoint}/api/provision/abc`); + const { data: proj } = await res2.json(); + t.is(proj.id, 'abc'); + t.is(proj.name, 'my project'); +}); + +test.serial('should fetch many items from a collection', async (t) => { + server.collections.createCollection('stuff'); + server.collections.upsert('stuff', 'x', { id: 'x' }); + + const response = await fetch(`${endpoint}/collections/stuff?query=*`); + const { items } = await response.json(); + t.is(items.length, 1); + t.deepEqual(items[0], { key: 'x', value: { id: 'x' } }); +}); + +test.serial('should fetch a single item from a collection', async (t) => { + server.collections.createCollection('stuff'); + server.collections.upsert('stuff', 'x', { id: 'x' }); + + const response = await fetch(`${endpoint}/collections/stuff/x`); + const result = await response.json(); + t.deepEqual(result, { key: 'x', value: { id: 'x' } }); }); + +test.serial("should return 404 if a collection isn't found", async (t) => { + const response = await fetch(`${endpoint}/collections/nope/*`); + t.is(response.status, 404); +}); + +test.todo("should return 403 if a collection isn't authorized"); diff --git a/packages/project/CHANGELOG.md b/packages/project/CHANGELOG.md index 44d014c79..452c31c2a 100644 --- a/packages/project/CHANGELOG.md +++ b/packages/project/CHANGELOG.md @@ -1,5 +1,16 @@ # @openfn/project +## 0.10.0 + +### Minor Changes + +- 4cc799b: Add support for aliases (replaces env) + +### Patch Changes + +- 3e63c08: Map project_credential_id to configuration +- 4cc799b: Project: remove `getIdentifier()` in favour of `qname` (qualified name) + ## 0.9.3 ### Patch Changes diff --git a/packages/project/package.json b/packages/project/package.json index 035a64ae0..d06678098 100644 --- a/packages/project/package.json +++ b/packages/project/package.json @@ -1,6 +1,6 @@ { "name": "@openfn/project", - "version": "0.9.3", + "version": "0.10.0", "description": "Read, serialize, replicate and sync OpenFn projects", "scripts": { "test": "pnpm ava", diff --git a/packages/project/src/Project.ts b/packages/project/src/Project.ts index 6555143b3..6e72ee896 100644 --- a/packages/project/src/Project.ts +++ b/packages/project/src/Project.ts @@ -7,7 +7,6 @@ import fromPath, { FromPathConfig } from './parse/from-path'; // TODO this naming clearly isn't right import { parseProject as fromFs, FromFsConfig } from './parse/from-fs'; import fromProject, { SerializedProject } from './parse/from-project'; -import getIdentifier from './util/get-identifier'; import slugify from './util/slugify'; import { getUuidForEdge, getUuidForStep } from './util/uuid'; import { merge, MergeProjectOptions } from './merge/merge-project'; @@ -28,6 +27,11 @@ type UUIDMap = { }; }; +type CLIMeta = { + version?: number; + alias?: string; +}; + export class Project { // what schema version is this? // And how are we tracking this? @@ -50,10 +54,10 @@ export class Project { // these are all (?) unused clientside options: any; - // local metadata used by the CLI - // This stuff is not synced back to lightning - // TODO maybe rename cli or local - meta: any; + /** + * Local metadata used by the CLI but not synced to Lightning + */ + cli: CLIMeta; // this contains meta about the connected openfn project openfn?: l.ProjectMeta; @@ -126,15 +130,26 @@ export class Project { // stuff that's external to the actual project and managed by the repo // TODO maybe the constructor is (data, Workspace) - constructor(data: Partial, config?: Partial) { - this.config = buildConfig(config); - + constructor( + data: Partial = {}, + meta?: Partial & CLIMeta + ) { this.id = data.id ?? (data.name ? slugify(data.name) : humanId({ separator: '-', capitalize: false })); + const { version, alias = 'main', ...otherConfig } = meta ?? {}; + this.cli = Object.assign( + { + alias, + }, + data.cli + ); + + this.config = buildConfig(otherConfig); + this.name = data.name; this.description = data.description ?? undefined; @@ -143,7 +158,23 @@ export class Project { this.workflows = data.workflows?.map(maybeCreateWorkflow) ?? []; this.collections = data.collections; this.credentials = data.credentials; - // this.meta = data.meta ?? {}; + } + + /** Local alias for the project. Comes from the file name. Not shared with Lightning. */ + get alias() { + return this.cli.alias ?? 'main'; + } + + get uuid() { + return this.openfn?.uuid ? `${this.openfn.uuid}` : undefined; + } + + // Helper to extract hostname from endpoint + get host() { + const { endpoint } = this.openfn ?? {}; + if (endpoint) { + return new URL(endpoint).hostname; + } } setConfig(config: Partial) { @@ -170,11 +201,13 @@ export class Project { ); } - // it's the name of the project.yaml file - // qualified name? Remote name? App name? - // every project in a repo need a unique identifier - getIdentifier() { - return getIdentifier(this.openfn); + /** Returns a fully qualified name for the project, id, alias@domain */ + get qname() { + const { alias, host } = this; + if (host) { + return `${alias}@${host}`; + } + return alias; } // Compare this project with another and return a diff diff --git a/packages/project/src/Workspace.ts b/packages/project/src/Workspace.ts index 7e48d16d6..2e06265fc 100644 --- a/packages/project/src/Workspace.ts +++ b/packages/project/src/Workspace.ts @@ -12,10 +12,14 @@ import { } from './util/config'; import fromProject from './parse/from-project'; import type { Logger } from '@openfn/logger'; +import matchProject from './util/match-project'; +import { extractAliasFromFilename } from './parse/from-path'; export class Workspace { // @ts-ignore config not definitely assigned - it sure is config: l.WorkspaceConfig; + + // TODO activeProject should be the actual project activeProject?: l.ProjectMeta; private projects: Project[] = []; @@ -23,7 +27,9 @@ export class Workspace { private isValid: boolean = false; private logger: Logger; - constructor(workspacePath: string, logger?: Logger) { + // Set validate to false to suppress warnings if a Workspace doesn't exist + // This is appropriate if, say, fetching a project for the first time + constructor(workspacePath: string, logger?: Logger, validate = true) { this.logger = logger ?? createLogger('Workspace', { level: 'info' }); let context = { workspace: undefined, project: undefined }; @@ -32,9 +38,11 @@ export class Workspace { context = loadWorkspaceFile(content, type as any); this.isValid = true; } catch (e) { - this.logger.warn( - `Could not find openfn.yaml at ${workspacePath}. Using default values.` - ); + if (validate) { + this.logger.warn( + `Could not find openfn.yaml at ${workspacePath}. Using default values.` + ); + } } this.config = buildConfig(context.workspace); this.activeProject = context.project; @@ -50,14 +58,15 @@ export class Workspace { path.extname(fileName) === ext && path.parse(fileName).name !== 'openfn' ); - this.projects = stateFiles .map((file) => { const stateFilePath = path.join(projectsPath, file); try { const data = fs.readFileSync(stateFilePath, 'utf-8'); + const alias = extractAliasFromFilename(file); const project = fromProject(data, { ...this.config, + alias, }); this.projectPaths.set(project.id, stateFilePath); return project; @@ -68,9 +77,11 @@ export class Workspace { }) .filter((s) => s) as Project[]; } else { - this.logger.warn( - `No projects found: directory at ${projectsPath} does not exist` - ); + if (validate) { + this.logger.warn( + `No projects found: directory at ${projectsPath} does not exist` + ); + } } } @@ -85,12 +96,9 @@ export class Workspace { return this.projects; } - /** Get a project by its id or UUID */ - get(id: string) { - return ( - this.projects.find((p) => p.id === id) ?? - this.projects.find((p) => p.openfn?.uuid === id) - ); + /** Get a project by its alias, id or UUID. Can also include a UUID */ + get(nameyThing: string) { + return matchProject(nameyThing, this.projects); } getProjectPath(id: string) { @@ -99,8 +107,8 @@ export class Workspace { getActiveProject() { return ( - this.projects.find((p) => p.id === this.activeProject?.id) ?? - this.projects.find((p) => p.openfn?.uuid === this.activeProject?.uuid) + this.projects.find((p) => p.openfn?.uuid === this.activeProject?.uuid) ?? + this.projects.find((p) => p.id === this.activeProject?.id) ); } diff --git a/packages/project/src/parse/from-app-state.ts b/packages/project/src/parse/from-app-state.ts index ff8b01c5d..24d246c93 100644 --- a/packages/project/src/parse/from-app-state.ts +++ b/packages/project/src/parse/from-app-state.ts @@ -10,6 +10,7 @@ import ensureJson from '../util/ensure-json'; export type fromAppStateConfig = Partial & { format?: 'yaml' | 'json'; + alias?: string; }; export default ( @@ -124,7 +125,13 @@ export const mapWorkflow = (workflow: Provisioner.Workflow) => { (e) => e.source_job_id === step.id || e.source_trigger_id === step.id ); - const { body: expression, name, adaptor, ...remoteProps } = step; + const { + body: expression, + name, + adaptor, + project_credential_id, + ...remoteProps + } = step; const s: any /*l.Job*/ = { id: slugify(name), @@ -133,6 +140,9 @@ export const mapWorkflow = (workflow: Provisioner.Workflow) => { adaptor, // TODO is this wrong? openfn: renameKeys(remoteProps, { id: 'uuid' }), }; + if (project_credential_id) { + s.configuration = project_credential_id; + } if (outboundEdges.length) { s.next = outboundEdges.reduce((next, edge) => { diff --git a/packages/project/src/parse/from-fs.ts b/packages/project/src/parse/from-fs.ts index 1c0e805fc..612c2b838 100644 --- a/packages/project/src/parse/from-fs.ts +++ b/packages/project/src/parse/from-fs.ts @@ -4,53 +4,34 @@ import { glob } from 'glob'; import * as l from '@openfn/lexicon'; import { Project } from '../Project'; -import getIdentifier from '../util/get-identifier'; import { yamlToJson } from '../util/yaml'; import { buildConfig, loadWorkspaceFile, findWorkspaceFile, } from '../util/config'; -import fromProject from './from-project'; import { omit } from 'lodash-es'; +import { Logger } from '@openfn/logger'; export type FromFsConfig = { root: string; + logger?: Logger; }; // Parse a single project from a root folder +// Note that this does NOT attempt to load UUIDS from the project file +// It just builds the project on disk +// I suppose we could take an option? export const parseProject = async (options: FromFsConfig) => { - const { root } = options; + const { root, logger } = options; const { type, content } = findWorkspaceFile(root); const context = loadWorkspaceFile(content, type as any); const config = buildConfig(context.workspace); - // Now we need to look for the corresponding state file - // Need to load UUIDs and other app settings from this - // If we load it as a Project, uuid tracking is way easier - let state: Project | null = null; - const identifier = getIdentifier({ - endpoint: context.project?.endpoint, - env: context.project?.env, - }); - try { - const format = config.formats?.project ?? config.formats?.project ?? 'yaml'; - const statePath = path.join( - root, - config.dirs?.projects ?? '.projects', - `${identifier}.${format}` - ); - const stateFile = await fs.readFile(statePath, 'utf8'); - - state = fromProject(stateFile, config); - } catch (e) { - console.warn(`Failed to find state file for ${identifier}`); - // console.warn(e); - } - const proj: any = { - name: state?.name, + id: context.project?.id, + name: context.project?.name, openfn: omit(context.project, ['id']), config: config, workflows: [], @@ -74,30 +55,21 @@ export const parseProject = async (options: FromFsConfig) => { const wf = fileType === 'yaml' ? yamlToJson(candidate) : JSON.parse(candidate); if (wf.id && Array.isArray(wf.steps)) { - // load settings from the state file - const wfState = state?.getWorkflow(wf.id); - - wf.openfn = Object.assign({}, wfState?.openfn, { - uuid: wfState?.openfn?.uuid ?? null, - }); - - //console.log('Loading workflow at ', filePath); // TODO logger.debug + //logger?.log('Loading workflow at ', filePath); // TODO logger.debug for (const step of wf.steps) { // This is the saved, remote view of the step // TODO if the id has changed, how do we track? - const stateStep = wfState?.get(step.id); if (step.expression && step.expression.endsWith('.js')) { const dir = path.dirname(filePath); const exprPath = path.join(dir, step.expression); try { - console.debug(`Loaded expression from ${exprPath}`); + logger?.debug(`Loaded expression from ${exprPath}`); step.expression = await fs.readFile(exprPath, 'utf-8'); } catch (e) { - console.error(`Error loading expression from ${exprPath}`); + logger?.error(`Error loading expression from ${exprPath}`); // throw? } } - step.openfn = Object.assign({}, stateStep?.openfn); // Now track UUIDs for edges against state for (const target in step.next || {}) { @@ -105,15 +77,13 @@ export const parseProject = async (options: FromFsConfig) => { const bool = step.next[target]; step.next[target] = { condition: bool }; } - const uuid = state?.getUUID(wf.id, step.id, target) ?? null; - step.next[target].openfn = { uuid }; } } proj.workflows.push(wf); } } catch (e) { - console.log(e); + logger?.log(e); // not valid json // should probably maybe a big deal about this huh? continue; diff --git a/packages/project/src/parse/from-path.ts b/packages/project/src/parse/from-path.ts index 37f446a20..745bb1c65 100644 --- a/packages/project/src/parse/from-path.ts +++ b/packages/project/src/parse/from-path.ts @@ -1,16 +1,38 @@ import * as l from '@openfn/lexicon'; import { readFile } from 'node:fs/promises'; +import path from 'node:path'; import fromProject from './from-project'; export type FromPathConfig = l.WorkspaceConfig & { format: 'json' | 'yaml'; + alias?: string; +}; + +// Extract alias from filename in format: alias@domain.yaml or alias.yaml +// If format is alias@domain.ext, returns the alias part +// Otherwise returns the filename without extension +export const extractAliasFromFilename = (filename: string): string => { + const basename = path.basename(filename, path.extname(filename)); + // Check for alias@domain format + const atIndex = basename.indexOf('@'); + if (atIndex > 0) { + return basename.substring(0, atIndex); + } + + // Otherwise return the basename as-is + return basename; }; // Load a project from a file path. // Pass config optionally -export default async (path: string, config: Partial = {}) => { - const source = await readFile(path, 'utf8'); +export default async ( + filePath: string, + config: Partial = {} +) => { + const source = await readFile(filePath, 'utf8'); + + const alias = config.alias ?? extractAliasFromFilename(filePath); - return fromProject(source, config); + return fromProject(source, { ...config, alias }); }; diff --git a/packages/project/src/parse/from-project.ts b/packages/project/src/parse/from-project.ts index 5ac129b6a..68399abe1 100644 --- a/packages/project/src/parse/from-project.ts +++ b/packages/project/src/parse/from-project.ts @@ -27,13 +27,13 @@ export type SerializedWorkflow = { export default ( data: l.Project | SerializedProject | string, - config?: Partial + config?: Partial & { alias?: string; version?: number } ) => { // first ensure the data is in JSON format let rawJson = ensureJson(data); let json; - if (rawJson.version) { + if (rawJson.cli?.version ?? rawJson.version /*deprecated*/) { // If there's any version key at all, its at least v2 json = from_v2(rawJson as SerializedProject); } else { diff --git a/packages/project/src/serialize/to-app-state.ts b/packages/project/src/serialize/to-app-state.ts index d4f5cfdea..6886ff424 100644 --- a/packages/project/src/serialize/to-app-state.ts +++ b/packages/project/src/serialize/to-app-state.ts @@ -10,6 +10,8 @@ import Workflow from '../Workflow'; type Options = { format?: 'json' | 'yaml' }; const defaultJobProps = { + // TODO why does the provisioner throw if these keys are not set? + // Ok, 90% of jobs will have a credenial, but it's still optional right? keychain_credential_id: null, project_credential_id: null, }; @@ -102,6 +104,17 @@ const mapWorkflow = (workflow: Workflow) => { if (s.expression) { node.body = s.expression; } + if ( + typeof s.configuration === 'string' && + !s.configuration.endsWith('.json') + ) { + // TODO do I need to ensure that this gets added to project_credntials? + // not really - if the credential hasn't been added yet, users have to go into + // the app and do it + // Maybe there's a feature-request to auto-add credentials if the user + // has access + otherOpenFnProps.project_credential_id = s.configuration; + } Object.assign(node, defaultJobProps, otherOpenFnProps); diff --git a/packages/project/src/serialize/to-project.ts b/packages/project/src/serialize/to-project.ts index 4463e3e93..e6fd528cd 100644 --- a/packages/project/src/serialize/to-project.ts +++ b/packages/project/src/serialize/to-project.ts @@ -8,6 +8,7 @@ import type { import Project from '../Project'; import { jsonToYaml } from '../util/yaml'; import { WithMeta } from '../Workflow'; +import { tidyOpenfn } from '../util/omit-nil'; const SERIALIZE_VERSION = 2; @@ -21,37 +22,35 @@ type ToProjectOptions = { export default (project: Project, options: ToProjectOptions = {}) => { // return a compatible json structure + const { alias, ...cliWithoutAlias } = project.cli; const proj: SerializedProject = omitBy( { id: project.id, name: project.name, - version: SERIALIZE_VERSION, // important! + cli: { + ...cliWithoutAlias, + version: SERIALIZE_VERSION, // important! + }, description: project.description, collections: project.collections, credentials: project.credentials, openfn: omitBy(project.openfn, isNil), - meta: project.meta, options: omitBy(project.options, isNil), workflows: project.workflows.map((w) => { const obj = w.toJSON() as SerializedWorkflow; - if (obj.openfn) { - obj.openfn = omitBy(obj.openfn, isNil); - } + tidyOpenfn(obj); if (obj.steps) { obj.steps = obj.steps.sort((a: any, b: any) => { return a.id < b.id ? -1 : a.id > b.id ? 1 : 0; }); obj.steps.forEach((s: WithMeta) => { - s.openfn = omitBy(s.openfn, isNil); + tidyOpenfn(s); if (s.next && typeof s.next !== 'string') { for (const id in s.next) { - const edge = s.next[id] as any; - if (edge.openfn) { - edge.openfn = omitBy(edge.openfn, isNil); - } + tidyOpenfn(s.next[id]); } } }); diff --git a/packages/project/src/util/get-identifier.ts b/packages/project/src/util/get-identifier.ts deleted file mode 100644 index 496a24b85..000000000 --- a/packages/project/src/util/get-identifier.ts +++ /dev/null @@ -1,14 +0,0 @@ -export default (config: { endpoint?: string; env?: string } = {}) => { - const endpoint = config.endpoint || 'local'; - const name = config.env ?? 'main'; - - let host; - try { - host = new URL(endpoint).hostname; - } catch (e) { - // if an invalid endpoint is passed, assume it's local - // this may not be fair?? - host = endpoint; - } - return `${name}@${host}`; -}; diff --git a/packages/project/src/util/match-project.ts b/packages/project/src/util/match-project.ts new file mode 100644 index 000000000..1032df5ab --- /dev/null +++ b/packages/project/src/util/match-project.ts @@ -0,0 +1,46 @@ +import { UUID } from '@openfn/lexicon'; +import Project from '../Project'; + +type Alias = string; +type ID = string; + +export class MultipleMatchingProjectsError extends Error {} + +const matchProject = (name: Alias | ID | UUID, candidates: Project[]) => { + const [searchTerm, domain] = `${name}`.split('@'); + + // Collect all matching projects + const matchingProjects: Record = {}; + let multipleIdMatches = false; + + // Filter candidates by domain + candidates = candidates.filter( + (project) => !domain || project.host === domain + ); + + const re = new RegExp(searchTerm, 'i'); + for (const project of candidates) { + if ( + project.id === searchTerm || + project.alias === searchTerm || + (project.uuid && re.test(project.uuid)) + ) { + matchingProjects[project.id] ??= []; + matchingProjects[project.id].push(project); + } + } + + const matches = Object.values(matchingProjects).flat(); + + // Multiple matches - throw error + if (multipleIdMatches || matches.length > 1) { + throw new MultipleMatchingProjectsError( + `Failed to resolve unique identifier for "${name}", clashes with: ${matches + .map((p) => p.id) + .join(', ')}` + ); + } + return matches.length ? matches[0] : null; +}; + +export default matchProject; diff --git a/packages/project/src/util/omit-nil.ts b/packages/project/src/util/omit-nil.ts new file mode 100644 index 000000000..40a8974e8 --- /dev/null +++ b/packages/project/src/util/omit-nil.ts @@ -0,0 +1,10 @@ +import { omitBy, isNil } from 'lodash-es'; + +export const omitNil = (obj: any, key: string) => { + if (obj[key]) { + obj[key] = omitBy(obj[key], isNil); + } +}; +export default omitNil; + +export const tidyOpenfn = (obj: any) => omitNil(obj, 'openfn'); diff --git a/packages/project/test/fixtures/sample-v1-project.ts b/packages/project/test/fixtures/sample-v1-project.ts index 726c39bfc..26ca2e099 100644 --- a/packages/project/test/fixtures/sample-v1-project.ts +++ b/packages/project/test/fixtures/sample-v1-project.ts @@ -28,7 +28,7 @@ const state: Provisioner.Project = { { id: '66add020-e6eb-4eec-836b-20008afca816', name: 'Transform data', - body: '// Check out the Job Writing Guide for help getting started:\n// https://docs.openfn.org/documentation/jobs/job-writing-guide\n', + body: 'fn(s => s)', adaptor: '@openfn/language-common@latest', project_credential_id: null, keychain_credential_id: null, diff --git a/packages/project/test/fixtures/sample-v2-project.ts b/packages/project/test/fixtures/sample-v2-project.ts index 2213990c2..049ccb27e 100644 --- a/packages/project/test/fixtures/sample-v2-project.ts +++ b/packages/project/test/fixtures/sample-v2-project.ts @@ -6,8 +6,8 @@ import { SerializedProject } from '../../src/parse/from-project'; export const json: SerializedProject = { id: 'my-project', name: 'My Project', - version: 2, description: 'my lovely project', + cli: { version: 2 }, openfn: { uuid: '1234', endpoint: 'https://app.openfn.org' }, options: { allow_support_access: false }, workflows: [ @@ -37,7 +37,8 @@ export const json: SerializedProject = { export const yaml = `id: my-project name: My Project -version: 2 +cli: + version: 2 description: my lovely project openfn: uuid: "1234" diff --git a/packages/project/test/parse/from-app-state.test.ts b/packages/project/test/parse/from-app-state.test.ts index 182b86624..fe696dec8 100644 --- a/packages/project/test/parse/from-app-state.test.ts +++ b/packages/project/test/parse/from-app-state.test.ts @@ -3,6 +3,7 @@ import fromAppState, { mapWorkflow } from '../../src/parse/from-app-state'; import { clone, cloneDeep } from 'lodash-es'; import state, { withCreds } from '../fixtures/sample-v1-project'; +import { Job } from '@openfn/lexicon'; // I don't think this file really represents anything // loosely maps to the old config file @@ -100,12 +101,10 @@ test('should create a Project from prov state with a workflow', (t) => { { id: 'transform-data', name: 'Transform data', - expression: - '// Check out the Job Writing Guide for help getting started:\n// https://docs.openfn.org/documentation/jobs/job-writing-guide\n', + expression: 'fn(s => s)', adaptor: '@openfn/language-common@latest', openfn: { uuid: '66add020-e6eb-4eec-836b-20008afca816', - project_credential_id: null, keychain_credential_id: null, }, }, @@ -167,32 +166,54 @@ test('mapWorkflow: map a simple job', (t) => { id: 'transform-data', name: 'Transform data', adaptor: '@openfn/language-common@latest', - expression: - '// Check out the Job Writing Guide for help getting started:\n// https://docs.openfn.org/documentation/jobs/job-writing-guide\n', + expression: 'fn(s => s)', openfn: { uuid: '66add020-e6eb-4eec-836b-20008afca816', - project_credential_id: null, keychain_credential_id: null, }, }); }); -// todo surprised this works -test('mapWorkflow: map a job with project and keychain credentials', (t) => { +test('mapWorkflow: map a job with keychain credentials onto .openfn', (t) => { const wf = withCreds.workflows[0]; - wf.jobs.map(console.log); const mapped = mapWorkflow(wf); const [_trigger, job] = mapped.steps; + + // this is the important bit + t.is((job as any).openfn.keychain_credential_id, 'k'); + + // But may as well do this too + t.deepEqual(job, { + id: 'transform-data', + name: 'Transform data', + adaptor: '@openfn/language-common@latest', + configuration: 'p', + expression: 'fn(s => s)', + openfn: { + uuid: '66add020-e6eb-4eec-836b-20008afca816', + keychain_credential_id: 'k', + }, + }); +}); + +test('mapWorkflow: map a job with projcet credentials onto job.configuration', (t) => { + const wf = withCreds.workflows[0]; + const mapped = mapWorkflow(wf); + + const [_trigger, job] = mapped.steps; + + // This is the important bit + t.is((job as Job).configuration, 'p'); + t.deepEqual(job, { id: 'transform-data', name: 'Transform data', adaptor: '@openfn/language-common@latest', - expression: - '// Check out the Job Writing Guide for help getting started:\n// https://docs.openfn.org/documentation/jobs/job-writing-guide\n', + expression: 'fn(s => s)', + configuration: 'p', openfn: { uuid: '66add020-e6eb-4eec-836b-20008afca816', - project_credential_id: 'p', keychain_credential_id: 'k', }, }); diff --git a/packages/project/test/parse/from-fs.test.ts b/packages/project/test/parse/from-fs.test.ts index 5dff39c5e..1a3d1e103 100644 --- a/packages/project/test/parse/from-fs.test.ts +++ b/packages/project/test/parse/from-fs.test.ts @@ -1,221 +1,280 @@ import test from 'ava'; import mock from 'mock-fs'; import { parseProject } from '../../src/parse/from-fs'; +import { jsonToYaml } from '../../src/util/yaml'; +import { buildConfig } from '../../src/util/config'; -const s = JSON.stringify; - -// mock several projects and use them through the tests -// TODO: the state files here are all in v1 format - need to add tests with v2 -// Probably need to rethink all these tests tbh -mock({ - '/p1/openfn.json': s({ - // this must be the whole deploy name right? - // else how do we know? - workflowRoot: 'workflows', - formats: { - openfn: 'json', - project: 'json', - workflow: 'json', - }, - project: { - id: 'e16c5f09-f0cb-4ba7-a4c2-73fcb2f29d00', - env: 'staging', - endpoint: 'https://app.openfn.org', - name: 'My Project', - description: '...', - // Note that we exclude app options here - // That stuff is all in the project.yaml, not useful here - }, - }), - '/p1/workflows/my-workflow': {}, - '/p1/workflows/my-workflow/my-workflow.json': s({ - id: 'my-workflow', - name: 'My Workflow', - steps: [ - { - id: 'a', - expression: 'job.js', - next: { - b: true, - }, - }, - { - id: 'b', - expression: './job.js', - next: { - c: false, - }, - }, - ], // TODO handle expressions too! - // TODO maybe test the options key though - }), - '/p1/workflows/my-workflow/job.js': `fn(s => s)`, - // keep a state file (just the stuff we need for uuids) - '/p1/.projects/staging@app.openfn.org.json': s({ - workflows: [ - { - id: '', - name: 'My Workflow', - jobs: [ - { - id: '', - name: 'a', - project_credential_id: 'p', - }, - { - id: '', - name: 'b', - }, - ], - triggers: [], - edges: [ - { - id: '', - source_job_id: '', - target_job_id: '', - }, - ], - }, - ], - }), - - // junk to throw the tests - '/p1/random.json': s({ - // not a workflow file! this should be ignored - }), - '/p1/workflows/my-workflow/random.json': s({ - // not a workflow file! this should be ignored - }), - - // p2 is all yaml based - '/p2/openfn.yaml': ` - workflowRoot: wfs - formats: - openfn: yaml - project: yaml - workflow: yaml - project: - env: main - id: "123" - endpoint: app.openfn.org`, - '/p2/wfs/my-workflow/my-workflow.yaml': ` - id: my-workflow - name: My Workflow - steps: - - id: job - adaptor: "@openfn/language-common@latest" - expression: ./job.js - `, - '/p2/wfs/my-workflow/job.js': `fn(s => s)`, - // TODO state here - quite a good test - - // p3 uses custom yaml - '/p3/openfn.yaml': ` -workspace: - x: 1 - y: 2 -project: -`, - '/p3/wfs/my-workflow/my-workflow.yaml': ` - id: my-workflow - name: My Workflow - steps: - - id: job - adaptor: "@openfn/language-common@latest" - expression: ./job.js - `, - '/p3/wfs/my-workflow/job.js': `fn(s => s)`, +test.afterEach(() => { + files = {}; + mock.restore(); }); -test('should load workspace config from json', async (t) => { - const project = await parseProject({ root: '/p1' }); +let files: Record = {}; + +function mockFile(path: string, content: string | object) { + if (path.endsWith('.yaml')) { + content = jsonToYaml(content); + } else if (path.endsWith('.json')) { + content = JSON.stringify(content); + } + + files[path] = content; + mock(files); +} + +test.serial('should load workspace config from json', async (t) => { + mockFile( + '/ws/openfn.json', + buildConfig({ + formats: { + openfn: 'json', + project: 'json', + workflow: 'json', + }, + // @ts-ignore ensure we include custom properties + x: 1, + }) + ); + + const project = await parseProject({ root: '/ws' }); t.deepEqual(project.config, { - workflowRoot: 'workflows', + x: 1, dirs: { projects: '.projects', workflows: 'workflows' }, formats: { openfn: 'json', project: 'json', workflow: 'json' }, }); }); -test('should load custom config props and include default', async (t) => { - const project = await parseProject({ root: '/p3' }); +test.serial('should load workspace config from yaml', async (t) => { + mockFile( + '/ws/openfn.yaml', + buildConfig({ + formats: { + openfn: 'yaml', + project: 'yaml', + workflow: 'yaml', + }, + // @ts-ignore ensure we include custom properties + x: 1, + }) + ); + + const project = await parseProject({ root: '/ws' }); t.deepEqual(project.config, { x: 1, - y: 2, dirs: { projects: '.projects', workflows: 'workflows' }, formats: { openfn: 'yaml', project: 'yaml', workflow: 'yaml' }, }); }); -test('should load the workspace config from json', async (t) => { - const project = await parseProject({ root: '/p1' }); +test.serial('should load single workflow', async (t) => { + mockFile('/ws/openfn.yaml', buildConfig()); - t.deepEqual(project.openfn, { - name: 'My Project', - env: 'staging', - endpoint: 'https://app.openfn.org', - description: '...', + mockFile('/ws/workflows/my-workflow/my-workflow.yaml', { + id: 'my-workflow', + name: 'My Workflow', + steps: [ + { + id: 'a', + expression: 'job.js', + }, + ], }); -}); -test('should load a workflow from the file system', async (t) => { - const project = await parseProject({ root: '/p1' }); + mockFile('/ws/workflows/my-workflow/job.js', `fn(s => s)`); + + const project = await parseProject({ root: '/ws' }); t.is(project.workflows.length, 1); - const [wf] = project.workflows; + const wf = project.getWorkflow('my-workflow'); + t.truthy(wf); t.is(wf.id, 'my-workflow'); - t.is(wf.openfn.uuid, ''); - t.is(wf.steps[0].expression, 'fn(s => s)'); + t.is(wf.name, 'My Workflow'); }); -test('should load a workflow from the file system and expand shorthand links', async (t) => { - const project = await parseProject({ root: '/p1' }); +test.serial('should load single workflow from json', async (t) => { + mockFile( + '/ws/openfn.yaml', + buildConfig({ + formats: { + workflow: 'json', + }, + }) + ); + + mockFile('/ws/workflows/my-workflow/my-workflow.json', { + id: 'my-workflow', + name: 'My Workflow', + steps: [ + { + id: 'a', + expression: 'job.js', + }, + ], + }); + + mockFile('/ws/workflows/my-workflow/job.js', `fn(s => s)`); + + const project = await parseProject({ root: '/ws' }); t.is(project.workflows.length, 1); - const [wf] = project.workflows; - t.is(typeof wf.steps[1].next.c, 'object'); + const wf = project.getWorkflow('my-workflow'); + t.truthy(wf); + t.is(wf.id, 'my-workflow'); + t.is(wf.name, 'My Workflow'); }); -test('should track the UUID of a step', async (t) => { - const project = await parseProject({ root: '/p1' }); +test.serial('should load single workflow from custom path', async (t) => { + mockFile( + '/ws/openfn.yaml', + buildConfig({ + dirs: { + workflows: 'custom-wfs', + projects: '.projects', + }, + }) + ); + + mockFile('/ws/custom-wfs/my-workflow/my-workflow.yaml', { + id: 'my-workflow', + name: 'My Workflow', + steps: [ + { + id: 'a', + expression: 'job.js', + }, + ], + }); + + mockFile('/ws/custom-wfs/my-workflow/job.js', `fn(s => s)`); + + const project = await parseProject({ root: '/ws' }); - const [wf] = project.workflows; + t.is(project.workflows.length, 1); - t.truthy(wf.steps[0].openfn); - t.is(wf.steps[0].openfn.uuid, ''); + const wf = project.getWorkflow('my-workflow'); + t.truthy(wf); + t.is(wf.id, 'my-workflow'); + t.is(wf.name, 'My Workflow'); }); -// TODO also test this on different openfn objects -test('should track openfn props from state file on a step', async (t) => { - const project = await parseProject({ root: '/p1' }); +test.serial('should include multiple workflows', async (t) => { + mockFile('/ws/openfn.yaml', buildConfig()); + + mockFile('/ws/workflows/workflow-1/workflow-1.yaml', { + id: 'workflow-1', + name: 'Workflow 1', + steps: [ + { + id: 'a', + expression: 'job.js', + }, + ], + }); + + mockFile('/ws/workflows/workflow-1/job.js', `fn(s => s)`); - const [wf] = project.workflows; + mockFile('/ws/workflows/workflow-2/workflow-2.yaml', { + id: 'workflow-2', + name: 'Workflow 2', + steps: [ + { + id: 'b', + expression: 'job.js', + }, + ], + }); - t.truthy(wf.steps[0].openfn); - t.is(wf.steps[0].openfn.project_credential_id, 'p'); -}); + mockFile('/ws/workflows/workflow-2/job.js', `fn(s => ({ data: [] }))`); -test('should track the UUID of an edge', async (t) => { - const project = await parseProject({ root: '/p1' }); + const project = await parseProject({ root: '/ws' }); - const [wf] = project.workflows; + t.is(project.workflows.length, 2); - t.truthy(wf.steps[0].next?.b.openfn); - t.is(wf.steps[0].next?.b.openfn.uuid, ''); + const wf1 = project.getWorkflow('workflow-1'); + t.truthy(wf1); + t.is(wf1.id, 'workflow-1'); + t.is(wf1.name, 'Workflow 1'); + + const wf2 = project.getWorkflow('workflow-2'); + t.truthy(wf2); + t.is(wf2.id, 'workflow-2'); + t.is(wf2.name, 'Workflow 2'); }); -test.todo('should track the UUID of a trigger'); -// maybe track other things that aren't in workflow.yaml? +test.serial('should load a workflow expression', async (t) => { + mockFile('/ws/openfn.yaml', buildConfig()); + + mockFile('/ws/workflows/my-workflow/my-workflow.yaml', { + id: 'my-workflow', + name: 'My Workflow', + steps: [ + { + id: 'a', + expression: 'job.js', + }, + ], + }); -test('should load a project from yaml', async (t) => { - const project = await parseProject({ root: '/p2' }); + mockFile('/ws/workflows/my-workflow/job.js', `fn(s => s)`); + const project = await parseProject({ root: '/ws' }); t.is(project.workflows.length, 1); - const [wf] = project.workflows; - t.is(wf.id, 'my-workflow'); + const wf = project.getWorkflow('my-workflow'); + + t.truthy(wf); + t.is(wf.steps[0].expression, 'fn(s => s)'); }); + +test.serial( + 'should return empty workflows array when no workflows found', + async (t) => { + mockFile('/ws/openfn.yaml', buildConfig()); + + const project = await parseProject({ root: '/ws' }); + + t.is(project.workflows.length, 0); + } +); + +test.serial( + 'should load a workflow from the file system and expand shorthand links', + async (t) => { + mockFile('/ws/openfn.yaml', buildConfig()); + + mockFile('/ws/workflows/my-workflow/my-workflow.yaml', { + id: 'my-workflow', + name: 'My Workflow', + steps: [ + { + id: 'a', + expression: 'job.js', + next: { + b: true, + }, + }, + { + id: 'b', + expression: './job.js', + next: { + c: false, + }, + }, + ], + }); + + mockFile('/ws/workflows/my-workflow/job.js', `fn(s => s)`); + + const project = await parseProject({ root: '/ws' }); + + t.is(project.workflows.length, 1); + const [wf] = project.workflows; + + t.is(typeof wf.steps[1].next.c, 'object'); + } +); diff --git a/packages/project/test/parse/from-path.test.ts b/packages/project/test/parse/from-path.test.ts index 8b87b7caf..9d31f65bf 100644 --- a/packages/project/test/parse/from-path.test.ts +++ b/packages/project/test/parse/from-path.test.ts @@ -2,7 +2,7 @@ import test from 'ava'; import mock from 'mock-fs'; import { generateProject } from '../../src'; -import fromPath from '../../src/parse/from-path'; +import fromPath, { extractAliasFromFilename } from '../../src/parse/from-path'; import * as v2 from '../fixtures/sample-v2-project'; const proj = generateProject('my-project', ['a-b'], { @@ -90,3 +90,30 @@ test.serial('should use workspace config', async (t) => { t.deepEqual(project.openfn.uuid, proj.openfn.uuid); }); + +test('extractAliasFromFilename: should extract alias from alias@domain.yaml format', (t) => { + const alias = extractAliasFromFilename('main@app.openfn.org.yaml'); + t.is(alias, 'main'); +}); + +test('extractAliasFromFilename: should extract alias from alias@domain.json format', (t) => { + const alias = extractAliasFromFilename('staging@localhost.json'); + t.is(alias, 'staging'); +}); + +test('extractAliasFromFilename: should extract alias from simple filename', (t) => { + const alias = extractAliasFromFilename('production.yaml'); + t.is(alias, 'production'); +}); + +test('extractAliasFromFilename: should handle full paths', (t) => { + const alias = extractAliasFromFilename('/path/to/dev@app.openfn.org.yaml'); + t.is(alias, 'dev'); +}); + +test('extractAliasFromFilename: should handle complex aliases', (t) => { + const alias = extractAliasFromFilename( + 'my-project-staging@app.openfn.org.yaml' + ); + t.is(alias, 'my-project-staging'); +}); diff --git a/packages/project/test/parse/from-project.test.ts b/packages/project/test/parse/from-project.test.ts index 96e80875e..d6c57644d 100644 --- a/packages/project/test/parse/from-project.test.ts +++ b/packages/project/test/parse/from-project.test.ts @@ -69,10 +69,11 @@ workflows: }); test('import from a v2 project as JSON', async (t) => { - const proj = await Project.from('project', v2.json); + const proj = await Project.from('project', v2.json, { alias: 'main' }); t.is(proj.id, 'my-project'); t.is(proj.name, 'My Project'); + t.is(proj.cli.alias, 'main'); t.is(proj.openfn!.uuid, '1234'); t.is(proj.openfn!.endpoint, 'https://app.openfn.org'); @@ -114,10 +115,19 @@ test('import from a v2 project as JSON', async (t) => { }); }); +test('import from a v2 project with alias', async (t) => { + const proj = await Project.from('project', v2.json, { alias: 'staging' }); + + t.is(proj.id, 'my-project'); + t.is(proj.name, 'My Project'); + t.is(proj.cli.alias, 'staging'); +}); + test('import from a v2 project as YAML', async (t) => { const proj = await Project.from('project', v2.yaml); t.is(proj.id, 'my-project'); t.is(proj.name, 'My Project'); + t.is(proj.cli.alias, 'main'); t.is(proj.openfn!.uuid, '1234'); t.is(proj.openfn!.endpoint, 'https://app.openfn.org'); // t.is(proj.options.retention_policy, 'retain_all'); @@ -167,10 +177,14 @@ test('import with custom config', async (t) => { projects: 'p', workflows: 'w', }, + alias: 'staging', }; const proj = await Project.from('project', v2.yaml, config); t.is(proj.id, 'my-project'); + t.is(proj.cli.alias, 'staging'); + + // note that alias should have been removed from config t.deepEqual(proj.config, { dirs: { projects: 'p', diff --git a/packages/project/test/project.test.ts b/packages/project/test/project.test.ts index aff15063b..c62a9e157 100644 --- a/packages/project/test/project.test.ts +++ b/packages/project/test/project.test.ts @@ -34,7 +34,7 @@ const state: Provisioner.Project = { { id: '66add020-e6eb-4eec-836b-20008afca816', name: 'Transform data', - body: '// Check out the Job Writing Guide for help getting started:\n// https://docs.openfn.org/documentation/jobs/job-writing-guide\n', + body: 'fn(s => s)', adaptor: '@openfn/language-common@latest', project_credential_id: null, keychain_credential_id: null, @@ -61,35 +61,55 @@ const state: Provisioner.Project = { dataclip_retention_period: null, }; -test('should generate a correct identifier with default values', (t) => { +test('should generate a correct qname with default values', (t) => { const project = new Project({}, {}); - const id = project.getIdentifier(); - t.is(id, 'main@local'); + t.is(project.qname, 'main'); }); -test('should generate a correct identifier with real values', (t) => { - const project = new Project({ - openfn: { - env: 'staging', - endpoint: 'https://app.openfn.org', +test('should generate a correct qname with real values', (t) => { + const project = new Project( + { + openfn: { + endpoint: 'https://app.openfn.org', + }, }, - }); + { + alias: 'staging', + } + ); - const id = project.getIdentifier(); - t.is(id, 'staging@app.openfn.org'); + t.is(project.qname, 'staging@app.openfn.org'); }); -test('should generate a correct identifier with weird values', (t) => { - const project = new Project({ - openfn: { - env: 'hello', - endpoint: 'https://app.com/openfn', +test('should generate a correct qname with weird values', (t) => { + const project = new Project( + { + openfn: { + endpoint: 'https://app.com/openfn', + }, }, - }); + { alias: 'hello' } + ); - const id = project.getIdentifier(); - t.is(id, 'hello@app.com'); + t.is(project.qname, 'hello@app.com'); +}); + +test('should return an alias', (t) => { + const project = new Project( + {}, + { + alias: 'staging', + } + ); + + t.is(project.alias, 'staging'); +}); + +test('should default alias to "main"', (t) => { + const project = new Project(); + + t.is(project.alias, 'main'); }); test('should convert a state file to a project and back again', async (t) => { @@ -111,11 +131,6 @@ test('should convert a state file to a project and back again', async (t) => { t.deepEqual(newState, state); }); -test.todo('serialize to and from yaml'); - -test.todo('serialize state as json'); -test.todo('serialize state as yaml'); - // Note that this is mostly tested under merge-project // This is testing the static function on Project, which is just a proxy test('should merge two projects', (t) => { diff --git a/packages/project/test/serialize/to-app-state.test.ts b/packages/project/test/serialize/to-app-state.test.ts index 6451f1644..e0c3e6613 100644 --- a/packages/project/test/serialize/to-app-state.test.ts +++ b/packages/project/test/serialize/to-app-state.test.ts @@ -31,11 +31,9 @@ const state: Provisioner.Project = { { id: '66add020-e6eb-4eec-836b-20008afca816', name: 'Transform data', - body: '// Check out the Job Writing Guide for help getting started:\n// https://docs.openfn.org/documentation/jobs/job-writing-guide\n', + body: 'fn(s => s)', adaptor: '@openfn/language-common@latest', - - // TODO make sure these get serialized back - project_credential_id: null, + project_credential_id: '', keychain_credential_id: null, }, ], @@ -51,7 +49,7 @@ const state: Provisioner.Project = { }, ], updated_at: '2025-04-23T11:15:59Z', - project_credentials: [], + project_credentials: [''], scheduled_deletion: null, allow_support_access: false, requires_mfa: false, @@ -242,9 +240,9 @@ test('should handle credentials', (t) => { { id: 'step', expression: '.', + configuration: 'p', openfn: { keychain_credential_id: 'k', - project_credential_id: 'p', }, }, ], @@ -341,7 +339,8 @@ test('should convert a project back to app state in json', (t) => { const data = { name: 'aaa', description: 'a project', - credentials: [], + // TODO I think we might need more automation of this? + credentials: [''], collections: [], openfn: { env: 'project', @@ -388,12 +387,11 @@ test('should convert a project back to app state in json', (t) => { { id: 'transform-data', name: 'Transform data', - expression: - '// Check out the Job Writing Guide for help getting started:\n// https://docs.openfn.org/documentation/jobs/job-writing-guide\n', + expression: 'fn(s => s)', adaptor: '@openfn/language-common@latest', + configuration: '', openfn: { uuid: '66add020-e6eb-4eec-836b-20008afca816', - project_credential_id: null, }, }, ], @@ -472,8 +470,7 @@ test.skip('should convert a project back to app state in yaml', (t) => { { id: 'transform-data', name: 'Transform data', - expression: - '// Check out the Job Writing Guide for help getting started:\n// https://docs.openfn.org/documentation/jobs/job-writing-guide\n', + expression: 'fn(s => s)', adaptor: '@openfn/language-common@latest', openfn: { uuid: '66add020-e6eb-4eec-836b-20008afca816', diff --git a/packages/project/test/serialize/to-project.test.ts b/packages/project/test/serialize/to-project.test.ts index 524af75a0..533350059 100644 --- a/packages/project/test/serialize/to-project.test.ts +++ b/packages/project/test/serialize/to-project.test.ts @@ -9,6 +9,9 @@ const createProject = () => { id: 'my-project', name: 'My Project', description: 'my lovely project', + cli: { + alias: 'main', + }, openfn: { uuid: '1234', endpoint: 'https://app.openfn.org', diff --git a/packages/project/test/util/match-project.test.ts b/packages/project/test/util/match-project.test.ts new file mode 100644 index 000000000..8362f9223 --- /dev/null +++ b/packages/project/test/util/match-project.test.ts @@ -0,0 +1,231 @@ +import test from 'ava'; +import matchProject, { + MultipleMatchingProjectsError, +} from '../../src/util/match-project'; +import Project from '../../src/Project'; + +const p = ( + uuid: any, + alias: string, + id: string, + domain: string = 'app.openfn.org' +) => { + return new Project( + { + id, + openfn: { + endpoint: `https://${domain}/abc`, + uuid, + }, + }, + { alias: alias } + ); +}; + +test('match by alias', (t) => { + const projects = [p('', 'staging', 'my-project')]; + + const result = matchProject('staging', projects); + + t.is(result?.id, 'my-project'); + t.is(result?.alias, 'staging'); +}); + +test('match by id', (t) => { + const projects = [p('', 'staging', 'my-project')]; + + const result = matchProject('my-project', projects); + + t.is(result?.id, 'my-project'); +}); + +test('match by uuid', (t) => { + const projects = [p('', 'staging', 'my-project')]; + + const result = matchProject('', projects); + + t.is(result?.id, 'my-project'); +}); + +test('return null if there is no match', (t) => { + const projects = [p('', 'staging', 'my-project')]; + + const result = matchProject('non-existent', projects); + + t.is(result, null); +}); + +test('match by partial uuid - prefix', (t) => { + const projects = [ + p('abcd1234-5678-90ef-ghij-klmnopqrstuv', 'staging', 'my-project'), + ]; + + const result = matchProject('abcd', projects); + + t.is(result?.id, 'my-project'); +}); + +test('match by partial uuid - middle section', (t) => { + const projects = [ + p('abcd1234-5678-90ef-ghij-klmnopqrstuv', 'staging', 'my-project'), + ]; + + const result = matchProject('90ef', projects); + + t.is(result?.id, 'my-project'); +}); + +test('match by partial uuid - case insensitive', (t) => { + const projects = [ + p('abcd1234-5678-90ef-ghij-klmnopqrstuv', 'staging', 'my-project'), + ]; + + const result = matchProject('ABCD', projects); + + t.is(result?.id, 'my-project'); +}); + +test('do not match by partial alias', (t) => { + const projects = [p('', 'staging', 'my-project')]; + + const result = matchProject('stag', projects); + + t.is(result, null); +}); + +test('do not match by partial id', (t) => { + const projects = [p('', 'staging', 'my-project')]; + + const result = matchProject('my-proj', projects); + + t.is(result, null); +}); + +test('throw if ambiguous alias', (t) => { + const projects = [ + p('', 'staging', 'project-a'), + p('', 'staging', 'project-b'), + ]; + + t.throws(() => matchProject('staging', projects), { + instanceOf: MultipleMatchingProjectsError, + }); +}); + +test('throw if ambiguous id', (t) => { + const projects = [ + p('', 'staging-a', 'my-project'), + p('', 'staging-b', 'my-project'), + ]; + + t.throws(() => matchProject('my-project', projects), { + instanceOf: MultipleMatchingProjectsError, + }); +}); + +test('match when id and alias are the same', (t) => { + const projects = [p('', 'staging', 'staging')]; + + const result = matchProject('staging', projects); + + t.is(result?.id, 'staging'); + t.is(result?.alias, 'staging'); +}); + +test('throw if ambiguous - id matches one, alias matches another', (t) => { + const projects = [ + p('', 'my-project', 'staging'), + p('', 'other', 'my-project'), + ]; + + t.throws(() => matchProject('my-project', projects), { + instanceOf: MultipleMatchingProjectsError, + }); +}); + +test('throw if ambiguous uuid', (t) => { + const projects = [ + p('abcd1234-5678-90ef-ghij-klmnopqrstuv', 'staging-a', 'project-a'), + p('abcd5678-1234-90ef-ghij-klmnopqrstuv', 'staging-b', 'project-b'), + ]; + + t.throws(() => matchProject('abcd', projects), { + instanceOf: MultipleMatchingProjectsError, + }); +}); + +test('match with domain - by alias', (t) => { + const projects = [p('', 'staging', 'my-project', 'app.openfn.org')]; + + const result = matchProject('staging@app.openfn.org', projects); + + t.is(result?.id, 'my-project'); +}); + +test('match with domain - by id', (t) => { + const projects = [p('', 'staging', 'my-project', 'app.openfn.org')]; + + const result = matchProject('my-project@app.openfn.org', projects); + + t.is(result?.id, 'my-project'); +}); + +test('no match when domain does not match', (t) => { + const projects = [p('', 'staging', 'my-project', 'app.openfn.org')]; + + const result = matchProject('staging@other-domain.com', projects); + + t.is(result, null); +}); + +test('filter by domain when multiple projects have same alias', (t) => { + const projects = [ + p('', 'staging', 'project-a', 'app.openfn.org'), + p('', 'staging', 'project-b', 'other-domain.com'), + ]; + + const result = matchProject('staging@app.openfn.org', projects); + + t.is(result?.id, 'project-a'); +}); + +test('filter by domain when multiple projects have same id', (t) => { + const projects = [ + p('', 'staging-a', 'my-project', 'app.openfn.org'), + p('', 'staging-b', 'my-project', 'other-domain.com'), + ]; + + const result = matchProject('my-project@app.openfn.org', projects); + + t.is(result?.id, 'my-project'); + t.is(result?.alias, 'staging-a'); +}); + +test('filter by domain when multiple projects match same partial uuid', (t) => { + const projects = [ + p( + 'abcd1234-5678-90ef-ghij-klmnopqrstuv', + 'staging-a', + 'project-a', + 'app.openfn.org' + ), + p( + 'abcd5678-9012-34ef-ghij-klmnopqrstuv', + 'staging-b', + 'project-b', + 'other-domain.com' + ), + ]; + + const result = matchProject('abcd@app.openfn.org', projects); + + t.is(result?.id, 'project-a'); +}); + +test('return null for empty projects array', (t) => { + const projects: Project[] = []; + + const result = matchProject('anything', projects); + + t.is(result, null); +}); diff --git a/packages/project/test/util/version-workflow.test.ts b/packages/project/test/util/version-workflow.test.ts index 96e16fae1..fc9b23dae 100644 --- a/packages/project/test/util/version-workflow.test.ts +++ b/packages/project/test/util/version-workflow.test.ts @@ -11,13 +11,11 @@ test('generate an 12 character version hash for a basic workflow', (t) => { ` @name a @id some-id - webhook-transform_data(name="Transform data",expression="// Check out the Job Writing Guide for help getting started:\n// https://docs.openfn.org/documentation/jobs/job-writing-guide\n") + webhook-transform_data(name="Transform data",expression="fn(s => s)") ` ); - const hash = workflow.getVersionHash(); - t.log(hash); - t.is(hash, 'cli:7e5ca7843721'); + t.is(hash, 'cli:518f491717a7'); }); test('unique hash but different steps order', (t) => { diff --git a/packages/project/test/workspace.test.ts b/packages/project/test/workspace.test.ts index 93b0f61b3..141b3cb96 100644 --- a/packages/project/test/workspace.test.ts +++ b/packages/project/test/workspace.test.ts @@ -2,6 +2,17 @@ import mock from 'mock-fs'; import { jsonToYaml, Workspace } from '../src'; import test from 'ava'; +const gen = (uuid: any, alias: string, id: string, domain: string) => + jsonToYaml({ + id, + name: id.toUpperCase(), + version: 2, + openfn: { + uuid: `${uuid}`, + }, + workflows: [], + }); + // TODO need a test on the legacy and new yaml formats here mock({ '/ws/openfn.yaml': jsonToYaml({ @@ -178,6 +189,19 @@ mock({ }, ], }), + + // aliasing + '/ws4/openfn.yaml': '', + '/ws4/.projects/main@openfn.org.yaml': gen( + 111, + 'main', + 'proj-1', + 'openfn.org' + ), + // prettier-ignore + '/ws4/.projects/main@somewhere.com.yaml': gen(112, 'main', 'proj-1', 'somewhere.com'), + // prettier-ignore + '/ws4/.projects/staging@openfn.org.yaml': gen(113, 'staging', 'proj-1-staging', 'openfn.org'), }); test('workspace-path: valid workspace path', (t) => { @@ -256,3 +280,40 @@ test('load project meta', (t) => { id: 'project-1', }); }); + +test('load v2 projects with multiple matching ids', (t) => { + const ws = new Workspace('/ws4'); + + t.is(ws.projects.length, 3); +}); + +test('get project by id', (t) => { + const ws = new Workspace('/ws4'); + const project = ws.get('proj-1-staging'); + + t.truthy(project); + t.is(project?.id, 'proj-1-staging'); +}); + +test('get project by partial uuid', (t) => { + const ws = new Workspace('/ws4'); + const project = ws.get('3'); + + t.truthy(project); + t.is(project?.uuid, '113'); +}); + +test('get project returns null when not found', (t) => { + const ws = new Workspace('/ws4'); + const project = ws.get('non-existent'); + + t.is(project, null); +}); + +test('get project throws on ambiguous match', (t) => { + const ws = new Workspace('/ws4'); + const error = t.throws(() => ws.get('main')); + + t.truthy(error); + t.regex(error!.message, /Failed to resolve unique identifier/); +}); diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index ddac7b8ab..34a8e6953 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -87,6 +87,9 @@ importers: '@openfn/lightning-mock': specifier: workspace:^ version: link:../../packages/lightning-mock + '@openfn/project': + specifier: workspace:* + version: link:../../packages/project '@types/node': specifier: ^18.19.127 version: 18.19.127 @@ -480,13 +483,13 @@ importers: dependencies: ava: specifier: ^6.4.1 - version: 6.4.1(encoding@0.1.13)(rollup@4.52.2) + version: 6.4.1(encoding@0.1.13)(rollup@3.29.5) packages/engine-multi/tmp/repo: dependencies: ava: specifier: ^6.4.1 - version: 6.4.1(encoding@0.1.13)(rollup@4.52.2) + version: 6.4.1(encoding@0.1.13)(rollup@3.29.5) packages/lexicon: dependencies: @@ -506,6 +509,9 @@ importers: '@openfn/engine-multi': specifier: workspace:* version: link:../engine-multi + '@openfn/language-collections': + specifier: 0.8.0 + version: 0.8.0 '@openfn/lexicon': specifier: workspace:^ version: link:../lexicon @@ -1502,8 +1508,8 @@ packages: '@manypkg/get-packages@1.1.3': resolution: {integrity: sha512-fo+QhuU3qE/2TQMQmbVMqaQ6EWbMhi4ABWP+O4AM1NqPBuy0OrApV5LO6BrrgnhtAHS2NH6RrVk9OL181tTi8A==} - '@mapbox/node-pre-gyp@2.0.0': - resolution: {integrity: sha512-llMXd39jtP0HpQLVI37Bf1m2ADlEb35GYSh1SDSLsBhR+5iCxiNGlT31yqbNtVHygHAtMy6dWFERpU2JgufhPg==} + '@mapbox/node-pre-gyp@2.0.3': + resolution: {integrity: sha512-uwPAhccfFJlsfCxMYTwOdVfOz3xqyj8xYL3zJj8f0pb30tLohnnFPhLuqp4/qoEz8sNxe4SESZedcBojRefIzg==} engines: {node: '>=18'} hasBin: true @@ -1537,6 +1543,9 @@ packages: '@openfn/language-collections@0.6.2': resolution: {integrity: sha512-EyXuXvYGBmBXgF95snuxWCd+HgZsT57ghqzDUnhYC+qaUNe9p0aIlFdfA2tTokce04KWI8hc7HKnvm0yPd5H7A==} + '@openfn/language-collections@0.8.0': + resolution: {integrity: sha512-iTCNkKZDnWBE5b8Ca3zZJ7BlZRIxDPZM2BKBbWwsNqarDzQpDbANF5F63nTBTDgUYPsrzb4jeGWUG3I0Ej9cNQ==} + '@openfn/language-common@2.0.0-rc3': resolution: {integrity: sha512-7kwhBnCd1idyTB3MD9dXmUqROAhoaUIkz2AGDKuv9vn/cbZh7egEv9/PzKkRcDJYFV9qyyS+cVT3Xbgsg2ii5g==} bundledDependencies: [] @@ -1547,6 +1556,9 @@ packages: '@openfn/language-common@2.1.1': resolution: {integrity: sha512-qIUPjdx+AIM3LW3nXhFcfnhGlgaK5np8utQuzaOSb9FYJiR5hxMFfTl1o0CPkVtUdZ/UfcTFL66cNPuEbGWabA==} + '@openfn/language-common@3.2.1': + resolution: {integrity: sha512-SOTBmmLvtO1kkWw3imyviFpd5ZW8GOirql4mNCv23BthoRyiOC+o2RBzpCeNBL8cVJtWwS+51PEyIlG+UMRBsQ==} + '@openfn/language-http@6.4.3': resolution: {integrity: sha512-8ihgIYId+ewMuNU9hbe5JWEWvaJInDrIEiy4EyO7tbzu5t/f1kO18JIzQWm6r7dcHiMfcG2QaXe6O3br1xOrDA==} @@ -1859,127 +1871,6 @@ packages: rollup: optional: true - '@rollup/rollup-android-arm-eabi@4.52.2': - resolution: {integrity: sha512-o3pcKzJgSGt4d74lSZ+OCnHwkKBeAbFDmbEm5gg70eA8VkyCuC/zV9TwBnmw6VjDlRdF4Pshfb+WE9E6XY1PoQ==} - cpu: [arm] - os: [android] - - '@rollup/rollup-android-arm64@4.52.2': - resolution: {integrity: sha512-cqFSWO5tX2vhC9hJTK8WAiPIm4Q8q/cU8j2HQA0L3E1uXvBYbOZMhE2oFL8n2pKB5sOCHY6bBuHaRwG7TkfJyw==} - cpu: [arm64] - os: [android] - - '@rollup/rollup-darwin-arm64@4.52.2': - resolution: {integrity: sha512-vngduywkkv8Fkh3wIZf5nFPXzWsNsVu1kvtLETWxTFf/5opZmflgVSeLgdHR56RQh71xhPhWoOkEBvbehwTlVA==} - cpu: [arm64] - os: [darwin] - - '@rollup/rollup-darwin-x64@4.52.2': - resolution: {integrity: sha512-h11KikYrUCYTrDj6h939hhMNlqU2fo/X4NB0OZcys3fya49o1hmFaczAiJWVAFgrM1NCP6RrO7lQKeVYSKBPSQ==} - cpu: [x64] - os: [darwin] - - '@rollup/rollup-freebsd-arm64@4.52.2': - resolution: {integrity: sha512-/eg4CI61ZUkLXxMHyVlmlGrSQZ34xqWlZNW43IAU4RmdzWEx0mQJ2mN/Cx4IHLVZFL6UBGAh+/GXhgvGb+nVxw==} - cpu: [arm64] - os: [freebsd] - - '@rollup/rollup-freebsd-x64@4.52.2': - resolution: {integrity: sha512-QOWgFH5X9+p+S1NAfOqc0z8qEpJIoUHf7OWjNUGOeW18Mx22lAUOiA9b6r2/vpzLdfxi/f+VWsYjUOMCcYh0Ng==} - cpu: [x64] - os: [freebsd] - - '@rollup/rollup-linux-arm-gnueabihf@4.52.2': - resolution: {integrity: sha512-kDWSPafToDd8LcBYd1t5jw7bD5Ojcu12S3uT372e5HKPzQt532vW+rGFFOaiR0opxePyUkHrwz8iWYEyH1IIQA==} - cpu: [arm] - os: [linux] - libc: [glibc] - - '@rollup/rollup-linux-arm-musleabihf@4.52.2': - resolution: {integrity: sha512-gKm7Mk9wCv6/rkzwCiUC4KnevYhlf8ztBrDRT9g/u//1fZLapSRc+eDZj2Eu2wpJ+0RzUKgtNijnVIB4ZxyL+w==} - cpu: [arm] - os: [linux] - libc: [musl] - - '@rollup/rollup-linux-arm64-gnu@4.52.2': - resolution: {integrity: sha512-66lA8vnj5mB/rtDNwPgrrKUOtCLVQypkyDa2gMfOefXK6rcZAxKLO9Fy3GkW8VkPnENv9hBkNOFfGLf6rNKGUg==} - cpu: [arm64] - os: [linux] - libc: [glibc] - - '@rollup/rollup-linux-arm64-musl@4.52.2': - resolution: {integrity: sha512-s+OPucLNdJHvuZHuIz2WwncJ+SfWHFEmlC5nKMUgAelUeBUnlB4wt7rXWiyG4Zn07uY2Dd+SGyVa9oyLkVGOjA==} - cpu: [arm64] - os: [linux] - libc: [musl] - - '@rollup/rollup-linux-loong64-gnu@4.52.2': - resolution: {integrity: sha512-8wTRM3+gVMDLLDdaT6tKmOE3lJyRy9NpJUS/ZRWmLCmOPIJhVyXwjBo+XbrrwtV33Em1/eCTd5TuGJm4+DmYjw==} - cpu: [loong64] - os: [linux] - libc: [glibc] - - '@rollup/rollup-linux-ppc64-gnu@4.52.2': - resolution: {integrity: sha512-6yqEfgJ1anIeuP2P/zhtfBlDpXUb80t8DpbYwXQ3bQd95JMvUaqiX+fKqYqUwZXqdJDd8xdilNtsHM2N0cFm6A==} - cpu: [ppc64] - os: [linux] - libc: [glibc] - - '@rollup/rollup-linux-riscv64-gnu@4.52.2': - resolution: {integrity: sha512-sshYUiYVSEI2B6dp4jMncwxbrUqRdNApF2c3bhtLAU0qA8Lrri0p0NauOsTWh3yCCCDyBOjESHMExonp7Nzc0w==} - cpu: [riscv64] - os: [linux] - libc: [glibc] - - '@rollup/rollup-linux-riscv64-musl@4.52.2': - resolution: {integrity: sha512-duBLgd+3pqC4MMwBrKkFxaZerUxZcYApQVC5SdbF5/e/589GwVvlRUnyqMFbM8iUSb1BaoX/3fRL7hB9m2Pj8Q==} - cpu: [riscv64] - os: [linux] - libc: [musl] - - '@rollup/rollup-linux-s390x-gnu@4.52.2': - resolution: {integrity: sha512-tzhYJJidDUVGMgVyE+PmxENPHlvvqm1KILjjZhB8/xHYqAGeizh3GBGf9u6WdJpZrz1aCpIIHG0LgJgH9rVjHQ==} - cpu: [s390x] - os: [linux] - libc: [glibc] - - '@rollup/rollup-linux-x64-gnu@4.52.2': - resolution: {integrity: sha512-opH8GSUuVcCSSyHHcl5hELrmnk4waZoVpgn/4FDao9iyE4WpQhyWJ5ryl5M3ocp4qkRuHfyXnGqg8M9oKCEKRA==} - cpu: [x64] - os: [linux] - libc: [glibc] - - '@rollup/rollup-linux-x64-musl@4.52.2': - resolution: {integrity: sha512-LSeBHnGli1pPKVJ79ZVJgeZWWZXkEe/5o8kcn23M8eMKCUANejchJbF/JqzM4RRjOJfNRhKJk8FuqL1GKjF5oQ==} - cpu: [x64] - os: [linux] - libc: [musl] - - '@rollup/rollup-openharmony-arm64@4.52.2': - resolution: {integrity: sha512-uPj7MQ6/s+/GOpolavm6BPo+6CbhbKYyZHUDvZ/SmJM7pfDBgdGisFX3bY/CBDMg2ZO4utfhlApkSfZ92yXw7Q==} - cpu: [arm64] - os: [openharmony] - - '@rollup/rollup-win32-arm64-msvc@4.52.2': - resolution: {integrity: sha512-Z9MUCrSgIaUeeHAiNkm3cQyst2UhzjPraR3gYYfOjAuZI7tcFRTOD+4cHLPoS/3qinchth+V56vtqz1Tv+6KPA==} - cpu: [arm64] - os: [win32] - - '@rollup/rollup-win32-ia32-msvc@4.52.2': - resolution: {integrity: sha512-+GnYBmpjldD3XQd+HMejo+0gJGwYIOfFeoBQv32xF/RUIvccUz20/V6Otdv+57NE70D5pa8W/jVGDoGq0oON4A==} - cpu: [ia32] - os: [win32] - - '@rollup/rollup-win32-x64-gnu@4.52.2': - resolution: {integrity: sha512-ApXFKluSB6kDQkAqZOKXBjiaqdF1BlKi+/eqnYe9Ee7U2K3pUDKsIyr8EYm/QDHTJIM+4X+lI0gJc3TTRhd+dA==} - cpu: [x64] - os: [win32] - - '@rollup/rollup-win32-x64-msvc@4.52.2': - resolution: {integrity: sha512-ARz+Bs8kY6FtitYM96PqPEVvPXqEZmPZsSkXvyX19YzDqkCaIlhCieLLMI5hxO9SRZ2XtCtm8wxhy0iJ2jxNfw==} - cpu: [x64] - os: [win32] - '@sentry/core@9.46.0': resolution: {integrity: sha512-it7JMFqxVproAgEtbLgCVBYtQ9fIb+Bu0JD+cEplTN/Ukpe6GaolyYib5geZqslVxhp2sQgT+58aGvfd/k0N8Q==} engines: {node: '>=18'} @@ -2645,8 +2536,8 @@ packages: resolution: {integrity: sha512-NIxF55hv4nSqQswkAeiOi1r83xy8JldOFDTWiug55KBu9Jnblncd2U6ViHmYgHf01TPZS77NJBhBMKdWj9HQMQ==} engines: {node: '>=8'} - ci-info@4.3.0: - resolution: {integrity: sha512-l+2bNRMiQgcfILUi33labAZYIWlH1kWDp+ecNo5iisRKrbm0xcRyCww71/YU0Fkw0mAFpz9bJayXPjey6vkmaQ==} + ci-info@4.3.1: + resolution: {integrity: sha512-Wdy2Igu8OcBpI2pZePZ5oWjPC38tmDVx5WKUXKwlLYkA0ozo85sLsLvkBbBn/sZaSCMFOGZJ14fvW9t5/d7kdA==} engines: {node: '>=8'} ci-parallel-vars@1.0.1: @@ -2897,8 +2788,8 @@ packages: resolution: {integrity: sha512-reYkTUJAZb9gUuZ2RvVCNhVHdg62RHnJ7WJl8ftMi4diZ6NWlciOzQN88pUhSELEwflJht4oQDv0F0BMlwaYtA==} engines: {node: '>=8'} - detect-libc@2.1.0: - resolution: {integrity: sha512-vEtk+OcP7VBRtQZ1EJ3bdgzSfBjgnEalLTp5zjJrS+2Z1w2KZly4SBdac/WDU3hhsNAZ9E8SC96ME4Ey8MZ7cg==} + detect-libc@2.1.2: + resolution: {integrity: sha512-Btj2BOOO83o3WyH59e8MgXsxEQVcarkUOpEYrubB0urwnN10yQ364rsiByU11nZlqWYZm05i/of7io4mzihBtQ==} engines: {node: '>=8'} didyoumean@1.2.2: @@ -2961,8 +2852,8 @@ packages: resolution: {integrity: sha512-KxdRyyFcS85pH3dnU8Y5yFUm2YJdaHwcBZWrfG8o89ZY9a13/f9itbN+YG3ELbBo9Pg5zvIozstmuV8bX13q6g==} engines: {node: '>=14.16'} - emoji-regex@10.5.0: - resolution: {integrity: sha512-lb49vf1Xzfx080OKA0o6l8DQQpV+6Vg95zyCJX9VB/BqKYlhG7N4wgROUUHRA+ZPUefLnteQOad7z1kT2bV7bg==} + emoji-regex@10.6.0: + resolution: {integrity: sha512-toUI84YS5YmxW219erniWD0CIVOo46xGKColeNQRgOzDorgBi1v4D71/OFzgD9GO2UGKIv1C3Sp8DAn0+j5w7A==} emoji-regex@8.0.0: resolution: {integrity: sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==} @@ -3887,8 +3778,8 @@ packages: resolution: {integrity: sha512-F2t4YIv9XQUBHt6AOJ0y7lSmP1+cY7Fm1DRh9GClTGzKST7UWLMx6ly9WZdLH/G/ppM5RL4MlQfRT71ri9t19A==} engines: {node: '>=12.20'} - memoize@10.1.0: - resolution: {integrity: sha512-MMbFhJzh4Jlg/poq1si90XRlTZRDHVqdlz2mPyGJ6kqMpyHUyVpDd5gpFAvVehW64+RA1eKE9Yt8aSLY7w2Kgg==} + memoize@10.2.0: + resolution: {integrity: sha512-DeC6b7QBrZsRs3Y02A6A7lQyzFbsQbqgjI6UW0GigGWV+u1s25TycMr0XHZE4cJce7rY/vyw2ctMQqfDkIhUEA==} engines: {node: '>=18'} merge-descriptors@1.0.3: @@ -4184,8 +4075,8 @@ packages: resolution: {integrity: sha512-VFqfGDHlx87K66yZrNdI4YGtD70IRyd+zSvgks6mzHPRNkoKy+9EKP4SFC77/vTTQYmRmti7dvqC+m5jBrBAcg==} engines: {node: '>=12'} - p-map@7.0.3: - resolution: {integrity: sha512-VkndIv2fIB99swvQoA65bm+fsmt6UNdGeIB0oxBs+WhAhdh08QA04JXpI7rbB9r08/nkbysKoya9rtDERYOYMA==} + p-map@7.0.4: + resolution: {integrity: sha512-tkAQEw8ysMzmkhgw8k+1U/iPhWNhykKnSk4Rd5zLoPJCuJaGRPo6YposrZgaxHKzDHdDWWZvE/Sk7hsL2X/CpQ==} engines: {node: '>=18'} p-queue@6.6.2: @@ -4585,11 +4476,6 @@ packages: engines: {node: '>=14.18.0', npm: '>=8.0.0'} hasBin: true - rollup@4.52.2: - resolution: {integrity: sha512-I25/2QgoROE1vYV+NQ1En9T9UFB9Cmfm2CJ83zZOlaDpvz29wGQSZXWKw7MiNXau7wYgB/T9fVIdIuEQ+KbiiA==} - engines: {node: '>=18.0.0', npm: '>=8.0.0'} - hasBin: true - run-async@3.0.0: resolution: {integrity: sha512-540WwVDOMxA6dN6We19EcT9sc3hkXPw5mzRNGM3FkdN/vtE9NFvj5lFAPNwUDmJjXidm3v7TC1cTE7t17Ulm1Q==} engines: {node: '>=0.12.0'} @@ -4834,8 +4720,8 @@ packages: resolution: {integrity: sha512-DZ4yORTwrbTj/7MZYq2w+/ZFdI6OZ/f9SFHR+71gIVUZhOQPHzVCLpvRnPgyaMpfWxxk/4ONva3GQSyNIKRv6A==} engines: {node: '>=10'} - tar@7.4.4: - resolution: {integrity: sha512-O1z7ajPkjTgEgmTGz0v9X4eqeEXTDREPTO77pVC1Nbs86feBU1Zhdg+edzavPmYW1olxkwsqA2v4uOw6E8LeDg==} + tar@7.5.2: + resolution: {integrity: sha512-7NyxrTE4Anh8km8iEy7o0QYPs+0JKBTj5ZaqHg6B39erLg0qYXN3BijtShwbsNSvQ+LN75+KV+C4QR/f6Gwnpg==} engines: {node: '>=18'} temp-dir@3.0.0: @@ -5019,6 +4905,10 @@ packages: resolution: {integrity: sha512-GrKEsc3ughskmGA9jevVlIOPMiiAHJ4OFUtaAH+NhfTUSiZ1wMPIQqQvAJUrJspFXJt3EBWgpAeoHEDVT1IBug==} engines: {node: '>=20.18.1'} + undici@7.16.0: + resolution: {integrity: sha512-QEg3HPMll0o3t2ourKwOeUAZ159Kn9mx5pnzHRQO8+Wixmh88YdZRiIwat0iNzNNXn0yoEtXJqFpyW7eM8BV7g==} + engines: {node: '>=20.18.1'} + unicorn-magic@0.3.0: resolution: {integrity: sha512-+QBBXBCvifc56fsbuxZQ6Sic3wqqc3WWaqxs58gvJrcOuN83HGTCwz3oS5phzU9LthRNE9VrJCFCLUgHeeFnfA==} engines: {node: '>=18'} @@ -5787,15 +5677,15 @@ snapshots: globby: 11.1.0 read-yaml-file: 1.1.0 - '@mapbox/node-pre-gyp@2.0.0(encoding@0.1.13)': + '@mapbox/node-pre-gyp@2.0.3(encoding@0.1.13)': dependencies: consola: 3.4.2 - detect-libc: 2.1.0 + detect-libc: 2.1.2 https-proxy-agent: 7.0.6 node-fetch: 2.7.0(encoding@0.1.13) nopt: 8.1.0 semver: 7.7.2 - tar: 7.4.4 + tar: 7.5.2 transitivePeerDependencies: - encoding - supports-color @@ -5841,6 +5731,12 @@ snapshots: stream-json: 1.9.1 undici: 5.29.0 + '@openfn/language-collections@0.8.0': + dependencies: + '@openfn/language-common': 3.2.1 + stream-json: 1.9.1 + undici: 5.29.0 + '@openfn/language-common@2.0.0-rc3': {} '@openfn/language-common@2.0.1': @@ -5865,6 +5761,17 @@ snapshots: lodash: 4.17.21 undici: 5.29.0 + '@openfn/language-common@3.2.1': + dependencies: + ajv: 8.17.1 + csv-parse: 5.6.0 + csvtojson: 2.0.10 + date-fns: 2.30.0 + http-status-codes: 2.3.0 + jsonpath-plus: 10.3.0 + lodash: 4.17.21 + undici: 7.16.0 + '@openfn/language-http@6.4.3': dependencies: '@openfn/language-common': 2.0.1 @@ -6182,79 +6089,13 @@ snapshots: transitivePeerDependencies: - supports-color - '@rollup/pluginutils@5.3.0(rollup@4.52.2)': + '@rollup/pluginutils@5.3.0(rollup@3.29.5)': dependencies: '@types/estree': 1.0.8 estree-walker: 2.0.2 picomatch: 4.0.3 optionalDependencies: - rollup: 4.52.2 - - '@rollup/rollup-android-arm-eabi@4.52.2': - optional: true - - '@rollup/rollup-android-arm64@4.52.2': - optional: true - - '@rollup/rollup-darwin-arm64@4.52.2': - optional: true - - '@rollup/rollup-darwin-x64@4.52.2': - optional: true - - '@rollup/rollup-freebsd-arm64@4.52.2': - optional: true - - '@rollup/rollup-freebsd-x64@4.52.2': - optional: true - - '@rollup/rollup-linux-arm-gnueabihf@4.52.2': - optional: true - - '@rollup/rollup-linux-arm-musleabihf@4.52.2': - optional: true - - '@rollup/rollup-linux-arm64-gnu@4.52.2': - optional: true - - '@rollup/rollup-linux-arm64-musl@4.52.2': - optional: true - - '@rollup/rollup-linux-loong64-gnu@4.52.2': - optional: true - - '@rollup/rollup-linux-ppc64-gnu@4.52.2': - optional: true - - '@rollup/rollup-linux-riscv64-gnu@4.52.2': - optional: true - - '@rollup/rollup-linux-riscv64-musl@4.52.2': - optional: true - - '@rollup/rollup-linux-s390x-gnu@4.52.2': - optional: true - - '@rollup/rollup-linux-x64-gnu@4.52.2': - optional: true - - '@rollup/rollup-linux-x64-musl@4.52.2': - optional: true - - '@rollup/rollup-openharmony-arm64@4.52.2': - optional: true - - '@rollup/rollup-win32-arm64-msvc@4.52.2': - optional: true - - '@rollup/rollup-win32-ia32-msvc@4.52.2': - optional: true - - '@rollup/rollup-win32-x64-gnu@4.52.2': - optional: true - - '@rollup/rollup-win32-x64-msvc@4.52.2': - optional: true + rollup: 3.29.5 '@sentry/core@9.46.0': {} @@ -6662,10 +6503,10 @@ snapshots: transitivePeerDependencies: - supports-color - '@vercel/nft@0.29.4(encoding@0.1.13)(rollup@4.52.2)': + '@vercel/nft@0.29.4(encoding@0.1.13)(rollup@3.29.5)': dependencies: - '@mapbox/node-pre-gyp': 2.0.0(encoding@0.1.13) - '@rollup/pluginutils': 5.3.0(rollup@4.52.2) + '@mapbox/node-pre-gyp': 2.0.3(encoding@0.1.13) + '@rollup/pluginutils': 5.3.0(rollup@3.29.5) acorn: 8.15.0 acorn-import-attributes: 1.9.5(acorn@8.15.0) async-sema: 3.1.1 @@ -6874,9 +6715,9 @@ snapshots: transitivePeerDependencies: - supports-color - ava@6.4.1(encoding@0.1.13)(rollup@4.52.2): + ava@6.4.1(encoding@0.1.13)(rollup@3.29.5): dependencies: - '@vercel/nft': 0.29.4(encoding@0.1.13)(rollup@4.52.2) + '@vercel/nft': 0.29.4(encoding@0.1.13)(rollup@3.29.5) acorn: 8.15.0 acorn-walk: 8.3.4 ansi-styles: 6.2.3 @@ -6886,7 +6727,7 @@ snapshots: cbor: 10.0.11 chalk: 5.6.2 chunkd: 2.0.1 - ci-info: 4.3.0 + ci-info: 4.3.1 ci-parallel-vars: 1.0.1 cli-truncate: 4.0.0 code-excerpt: 4.0.0 @@ -6902,9 +6743,9 @@ snapshots: is-plain-object: 5.0.0 is-promise: 4.0.0 matcher: 5.0.0 - memoize: 10.1.0 + memoize: 10.2.0 ms: 2.1.3 - p-map: 7.0.3 + p-map: 7.0.4 package-config: 5.0.0 picomatch: 4.0.3 plur: 5.1.0 @@ -7126,7 +6967,7 @@ snapshots: ci-info@3.9.0: {} - ci-info@4.3.0: {} + ci-info@4.3.1: {} ci-parallel-vars@1.0.1: {} @@ -7344,7 +7185,7 @@ snapshots: detect-indent@6.1.0: {} - detect-libc@2.1.0: {} + detect-libc@2.1.2: {} didyoumean@1.2.2: {} @@ -7405,7 +7246,7 @@ snapshots: emittery@1.2.0: {} - emoji-regex@10.5.0: {} + emoji-regex@10.6.0: {} emoji-regex@8.0.0: {} @@ -8388,7 +8229,7 @@ snapshots: map-age-cleaner: 0.1.3 mimic-fn: 4.0.0 - memoize@10.1.0: + memoize@10.2.0: dependencies: mimic-function: 5.0.1 @@ -8677,7 +8518,7 @@ snapshots: dependencies: aggregate-error: 4.0.1 - p-map@7.0.3: {} + p-map@7.0.4: {} p-queue@6.6.2: dependencies: @@ -9060,35 +8901,6 @@ snapshots: optionalDependencies: fsevents: 2.3.3 - rollup@4.52.2: - dependencies: - '@types/estree': 1.0.8 - optionalDependencies: - '@rollup/rollup-android-arm-eabi': 4.52.2 - '@rollup/rollup-android-arm64': 4.52.2 - '@rollup/rollup-darwin-arm64': 4.52.2 - '@rollup/rollup-darwin-x64': 4.52.2 - '@rollup/rollup-freebsd-arm64': 4.52.2 - '@rollup/rollup-freebsd-x64': 4.52.2 - '@rollup/rollup-linux-arm-gnueabihf': 4.52.2 - '@rollup/rollup-linux-arm-musleabihf': 4.52.2 - '@rollup/rollup-linux-arm64-gnu': 4.52.2 - '@rollup/rollup-linux-arm64-musl': 4.52.2 - '@rollup/rollup-linux-loong64-gnu': 4.52.2 - '@rollup/rollup-linux-ppc64-gnu': 4.52.2 - '@rollup/rollup-linux-riscv64-gnu': 4.52.2 - '@rollup/rollup-linux-riscv64-musl': 4.52.2 - '@rollup/rollup-linux-s390x-gnu': 4.52.2 - '@rollup/rollup-linux-x64-gnu': 4.52.2 - '@rollup/rollup-linux-x64-musl': 4.52.2 - '@rollup/rollup-openharmony-arm64': 4.52.2 - '@rollup/rollup-win32-arm64-msvc': 4.52.2 - '@rollup/rollup-win32-ia32-msvc': 4.52.2 - '@rollup/rollup-win32-x64-gnu': 4.52.2 - '@rollup/rollup-win32-x64-msvc': 4.52.2 - fsevents: 2.3.3 - optional: true - run-async@3.0.0: {} run-parallel@1.2.0: @@ -9293,7 +9105,7 @@ snapshots: string-width@7.2.0: dependencies: - emoji-regex: 10.5.0 + emoji-regex: 10.6.0 get-east-asian-width: 1.4.0 strip-ansi: 7.1.2 @@ -9392,7 +9204,7 @@ snapshots: mkdirp: 1.0.4 yallist: 4.0.0 - tar@7.4.4: + tar@7.5.2: dependencies: '@isaacs/fs-minipass': 4.0.1 chownr: 3.0.0 @@ -9662,6 +9474,8 @@ snapshots: undici@7.12.0: {} + undici@7.16.0: {} + unicorn-magic@0.3.0: {} unique-filename@3.0.0: diff --git a/pnpm-workspace.yaml b/pnpm-workspace.yaml index f749802d7..55e6aa971 100644 --- a/pnpm-workspace.yaml +++ b/pnpm-workspace.yaml @@ -1,15 +1,11 @@ -minimumReleaseAge: 4320 # 3 days packages: - # exclude the integration test repos - '!integration-tests/worker/dummy-repo/**' - '!integration-tests/worker/tmp/**' - '!integration-tests/cli/tmp/**' - '!integration-tests/cli/repo/**' - - # all packages in subdirs of packages/ and components/ - - 'packages/**' - - 'examples/**' - - 'integration-tests/**' - - # exclude packages that are inside test directories + - packages/** + - examples/** + - integration-tests/** - '!**/test/**' + +minimumReleaseAge: 1440