diff --git a/package.json b/package.json index 3b21728d..765f49d2 100644 --- a/package.json +++ b/package.json @@ -2,7 +2,7 @@ "author": "Transcend Inc.", "name": "@transcend-io/cli", "description": "A command line interface for programmatic operations across Transcend.", - "version": "8.1.1", + "version": "8.1.2", "homepage": "https://github.com/transcend-io/cli", "repository": { "type": "git", diff --git a/src/commands/consent/upload-preferences/artifacts/receipts/applyReceiptSummary.ts b/src/commands/consent/upload-preferences/artifacts/receipts/applyReceiptSummary.ts new file mode 100644 index 00000000..3a267c74 --- /dev/null +++ b/src/commands/consent/upload-preferences/artifacts/receipts/applyReceiptSummary.ts @@ -0,0 +1,60 @@ +import type { AnyTotals } from '../../ui'; +import { readFailingUpdatesFromReceipt } from './readFailingUpdatesFromReceipt'; +import { resolveReceiptPath } from './resolveReceiptPath'; +import { summarizeReceipt } from './summarizeReceipt'; + +/** + * Applies the summary of a receipt to the overall aggregation. + * + * @param opts - Options for applying the receipt summary + */ +export function applyReceiptSummary(opts: { + /** Folder where receipts are stored */ + receiptsFolder: string; + /** Path to the file being processed */ + filePath: string; + /** Path to the receipt file, if different from the default */ + receiptFilepath?: string | null; + /** Aggregation object to update */ + agg: AnyTotals; + /** Whether this is a dry run (no actual updates) */ + dryRun: boolean; + /** Array to collect failing updates from the receipt */ + failingUpdatesMem: Array; +}): void { + const { + receiptsFolder, + filePath, + receiptFilepath, + agg, + dryRun, + failingUpdatesMem, + } = opts; + + const resolved = + (typeof receiptFilepath === 'string' && receiptFilepath) || + resolveReceiptPath(receiptsFolder, filePath); + + if (!resolved) return; + + const summary = summarizeReceipt(resolved, dryRun); + + // collect failing updates + failingUpdatesMem.push(...readFailingUpdatesFromReceipt(resolved, filePath)); + + // merge totals + if (summary.mode === 'upload' && agg.mode === 'upload') { + agg.success += summary.success; + agg.skipped += summary.skipped; + agg.error += summary.error; + Object.entries(summary.errors).forEach(([k, v]) => { + (agg.errors as Record)[k] = + (agg.errors[k] ?? 0) + (v as number); + }); + } else if (summary.mode === 'check' && agg.mode === 'check') { + agg.totalPending += summary.totalPending; + agg.pendingConflicts += summary.pendingConflicts; + agg.pendingSafe += summary.pendingSafe; + agg.skipped += summary.skipped; + } +} diff --git a/src/commands/consent/upload-preferences/artifacts/receipts/index.ts b/src/commands/consent/upload-preferences/artifacts/receipts/index.ts new file mode 100644 index 00000000..fa9c6f7d --- /dev/null +++ b/src/commands/consent/upload-preferences/artifacts/receipts/index.ts @@ -0,0 +1,4 @@ +export * from './readFailingUpdatesFromReceipt'; +export * from './summarizeReceipt'; +export * from './resolveReceiptPath'; +export * from './applyReceiptSummary'; diff --git a/src/commands/consent/upload-preferences/artifacts/receipts/readFailingUpdatesFromReceipt.ts b/src/commands/consent/upload-preferences/artifacts/receipts/readFailingUpdatesFromReceipt.ts new file mode 100644 index 00000000..ebdeaf1f --- /dev/null +++ b/src/commands/consent/upload-preferences/artifacts/receipts/readFailingUpdatesFromReceipt.ts @@ -0,0 +1,36 @@ +import { readFileSync } from 'node:fs'; +import type { FailingUpdateRow } from '..'; + +/** + * Parse failing updates out of a receipts.json file. + * Returns rows you can merge into your in-memory buffer. + * + * @param receiptPath - The path to the receipts.json file + * @param sourceFile - Optional source file for context + * @returns An array of FailingUpdateRow objects + */ +export function readFailingUpdatesFromReceipt( + receiptPath: string, + sourceFile?: string, +): FailingUpdateRow[] { + try { + const raw = readFileSync(receiptPath, 'utf8'); + // eslint-disable-next-line @typescript-eslint/no-explicit-any + const json = JSON.parse(raw) as any; + const failing = json?.failingUpdates ?? {}; + const out: FailingUpdateRow[] = []; + // eslint-disable-next-line @typescript-eslint/no-explicit-any + for (const [primaryKey, val] of Object.entries(failing)) { + out.push({ + primaryKey, + uploadedAt: val?.uploadedAt ?? '', + error: val?.error ?? '', + updateJson: val?.update ? JSON.stringify(val.update) : '', + sourceFile, + }); + } + return out; + } catch { + return []; + } +} diff --git a/src/commands/consent/upload-preferences/artifacts/receipts/resolveReceiptPath.ts b/src/commands/consent/upload-preferences/artifacts/receipts/resolveReceiptPath.ts new file mode 100644 index 00000000..cfa234a5 --- /dev/null +++ b/src/commands/consent/upload-preferences/artifacts/receipts/resolveReceiptPath.ts @@ -0,0 +1,39 @@ +import { join } from 'node:path'; +import { getFilePrefix } from '../computeFiles'; +import { existsSync, readdirSync, statSync } from 'node:fs'; + +/** + * Find the receipt JSON for a given input file (supports suffixes like __1). + * + * @param receiptsFolder - Where to look for receipts + * @param filePath - The input file path to match against + * @returns The path to the receipt file, or null if not found + */ +export function resolveReceiptPath( + receiptsFolder: string, + filePath: string, +): string | null { + const base = `${getFilePrefix(filePath)}-receipts.json`; + const exact = join(receiptsFolder, base); + if (existsSync(exact)) return exact; + + const prefix = `${getFilePrefix(filePath)}-receipts`; + try { + const entries = readdirSync(receiptsFolder) + .filter((n) => n.startsWith(prefix) && n.endsWith('.json')) + .map((name) => { + const full = join(receiptsFolder, name); + let mtime = 0; + try { + mtime = statSync(full).mtimeMs; + } catch { + // ignore if stat fails + } + return { full, mtime }; + }) + .sort((a, b) => b.mtime - a.mtime); + return entries[0]?.full ?? null; + } catch { + return null; + } +} diff --git a/src/commands/consent/upload-preferences/artifacts/receipts/summarizeReceipt.ts b/src/commands/consent/upload-preferences/artifacts/receipts/summarizeReceipt.ts new file mode 100644 index 00000000..104d5b51 --- /dev/null +++ b/src/commands/consent/upload-preferences/artifacts/receipts/summarizeReceipt.ts @@ -0,0 +1,64 @@ +import { readFileSync } from 'node:fs'; +import type { AnyTotals } from '../../ui'; + +/** + * Summarize a receipts JSON into dashboard counters. + * + * @param receiptPath - The path to the receipt file + * @param dryRun - Whether this is a dry run (no actual upload) + * @returns An object summarizing the receipt data + */ +export function summarizeReceipt( + receiptPath: string, + dryRun: boolean, +): AnyTotals { + try { + const raw = readFileSync(receiptPath, 'utf8'); + // eslint-disable-next-line @typescript-eslint/no-explicit-any + const json = JSON.parse(raw) as any; + + const skippedCount = Object.values(json?.skippedUpdates ?? {}).length; + + if (!dryRun) { + const success = Object.values(json?.successfulUpdates ?? {}).length; + const failed = Object.values(json?.failingUpdates ?? {}).length; + const errors: Record = {}; + Object.values(json?.failingUpdates ?? {}).forEach((v) => { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + const msg = (v as any)?.error ?? 'Unknown error'; + errors[msg] = (errors[msg] ?? 0) + 1; + }); + return { + mode: 'upload', + success, + skipped: skippedCount, + error: failed, + errors, + }; + } + + const totalPending = Object.values(json?.pendingUpdates ?? {}).length; + const pendingConflicts = Object.values( + json?.pendingConflictUpdates ?? {}, + ).length; + const pendingSafe = Object.values(json?.pendingSafeUpdates ?? {}).length; + + return { + mode: 'check', + totalPending, + pendingConflicts, + pendingSafe, + skipped: skippedCount, + }; + } catch { + return !dryRun + ? { mode: 'upload', success: 0, skipped: 0, error: 0, errors: {} } + : { + mode: 'check', + totalPending: 0, + pendingConflicts: 0, + pendingSafe: 0, + skipped: 0, + }; + } +} diff --git a/src/commands/consent/upload-preferences/artifacts/receipts/tests/applyReceiptSummary.test.ts b/src/commands/consent/upload-preferences/artifacts/receipts/tests/applyReceiptSummary.test.ts new file mode 100644 index 00000000..1296170e --- /dev/null +++ b/src/commands/consent/upload-preferences/artifacts/receipts/tests/applyReceiptSummary.test.ts @@ -0,0 +1,341 @@ +import { describe, it, expect, vi, beforeEach } from 'vitest'; + +import { applyReceiptSummary } from '../applyReceiptSummary'; +import type { AnyTotals, CheckModeTotals, UploadModeTotals } from '../../../ui'; + +const H = vi.hoisted(() => { + // capture last calls/args + const calls = { + summarize: [] as Array, + resolve: [] as Array, + readFailing: [] as Array, + }; + + // default stubs (tests can override per-case) + const stubs = { + summarizeReceipt: vi.fn((): UploadModeTotals | CheckModeTotals => ({ + mode: 'upload' as const, + success: 1, + skipped: 2, + error: 3, + errors: { E1: 1 }, + })), + resolveReceiptPath: vi.fn(() => '/resolved.json'), + readFailingUpdatesFromReceipt: vi.fn(() => ['f1', 'f2']), + }; + + return { calls, stubs }; +}); + +vi.mock('../summarizeReceipt', () => ({ + summarizeReceipt: vi.fn((...a: unknown[]) => { + H.calls.summarize.push(a); + return H.stubs.summarizeReceipt(); + }), +})); + +vi.mock('../resolveReceiptPath', () => ({ + resolveReceiptPath: vi.fn((...a: unknown[]) => { + H.calls.resolve.push(a); + return H.stubs.resolveReceiptPath(); + }), +})); + +vi.mock('../readFailingUpdatesFromReceipt', () => ({ + readFailingUpdatesFromReceipt: vi.fn((...a: unknown[]) => { + H.calls.readFailing.push(a); + return H.stubs.readFailingUpdatesFromReceipt(); + }), +})); + +describe('applyReceiptSummary', () => { + beforeEach(() => { + vi.clearAllMocks(); + H.calls.summarize = []; + H.calls.resolve = []; + H.calls.readFailing = []; + // reset stubs to defaults + H.stubs.summarizeReceipt.mockImplementation(() => ({ + mode: 'upload', + success: 1, + skipped: 2, + error: 3, + errors: { E1: 1 }, + })); + H.stubs.resolveReceiptPath.mockImplementation(() => '/resolved.json'); + H.stubs.readFailingUpdatesFromReceipt.mockImplementation(() => [ + 'f1', + 'f2', + ]); + }); + + it('returns early when resolved path is falsy', () => { + H.stubs.resolveReceiptPath.mockReturnValue(''); + + const agg: AnyTotals = { + mode: 'upload', + success: 0, + skipped: 0, + error: 0, + errors: {}, + }; + const failing: unknown[] = []; + + applyReceiptSummary({ + receiptsFolder: '/receipts', + filePath: '/abs/file.csv', + receiptFilepath: undefined, + agg, + dryRun: false, + failingUpdatesMem: failing, + }); + + // no downstream calls + expect(H.calls.summarize.length).toBe(0); + expect(H.calls.readFailing.length).toBe(0); + // no changes + expect(agg).toEqual({ + mode: 'upload', + success: 0, + skipped: 0, + error: 0, + errors: {}, + }); + expect(failing).toEqual([]); + }); + + it('uses provided receiptFilepath when present and non-empty', () => { + const agg: AnyTotals = { + mode: 'upload', + success: 0, + skipped: 0, + error: 0, + errors: {}, + }; + const failing: unknown[] = []; + + applyReceiptSummary({ + receiptsFolder: '/receipts', + filePath: '/abs/file.csv', + receiptFilepath: '/explicit.json', + agg, + dryRun: true, + failingUpdatesMem: failing, + }); + + // resolveReceiptPath should NOT be called when receiptFilepath is provided + expect(H.calls.resolve.length).toBe(0); + + // summarizeReceipt receives explicit path and dryRun flag + expect(H.calls.summarize[0]).toEqual(['/explicit.json', true]); + + // readFailingUpdatesFromReceipt called with explicit path and original filePath + expect(H.calls.readFailing[0]).toEqual(['/explicit.json', '/abs/file.csv']); + + // defaults (upload) merged into agg + expect(agg).toEqual({ + mode: 'upload', + success: 1, + skipped: 2, + error: 3, + errors: { E1: 1 }, + }); + + // failing entries pushed + expect(failing).toEqual(['f1', 'f2']); + }); + + it('resolves path when receiptFilepath is undefined/null/empty', () => { + const agg: AnyTotals = { + mode: 'upload', + success: 0, + skipped: 0, + error: 0, + errors: {}, + }; + const failing: unknown[] = []; + + applyReceiptSummary({ + receiptsFolder: '/receipts', + filePath: '/abs/file.csv', + receiptFilepath: null, + agg, + dryRun: false, + failingUpdatesMem: failing, + }); + + expect(H.calls.resolve[0]).toEqual(['/receipts', '/abs/file.csv']); + expect(H.calls.summarize[0]).toEqual(['/resolved.json', false]); + expect(H.calls.readFailing[0]).toEqual(['/resolved.json', '/abs/file.csv']); + }); + + it('merges upload totals and accumulates errors per key', () => { + // First receipt: upload with two error keys + H.stubs.summarizeReceipt.mockReturnValueOnce({ + mode: 'upload' as const, + success: 5, + skipped: 1, + error: 2, + errors: { E1: 2, E2: 1 }, + }); + H.stubs.readFailingUpdatesFromReceipt.mockReturnValueOnce(['a']); + + // Second receipt: upload with overlapping keys + H.stubs.summarizeReceipt.mockReturnValueOnce({ + mode: 'upload' as const, + success: 3, + skipped: 0, + error: 1, + errors: { E1: 4, E3: 1 }, + }); + H.stubs.readFailingUpdatesFromReceipt.mockReturnValueOnce(['b', 'c']); + + const agg: UploadModeTotals = { + mode: 'upload', + success: 10, + skipped: 2, + error: 0, + errors: { E1: 1 }, + }; + const failing: unknown[] = []; + + // apply twice to simulate multiple receipts + applyReceiptSummary({ + receiptsFolder: '/r', + filePath: '/f1.csv', + agg, + dryRun: false, + failingUpdatesMem: failing, + }); + applyReceiptSummary({ + receiptsFolder: '/r', + filePath: '/f2.csv', + agg, + dryRun: false, + failingUpdatesMem: failing, + }); + + expect(agg).toEqual({ + mode: 'upload', + success: 10 + 5 + 3, // 18 + skipped: 2 + 1 + 0, // 3 + error: 0 + 2 + 1, // 3 + errors: { + // start with E1:1, add receipt1(E1:2), receipt2(E1:4) + E1: 1 + 2 + 4, // 7 + E2: 1, // from receipt1 + E3: 1, // from receipt2 + }, + }); + + expect(failing).toEqual(['a', 'b', 'c']); + }); + + it('merges check totals correctly', () => { + H.stubs.summarizeReceipt.mockReturnValue({ + mode: 'check' as const, + totalPending: 9, + pendingConflicts: 4, + pendingSafe: 5, + skipped: 2, + }); + H.stubs.readFailingUpdatesFromReceipt.mockReturnValue(['x']); + + const agg: CheckModeTotals = { + mode: 'check', + totalPending: 1, + pendingConflicts: 2, + pendingSafe: 3, + skipped: 4, + }; + const failing: unknown[] = []; + + applyReceiptSummary({ + receiptsFolder: '/r', + filePath: '/f.csv', + agg, + dryRun: false, + failingUpdatesMem: failing, + }); + + expect(agg).toEqual({ + mode: 'check', + totalPending: 1 + 9, // 10 + pendingConflicts: 2 + 4, // 6 + pendingSafe: 3 + 5, // 8 + skipped: 4 + 2, // 6 + }); + + expect(failing).toEqual(['x']); + }); + + it('does not cross-merge mismatched modes (upload summary into check agg and vice versa)', () => { + // upload summary + check agg → no merge changes + H.stubs.summarizeReceipt.mockReturnValueOnce({ + mode: 'upload' as const, + success: 10, + skipped: 1, + error: 2, + errors: { E: 10 }, + }); + + const aggCheck: CheckModeTotals = { + mode: 'check', + totalPending: 1, + pendingConflicts: 1, + pendingSafe: 1, + skipped: 1, + }; + const failing1: unknown[] = []; + + applyReceiptSummary({ + receiptsFolder: '/r', + filePath: '/f.csv', + agg: aggCheck, + dryRun: false, + failingUpdatesMem: failing1, + }); + + expect(aggCheck).toEqual({ + mode: 'check', + totalPending: 1, + pendingConflicts: 1, + pendingSafe: 1, + skipped: 1, + }); + + // check summary + upload agg → no merge changes + H.stubs.summarizeReceipt.mockReturnValueOnce({ + mode: 'check' as const, + totalPending: 7, + pendingConflicts: 2, + pendingSafe: 5, + skipped: 0, + }); + + const aggUpload: UploadModeTotals = { + mode: 'upload', + success: 1, + skipped: 2, + error: 3, + errors: { E0: 1 }, + }; + const failing2: unknown[] = []; + + applyReceiptSummary({ + receiptsFolder: '/r', + filePath: '/f2.csv', + agg: aggUpload, + dryRun: false, + failingUpdatesMem: failing2, + }); + + expect(aggUpload).toEqual({ + mode: 'upload', + success: 1, + skipped: 2, + error: 3, + errors: { E0: 1 }, + }); + }); +}); diff --git a/src/commands/consent/upload-preferences/artifacts/receipts/tests/readFailingUpdatesFromReceipt.test.ts b/src/commands/consent/upload-preferences/artifacts/receipts/tests/readFailingUpdatesFromReceipt.test.ts new file mode 100644 index 00000000..83668e71 --- /dev/null +++ b/src/commands/consent/upload-preferences/artifacts/receipts/tests/readFailingUpdatesFromReceipt.test.ts @@ -0,0 +1,123 @@ +import { describe, it, expect, vi, beforeEach } from 'vitest'; + +import { readFailingUpdatesFromReceipt } from '../readFailingUpdatesFromReceipt'; + +const H = vi.hoisted(() => ({ + readFileSync: vi.fn() as unknown as (path: string, enc: string) => string, +})); + +// mock MUST come before importing SUT +vi.mock('node:fs', () => ({ + // eslint-disable-next-line @typescript-eslint/no-explicit-any + readFileSync: (...a: unknown[]) => (H.readFileSync as any)(...a), +})); + +describe('readFailingUpdatesFromReceipt', () => { + beforeEach(() => { + vi.clearAllMocks(); + }); + + it('parses failing updates (happy path) and includes sourceFile', () => { + H.readFileSync = vi.fn().mockReturnValueOnce( + JSON.stringify({ + failingUpdates: { + 'pk-1': { + uploadedAt: '2025-08-15T00:00:00.000Z', + error: 'Bad thing', + update: { purpose: 'Marketing', enabled: false }, + }, + 'pk-2': { + uploadedAt: '2025-08-16T10:11:12.000Z', + error: 'Oops', + update: { purpose: 'Email', enabled: true }, + }, + }, + }), + ); + + const out = readFailingUpdatesFromReceipt( + '/path/receipts.json', + '/src/file.csv', + ); + + expect(out).toEqual([ + { + primaryKey: 'pk-1', + uploadedAt: '2025-08-15T00:00:00.000Z', + error: 'Bad thing', + updateJson: JSON.stringify({ purpose: 'Marketing', enabled: false }), + sourceFile: '/src/file.csv', + }, + { + primaryKey: 'pk-2', + uploadedAt: '2025-08-16T10:11:12.000Z', + error: 'Oops', + updateJson: JSON.stringify({ purpose: 'Email', enabled: true }), + sourceFile: '/src/file.csv', + }, + ]); + + expect(H.readFileSync).toHaveBeenCalledWith('/path/receipts.json', 'utf8'); + }); + + it('fills defaults when fields are missing and omits updateJson when update is absent', () => { + H.readFileSync = vi.fn().mockReturnValueOnce( + JSON.stringify({ + failingUpdates: { + 'pk-1': {}, // all missing -> defaults + 'pk-2': { uploadedAt: 'X' }, // partial + }, + }), + ); + + const out = readFailingUpdatesFromReceipt('/path/receipts.json'); + + expect(out).toEqual([ + { + primaryKey: 'pk-1', + uploadedAt: '', + error: '', + updateJson: '', + sourceFile: undefined, + }, + { + primaryKey: 'pk-2', + uploadedAt: 'X', + error: '', + updateJson: '', + sourceFile: undefined, + }, + ]); + }); + + it('returns [] when failingUpdates is empty object', () => { + H.readFileSync = vi + .fn() + .mockReturnValueOnce(JSON.stringify({ failingUpdates: {} })); + const out = readFailingUpdatesFromReceipt('/path/receipts.json'); + expect(out).toEqual([]); + }); + + it('returns [] when failingUpdates key is missing entirely', () => { + H.readFileSync = vi + .fn() + .mockReturnValueOnce(JSON.stringify({ someOtherKey: 1 })); + const out = readFailingUpdatesFromReceipt('/path/receipts.json'); + expect(out).toEqual([]); + }); + + it('returns [] on invalid JSON', () => { + H.readFileSync = vi.fn().mockReturnValueOnce('{not json}'); + const out = readFailingUpdatesFromReceipt('/path/receipts.json'); + expect(out).toEqual([]); + }); + + it('returns [] when readFileSync throws', () => { + H.readFileSync = vi.fn(() => { + throw new Error('ENOENT'); + // eslint-disable-next-line @typescript-eslint/no-explicit-any + }) as any; + const out = readFailingUpdatesFromReceipt('/path/missing.json'); + expect(out).toEqual([]); + }); +}); diff --git a/src/commands/consent/upload-preferences/artifacts/receipts/tests/resolveReceiptPath.test.ts b/src/commands/consent/upload-preferences/artifacts/receipts/tests/resolveReceiptPath.test.ts new file mode 100644 index 00000000..8501d7a1 --- /dev/null +++ b/src/commands/consent/upload-preferences/artifacts/receipts/tests/resolveReceiptPath.test.ts @@ -0,0 +1,162 @@ +import { describe, it, expect, vi, beforeEach } from 'vitest'; +import { join } from 'node:path'; + +// Now import the SUT +import { resolveReceiptPath } from '../resolveReceiptPath'; +import type { getFilePrefix } from '../../computeFiles'; + +const H = vi.hoisted(() => { + const existsSync = vi.fn(); + const readdirSync = vi.fn(); + const statSync = vi.fn(); + + const getFilePrefix = vi.fn( + // eslint-disable-next-line @typescript-eslint/no-unused-vars + (...args: string[]) => 'FILE', + ); + + const resetFs = (): void => { + existsSync.mockReset(); + readdirSync.mockReset(); + statSync.mockReset(); + }; + const resetCF = (): void => { + getFilePrefix.mockReset(); + getFilePrefix.mockReturnValue('FILE'); + }; + + return { existsSync, readdirSync, statSync, getFilePrefix, resetFs, resetCF }; +}); + +vi.mock('node:fs', () => ({ + existsSync: (...a: unknown[]) => H.existsSync(...a), + readdirSync: (...a: unknown[]) => H.readdirSync(...a), + statSync: (...a: unknown[]) => H.statSync(...a), +})); + +vi.mock('../../computeFiles', () => ({ + // forward the arguments so the spy records them + getFilePrefix: (...a: Parameters) => + H.getFilePrefix(...a), +})); + +// ----------------------------------------------------------------------------- + +describe('resolveReceiptPath', () => { + const folder = '/receipts'; + const file = '/some/path/input.csv'; + + beforeEach(() => { + H.resetFs(); + H.resetCF(); + }); + + it('returns the exact receipt path when it exists (short-circuit, no directory scan)', () => { + const expectedBase = 'FILE-receipts.json'; + const expected = join(folder, expectedBase); + + H.existsSync.mockReturnValueOnce(true); + + const out = resolveReceiptPath(folder, file); + + expect(H.getFilePrefix).toHaveBeenCalledWith(file); + expect(H.existsSync).toHaveBeenCalledWith(expected); + expect(out).toBe(expected); + + expect(H.readdirSync).not.toHaveBeenCalled(); + expect(H.statSync).not.toHaveBeenCalled(); + }); + + it('scans directory and returns the most recent matching suffix when exact does not exist', () => { + H.existsSync.mockReturnValueOnce(false); + + H.readdirSync.mockReturnValueOnce([ + 'FILE-receipts__1.json', + 'FILE-receipts__3.json', + 'FILE-receipts__2.json', + 'unrelated.txt', + 'OTHER-receipts.json', + ]); + + H.statSync.mockImplementation((full: string) => { + const mtimeMs = full.endsWith('FILE-receipts__3.json') + ? 3000 + : full.endsWith('FILE-receipts__2.json') + ? 2000 + : full.endsWith('FILE-receipts__1.json') + ? 1000 + : 0; + return { mtimeMs } as unknown as import('node:fs').Stats; + }); + + const out = resolveReceiptPath(folder, file); + + expect(H.readdirSync).toHaveBeenCalledWith(folder); + expect(out).toBe(join(folder, 'FILE-receipts__3.json')); + + expect(H.statSync).toHaveBeenCalledTimes(3); + expect(H.statSync).toHaveBeenCalledWith( + join(folder, 'FILE-receipts__1.json'), + ); + expect(H.statSync).toHaveBeenCalledWith( + join(folder, 'FILE-receipts__2.json'), + ); + expect(H.statSync).toHaveBeenCalledWith( + join(folder, 'FILE-receipts__3.json'), + ); + }); + + it('ignores stat errors but still picks the newest among remaining candidates', () => { + H.existsSync.mockReturnValueOnce(false); + + H.readdirSync.mockReturnValueOnce([ + 'FILE-receipts__old.json', + 'FILE-receipts__new.json', + ]); + + H.statSync.mockImplementation((full: string) => { + if (full.endsWith('__old.json')) throw new Error('EPERM'); + return { mtimeMs: 9999 } as unknown as import('node:fs').Stats; + }); + + const out = resolveReceiptPath(folder, file); + expect(out).toBe(join(folder, 'FILE-receipts__new.json')); + }); + + it('returns null when there are no matching files after filtering', () => { + H.existsSync.mockReturnValueOnce(false); + H.readdirSync.mockReturnValueOnce([ + 'unrelated.json', + 'also-unrelated.txt', + 'FILE-not-a-receipt.json', + ]); + + const out = resolveReceiptPath(folder, file); + expect(out).toBeNull(); + expect(H.statSync).not.toHaveBeenCalled(); + }); + + it('returns null when directory read throws', () => { + H.existsSync.mockReturnValueOnce(false); + H.readdirSync.mockImplementationOnce(() => { + throw new Error('ENOENT'); + }); + + const out = resolveReceiptPath(folder, file); + expect(out).toBeNull(); + }); + + it('uses getFilePrefix value for both exact and prefix computations', () => { + // It’s called twice in the SUT (exact and prefix), so persist the value across both calls. + H.getFilePrefix.mockReturnValue('DYNAMIC'); + + H.existsSync.mockReturnValueOnce(false); + H.readdirSync.mockReturnValueOnce(['DYNAMIC-receipts__42.json']); + H.statSync.mockReturnValueOnce({ + mtimeMs: 1, + } as unknown as import('node:fs').Stats); + + const out = resolveReceiptPath(folder, file); + expect(out).toBe(join(folder, 'DYNAMIC-receipts__42.json')); + }); +}); diff --git a/src/commands/consent/upload-preferences/artifacts/receipts/tests/summarizeReceipt.test.ts b/src/commands/consent/upload-preferences/artifacts/receipts/tests/summarizeReceipt.test.ts new file mode 100644 index 00000000..07d3c49d --- /dev/null +++ b/src/commands/consent/upload-preferences/artifacts/receipts/tests/summarizeReceipt.test.ts @@ -0,0 +1,162 @@ +import { describe, it, expect, vi, beforeEach } from 'vitest'; + +import { summarizeReceipt } from '../summarizeReceipt'; + +// --- Hoisted fs mock ---------------------------------------------------------- +const H = vi.hoisted(() => ({ + readFileSync: vi.fn() as unknown as (path: string, enc: string) => string, +})); + +// Mock BEFORE SUT import +vi.mock('node:fs', () => ({ + // eslint-disable-next-line @typescript-eslint/no-explicit-any + readFileSync: (...a: unknown[]) => (H.readFileSync as any)(...a), +})); + +describe('summarizeReceipt', () => { + beforeEach(() => { + vi.clearAllMocks(); + }); + + it('upload mode: counts success/error/skipped and aggregates errors by message', () => { + H.readFileSync = vi.fn().mockReturnValueOnce( + JSON.stringify({ + successfulUpdates: { + a: {}, + b: {}, + c: {}, + }, + failingUpdates: { + f1: { error: 'Bad input' }, + f2: { error: 'Bad input' }, + f3: { error: 'Network' }, + f4: {}, // missing error -> "Unknown error" + }, + skippedUpdates: { + s1: {}, + s2: {}, + }, + }), + ); + + const out = summarizeReceipt('/r/receipt.json', /* dryRun */ false); + + expect(H.readFileSync).toHaveBeenCalledWith('/r/receipt.json', 'utf8'); + + expect(out).toEqual({ + mode: 'upload', + success: 3, // a,b,c + error: 4, // f1..f4 + skipped: 2, // s1,s2 + errors: { + 'Bad input': 2, + Network: 1, + 'Unknown error': 1, + }, + }); + }); + + it('upload mode: missing sections are treated as empty', () => { + H.readFileSync = vi.fn().mockReturnValueOnce(JSON.stringify({})); + + const out = summarizeReceipt('/r/empty.json', false); + + expect(out).toEqual({ + mode: 'upload', + success: 0, + error: 0, + skipped: 0, + errors: {}, + }); + }); + + it('check mode: counts pending, conflicts, safe, and skipped', () => { + H.readFileSync = vi.fn().mockReturnValueOnce( + JSON.stringify({ + pendingUpdates: { p1: {}, p2: {}, p3: {} }, + pendingConflictUpdates: { c1: {}, c2: {} }, + pendingSafeUpdates: { s1: {}, s2: {}, s3: {}, s4: {} }, + skippedUpdates: { k1: {} }, + }), + ); + + const out = summarizeReceipt('/r/check.json', /* dryRun */ true); + + expect(H.readFileSync).toHaveBeenCalledWith('/r/check.json', 'utf8'); + + expect(out).toEqual({ + mode: 'check', + totalPending: 3, + pendingConflicts: 2, + pendingSafe: 4, + skipped: 1, + }); + }); + + it('check mode: missing sections are treated as empty', () => { + H.readFileSync = vi.fn().mockReturnValueOnce(JSON.stringify({})); + + const out = summarizeReceipt('/r/empty.json', true); + + expect(out).toEqual({ + mode: 'check', + totalPending: 0, + pendingConflicts: 0, + pendingSafe: 0, + skipped: 0, + }); + }); + + it('returns zeroed defaults on invalid JSON (upload mode)', () => { + H.readFileSync = vi.fn().mockReturnValueOnce('{not json'); + + const out = summarizeReceipt('/r/bad.json', false); + + expect(out).toEqual({ + mode: 'upload', + success: 0, + error: 0, + skipped: 0, + errors: {}, + }); + }); + + it('returns zeroed defaults on invalid JSON (check mode)', () => { + H.readFileSync = vi.fn().mockReturnValueOnce('{not json'); + + const out = summarizeReceipt('/r/bad.json', true); + + expect(out).toEqual({ + mode: 'check', + totalPending: 0, + pendingConflicts: 0, + pendingSafe: 0, + skipped: 0, + }); + }); + + it('returns zeroed defaults when readFileSync throws (both modes)', () => { + H.readFileSync = vi.fn(() => { + throw new Error('ENOENT'); + // eslint-disable-next-line @typescript-eslint/no-explicit-any + }) as any; + + const upload = summarizeReceipt('/r/missing.json', false); + expect(upload).toEqual({ + mode: 'upload', + success: 0, + error: 0, + skipped: 0, + errors: {}, + }); + + const check = summarizeReceipt('/r/missing.json', true); + expect(check).toEqual({ + mode: 'check', + totalPending: 0, + pendingConflicts: 0, + pendingSafe: 0, + skipped: 0, + }); + }); +}); diff --git a/src/lib/preference-management/codecs.ts b/src/lib/preference-management/codecs.ts index 426b7680..9687a154 100644 --- a/src/lib/preference-management/codecs.ts +++ b/src/lib/preference-management/codecs.ts @@ -45,6 +45,40 @@ export const PurposeRowMapping = t.type({ /** Override type */ export type PurposeRowMapping = t.TypeOf; +/** + * Mapping of column name to purpose row mapping. + * This is used to map each column in the CSV to the relevant purpose and preference definitions in + * transcend. + */ +export const ColumnPurposeMap = t.record(t.string, PurposeRowMapping); + +/** Override type */ +export type ColumnPurposeMap = t.TypeOf; + +export const IdentifierMetadataForPreference = t.type({ + /** The identifier name */ + name: t.string, + /** Is unique on preference store */ + isUniqueOnPreferenceStore: t.boolean, +}); + +/** Override type */ +export type IdentifierMetadataForPreference = t.TypeOf< + typeof IdentifierMetadataForPreference +>; + +/** + * Mapping of identifier name to the column name in the CSV file. + * This is used to map each identifier name to the column in the CSV file. + */ +export const ColumnIdentifierMap = t.record( + t.string, + IdentifierMetadataForPreference, +); + +/** Override type */ +export type ColumnIdentifierMap = t.TypeOf; + export const FileMetadataState = t.intersection([ t.type({ /** @@ -87,6 +121,105 @@ export const FileMetadataState = t.intersection([ /** Override type */ export type FileMetadataState = t.TypeOf; +/** + * This is the type of the receipts that are stored in the file + * that is used to track the state of the upload process. + * It is used to resume the upload process from where it left off. + * It is used to persist the state of the upload process across multiple runs. + */ +export const PreferenceUpdateMap = t.record( + t.string, + // This can either be true to indicate the record is pending + // or it can be an object showing the object + // We only return a fixed number of results to avoid + // making the JSON file too large + t.union([t.boolean, PreferenceUpdateItem]), +); + +/** Override type */ +export type PreferenceUpdateMap = t.TypeOf; + +/** + * This is the type of the pending updates that are safe to run without + * conflicts with existing consent preferences. + * + * Key is primaryKey of the record in the file. + * The value is the row in the file that is safe to upload. + */ +export const PendingSafePreferenceUpdates = t.record( + t.string, + // This can either be true to indicate the record is safe + // or it can be an object showing the object + // We only return a fixed number of results to avoid + // making the JSON file too large + t.union([t.boolean, t.record(t.string, t.string)]), +); + +/** Override type */ +export type PendingSafePreferenceUpdates = t.TypeOf< + typeof PendingSafePreferenceUpdates +>; + +/** + * These are the updates that failed to be uploaded to the API. + */ +export const FailingPreferenceUpdates = t.record( + t.string, + t.type({ + /** Time upload ran at */ + uploadedAt: t.string, + /** Attempts to upload that resulted in an error */ + error: t.string, + /** The update body */ + update: PreferenceUpdateItem, + }), +); + +/** Override type */ +export type FailingPreferenceUpdates = t.TypeOf< + typeof FailingPreferenceUpdates +>; + +/** + * This is the type of the pending updates that are in conflict with existing consent preferences. + * + * Key is primaryKey of the record in the file. + * The value is the row in the file that is pending upload. + */ +export const PendingWithConflictPreferenceUpdates = t.record( + t.string, + // We always return the conflicts for investigation + t.type({ + /** Record to be inserted to transcend v1/preferences API */ + record: PreferenceQueryResponseItem, + /** The row in the file that is pending upload */ + row: t.record(t.string, t.string), + }), +); + +/** Override type */ +export type PendingWithConflictPreferenceUpdates = t.TypeOf< + typeof PendingWithConflictPreferenceUpdates +>; + +/** + * The set of preference updates that are skipped + * Key is primaryKey and value is the row in the CSV + * that is skipped. + * + * This is usually because the preferences are already in the store + * or there are duplicate rows in the CSV file that are identical. + */ +export const SkippedPreferenceUpdates = t.record( + t.string, + t.record(t.string, t.string), +); + +/** Override type */ +export type SkippedPreferenceUpdates = t.TypeOf< + typeof SkippedPreferenceUpdates +>; + /** Persist this data between runs of the script */ export const PreferenceState = t.type({ /**