diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 9521ca3..3f29ef4 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -94,6 +94,13 @@ jobs:
contents: read
steps:
- uses: actions/checkout@v4
+ with:
+ fetch-depth: 0
+
+ - name: Configure git identity for graph materialization
+ run: |
+ git config user.name "ci-bot"
+ git config user.email "ci@xyph.dev"
- name: Fetch WARP graph refs
run: git fetch origin 'refs/warp/xyph-roadmap/writers/*:refs/warp/xyph-roadmap/writers/*'
diff --git a/.gitignore b/.gitignore
index ea438b4..dce4778 100644
--- a/.gitignore
+++ b/.gitignore
@@ -11,3 +11,4 @@ coverage/
.codex/
.claude/
.xyph.json
+docs/work/
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 749b145..46b5064 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -4,6 +4,18 @@ All notable changes to XYPH will be documented in this file.
## [Unreleased]
+### Added — Work DAG Analysis Suite
+
+- **`DagAnalysis.ts`** — pure functions for DAG structure analysis: level assignment, DAG width, greedy worker scheduling, transitive reduction/closure, anti-chain decomposition, reverse reachability, and provenance tracing
+- **`scripts/generate-work-dag.ts`** — generates comprehensive DAG visualization suite: full/per-campaign/backlog/graveyard SVGs in both LR and TB orientations, plus `work.md` analysis document with topological sort, critical path, parallelism, scheduling, transitive reduction/closure, ancestry/impact, campaign grouping, and anti-chain waves
+- **`npm run graph:work`** — runs the generator, outputs to `docs/work/`
+- **43 new tests** — unit tests for all DagAnalysis functions (diamond, linear, empty, single-node, isolated-node graphs)
+
+### Fixed — PR #32 Code Review
+
+- **DONE tasks inflated scheduling makespan** — `scheduleWorkers` now treats DONE tasks as weight 0, matching `computeCriticalPath` semantics (Codex P1)
+- **CI traceability job failure** — added `fetch-depth: 0` and git identity config to traceability workflow; shallow clones lack commit objects needed by git-warp materialization
+
### Added — Workflow Infrastructure
- **Git hooks** — `scripts/hooks/pre-commit` (lint gate) and `scripts/hooks/pre-push` (test gate); plain shell scripts, no Husky/lint-staged
diff --git a/CLAUDE.md b/CLAUDE.md
index 385fc57..0400cdc 100644
--- a/CLAUDE.md
+++ b/CLAUDE.md
@@ -80,6 +80,15 @@ If the types are hard, that means you need to understand the code better.
before your branch — fix them. You touched the codebase; you leave it better than
you found it.
+**NEVER implement graph algorithms in userland:**
+- If you find yourself implementing graph algorithms (BFS, DFS, topological sort,
+ reachability, transitive reduction/closure, level assignment, etc.), **STOP**.
+- git-warp probably already does what you need via `graph.traverse.*` or `graph.query()`.
+- If git-warp doesn't have the primitive you need, **STOP** and request the user adds
+ the desired functionality to git-warp. You must never assume that the full DAG can
+ fit in memory at once — git-warp's traversals are designed to work incrementally
+ over the commit graph.
+
### Project Planning via the Actuator
XYPH plans and tracks its own development through the WARP graph.
The `xyph-actuator.ts` CLI is the single source of truth for what's been done,
diff --git a/docs/assets/work-dag.svg b/docs/assets/work-dag.svg
new file mode 100644
index 0000000..fb3ed4a
--- /dev/null
+++ b/docs/assets/work-dag.svg
@@ -0,0 +1,2275 @@
+
+
+
+
+
diff --git a/package.json b/package.json
index 1754049..8d340b2 100644
--- a/package.json
+++ b/package.json
@@ -12,6 +12,7 @@
"test:local": "vitest run",
"graph:pull": "git fetch origin 'refs/warp/xyph-roadmap/writers/*:refs/warp/xyph-roadmap/writers/*'",
"graph:push": "git push origin 'refs/warp/xyph-roadmap/writers/*:refs/warp/xyph-roadmap/writers/*'",
+ "graph:work": "npx tsx scripts/generate-work-dag.ts",
"postinstall": "patch-package"
},
"dependencies": {
diff --git a/scripts/generate-work-dag.ts b/scripts/generate-work-dag.ts
new file mode 100644
index 0000000..df90beb
--- /dev/null
+++ b/scripts/generate-work-dag.ts
@@ -0,0 +1,730 @@
+#!/usr/bin/env -S npx tsx
+/**
+ * Generate the XYPH Work DAG analysis suite.
+ *
+ * Produces:
+ * docs/work/all-hr.svg, all-vert.svg — full DAG in both orientations
+ * docs/work//-hr/vert.svg — per-campaign views
+ * docs/work/backlog-hr.svg, backlog-vert.svg
+ * docs/work/graveyard-hr.svg, graveyard-vert.svg
+ * docs/work/work.md — full analysis document
+ *
+ * Usage: npx tsx scripts/generate-work-dag.ts
+ */
+
+import WarpGraph, { GitGraphAdapter } from '@git-stunts/git-warp';
+import Plumbing from '@git-stunts/plumbing';
+import { execSync } from 'node:child_process';
+import { mkdirSync, writeFileSync } from 'node:fs';
+import { join } from 'node:path';
+import { normalizeQuestStatus } from '../src/domain/entities/Quest.js';
+import {
+ computeFrontier,
+ computeTopBlockers,
+ computeCriticalPath,
+ type TaskSummary,
+ type DepEdge,
+} from '../src/domain/services/DepAnalysis.js';
+import {
+ computeLevels,
+ dagWidth,
+ scheduleWorkers,
+ transitiveReduction,
+ transitiveClosure,
+ computeAntiChains,
+ reverseReachability,
+ computeProvenance,
+} from '../src/domain/services/DagAnalysis.js';
+
+// ---------------------------------------------------------------------------
+// Config
+// ---------------------------------------------------------------------------
+
+const WRITER_ID = process.env['XYPH_AGENT_ID'] ?? 'agent.prime';
+const OUTPUT_DIR = join(process.cwd(), 'docs', 'work');
+const WORKERS = 4;
+
+// Dark theme colors
+const STATUS_COLORS: Record = {
+ DONE: { fill: '#2d5016', font: '#b8e6a0', border: '#4a8c28' },
+ IN_PROGRESS: { fill: '#1a4a6e', font: '#a0d4f7', border: '#2980b9' },
+ PLANNED: { fill: '#4a3560', font: '#c9a0f7', border: '#7b52a0' },
+ BACKLOG: { fill: '#3a3a3a', font: '#cccccc', border: '#666666' },
+ GRAVEYARD: { fill: '#2a1a1a', font: '#996666', border: '#553333' },
+};
+
+const FRONTIER_COLORS = { fill: '#5c4a00', font: '#ffd700', border: '#daa520' };
+
+const CAMPAIGN_COLORS: Record = {
+ 'campaign:CLITOOL': '#e67e22',
+ 'campaign:DASHBOARD': '#2ecc71',
+ 'campaign:AGENT': '#e74c3c',
+ 'campaign:ORACLE': '#9b59b6',
+ 'campaign:FORGE': '#f39c12',
+ 'campaign:WEAVER': '#1abc9c',
+ 'campaign:TRIAGE': '#3498db',
+ 'campaign:BEDROCK': '#95a5a6',
+ 'campaign:HEARTBEAT': '#95a5a6',
+ 'campaign:SOVEREIGNTY': '#95a5a6',
+ 'campaign:SUBMISSION': '#95a5a6',
+ 'campaign:TRACEABILITY': '#16a085',
+};
+
+// ---------------------------------------------------------------------------
+// Types
+// ---------------------------------------------------------------------------
+
+interface TaskNode {
+ id: string;
+ title: string;
+ status: string;
+ hours: number;
+ campaign: string | null;
+ campaignTitle: string | null;
+ deps: string[];
+}
+
+interface DotOptions {
+ rankdir: 'LR' | 'TB';
+ title?: string;
+ filter?: (t: TaskNode) => boolean;
+ highlightPath?: Set;
+ highlightFrontier?: Set;
+ highlightBlockers?: Set;
+}
+
+// ---------------------------------------------------------------------------
+// Data loading
+// ---------------------------------------------------------------------------
+
+async function loadGraph(): Promise<{
+ tasks: Map;
+ campaigns: Map;
+}> {
+ const plumbing = Plumbing.createDefault({ cwd: process.cwd() });
+ const persistence = new GitGraphAdapter({ plumbing });
+
+ const graph = await WarpGraph.open({
+ persistence,
+ graphName: 'xyph-roadmap',
+ writerId: WRITER_ID,
+ autoMaterialize: true,
+ });
+ await graph.syncCoverage();
+ await graph.materialize();
+
+ const allNodes = await graph.getNodes();
+ const taskIds = allNodes.filter((n) => n.startsWith('task:'));
+ const campaignIds = allNodes.filter((n) => n.startsWith('campaign:') || n.startsWith('milestone:'));
+
+ // Load campaign titles
+ const campaigns = new Map();
+ for (const cid of campaignIds) {
+ const props = await graph.getNodeProps(cid);
+ if (props) {
+ campaigns.set(cid, (props.get('title') as string) ?? cid.replace(/^(campaign|milestone):/, ''));
+ }
+ }
+
+ // Load tasks
+ const tasks = new Map();
+ for (const id of taskIds) {
+ const props = await graph.getNodeProps(id);
+ if (!props) continue;
+
+ const rawStatus = (props.get('status') as string) ?? 'BACKLOG';
+ const status = normalizeQuestStatus(rawStatus);
+ const title = (props.get('title') as string) ?? id;
+ const hours = Number(props.get('hours') ?? 1);
+
+ const neighbors = (await graph.neighbors(id, 'outgoing')) as Array<{
+ label: string;
+ nodeId: string;
+ }>;
+
+ const campaignEdge = neighbors.find(
+ (n) => n.label === 'belongs-to' && (n.nodeId.startsWith('campaign:') || n.nodeId.startsWith('milestone:')),
+ );
+
+ const deps = neighbors
+ .filter((n) => n.label === 'depends-on')
+ .map((n) => n.nodeId);
+
+ const campaignId = campaignEdge?.nodeId ?? null;
+ tasks.set(id, {
+ id,
+ title,
+ status,
+ hours: Number.isFinite(hours) ? hours : 1,
+ campaign: campaignId,
+ campaignTitle: campaignId ? (campaigns.get(campaignId) ?? null) : null,
+ deps,
+ });
+ }
+
+ return { tasks, campaigns };
+}
+
+// ---------------------------------------------------------------------------
+// DOT generation
+// ---------------------------------------------------------------------------
+
+function escapeLabel(s: string): string {
+ return s.replace(/"/g, '\\"').replace(/\n/g, '\\n');
+}
+
+function generateDot(
+ allTasks: Map,
+ opts: DotOptions,
+): string {
+ const filteredIds = new Set();
+ for (const [id, task] of allTasks) {
+ if (!opts.filter || opts.filter(task)) {
+ filteredIds.add(id);
+ }
+ }
+
+ // Also include deps of filtered tasks that exist
+ const inDag = new Set(filteredIds);
+ for (const id of filteredIds) {
+ const task = allTasks.get(id);
+ if (task) {
+ for (const dep of task.deps) {
+ if (allTasks.has(dep)) inDag.add(dep);
+ }
+ }
+ }
+
+ // Group by campaign
+ const byCampaign = new Map();
+ for (const id of inDag) {
+ const task = allTasks.get(id);
+ if (!task) continue;
+ const key = task.campaign ?? '(none)';
+ const arr = byCampaign.get(key) ?? [];
+ arr.push(id);
+ byCampaign.set(key, arr);
+ }
+
+ const lines: string[] = [];
+ lines.push('digraph XYPH {');
+ lines.push(` rankdir=${opts.rankdir};`);
+ lines.push(' bgcolor="#1a1a2e";');
+ lines.push(' node [shape=box, style="filled,rounded", fontname="Helvetica", fontsize=10, margin="0.15,0.08"];');
+ lines.push(' edge [color="#555555", arrowsize=0.7];');
+ lines.push(' graph [fontname="Helvetica", fontsize=12, fontcolor="#cccccc"];');
+
+ if (opts.title) {
+ lines.push(` label="${escapeLabel(opts.title)}";`);
+ lines.push(' labelloc=t;');
+ lines.push(' fontsize=16;');
+ }
+
+ lines.push('');
+
+ let clusterIdx = 0;
+ for (const [campaign, ids] of byCampaign) {
+ const campaignColor = CAMPAIGN_COLORS[campaign] ?? '#666666';
+ const task0 = ids[0] ? allTasks.get(ids[0]) : undefined;
+ const label = campaign === '(none)'
+ ? 'Unassigned'
+ : (task0?.campaignTitle ?? campaign.replace(/^(campaign|milestone):/, 'M: '));
+
+ lines.push(` subgraph cluster_${clusterIdx++} {`);
+ lines.push(` label="${escapeLabel(label)}";`);
+ lines.push(' style=dashed;');
+ lines.push(` color="${campaignColor}";`);
+ lines.push(` fontcolor="${campaignColor}";`);
+ lines.push('');
+
+ for (const id of ids) {
+ const task = allTasks.get(id);
+ if (!task) continue;
+
+ const isFrontier = opts.highlightFrontier?.has(id) ?? false;
+ const isBlocker = opts.highlightBlockers?.has(id) ?? false;
+ const isCritical = opts.highlightPath?.has(id) ?? false;
+
+ const sc = isFrontier
+ ? FRONTIER_COLORS
+ : STATUS_COLORS[task.status] ?? STATUS_COLORS['BACKLOG']!;
+
+ const shortId = id.replace('task:', '');
+ const tags: string[] = [];
+ if (isCritical) tags.push('CP');
+ if (isFrontier) tags.push('*');
+ const tagStr = tags.length > 0 ? ` [${tags.join(',')}]` : '';
+ const truncTitle = task.title.length > 35 ? task.title.slice(0, 35) : task.title;
+ const label = `${shortId}${tagStr}\\n${escapeLabel(truncTitle)}`;
+
+ const penwidth = isBlocker || isCritical ? ', penwidth=3' : '';
+ const borderColor = isBlocker ? '#ff4444' : (isCritical ? '#ff8800' : sc.border);
+
+ lines.push(
+ ` "${id}" [label="${label}", fillcolor="${sc.fill}", fontcolor="${sc.font}", color="${borderColor}"${penwidth}];`,
+ );
+ }
+
+ lines.push(' }');
+ lines.push('');
+ }
+
+ // Edges: prerequisite → dependent (work flows in rankdir direction)
+ for (const id of inDag) {
+ const task = allTasks.get(id);
+ if (!task) continue;
+ for (const dep of task.deps) {
+ if (!inDag.has(dep)) continue;
+ const depTask = allTasks.get(dep);
+ const edgeColor = depTask?.status === 'DONE' ? '#4a8c28' : '#888888';
+ lines.push(` "${dep}" -> "${id}" [color="${edgeColor}"];`);
+ }
+ }
+
+ // Legend
+ lines.push('');
+ lines.push(' subgraph cluster_legend {');
+ lines.push(' label="Legend";');
+ lines.push(' style=dashed;');
+ lines.push(' color="#444444";');
+ lines.push(' fontcolor="#999999";');
+ lines.push(' "leg_done" [label="DONE", fillcolor="#2d5016", fontcolor="#b8e6a0", color="#4a8c28"];');
+ lines.push(' "leg_wip" [label="IN_PROGRESS", fillcolor="#1a4a6e", fontcolor="#a0d4f7", color="#2980b9"];');
+ lines.push(' "leg_frontier" [label="FRONTIER", fillcolor="#5c4a00", fontcolor="#ffd700", color="#daa520"];');
+ lines.push(' "leg_planned" [label="PLANNED", fillcolor="#4a3560", fontcolor="#c9a0f7", color="#7b52a0"];');
+ lines.push(' "leg_backlog" [label="BACKLOG", fillcolor="#3a3a3a", fontcolor="#cccccc", color="#666666"];');
+ lines.push(' "leg_done" -> "leg_wip" -> "leg_frontier" -> "leg_planned" -> "leg_backlog" [style=invis];');
+ lines.push(' }');
+
+ lines.push('}');
+ return lines.join('\n');
+}
+
+// ---------------------------------------------------------------------------
+// SVG rendering
+// ---------------------------------------------------------------------------
+
+function renderSvg(dotContent: string, outputPath: string): void {
+ const dotFile = outputPath.replace(/\.svg$/, '.dot');
+ writeFileSync(dotFile, dotContent);
+ try {
+ execSync(`dot -Tsvg "${dotFile}" -o "${outputPath}"`, { stdio: 'pipe' });
+ console.log(` SVG: ${outputPath}`);
+ } catch (err) {
+ console.error(` FAIL: ${outputPath} — is graphviz installed? (brew install graphviz)`);
+ console.error(err instanceof Error ? err.message : String(err));
+ }
+}
+
+function generatePair(
+ allTasks: Map,
+ dir: string,
+ baseName: string,
+ opts: Omit,
+): void {
+ const hrDot = generateDot(allTasks, { ...opts, rankdir: 'LR' });
+ const vtDot = generateDot(allTasks, { ...opts, rankdir: 'TB' });
+ renderSvg(hrDot, join(dir, `${baseName}-hr.svg`));
+ renderSvg(vtDot, join(dir, `${baseName}-vert.svg`));
+}
+
+// ---------------------------------------------------------------------------
+// Analysis helpers
+// ---------------------------------------------------------------------------
+
+function buildAnalysisInputs(tasks: Map): {
+ summaries: TaskSummary[];
+ edges: DepEdge[];
+ sorted: string[];
+} {
+ const summaries: TaskSummary[] = [];
+ const edges: DepEdge[] = [];
+
+ for (const [, task] of tasks) {
+ summaries.push({ id: task.id, status: task.status, hours: task.hours });
+ for (const dep of task.deps) {
+ if (tasks.has(dep)) {
+ edges.push({ from: task.id, to: dep });
+ }
+ }
+ }
+
+ // Topological sort via Kahn's algorithm
+ const inDegree = new Map();
+ const adj = new Map();
+ for (const s of summaries) {
+ inDegree.set(s.id, 0);
+ adj.set(s.id, []);
+ }
+ for (const e of edges) {
+ inDegree.set(e.from, (inDegree.get(e.from) ?? 0) + 1);
+ const arr = adj.get(e.to) ?? [];
+ arr.push(e.from);
+ adj.set(e.to, arr);
+ }
+
+ const queue: string[] = [];
+ for (const [id, deg] of inDegree) {
+ if (deg === 0) queue.push(id);
+ }
+ queue.sort(); // determinism
+
+ const sorted: string[] = [];
+ while (queue.length > 0) {
+ const node = queue.shift()!;
+ sorted.push(node);
+ for (const dep of adj.get(node) ?? []) {
+ const newDeg = (inDegree.get(dep) ?? 1) - 1;
+ inDegree.set(dep, newDeg);
+ if (newDeg === 0) {
+ // Insert sorted for determinism
+ const insertIdx = queue.findIndex((q) => q > dep);
+ if (insertIdx === -1) queue.push(dep);
+ else queue.splice(insertIdx, 0, dep);
+ }
+ }
+ }
+
+ return { summaries, edges, sorted };
+}
+
+// ---------------------------------------------------------------------------
+// work.md generation
+// ---------------------------------------------------------------------------
+
+function generateWorkMd(
+ tasks: Map,
+ summaries: TaskSummary[],
+ edges: DepEdge[],
+ sorted: string[],
+): string {
+ const lines: string[] = [];
+ const now = new Date().toISOString().slice(0, 10);
+
+ // Header
+ lines.push('# XYPH Work DAG Analysis');
+ lines.push('');
+ lines.push(`Generated: ${now} | ${summaries.length} quests | ${edges.length} dependency edges`);
+ lines.push('');
+
+ // --- Topological Sort ---
+ lines.push('## Topological Sort');
+ lines.push('');
+ lines.push('| # | Task ID | Title | Status | Campaign | Hours |');
+ lines.push('|---|---------|-------|--------|----------|-------|');
+ for (let i = 0; i < sorted.length; i++) {
+ const id = sorted[i]!;
+ const task = tasks.get(id);
+ if (!task) continue;
+ const campaign = task.campaignTitle ?? task.campaign?.replace(/^(campaign|milestone):/, '') ?? '—';
+ lines.push(`| ${i + 1} | \`${id}\` | ${task.title} | ${task.status} | ${campaign} | ${task.hours} |`);
+ }
+ lines.push('');
+
+ // --- Critical Path ---
+ const cp = computeCriticalPath(sorted, summaries, edges);
+ lines.push('## Critical Path Analysis');
+ lines.push('');
+ if (cp.path.length > 0) {
+ lines.push(`**Total hours:** ${cp.totalHours} | **Length:** ${cp.path.length} tasks`);
+ lines.push('');
+ lines.push('```');
+ lines.push(cp.path.map((id) => {
+ const task = tasks.get(id);
+ const h = task?.hours ?? 0;
+ return `${id} (${h}h)`;
+ }).join(' → '));
+ lines.push('```');
+ lines.push('');
+
+ // Bottleneck: the critical path task with most hours
+ const bottleneck = cp.path.reduce((max, id) => {
+ const t = tasks.get(id);
+ const h = t?.hours ?? 0;
+ const maxH = tasks.get(max)?.hours ?? 0;
+ return h > maxH ? id : max;
+ }, cp.path[0]!);
+ const btTask = tasks.get(bottleneck);
+ if (btTask) {
+ lines.push(`**Bottleneck:** \`${bottleneck}\` — ${btTask.title} (${btTask.hours}h)`);
+ lines.push('');
+ }
+ } else {
+ lines.push('No critical path (no dependencies or all tasks DONE).');
+ lines.push('');
+ }
+
+ // --- Parallelism and Leveling ---
+ const levels = computeLevels(sorted, edges);
+ const width = dagWidth(levels);
+ lines.push('## Parallelism and Leveling');
+ lines.push('');
+ lines.push('### Width of DAG');
+ lines.push('');
+ lines.push(`**Max parallelism:** ${width.width} tasks at level ${width.widestLevel}`);
+ lines.push('');
+
+ // Level histogram
+ const levelGroups = new Map();
+ for (const [id, level] of levels) {
+ const arr = levelGroups.get(level) ?? [];
+ arr.push(id);
+ levelGroups.set(level, arr);
+ }
+ const sortedLevelKeys = [...levelGroups.keys()].sort((a, b) => a - b);
+ lines.push('| Level | Count | Tasks |');
+ lines.push('|-------|-------|-------|');
+ for (const level of sortedLevelKeys) {
+ const ids = levelGroups.get(level) ?? [];
+ const display = ids.map((id) => `\`${id.replace('task:', '')}\``).join(', ');
+ lines.push(`| ${level} | ${ids.length} | ${display} |`);
+ }
+ lines.push('');
+
+ // Scheduling
+ lines.push(`### Scheduling (${WORKERS} workers)`);
+ lines.push('');
+ const schedule = scheduleWorkers(sorted, summaries, edges, WORKERS);
+ lines.push(`**Makespan:** ${schedule.makespan}h`);
+ const serialTotal = summaries.reduce((sum, t) => sum + (t.status === 'DONE' ? 0 : t.hours), 0);
+ const utilization = schedule.makespan > 0
+ ? ((serialTotal / (schedule.makespan * WORKERS)) * 100).toFixed(1)
+ : '0.0';
+ lines.push(`**Serial total:** ${serialTotal}h | **Utilization:** ${utilization}%`);
+ lines.push('');
+
+ for (const worker of schedule.schedule) {
+ lines.push(`**Worker ${worker.workerId}:**`);
+ for (const slot of worker.tasks) {
+ const task = tasks.get(slot.id);
+ const title = task?.title ?? slot.id;
+ lines.push(` - \`${slot.id}\` [${slot.start}h–${slot.start + slot.hours}h] ${title}`);
+ }
+ lines.push('');
+ }
+
+ // --- Transitive Reduction / Closure ---
+ lines.push('## Transitive Reduction and Closure');
+ lines.push('');
+
+ const reduced = transitiveReduction(edges);
+ const redundantCount = edges.length - reduced.length;
+ lines.push('### Transitive Reduction');
+ lines.push('');
+ lines.push(`**Redundant edges:** ${redundantCount} of ${edges.length}`);
+ lines.push('');
+ if (redundantCount > 0) {
+ const reducedSet = new Set(reduced.map((e) => `${e.from}→${e.to}`));
+ const removedEdges = edges.filter((e) => !reducedSet.has(`${e.from}→${e.to}`));
+ for (const e of removedEdges) {
+ lines.push(`- \`${e.from}\` → \`${e.to}\` (redundant)`);
+ }
+ lines.push('');
+ }
+
+ const closure = transitiveClosure(edges);
+ const impliedCount = closure.length - edges.length;
+ lines.push('### Transitive Closure');
+ lines.push('');
+ lines.push(`**Implied dependencies:** ${impliedCount}`);
+ lines.push('');
+ if (impliedCount > 0) {
+ const originalSet = new Set(edges.map((e) => `${e.from}→${e.to}`));
+ const impliedEdges = closure.filter((e) => !originalSet.has(`${e.from}→${e.to}`));
+ const displayLimit = Math.min(impliedEdges.length, 20);
+ for (let i = 0; i < displayLimit; i++) {
+ const e = impliedEdges[i]!;
+ lines.push(`- \`${e.from}\` → \`${e.to}\``);
+ }
+ if (impliedEdges.length > 20) {
+ lines.push(`- ... and ${impliedEdges.length - 20} more`);
+ }
+ lines.push('');
+ }
+
+ // --- Ancestry and Impact ---
+ lines.push('## Ancestry and Impact Analysis');
+ lines.push('');
+
+ lines.push('### Reverse Reachability (Top Blockers)');
+ lines.push('');
+ const topBlockers = computeTopBlockers(summaries, edges, 15);
+ if (topBlockers.length > 0) {
+ lines.push('| Task | Title | Direct | Transitive |');
+ lines.push('|------|-------|--------|------------|');
+ for (const b of topBlockers) {
+ const task = tasks.get(b.id);
+ const title = task?.title ?? b.id;
+ lines.push(`| \`${b.id}\` | ${title} | ${b.directCount} | ${b.transitiveCount} |`);
+ }
+ lines.push('');
+ } else {
+ lines.push('No active blockers.');
+ lines.push('');
+ }
+
+ lines.push('### Provenance');
+ lines.push('');
+ const { frontier } = computeFrontier(summaries, edges);
+ const prov = computeProvenance(frontier, edges);
+ if (prov.size > 0) {
+ lines.push('| Frontier Task | Title | Root Ancestors |');
+ lines.push('|---------------|-------|----------------|');
+ for (const [id, roots] of prov) {
+ const task = tasks.get(id);
+ const title = task?.title ?? id;
+ const rootStr = roots.map((r) => `\`${r}\``).join(', ');
+ lines.push(`| \`${id}\` | ${title} | ${rootStr} |`);
+ }
+ lines.push('');
+ } else {
+ lines.push('No frontier tasks.');
+ lines.push('');
+ }
+
+ // --- Logical Grouping ---
+ lines.push('## Logical Grouping (Campaigns)');
+ lines.push('');
+
+ const campaignStats = new Map();
+ for (const [, task] of tasks) {
+ const key = task.campaign ?? '(none)';
+ const stats = campaignStats.get(key) ?? { total: 0, done: 0, hours: 0, deps: 0 };
+ stats.total++;
+ if (task.status === 'DONE') stats.done++;
+ stats.hours += task.hours;
+ stats.deps += task.deps.length;
+ campaignStats.set(key, stats);
+ }
+
+ lines.push('| Campaign | Tasks | Done | % | Hours | Deps |');
+ lines.push('|----------|-------|------|---|-------|------|');
+ const sortedCampaigns = [...campaignStats.entries()].sort((a, b) => a[0].localeCompare(b[0]));
+ for (const [campaign, stats] of sortedCampaigns) {
+ const pct = stats.total > 0 ? ((stats.done / stats.total) * 100).toFixed(0) : '0';
+ const label = campaign === '(none)' ? 'Unassigned' : campaign.replace(/^(campaign|milestone):/, '');
+ lines.push(`| ${label} | ${stats.total} | ${stats.done} | ${pct}% | ${stats.hours} | ${stats.deps} |`);
+ }
+ lines.push('');
+
+ // --- Anti-chains ---
+ lines.push('## Anti-chains (Parallel Waves)');
+ lines.push('');
+ const chains = computeAntiChains(sorted, edges, summaries);
+ if (chains.length > 0) {
+ lines.push('| Wave | Parallel Tasks | Count | Total Hours |');
+ lines.push('|------|----------------|-------|-------------|');
+ for (let i = 0; i < chains.length; i++) {
+ const wave = chains[i]!;
+ const waveHours = wave.reduce((sum, id) => {
+ const t = summaries.find((s) => s.id === id);
+ return sum + (t?.hours ?? 0);
+ }, 0);
+ const display = wave.map((id) => `\`${id.replace('task:', '')}\``).join(', ');
+ lines.push(`| ${i} | ${display} | ${wave.length} | ${waveHours} |`);
+ }
+ lines.push('');
+ } else {
+ lines.push('No active anti-chains (all tasks DONE or no tasks).');
+ lines.push('');
+ }
+
+ // --- Status summary ---
+ lines.push('## Status Summary');
+ lines.push('');
+ const statusCounts = new Map();
+ for (const [, task] of tasks) {
+ statusCounts.set(task.status, (statusCounts.get(task.status) ?? 0) + 1);
+ }
+ lines.push('| Status | Count |');
+ lines.push('|--------|-------|');
+ for (const status of ['DONE', 'IN_PROGRESS', 'PLANNED', 'BACKLOG', 'GRAVEYARD']) {
+ const count = statusCounts.get(status) ?? 0;
+ if (count > 0) lines.push(`| ${status} | ${count} |`);
+ }
+ lines.push('');
+
+ return lines.join('\n');
+}
+
+// ---------------------------------------------------------------------------
+// Main
+// ---------------------------------------------------------------------------
+
+async function main(): Promise {
+ console.log('Loading WARP graph...');
+ const { tasks, campaigns } = await loadGraph();
+ console.log(`Loaded ${tasks.size} tasks, ${campaigns.size} campaigns`);
+
+ const { summaries, edges, sorted } = buildAnalysisInputs(tasks);
+
+ // Compute highlights
+ const { frontier } = computeFrontier(summaries, edges);
+ const frontierSet = new Set(frontier);
+ const cp = computeCriticalPath(sorted, summaries, edges);
+ const criticalSet = new Set(cp.path);
+ const topBlockers = computeTopBlockers(summaries, edges, 10);
+ const blockerSet = new Set(topBlockers.map((b) => b.id));
+
+ const baseOpts: Omit = {
+ highlightFrontier: frontierSet,
+ highlightPath: criticalSet,
+ highlightBlockers: blockerSet,
+ };
+
+ // Create output dirs
+ mkdirSync(OUTPUT_DIR, { recursive: true });
+
+ // 1. Full DAG
+ console.log('\nGenerating full DAG...');
+ generatePair(tasks, OUTPUT_DIR, 'all', { ...baseOpts, title: 'XYPH Work DAG — All Quests' });
+
+ // 2. Per-campaign
+ const campaignIds = new Set();
+ for (const [, task] of tasks) {
+ if (task.campaign) campaignIds.add(task.campaign);
+ }
+
+ for (const campaignId of [...campaignIds].sort()) {
+ const campaignName = campaigns.get(campaignId) ?? campaignId.replace(/^(campaign|milestone):/, '');
+ const safeName = campaignId.replace(/^(campaign|milestone):/, '');
+ const dir = join(OUTPUT_DIR, `${safeName}`);
+ mkdirSync(dir, { recursive: true });
+
+ console.log(`\nGenerating ${safeName}...`);
+ generatePair(tasks, dir, safeName, {
+ ...baseOpts,
+ title: `XYPH — ${campaignName}`,
+ filter: (t) => t.campaign === campaignId,
+ });
+ }
+
+ // 3. Backlog view
+ console.log('\nGenerating backlog...');
+ generatePair(tasks, OUTPUT_DIR, 'backlog', {
+ ...baseOpts,
+ title: 'XYPH — Backlog / Planned',
+ filter: (t) => t.status === 'BACKLOG' || t.status === 'PLANNED',
+ });
+
+ // 4. Graveyard view
+ console.log('\nGenerating graveyard...');
+ generatePair(tasks, OUTPUT_DIR, 'graveyard', {
+ ...baseOpts,
+ title: 'XYPH — Graveyard',
+ filter: (t) => t.status === 'GRAVEYARD',
+ });
+
+ // 5. Analysis document
+ console.log('\nGenerating work.md...');
+ const workMd = generateWorkMd(tasks, summaries, edges, sorted);
+ writeFileSync(join(OUTPUT_DIR, 'work.md'), workMd);
+ console.log(` MD: ${join(OUTPUT_DIR, 'work.md')}`);
+
+ console.log('\nDone!');
+}
+
+main().catch((err) => {
+ console.error(`[FATAL] ${err instanceof Error ? err.message : String(err)}`);
+ process.exit(1);
+});
diff --git a/src/domain/services/DagAnalysis.ts b/src/domain/services/DagAnalysis.ts
new file mode 100644
index 0000000..4a83ec0
--- /dev/null
+++ b/src/domain/services/DagAnalysis.ts
@@ -0,0 +1,481 @@
+/**
+ * DagAnalysis — Pure functions for DAG structure analysis.
+ *
+ * Complements DepAnalysis.ts (frontier, critical path, top blockers) with
+ * structural analysis: leveling, width, scheduling, transitive reduction/
+ * closure, anti-chains, reverse reachability, and provenance.
+ *
+ * All functions are pure: (sorted, edges, tasks) → result.
+ * Graph traversals (topo sort, BFS, reachability) are handled by git-warp
+ * natively — these functions operate on the extracted DepEdge[] data.
+ */
+
+import type { TaskSummary, DepEdge } from './DepAnalysis.js';
+
+// ---------------------------------------------------------------------------
+// Level assignment
+// ---------------------------------------------------------------------------
+
+/**
+ * Assigns each task to its longest-path level from roots.
+ * Level = max(level of prerequisites) + 1. Roots are level 0.
+ */
+export function computeLevels(
+ sorted: string[],
+ edges: DepEdge[],
+): Map {
+ const levels = new Map();
+
+ // Build deps map: task → [prerequisites]
+ const depsOf = new Map();
+ for (const edge of edges) {
+ const arr = depsOf.get(edge.from) ?? [];
+ arr.push(edge.to);
+ depsOf.set(edge.from, arr);
+ }
+
+ for (const node of sorted) {
+ const deps = depsOf.get(node) ?? [];
+ if (deps.length === 0) {
+ levels.set(node, 0);
+ } else {
+ let maxDepLevel = 0;
+ for (const dep of deps) {
+ const depLevel = levels.get(dep) ?? 0;
+ if (depLevel + 1 > maxDepLevel) {
+ maxDepLevel = depLevel + 1;
+ }
+ }
+ levels.set(node, maxDepLevel);
+ }
+ }
+
+ return levels;
+}
+
+// ---------------------------------------------------------------------------
+// DAG width
+// ---------------------------------------------------------------------------
+
+/**
+ * Returns the maximum number of tasks at any single level (max parallelism).
+ */
+export function dagWidth(
+ levels: Map,
+): { width: number; widestLevel: number } {
+ if (levels.size === 0) {
+ return { width: 0, widestLevel: -1 };
+ }
+
+ const counts = new Map();
+ for (const level of levels.values()) {
+ counts.set(level, (counts.get(level) ?? 0) + 1);
+ }
+
+ let width = 0;
+ let widestLevel = -1;
+ for (const [level, count] of counts) {
+ if (count > width) {
+ width = count;
+ widestLevel = level;
+ }
+ }
+
+ return { width, widestLevel };
+}
+
+// ---------------------------------------------------------------------------
+// Worker scheduling
+// ---------------------------------------------------------------------------
+
+export interface TaskSlot {
+ id: string;
+ start: number;
+ hours: number;
+}
+
+export interface WorkerSchedule {
+ workerId: number;
+ tasks: TaskSlot[];
+}
+
+/**
+ * Greedy list-scheduling: assigns tasks in topological order to the worker
+ * that becomes free earliest, respecting dependency constraints.
+ */
+export function scheduleWorkers(
+ sorted: string[],
+ tasks: TaskSummary[],
+ edges: DepEdge[],
+ workers: number,
+): { schedule: WorkerSchedule[]; makespan: number } {
+ if (sorted.length === 0) {
+ return { schedule: [], makespan: 0 };
+ }
+
+ // Build hours map (DONE tasks weigh 0 — already completed)
+ const hoursMap = new Map();
+ for (const t of tasks) {
+ hoursMap.set(t.id, t.status === 'DONE' ? 0 : t.hours);
+ }
+
+ // Build deps map: task → [prerequisites]
+ const depsOf = new Map();
+ for (const edge of edges) {
+ const arr = depsOf.get(edge.from) ?? [];
+ arr.push(edge.to);
+ depsOf.set(edge.from, arr);
+ }
+
+ // Track when each task finishes
+ const finishTime = new Map();
+
+ // Worker availability: earliest time each worker is free
+ const workerFree = new Array(workers).fill(0);
+ const workerTasks = new Array(workers);
+ for (let i = 0; i < workers; i++) {
+ workerTasks[i] = [];
+ }
+
+ for (const taskId of sorted) {
+ const hours = hoursMap.get(taskId) ?? 1;
+
+ // Earliest start = max finish time of all prerequisites
+ let earliest = 0;
+ for (const dep of depsOf.get(taskId) ?? []) {
+ const depFinish = finishTime.get(dep) ?? 0;
+ if (depFinish > earliest) earliest = depFinish;
+ }
+
+ // Find the worker that is free earliest (but not before `earliest`)
+ let bestWorker = 0;
+ let bestStart = Math.max(workerFree[0] ?? 0, earliest);
+ for (let w = 1; w < workers; w++) {
+ const start = Math.max(workerFree[w] ?? 0, earliest);
+ if (start < bestStart) {
+ bestStart = start;
+ bestWorker = w;
+ }
+ }
+
+ const slot: TaskSlot = { id: taskId, start: bestStart, hours };
+ const wt = workerTasks[bestWorker];
+ if (wt) wt.push(slot);
+ const endTime = bestStart + hours;
+ workerFree[bestWorker] = endTime;
+ finishTime.set(taskId, endTime);
+ }
+
+ // Build schedule (only include workers that got tasks)
+ const schedule: WorkerSchedule[] = [];
+ for (let w = 0; w < workers; w++) {
+ const wTasks = workerTasks[w];
+ if (wTasks && wTasks.length > 0) {
+ schedule.push({ workerId: w, tasks: wTasks });
+ }
+ }
+
+ const makespan = Math.max(...workerFree);
+ return { schedule, makespan };
+}
+
+// ---------------------------------------------------------------------------
+// Transitive reduction
+// ---------------------------------------------------------------------------
+
+/**
+ * Removes redundant edges: A→C is redundant if a longer path A→...→C exists.
+ * Uses BFS per edge to check reachability without that edge.
+ */
+export function transitiveReduction(
+ edges: DepEdge[],
+): DepEdge[] {
+ if (edges.length === 0) return [];
+
+ // Build adjacency: from → [to] (dependency direction)
+ const adj = new Map>();
+ for (const edge of edges) {
+ const set = adj.get(edge.from) ?? new Set();
+ set.add(edge.to);
+ adj.set(edge.from, set);
+ }
+
+ // For each edge (from → to), check if `from` can reach `to` via other edges
+ const result: DepEdge[] = [];
+ for (const edge of edges) {
+ // Temporarily remove this edge
+ const neighbors = adj.get(edge.from);
+ if (!neighbors) {
+ result.push(edge);
+ continue;
+ }
+ neighbors.delete(edge.to);
+
+ // BFS from edge.from to see if edge.to is still reachable
+ const reachable = bfsReachable(edge.from, edge.to, adj);
+ if (!reachable) {
+ result.push(edge); // edge is essential
+ }
+
+ // Restore edge
+ neighbors.add(edge.to);
+ }
+
+ return result;
+}
+
+function bfsReachable(
+ start: string,
+ target: string,
+ adj: Map>,
+): boolean {
+ const visited = new Set();
+ const queue = [start];
+ visited.add(start);
+
+ while (queue.length > 0) {
+ const current = queue.shift();
+ if (current === undefined) break;
+ for (const neighbor of adj.get(current) ?? []) {
+ if (neighbor === target) return true;
+ if (!visited.has(neighbor)) {
+ visited.add(neighbor);
+ queue.push(neighbor);
+ }
+ }
+ }
+
+ return false;
+}
+
+// ---------------------------------------------------------------------------
+// Transitive closure
+// ---------------------------------------------------------------------------
+
+/**
+ * Adds all implied edges: if A depends on B and B depends on C, adds A→C.
+ * Returns original edges plus all transitive edges (deduped).
+ */
+export function transitiveClosure(
+ edges: DepEdge[],
+): DepEdge[] {
+ if (edges.length === 0) return [];
+
+ // Build adjacency: from → Set
+ const adj = new Map>();
+ for (const edge of edges) {
+ const set = adj.get(edge.from) ?? new Set();
+ set.add(edge.to);
+ adj.set(edge.from, set);
+ }
+
+ // For each node, BFS to find all reachable nodes and add edges
+ const allEdges = new Set();
+ for (const edge of edges) {
+ allEdges.add(`${edge.from}→${edge.to}`);
+ }
+
+ const result: DepEdge[] = [...edges];
+
+ // Derive node set from edges
+ const nodes = new Set();
+ for (const edge of edges) {
+ nodes.add(edge.from);
+ nodes.add(edge.to);
+ }
+
+ for (const node of nodes) {
+ // BFS from node following dependency direction
+ const visited = new Set();
+ const queue: string[] = [];
+
+ // Seed with direct deps
+ for (const dep of adj.get(node) ?? []) {
+ if (!visited.has(dep)) {
+ visited.add(dep);
+ queue.push(dep);
+ }
+ }
+
+ while (queue.length > 0) {
+ const current = queue.shift();
+ if (current === undefined) break;
+ for (const dep of adj.get(current) ?? []) {
+ if (!visited.has(dep)) {
+ visited.add(dep);
+ queue.push(dep);
+ }
+ }
+ }
+
+ // Add transitive edges
+ for (const reachable of visited) {
+ const key = `${node}→${reachable}`;
+ if (!allEdges.has(key)) {
+ allEdges.add(key);
+ result.push({ from: node, to: reachable });
+ }
+ }
+ }
+
+ return result;
+}
+
+// ---------------------------------------------------------------------------
+// Anti-chain decomposition (MECE parallel waves)
+// ---------------------------------------------------------------------------
+
+/**
+ * Groups non-DONE tasks into parallel waves based on dependency levels.
+ * Each wave is an anti-chain: tasks within a wave have no dependencies
+ * on each other and can run in parallel.
+ */
+export function computeAntiChains(
+ sorted: string[],
+ edges: DepEdge[],
+ tasks: TaskSummary[],
+): string[][] {
+ if (sorted.length === 0) return [];
+
+ const doneSet = new Set(tasks.filter((t) => t.status === 'DONE').map((t) => t.id));
+ const activeSorted = sorted.filter((id) => !doneSet.has(id));
+
+ if (activeSorted.length === 0) return [];
+
+ // Filter edges to only include active tasks
+ const activeSet = new Set(activeSorted);
+ const activeEdges = edges.filter(
+ (e) => activeSet.has(e.from) && activeSet.has(e.to),
+ );
+
+ // Compute levels on active subgraph
+ const levels = computeLevels(activeSorted, activeEdges);
+
+ // Group by level
+ const byLevel = new Map();
+ for (const [id, level] of levels) {
+ const arr = byLevel.get(level) ?? [];
+ arr.push(id);
+ byLevel.set(level, arr);
+ }
+
+ // Sort levels and build result
+ const sortedLevels = [...byLevel.keys()].sort((a, b) => a - b);
+ return sortedLevels.map((level) => {
+ const wave = byLevel.get(level) ?? [];
+ wave.sort();
+ return wave;
+ });
+}
+
+// ---------------------------------------------------------------------------
+// Reverse reachability
+// ---------------------------------------------------------------------------
+
+/**
+ * Returns all tasks that transitively depend on the given task.
+ * Uses BFS over the reverse dependency graph.
+ */
+export function reverseReachability(
+ taskId: string,
+ edges: DepEdge[],
+): string[] {
+ // Build reverse map: prerequisite → [dependents]
+ const dependentsOf = new Map();
+ for (const edge of edges) {
+ const arr = dependentsOf.get(edge.to) ?? [];
+ arr.push(edge.from);
+ dependentsOf.set(edge.to, arr);
+ }
+
+ const visited = new Set();
+ const queue = [taskId];
+
+ while (queue.length > 0) {
+ const current = queue.shift();
+ if (current === undefined) break;
+ for (const dep of dependentsOf.get(current) ?? []) {
+ if (!visited.has(dep)) {
+ visited.add(dep);
+ queue.push(dep);
+ }
+ }
+ }
+
+ const result = [...visited];
+ result.sort();
+ return result;
+}
+
+// ---------------------------------------------------------------------------
+// Provenance
+// ---------------------------------------------------------------------------
+
+/**
+ * For each frontier task, traces back through dependencies to find root
+ * ancestors (tasks with no prerequisites).
+ */
+export function computeProvenance(
+ frontier: string[],
+ edges: DepEdge[],
+): Map {
+ const result = new Map();
+ if (frontier.length === 0) return result;
+
+ // Build deps map: task → [prerequisites]
+ const depsOf = new Map();
+ for (const edge of edges) {
+ const arr = depsOf.get(edge.from) ?? [];
+ arr.push(edge.to);
+ depsOf.set(edge.from, arr);
+ }
+
+ // All nodes that appear in edges (to find roots)
+ const allNodes = new Set();
+ for (const edge of edges) {
+ allNodes.add(edge.from);
+ allNodes.add(edge.to);
+ }
+
+ for (const taskId of frontier) {
+ // BFS backwards through deps
+ const visited = new Set();
+ const queue: string[] = [];
+ const roots: string[] = [];
+
+ // Seed with task's own deps
+ const directDeps = depsOf.get(taskId) ?? [];
+ if (directDeps.length === 0) {
+ // This task IS a root
+ roots.push(taskId);
+ } else {
+ for (const dep of directDeps) {
+ if (!visited.has(dep)) {
+ visited.add(dep);
+ queue.push(dep);
+ }
+ }
+ }
+
+ while (queue.length > 0) {
+ const current = queue.shift();
+ if (current === undefined) break;
+ const currentDeps = depsOf.get(current) ?? [];
+ if (currentDeps.length === 0) {
+ roots.push(current);
+ } else {
+ for (const dep of currentDeps) {
+ if (!visited.has(dep)) {
+ visited.add(dep);
+ queue.push(dep);
+ }
+ }
+ }
+ }
+
+ roots.sort();
+ result.set(taskId, roots);
+ }
+
+ return result;
+}
diff --git a/test/unit/DagAnalysis.test.ts b/test/unit/DagAnalysis.test.ts
new file mode 100644
index 0000000..e2de257
--- /dev/null
+++ b/test/unit/DagAnalysis.test.ts
@@ -0,0 +1,391 @@
+import { describe, it, expect } from 'vitest';
+import {
+ computeLevels,
+ dagWidth,
+ scheduleWorkers,
+ transitiveReduction,
+ transitiveClosure,
+ computeAntiChains,
+ reverseReachability,
+ computeProvenance,
+} from '../../src/domain/services/DagAnalysis.js';
+import type { TaskSummary, DepEdge } from '../../src/domain/services/DepAnalysis.js';
+
+function makeTasks(...specs: Array<{ id: string; status?: string; hours?: number }>): TaskSummary[] {
+ return specs.map((s) => ({
+ id: s.id,
+ status: s.status ?? 'PLANNED',
+ hours: s.hours ?? 1,
+ }));
+}
+
+// ---------------------------------------------------------------------------
+// Diamond graph: A → B, A → C, B → D, C → D
+// Edge semantics: B depends-on A means { from: 'B', to: 'A' }
+// Topological order: [A, B, C, D] (A is root, D is sink)
+// ---------------------------------------------------------------------------
+const diamondSorted = ['task:A', 'task:B', 'task:C', 'task:D'];
+const diamondEdges: DepEdge[] = [
+ { from: 'task:B', to: 'task:A' },
+ { from: 'task:C', to: 'task:A' },
+ { from: 'task:D', to: 'task:B' },
+ { from: 'task:D', to: 'task:C' },
+];
+const diamondTasks = makeTasks(
+ { id: 'task:A', hours: 2 },
+ { id: 'task:B', hours: 3 },
+ { id: 'task:C', hours: 1 },
+ { id: 'task:D', hours: 4 },
+);
+
+// ---------------------------------------------------------------------------
+// Linear chain: A → B → C
+// ---------------------------------------------------------------------------
+const linearSorted = ['task:A', 'task:B', 'task:C'];
+const linearEdges: DepEdge[] = [
+ { from: 'task:B', to: 'task:A' },
+ { from: 'task:C', to: 'task:B' },
+];
+const linearTasks = makeTasks(
+ { id: 'task:A', hours: 2 },
+ { id: 'task:B', hours: 3 },
+ { id: 'task:C', hours: 1 },
+);
+
+// ---------------------------------------------------------------------------
+// computeLevels
+// ---------------------------------------------------------------------------
+describe('computeLevels', () => {
+ it('assigns levels in diamond graph: A=0, B=1, C=1, D=2', () => {
+ const levels = computeLevels(diamondSorted, diamondEdges);
+ expect(levels.get('task:A')).toBe(0);
+ expect(levels.get('task:B')).toBe(1);
+ expect(levels.get('task:C')).toBe(1);
+ expect(levels.get('task:D')).toBe(2);
+ });
+
+ it('assigns levels in linear chain: A=0, B=1, C=2', () => {
+ const levels = computeLevels(linearSorted, linearEdges);
+ expect(levels.get('task:A')).toBe(0);
+ expect(levels.get('task:B')).toBe(1);
+ expect(levels.get('task:C')).toBe(2);
+ });
+
+ it('returns empty map for empty graph', () => {
+ const levels = computeLevels([], []);
+ expect(levels.size).toBe(0);
+ });
+
+ it('assigns level 0 to a single node', () => {
+ const levels = computeLevels(['task:A'], []);
+ expect(levels.get('task:A')).toBe(0);
+ });
+
+ it('assigns level 0 to isolated nodes', () => {
+ const levels = computeLevels(['task:A', 'task:B', 'task:C'], []);
+ expect(levels.get('task:A')).toBe(0);
+ expect(levels.get('task:B')).toBe(0);
+ expect(levels.get('task:C')).toBe(0);
+ });
+});
+
+// ---------------------------------------------------------------------------
+// dagWidth
+// ---------------------------------------------------------------------------
+describe('dagWidth', () => {
+ it('returns width=2 at level 1 for diamond graph', () => {
+ const levels = computeLevels(diamondSorted, diamondEdges);
+ const result = dagWidth(levels);
+ expect(result.width).toBe(2);
+ expect(result.widestLevel).toBe(1);
+ });
+
+ it('returns width=1 for linear chain', () => {
+ const levels = computeLevels(linearSorted, linearEdges);
+ const result = dagWidth(levels);
+ expect(result.width).toBe(1);
+ });
+
+ it('returns width=0 for empty graph', () => {
+ const result = dagWidth(new Map());
+ expect(result.width).toBe(0);
+ expect(result.widestLevel).toBe(-1);
+ });
+
+ it('returns width equal to node count for all-isolated nodes', () => {
+ const levels = new Map([
+ ['task:A', 0],
+ ['task:B', 0],
+ ['task:C', 0],
+ ]);
+ const result = dagWidth(levels);
+ expect(result.width).toBe(3);
+ expect(result.widestLevel).toBe(0);
+ });
+});
+
+// ---------------------------------------------------------------------------
+// scheduleWorkers
+// ---------------------------------------------------------------------------
+describe('scheduleWorkers', () => {
+ it('schedules diamond with 4 workers: makespan = 9', () => {
+ const result = scheduleWorkers(diamondSorted, diamondTasks, diamondEdges, 4);
+ // Parallel: A(2), then B(3)+C(1) in parallel → B finishes at 5, then D(4) → makespan=9
+ expect(result.makespan).toBe(9);
+ expect(result.schedule.length).toBeLessThanOrEqual(4);
+ });
+
+ it('schedules linear chain: makespan equals serial total', () => {
+ const result = scheduleWorkers(linearSorted, linearTasks, linearEdges, 4);
+ expect(result.makespan).toBe(6);
+ });
+
+ it('returns makespan=0 for empty graph', () => {
+ const result = scheduleWorkers([], [], [], 4);
+ expect(result.makespan).toBe(0);
+ expect(result.schedule).toHaveLength(0);
+ });
+
+ it('schedules single task', () => {
+ const tasks = makeTasks({ id: 'task:A', hours: 5 });
+ const result = scheduleWorkers(['task:A'], tasks, [], 2);
+ expect(result.makespan).toBe(5);
+ expect(result.schedule).toHaveLength(1);
+ });
+
+ it('schedules independent tasks across workers', () => {
+ const tasks = makeTasks(
+ { id: 'task:A', hours: 3 },
+ { id: 'task:B', hours: 3 },
+ { id: 'task:C', hours: 3 },
+ );
+ const result = scheduleWorkers(['task:A', 'task:B', 'task:C'], tasks, [], 3);
+ // All independent, 3 workers → makespan = 3
+ expect(result.makespan).toBe(3);
+ expect(result.schedule).toHaveLength(3);
+ });
+
+ it('tracks task assignments per worker', () => {
+ const result = scheduleWorkers(diamondSorted, diamondTasks, diamondEdges, 2);
+ // Every task must appear in exactly one worker's assignment
+ const allAssigned = result.schedule.flatMap((w) => w.tasks.map((t) => t.id));
+ expect(allAssigned.sort()).toEqual(diamondSorted);
+ });
+
+ it('respects dependency ordering within schedule', () => {
+ const result = scheduleWorkers(diamondSorted, diamondTasks, diamondEdges, 2);
+ // D must start after B and C finish
+ const allSlots = result.schedule.flatMap((w) => w.tasks);
+ const dSlot = allSlots.find((s) => s.id === 'task:D');
+ const bSlot = allSlots.find((s) => s.id === 'task:B');
+ const cSlot = allSlots.find((s) => s.id === 'task:C');
+ expect(dSlot).toBeDefined();
+ expect(bSlot).toBeDefined();
+ expect(cSlot).toBeDefined();
+ expect(dSlot!.start).toBeGreaterThanOrEqual(bSlot!.start + bSlot!.hours);
+ expect(dSlot!.start).toBeGreaterThanOrEqual(cSlot!.start + cSlot!.hours);
+ });
+
+ it('treats DONE tasks as weight 0 (no worker time consumed)', () => {
+ // A is DONE (8h in graph, but should cost 0), B depends on A (1h active)
+ const tasks = makeTasks(
+ { id: 'task:A', status: 'DONE', hours: 8 },
+ { id: 'task:B', hours: 1 },
+ );
+ const edges: DepEdge[] = [{ from: 'task:B', to: 'task:A' }];
+ const result = scheduleWorkers(['task:A', 'task:B'], tasks, edges, 2);
+ // DONE tasks weigh 0 → makespan should be 1, not 9
+ expect(result.makespan).toBe(1);
+ });
+});
+
+// ---------------------------------------------------------------------------
+// transitiveReduction
+// ---------------------------------------------------------------------------
+describe('transitiveReduction', () => {
+ it('removes redundant edge in diamond+shortcut', () => {
+ const edges: DepEdge[] = [
+ ...diamondEdges,
+ { from: 'task:D', to: 'task:A' }, // shortcut — redundant
+ ];
+ const reduced = transitiveReduction(edges);
+ const hasShortcut = reduced.some((e) => e.from === 'task:D' && e.to === 'task:A');
+ expect(hasShortcut).toBe(false);
+ expect(reduced).toHaveLength(4);
+ });
+
+ it('keeps all edges in diamond without shortcuts', () => {
+ const reduced = transitiveReduction(diamondEdges);
+ expect(reduced).toHaveLength(4);
+ });
+
+ it('returns empty array for empty graph', () => {
+ expect(transitiveReduction([])).toEqual([]);
+ });
+
+ it('keeps edges in linear chain (no shortcuts)', () => {
+ const reduced = transitiveReduction(linearEdges);
+ expect(reduced).toHaveLength(2);
+ });
+
+ it('removes shortcut in linear chain with skip edge', () => {
+ const edges: DepEdge[] = [
+ ...linearEdges,
+ { from: 'task:C', to: 'task:A' }, // C→A redundant: C→B→A
+ ];
+ const reduced = transitiveReduction(edges);
+ expect(reduced).toHaveLength(2);
+ const hasShortcut = reduced.some((e) => e.from === 'task:C' && e.to === 'task:A');
+ expect(hasShortcut).toBe(false);
+ });
+});
+
+// ---------------------------------------------------------------------------
+// transitiveClosure
+// ---------------------------------------------------------------------------
+describe('transitiveClosure', () => {
+ it('adds D→A in diamond graph', () => {
+ const closure = transitiveClosure(diamondEdges);
+ const hasTransitive = closure.some((e) => e.from === 'task:D' && e.to === 'task:A');
+ expect(hasTransitive).toBe(true);
+ expect(closure).toHaveLength(5); // 4 original + D→A
+ });
+
+ it('adds C→A in linear chain', () => {
+ const closure = transitiveClosure(linearEdges);
+ const hasTransitive = closure.some((e) => e.from === 'task:C' && e.to === 'task:A');
+ expect(hasTransitive).toBe(true);
+ expect(closure).toHaveLength(3); // 2 original + C→A
+ });
+
+ it('returns empty array for empty graph', () => {
+ expect(transitiveClosure([])).toEqual([]);
+ });
+
+ it('returns original edge when no transitives possible', () => {
+ const edges: DepEdge[] = [{ from: 'task:B', to: 'task:A' }];
+ const closure = transitiveClosure(edges);
+ expect(closure).toHaveLength(1);
+ });
+});
+
+// ---------------------------------------------------------------------------
+// computeAntiChains
+// ---------------------------------------------------------------------------
+describe('computeAntiChains', () => {
+ it('produces 3 waves for diamond: [A], [B,C], [D]', () => {
+ const chains = computeAntiChains(diamondSorted, diamondEdges, diamondTasks);
+ expect(chains).toHaveLength(3);
+ expect(chains[0]).toEqual(['task:A']);
+ expect(chains[1]?.sort()).toEqual(['task:B', 'task:C']);
+ expect(chains[2]).toEqual(['task:D']);
+ });
+
+ it('produces N waves for linear chain', () => {
+ const chains = computeAntiChains(linearSorted, linearEdges, linearTasks);
+ expect(chains).toHaveLength(3);
+ expect(chains[0]).toEqual(['task:A']);
+ expect(chains[1]).toEqual(['task:B']);
+ expect(chains[2]).toEqual(['task:C']);
+ });
+
+ it('returns empty array for empty graph', () => {
+ expect(computeAntiChains([], [], [])).toEqual([]);
+ });
+
+ it('puts all isolated nodes in one wave', () => {
+ const tasks = makeTasks(
+ { id: 'task:A' },
+ { id: 'task:B' },
+ { id: 'task:C' },
+ );
+ const chains = computeAntiChains(['task:A', 'task:B', 'task:C'], [], tasks);
+ expect(chains).toHaveLength(1);
+ expect(chains[0]?.sort()).toEqual(['task:A', 'task:B', 'task:C']);
+ });
+
+ it('excludes DONE tasks from waves', () => {
+ const tasks = makeTasks(
+ { id: 'task:A', status: 'DONE' },
+ { id: 'task:B' },
+ { id: 'task:C' },
+ { id: 'task:D' },
+ );
+ const chains = computeAntiChains(diamondSorted, diamondEdges, tasks);
+ // A is DONE → excluded. B and C deps all done → wave 0. D blocked → wave 1.
+ expect(chains).toHaveLength(2);
+ expect(chains[0]?.sort()).toEqual(['task:B', 'task:C']);
+ expect(chains[1]).toEqual(['task:D']);
+ });
+});
+
+// ---------------------------------------------------------------------------
+// reverseReachability
+// ---------------------------------------------------------------------------
+describe('reverseReachability', () => {
+ it('returns all downstream tasks for root in diamond', () => {
+ const reach = reverseReachability('task:A', diamondEdges);
+ expect(reach.sort()).toEqual(['task:B', 'task:C', 'task:D']);
+ });
+
+ it('returns only direct dependent for leaf-adjacent node', () => {
+ const reach = reverseReachability('task:B', diamondEdges);
+ expect(reach).toEqual(['task:D']);
+ });
+
+ it('returns empty for leaf node', () => {
+ expect(reverseReachability('task:D', diamondEdges)).toEqual([]);
+ });
+
+ it('returns empty for unknown node', () => {
+ expect(reverseReachability('task:Z', diamondEdges)).toEqual([]);
+ });
+
+ it('returns empty for empty graph', () => {
+ expect(reverseReachability('task:A', [])).toEqual([]);
+ });
+
+ it('returns all downstream in linear chain', () => {
+ const reach = reverseReachability('task:A', linearEdges);
+ expect(reach.sort()).toEqual(['task:B', 'task:C']);
+ });
+});
+
+// ---------------------------------------------------------------------------
+// computeProvenance
+// ---------------------------------------------------------------------------
+describe('computeProvenance', () => {
+ it('traces frontier task D back to root A in diamond', () => {
+ const prov = computeProvenance(['task:D'], diamondEdges);
+ expect(prov.get('task:D')).toEqual(['task:A']);
+ });
+
+ it('traces mid-level tasks to their roots', () => {
+ const prov = computeProvenance(['task:B', 'task:C'], diamondEdges);
+ expect(prov.get('task:B')).toEqual(['task:A']);
+ expect(prov.get('task:C')).toEqual(['task:A']);
+ });
+
+ it('returns self as root for root tasks', () => {
+ const prov = computeProvenance(['task:A'], diamondEdges);
+ expect(prov.get('task:A')).toEqual(['task:A']);
+ });
+
+ it('returns empty map for empty input', () => {
+ expect(computeProvenance([], diamondEdges).size).toBe(0);
+ });
+
+ it('traces through linear chain to root', () => {
+ const prov = computeProvenance(['task:C'], linearEdges);
+ expect(prov.get('task:C')).toEqual(['task:A']);
+ });
+
+ it('handles multiple roots correctly', () => {
+ const edges: DepEdge[] = [
+ { from: 'task:C', to: 'task:A' },
+ { from: 'task:C', to: 'task:B' },
+ ];
+ const prov = computeProvenance(['task:C'], edges);
+ expect(prov.get('task:C')?.sort()).toEqual(['task:A', 'task:B']);
+ });
+});