diff --git a/.claude/pipeline.config.json b/.claude/pipeline.config.json index 983b52d..b6927a2 100644 --- a/.claude/pipeline.config.json +++ b/.claude/pipeline.config.json @@ -493,5 +493,67 @@ "showSpeedupEstimate": true, "includeTimeline": true } + }, + "caching": { + "enabled": true, + "strategy": "content-hash", + "directory": ".claude/pipeline-cache", + "maxAgedays": 7, + "inputCategories": { + "source": ["src/**/*.{ts,tsx,js,jsx}", "components/**/*.{ts,tsx}"], + "styles": ["src/**/*.css", "styles/**/*.css", "tailwind.config.*"], + "tests": ["**/*.test.{ts,tsx,js,jsx}", "**/*.spec.{ts,tsx}"], + "config": ["package.json", "tsconfig.json", "vite.config.*", "next.config.*"], + "tokens": ["design-tokens.lock.json", "tailwind.config.*"], + "figma": ["build-spec.json", "design-tokens.lock.json"] + }, + "phaseInputs": { + "token-sync": ["tokens", "config"], + "intake": ["figma"], + "token-lock": ["figma", "tokens"], + "tdd-scaffold": ["figma", "tokens", "tests"], + "component-build": ["source", "styles", "tokens", "tests", "config"], + "storybook": ["source", "styles"], + "visual-diff": ["source", "styles"], + "dark-mode": ["source", "styles"], + "e2e-tests": ["source", "tests", "config"], + "cross-browser": ["source", "styles"], + "quality-gate": ["source", "tests", "config"], + "responsive": ["source", "styles"] + }, + "invalidateOnConfigChange": true + }, + "profiling": { + "enabled": true, + "metricsDirectory": ".claude/pipeline-cache/metrics", + "historyLimit": 50, + "slowStageThresholdMs": 30000, + "alerts": { + "enabled": true, + "slowStageWarning": true, + "memoryThresholdMb": 1024, + "trendDegradationPercent": 20 + }, + "reporting": { + "generateOnComplete": true, + "formats": ["md", "json"], + "includeMemoryMetrics": true, + "includeTimeline": true + } + }, + "incrementalBuild": { + "enabled": true, + "parallelPhases": true, + "skipCachedPhases": true, + "forceRebuildOn": ["package.json", "pnpm-lock.yaml", "pipeline.config.json"], + "maxParallelPhases": 4 + }, + "dashboard": { + "enabled": true, + "outputDirectory": ".claude/visual-qa/dashboard", + "formats": ["html", "md"], + "autoGenerateAfterBuild": true, + "includeCharts": true, + "retentionDays": 30 } } diff --git a/.claude/visual-qa/dashboard/.gitkeep b/.claude/visual-qa/dashboard/.gitkeep new file mode 100644 index 0000000..e69de29 diff --git a/CLAUDE.md b/CLAUDE.md index 9c21ece..3223cca 100644 --- a/CLAUDE.md +++ b/CLAUDE.md @@ -104,6 +104,24 @@ node scripts/visual-diff.js --batch [--output-dir di # Run visual regression tests against baselines ./scripts/regression-test.sh [url] [--update-baselines] [--json] + +# Incremental build with caching and profiling +./scripts/incremental-build.sh [phase|all] [--force] [--parallel] + +# Pipeline cache management +node scripts/pipeline-cache.js status # Show cache status +node scripts/pipeline-cache.js check # Check cache validity +node scripts/pipeline-cache.js invalidate # Invalidate cache + +# Stage profiling and performance analysis +node scripts/stage-profiler.js report # Generate performance report +node scripts/stage-profiler.js analyze # Analyze slow stages +node scripts/stage-profiler.js history # View build history + +# Build performance dashboard +node scripts/metrics-dashboard.js generate # Generate HTML dashboard +node scripts/metrics-dashboard.js summary # Show metrics summary +node scripts/metrics-dashboard.js trends # Show performance trends ``` ## Development Commands @@ -502,7 +520,19 @@ gh issue create # Create issue ./scripts/regression-test.sh # Visual regression testing ``` +**Build Performance & Caching:** +```bash +./scripts/incremental-build.sh # Incremental build with caching +./scripts/incremental-build.sh --parallel # Parallel execution +./scripts/incremental-build.sh --force # Force rebuild (ignore cache) +node scripts/pipeline-cache.js status # Cache status +node scripts/stage-profiler.js report # Performance report +node scripts/stage-profiler.js analyze # Slow stage analysis +node scripts/metrics-dashboard.js generate # HTML dashboard +node scripts/metrics-dashboard.js summary # Quick metrics summary +``` + --- -**Last Updated:** 2026-03-25 -**Architecture:** 53 agents, 19 skills, 4 plugins + gh CLI, Figma + Canva + Playwright MCP, 21 scripts, 8 hooks +**Last Updated:** 2026-03-30 +**Architecture:** 53 agents, 19 skills, 4 plugins + gh CLI, Figma + Canva + Playwright MCP, 25 scripts, 8 hooks diff --git a/scripts/README.md b/scripts/README.md index ce55f96..b118bb9 100644 --- a/scripts/README.md +++ b/scripts/README.md @@ -1,6 +1,6 @@ # Scripts Reference -**Last Updated:** 2026-03-17 +**Last Updated:** 2026-03-30 All scripts live in `scripts/` and are designed to run from the project root. @@ -79,6 +79,64 @@ All scripts live in `scripts/` and are designed to run from the project root. - **Exit codes**: 0 = pass, 1 = fail (above threshold), 2 = error - **Config**: Reads defaults from `.claude/pipeline.config.json` +## Build Performance & Caching + +### Incremental Build (`incremental-build.sh`) +- **Purpose**: Run pipeline phases with intelligent caching and profiling +- **Usage**: + ```bash + ./scripts/incremental-build.sh # Run all quality checks + ./scripts/incremental-build.sh lint # Run specific phase + ./scripts/incremental-build.sh quality # Run full quality gate + ./scripts/incremental-build.sh --force # Ignore cache, force rebuild + ./scripts/incremental-build.sh --parallel # Run independent phases in parallel + ./scripts/incremental-build.sh --no-cache # Disable caching + ``` +- **Phases**: lint, types, tests, build, bundle, a11y, tokens, quality, all +- **Features**: Hash-based caching, automatic phase skipping, stage profiling + +### Pipeline Cache (`pipeline-cache.js`) +- **Purpose**: Content-addressable caching for pipeline phases using SHA-256 hashing +- **Usage**: + ```bash + node scripts/pipeline-cache.js status # Show cache status + node scripts/pipeline-cache.js check # Check if phase cache is valid + node scripts/pipeline-cache.js hash # Hash a file or directory + node scripts/pipeline-cache.js invalidate # Invalidate a phase cache + node scripts/pipeline-cache.js invalidate all # Invalidate all caches + node scripts/pipeline-cache.js clean --max-age 7 # Clean old entries + ``` +- **Features**: Phase-level cache, file hash tracking, cache metrics + +### Stage Profiler (`stage-profiler.js`) +- **Purpose**: Track timing and performance metrics for each pipeline stage +- **Usage**: + ```bash + node scripts/stage-profiler.js start # Start timing a stage + node scripts/stage-profiler.js end # End timing a stage + node scripts/stage-profiler.js complete # Archive current run + node scripts/stage-profiler.js report # Generate performance report + node scripts/stage-profiler.js report --format md # Markdown report + node scripts/stage-profiler.js history --last 10 # Show recent runs + node scripts/stage-profiler.js analyze # Analyze performance trends + node scripts/stage-profiler.js status # Show current run status + ``` +- **Features**: Sub-second timing, memory tracking, slow stage detection, trend analysis + +### Metrics Dashboard (`metrics-dashboard.js`) +- **Purpose**: Generate visual build performance dashboards +- **Usage**: + ```bash + node scripts/metrics-dashboard.js generate # Generate HTML dashboard + node scripts/metrics-dashboard.js generate --format md # Markdown dashboard + node scripts/metrics-dashboard.js summary # Show performance summary + node scripts/metrics-dashboard.js trends # Show 7-day trends + node scripts/metrics-dashboard.js trends --period 30d # 30-day trends + node scripts/metrics-dashboard.js compare # Compare two runs + ``` +- **Output**: HTML/Markdown dashboards in `.claude/visual-qa/dashboard/` +- **Features**: Cache efficiency tracking, stage breakdown, historical trends + ## Project Setup ### Setup Project (`setup-project.sh`) diff --git a/scripts/incremental-build.sh b/scripts/incremental-build.sh new file mode 100644 index 0000000..4b1fd24 --- /dev/null +++ b/scripts/incremental-build.sh @@ -0,0 +1,401 @@ +#!/usr/bin/env bash +set -euo pipefail + +# Incremental Build Runner +# Orchestrates pipeline with caching, profiling, and performance optimization +# +# Usage: +# ./scripts/incremental-build.sh [phase|all] [--force] [--profile] [--no-cache] +# +# Features: +# - Hash-based cache checking before each phase +# - Automatic phase skipping when cache is valid +# - Stage profiling and metrics collection +# - Performance dashboard generation + +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +PROJECT_ROOT="$(cd "$SCRIPT_DIR/.." && pwd)" + +# Colors +RED='\033[0;31m' +GREEN='\033[0;32m' +YELLOW='\033[0;33m' +BLUE='\033[0;34m' +MAGENTA='\033[0;35m' +CYAN='\033[0;36m' +NC='\033[0m' # No Color + +# Configuration +CACHE_SCRIPT="$SCRIPT_DIR/pipeline-cache.js" +PROFILER_SCRIPT="$SCRIPT_DIR/stage-profiler.js" +DASHBOARD_SCRIPT="$SCRIPT_DIR/metrics-dashboard.js" + +# Parse arguments +PHASE="all" +FORCE_BUILD=false +ENABLE_PROFILING=true +ENABLE_CACHE=true +VERBOSE=false +PARALLEL=false + +while [[ $# -gt 0 ]]; do + case "$1" in + --force|-f) + FORCE_BUILD=true + shift + ;; + --no-profile) + ENABLE_PROFILING=false + shift + ;; + --no-cache) + ENABLE_CACHE=false + shift + ;; + --verbose|-v) + VERBOSE=true + shift + ;; + --parallel|-p) + PARALLEL=true + shift + ;; + --help|-h) + echo "Incremental Build Runner" + echo "" + echo "Usage: ./scripts/incremental-build.sh [phase|all] [options]" + echo "" + echo "Phases:" + echo " all Run all phases (default)" + echo " lint Run linting and formatting" + echo " types Run TypeScript type checking" + echo " tests Run tests with coverage" + echo " build Run production build" + echo " bundle Check bundle size" + echo " a11y Run accessibility checks" + echo " tokens Verify design tokens" + echo " quality Run full quality gate" + echo "" + echo "Options:" + echo " --force, -f Force rebuild, ignore cache" + echo " --no-profile Disable performance profiling" + echo " --no-cache Disable cache checking" + echo " --verbose, -v Show detailed output" + echo " --parallel, -p Run independent phases in parallel" + echo " --help, -h Show this help" + exit 0 + ;; + *) + PHASE="$1" + shift + ;; + esac +done + +# Helper functions +log_info() { + echo -e "${BLUE}ℹ${NC} $1" +} + +log_success() { + echo -e "${GREEN}✓${NC} $1" +} + +log_warning() { + echo -e "${YELLOW}⚠${NC} $1" +} + +log_error() { + echo -e "${RED}✗${NC} $1" +} + +log_phase() { + echo "" + echo -e "${MAGENTA}━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━${NC}" + echo -e "${MAGENTA}▶${NC} ${CYAN}$1${NC}" + echo -e "${MAGENTA}━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━${NC}" +} + +# Check if phase cache is valid +check_cache() { + local phase="$1" + + if [[ "$ENABLE_CACHE" != true ]]; then + return 1 # Cache disabled, run phase + fi + + if [[ "$FORCE_BUILD" == true ]]; then + return 1 # Force build, run phase + fi + + if [[ -f "$CACHE_SCRIPT" ]]; then + if node "$CACHE_SCRIPT" check "$phase" > /dev/null 2>&1; then + return 0 # Cache valid, skip phase + fi + fi + + return 1 # Cache invalid or not found, run phase +} + +# Update cache after successful phase +update_cache() { + local phase="$1" + local duration="$2" + + if [[ "$ENABLE_CACHE" == true ]] && [[ -f "$CACHE_SCRIPT" ]]; then + node "$CACHE_SCRIPT" update "$phase" "$duration" > /dev/null 2>&1 || true + fi +} + +# Record cache hit for metrics +record_cache_hit() { + local phase="$1" + local saved_time="$2" + + if [[ -f "$CACHE_SCRIPT" ]]; then + node "$CACHE_SCRIPT" hit "$saved_time" --phase "$phase" > /dev/null 2>&1 || true + fi +} + +# Start profiling a phase +start_profile() { + local phase="$1" + + if [[ "$ENABLE_PROFILING" == true ]] && [[ -f "$PROFILER_SCRIPT" ]]; then + node "$PROFILER_SCRIPT" start "$phase" > /dev/null 2>&1 || true + fi +} + +# End profiling a phase +end_profile() { + local phase="$1" + local status="${2:-pass}" + + if [[ "$ENABLE_PROFILING" == true ]] && [[ -f "$PROFILER_SCRIPT" ]]; then + node "$PROFILER_SCRIPT" end "$phase" --status "$status" > /dev/null 2>&1 || true + fi +} + +# Run a phase with caching and profiling +run_phase() { + local phase="$1" + local description="$2" + shift 2 + local command="$@" + + log_phase "$description" + + # Check cache + if check_cache "$phase"; then + local cached_duration + cached_duration=$(node "$CACHE_SCRIPT" check "$phase" --json 2>/dev/null | grep -o '"duration":[0-9]*' | cut -d: -f2 || echo "0") + log_success "Cache HIT - skipping (saved ~${cached_duration}ms)" + record_cache_hit "$phase" "$cached_duration" + return 0 + fi + + # Run phase with profiling + start_profile "$phase" + + local start_time + start_time=$(date +%s%3N) + + local status=0 + if $VERBOSE; then + eval "$command" || status=$? + else + eval "$command" 2>&1 || status=$? + fi + + local end_time + end_time=$(date +%s%3N) + local duration=$((end_time - start_time)) + + if [[ $status -eq 0 ]]; then + end_profile "$phase" "pass" + update_cache "$phase" "$duration" + log_success "Completed in ${duration}ms" + else + end_profile "$phase" "fail" + log_error "Failed after ${duration}ms" + return $status + fi +} + +# Phase implementations +phase_lint() { + run_phase "lint" "Lint & Format" \ + "$SCRIPT_DIR/lint-and-format.sh" +} + +phase_types() { + run_phase "types" "TypeScript Type Check" \ + "$SCRIPT_DIR/check-types.sh" +} + +phase_tests() { + run_phase "tests" "Tests with Coverage" \ + "$SCRIPT_DIR/run-tests.sh" +} + +phase_build() { + run_phase "build" "Production Build" \ + "pnpm build" +} + +phase_bundle() { + run_phase "bundle" "Bundle Size Analysis" \ + "$SCRIPT_DIR/check-bundle-size.sh" +} + +phase_a11y() { + run_phase "a11y" "Accessibility Checks" \ + "$SCRIPT_DIR/check-accessibility.sh" +} + +phase_tokens() { + run_phase "tokens" "Design Token Verification" \ + "$SCRIPT_DIR/verify-tokens.sh" +} + +# Quality gate runs multiple checks +phase_quality() { + log_phase "Quality Gate" + + local failed=0 + local phases_run=0 + local phases_skipped=0 + local phases_failed=0 + + # Run independent phases + if [[ "$PARALLEL" == true ]]; then + log_info "Running quality checks in parallel..." + + # Run in background + (phase_lint) & + local lint_pid=$! + + (phase_types) & + local types_pid=$! + + # Wait for all + wait $lint_pid || ((phases_failed++)) + wait $types_pid || ((phases_failed++)) + + # Run dependent phases sequentially + phase_tests || ((phases_failed++)) + phase_build || ((phases_failed++)) + + # Run post-build checks in parallel + (phase_bundle) & + local bundle_pid=$! + + (phase_a11y) & + local a11y_pid=$! + + (phase_tokens) & + local tokens_pid=$! + + wait $bundle_pid || ((phases_failed++)) + wait $a11y_pid || ((phases_failed++)) + wait $tokens_pid || ((phases_failed++)) + else + # Sequential execution + phase_lint || ((phases_failed++)) + phase_types || ((phases_failed++)) + phase_tests || ((phases_failed++)) + phase_build || ((phases_failed++)) + phase_bundle || ((phases_failed++)) + phase_a11y || ((phases_failed++)) + phase_tokens || ((phases_failed++)) + fi + + echo "" + if [[ $phases_failed -eq 0 ]]; then + log_success "Quality gate passed!" + else + log_error "Quality gate failed ($phases_failed phase(s) failed)" + return 1 + fi +} + +# Main execution +main() { + echo "" + echo -e "${CYAN}╔══════════════════════════════════════════════════════════════╗${NC}" + echo -e "${CYAN}║${NC} ${MAGENTA}Incremental Build System${NC} ${CYAN}║${NC}" + echo -e "${CYAN}╚══════════════════════════════════════════════════════════════╝${NC}" + echo "" + + log_info "Phase: $PHASE" + log_info "Cache: $(if [[ "$ENABLE_CACHE" == true ]]; then echo "enabled"; else echo "disabled"; fi)" + log_info "Profiling: $(if [[ "$ENABLE_PROFILING" == true ]]; then echo "enabled"; else echo "disabled"; fi)" + log_info "Parallel: $(if [[ "$PARALLEL" == true ]]; then echo "enabled"; else echo "disabled"; fi)" + + # Record overall start + local overall_start + overall_start=$(date +%s%3N) + + # Run requested phase(s) + case "$PHASE" in + lint) + phase_lint + ;; + types) + phase_types + ;; + tests) + phase_tests + ;; + build) + phase_build + ;; + bundle) + phase_bundle + ;; + a11y) + phase_a11y + ;; + tokens) + phase_tokens + ;; + quality|all) + phase_quality + ;; + *) + log_error "Unknown phase: $PHASE" + echo "Run with --help for usage information" + exit 1 + ;; + esac + + local overall_end + overall_end=$(date +%s%3N) + local overall_duration=$((overall_end - overall_start)) + + echo "" + echo -e "${GREEN}━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━${NC}" + log_success "Total time: ${overall_duration}ms" + echo -e "${GREEN}━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━${NC}" + + # Complete profiler run + if [[ "$ENABLE_PROFILING" == true ]] && [[ -f "$PROFILER_SCRIPT" ]]; then + node "$PROFILER_SCRIPT" complete > /dev/null 2>&1 || true + + # Generate dashboard after significant runs + if [[ -f "$DASHBOARD_SCRIPT" ]]; then + log_info "Updating metrics dashboard..." + node "$DASHBOARD_SCRIPT" generate --format md > /dev/null 2>&1 || true + fi + fi + + # Show cache status + if [[ "$ENABLE_CACHE" == true ]] && [[ -f "$CACHE_SCRIPT" ]]; then + echo "" + log_info "Cache status:" + node "$CACHE_SCRIPT" status 2>/dev/null | head -15 || true + fi +} + +# Run main +main "$@" diff --git a/scripts/metrics-dashboard.js b/scripts/metrics-dashboard.js new file mode 100644 index 0000000..d90ce4a --- /dev/null +++ b/scripts/metrics-dashboard.js @@ -0,0 +1,717 @@ +#!/usr/bin/env node +/** + * metrics-dashboard.js — Build performance metrics dashboard generator + * + * Usage: + * node scripts/metrics-dashboard.js generate [--output ] [--format html|md|json] + * node scripts/metrics-dashboard.js summary [--json] + * node scripts/metrics-dashboard.js trends [--period 7d|30d|all] + * node scripts/metrics-dashboard.js compare + * + * Features: + * - Comprehensive build metrics visualization + * - Cache efficiency tracking + * - Performance trend analysis + * - Stage-by-stage breakdowns + * - Actionable optimization recommendations + */ + +import { + readFileSync, + writeFileSync, + existsSync, + mkdirSync, +} from "fs"; +import { join, dirname } from "path"; +import { fileURLToPath } from "url"; + +const __filename = fileURLToPath(import.meta.url); +const __dirname = dirname(__filename); +const PROJECT_ROOT = join(__dirname, ".."); + +// Paths +const METRICS_DIR = join(PROJECT_ROOT, ".claude", "pipeline-cache", "metrics"); +const CACHE_DIR = join(PROJECT_ROOT, ".claude", "pipeline-cache"); +const CACHE_MANIFEST = join(CACHE_DIR, "cache-manifest.json"); +const HISTORY_FILE = join(METRICS_DIR, "history.json"); +const DASHBOARD_DIR = join(PROJECT_ROOT, ".claude", "visual-qa", "dashboard"); + +// Load data files +function loadHistory() { + if (!existsSync(HISTORY_FILE)) { + return { runs: [] }; + } + return JSON.parse(readFileSync(HISTORY_FILE, "utf-8")); +} + +function loadCacheManifest() { + if (!existsSync(CACHE_MANIFEST)) { + return { phases: {}, metrics: { cacheHits: 0, cacheMisses: 0, timeSaved: 0 } }; + } + return JSON.parse(readFileSync(CACHE_MANIFEST, "utf-8")); +} + +// Calculate summary statistics +function calculateSummary() { + const history = loadHistory(); + const cache = loadCacheManifest(); + const runs = history.runs; + + if (runs.length === 0) { + return { + error: "No build history found", + recommendation: "Run the pipeline to collect metrics", + }; + } + + // Basic stats + const totalRuns = runs.length; + const successfulRuns = runs.filter((r) => r.status === "complete").length; + const failedRuns = runs.filter((r) => r.status !== "complete").length; + + // Duration stats + const durations = runs.filter((r) => r.totalDuration).map((r) => r.totalDuration); + const avgDuration = durations.length > 0 + ? durations.reduce((a, b) => a + b, 0) / durations.length + : 0; + const minDuration = durations.length > 0 ? Math.min(...durations) : 0; + const maxDuration = durations.length > 0 ? Math.max(...durations) : 0; + + // Recent trend (last 7 runs) + const recentRuns = runs.slice(-7); + const recentAvg = recentRuns.length > 0 + ? recentRuns.filter((r) => r.totalDuration).reduce((a, r) => a + r.totalDuration, 0) / recentRuns.length + : 0; + + // Cache efficiency + const cacheMetrics = cache.metrics || {}; + const totalCacheOps = (cacheMetrics.cacheHits || 0) + (cacheMetrics.cacheMisses || 0); + const cacheHitRate = totalCacheOps > 0 + ? ((cacheMetrics.cacheHits || 0) / totalCacheOps) * 100 + : 0; + + // Stage analysis + const stageStats = {}; + for (const run of runs) { + if (!run.stages) continue; + for (const [stage, data] of Object.entries(run.stages)) { + if (!stageStats[stage]) { + stageStats[stage] = { durations: [], failures: 0, successes: 0 }; + } + if (data.duration != null) { + stageStats[stage].durations.push(data.duration); + } + if (data.status === "pass") stageStats[stage].successes++; + else if (data.status === "fail") stageStats[stage].failures++; + } + } + + // Find slowest stages + const stageAvgs = Object.entries(stageStats).map(([stage, stats]) => ({ + stage, + avgDuration: stats.durations.length > 0 + ? stats.durations.reduce((a, b) => a + b, 0) / stats.durations.length + : 0, + successRate: stats.successes + stats.failures > 0 + ? (stats.successes / (stats.successes + stats.failures)) * 100 + : 100, + })); + + const slowestStages = stageAvgs.sort((a, b) => b.avgDuration - a.avgDuration).slice(0, 5); + + return { + overview: { + totalRuns, + successfulRuns, + failedRuns, + successRate: ((successfulRuns / totalRuns) * 100).toFixed(1), + }, + duration: { + average: Math.round(avgDuration), + min: minDuration, + max: maxDuration, + recent: Math.round(recentAvg), + trend: recentAvg < avgDuration ? "improving" : recentAvg > avgDuration ? "degrading" : "stable", + }, + cache: { + hits: cacheMetrics.cacheHits || 0, + misses: cacheMetrics.cacheMisses || 0, + hitRate: cacheHitRate.toFixed(1), + timeSaved: cacheMetrics.timeSaved || 0, + }, + slowestStages, + stageCount: Object.keys(stageStats).length, + }; +} + +// Calculate trends over a period +function calculateTrends(period = "7d") { + const history = loadHistory(); + let runs = history.runs; + + // Filter by period + if (period !== "all") { + const days = parseInt(period, 10) || 7; + const cutoff = Date.now() - days * 24 * 60 * 60 * 1000; + runs = runs.filter((r) => new Date(r.timestamp).getTime() > cutoff); + } + + if (runs.length < 2) { + return { error: "Not enough data for trend analysis", runs: runs.length }; + } + + // Calculate daily averages + const dailyStats = {}; + for (const run of runs) { + const date = run.timestamp.slice(0, 10); + if (!dailyStats[date]) { + dailyStats[date] = { durations: [], successes: 0, failures: 0 }; + } + if (run.totalDuration) { + dailyStats[date].durations.push(run.totalDuration); + } + if (run.status === "complete") dailyStats[date].successes++; + else dailyStats[date].failures++; + } + + const dailyTrend = Object.entries(dailyStats) + .sort(([a], [b]) => a.localeCompare(b)) + .map(([date, stats]) => ({ + date, + avgDuration: stats.durations.length > 0 + ? Math.round(stats.durations.reduce((a, b) => a + b, 0) / stats.durations.length) + : 0, + runs: stats.durations.length, + successRate: stats.successes + stats.failures > 0 + ? Math.round((stats.successes / (stats.successes + stats.failures)) * 100) + : 100, + })); + + // Calculate trend direction + if (dailyTrend.length >= 2) { + const firstHalf = dailyTrend.slice(0, Math.floor(dailyTrend.length / 2)); + const secondHalf = dailyTrend.slice(Math.floor(dailyTrend.length / 2)); + + const firstAvg = firstHalf.reduce((a, d) => a + d.avgDuration, 0) / firstHalf.length; + const secondAvg = secondHalf.reduce((a, d) => a + d.avgDuration, 0) / secondHalf.length; + + const percentChange = ((secondAvg - firstAvg) / firstAvg) * 100; + + return { + period, + dataPoints: dailyTrend.length, + daily: dailyTrend, + trend: { + direction: percentChange < -5 ? "improving" : percentChange > 5 ? "degrading" : "stable", + percentChange: percentChange.toFixed(1), + }, + }; + } + + return { period, daily: dailyTrend }; +} + +// Compare two runs +function compareRuns(runId1, runId2) { + const history = loadHistory(); + const run1 = history.runs.find((r) => r.runId === runId1 || r.runId.includes(runId1)); + const run2 = history.runs.find((r) => r.runId === runId2 || r.runId.includes(runId2)); + + if (!run1 || !run2) { + return { error: "One or both runs not found" }; + } + + const comparison = { + run1: { id: run1.runId, timestamp: run1.timestamp, duration: run1.totalDuration }, + run2: { id: run2.runId, timestamp: run2.timestamp, duration: run2.totalDuration }, + durationDiff: (run2.totalDuration || 0) - (run1.totalDuration || 0), + stages: {}, + }; + + // Compare stages + const allStages = new Set([ + ...Object.keys(run1.stages || {}), + ...Object.keys(run2.stages || {}), + ]); + + for (const stage of allStages) { + const stage1 = run1.stages?.[stage] || {}; + const stage2 = run2.stages?.[stage] || {}; + + comparison.stages[stage] = { + run1: { duration: stage1.duration, status: stage1.status }, + run2: { duration: stage2.duration, status: stage2.status }, + durationDiff: (stage2.duration || 0) - (stage1.duration || 0), + improved: (stage2.duration || 0) < (stage1.duration || 0), + }; + } + + return comparison; +} + +// Generate HTML dashboard +function generateHtmlDashboard() { + const summary = calculateSummary(); + const trends = calculateTrends("7d"); + + if (summary.error) { + return `

No Data

${summary.error}

`; + } + + const html = ` + + + + + Pipeline Performance Dashboard + + + +
+

🚀 Pipeline Performance Dashboard

+ +
+ +
+

Build Overview

+
${summary.overview.totalRuns}
+
Total pipeline runs
+
+ ${summary.overview.successfulRuns} passed + ${summary.overview.failedRuns} failed +
+
Success rate: ${summary.overview.successRate}%
+
+ + +
+

Build Duration

+
${(summary.duration.average / 1000).toFixed(1)}s
+
Average duration
+
+
+
${(summary.duration.min / 1000).toFixed(1)}s
+
Fastest
+
+
+
${(summary.duration.max / 1000).toFixed(1)}s
+
Slowest
+
+
+
+ Trend: ${summary.duration.trend} +
+
+ + +
+

Cache Efficiency

+
${summary.cache.hitRate}%
+
Cache hit rate
+
+
Hits: ${summary.cache.hits} | Misses: ${summary.cache.misses}
+
Time saved: ${(summary.cache.timeSaved / 1000).toFixed(1)}s
+
+
+ + +
+

Slowest Stages

+
+ ${summary.slowestStages.map((s) => { + const maxDuration = summary.slowestStages[0]?.avgDuration || 1; + const pct = (s.avgDuration / maxDuration) * 100; + return ` +
+
${s.stage}
+
+
${(s.avgDuration / 1000).toFixed(1)}s
+
+
`; + }).join('')} +
+
+ + +
+

7-Day Trend

+ ${trends.daily ? ` + + + + + + + + + + + ${trends.daily.slice(-7).map((d) => ` + + + + + + + `).join('')} + +
DateRunsAvg DurationSuccess Rate
${d.date}${d.runs}${(d.avgDuration / 1000).toFixed(1)}s${d.successRate}%
+ ` : '
Not enough data for trends
'} +
+
+ +
+ Generated: ${new Date().toISOString()} +
+
+ +`; + + return html; +} + +// Generate Markdown dashboard +function generateMarkdownDashboard() { + const summary = calculateSummary(); + const trends = calculateTrends("7d"); + + if (summary.error) { + return `# Pipeline Performance Dashboard\n\n**No data available:** ${summary.error}`; + } + + const lines = [ + "# Pipeline Performance Dashboard", + "", + `*Generated: ${new Date().toISOString()}*`, + "", + "## Overview", + "", + `| Metric | Value |`, + `|--------|-------|`, + `| Total Runs | ${summary.overview.totalRuns} |`, + `| Successful | ${summary.overview.successfulRuns} |`, + `| Failed | ${summary.overview.failedRuns} |`, + `| Success Rate | ${summary.overview.successRate}% |`, + "", + "## Build Duration", + "", + `| Metric | Value |`, + `|--------|-------|`, + `| Average | ${(summary.duration.average / 1000).toFixed(1)}s |`, + `| Fastest | ${(summary.duration.min / 1000).toFixed(1)}s |`, + `| Slowest | ${(summary.duration.max / 1000).toFixed(1)}s |`, + `| Recent Avg | ${(summary.duration.recent / 1000).toFixed(1)}s |`, + `| Trend | ${summary.duration.trend} |`, + "", + "## Cache Efficiency", + "", + `| Metric | Value |`, + `|--------|-------|`, + `| Hit Rate | ${summary.cache.hitRate}% |`, + `| Hits | ${summary.cache.hits} |`, + `| Misses | ${summary.cache.misses} |`, + `| Time Saved | ${(summary.cache.timeSaved / 1000).toFixed(1)}s |`, + "", + "## Slowest Stages", + "", + "| Stage | Avg Duration | Success Rate |", + "|-------|--------------|--------------|", + ]; + + for (const s of summary.slowestStages) { + lines.push(`| ${s.stage} | ${(s.avgDuration / 1000).toFixed(1)}s | ${s.successRate.toFixed(0)}% |`); + } + + if (trends.daily && trends.daily.length > 0) { + lines.push(""); + lines.push("## 7-Day Trend"); + lines.push(""); + lines.push("| Date | Runs | Avg Duration | Success Rate |"); + lines.push("|------|------|--------------|--------------|"); + + for (const d of trends.daily.slice(-7)) { + lines.push(`| ${d.date} | ${d.runs} | ${(d.avgDuration / 1000).toFixed(1)}s | ${d.successRate}% |`); + } + + if (trends.trend) { + lines.push(""); + lines.push(`**Trend:** ${trends.trend.direction} (${trends.trend.percentChange}%)`); + } + } + + return lines.join("\n"); +} + +// Parse CLI arguments +function parseArgs(args) { + const parsed = { + command: args[0], + target: args[1], + options: {}, + }; + + for (let i = 1; i < args.length; i++) { + const arg = args[i]; + if (arg === "--json") { + parsed.options.json = true; + } else if (arg === "--format" && args[i + 1]) { + parsed.options.format = args[++i]; + } else if (arg === "--output" && args[i + 1]) { + parsed.options.output = args[++i]; + } else if (arg === "--period" && args[i + 1]) { + parsed.options.period = args[++i]; + } else if (!parsed.target && !arg.startsWith("--")) { + parsed.target = arg; + } else if (!parsed.options.target2 && !arg.startsWith("--")) { + parsed.options.target2 = arg; + } + } + + return parsed; +} + +// Ensure dashboard directory exists +function ensureDashboardDir() { + if (!existsSync(DASHBOARD_DIR)) { + mkdirSync(DASHBOARD_DIR, { recursive: true }); + } +} + +// Main CLI handler +const args = parseArgs(process.argv.slice(2)); + +switch (args.command) { + case "generate": { + const format = args.options.format || "html"; + let content; + let ext; + + switch (format) { + case "html": + content = generateHtmlDashboard(); + ext = "html"; + break; + case "md": + content = generateMarkdownDashboard(); + ext = "md"; + break; + case "json": + content = JSON.stringify(calculateSummary(), null, 2); + ext = "json"; + break; + default: + console.error(`Unknown format: ${format}`); + process.exit(2); + } + + if (args.options.output) { + writeFileSync(args.options.output, content); + console.log(`✓ Dashboard written to ${args.options.output}`); + } else { + ensureDashboardDir(); + const outPath = join(DASHBOARD_DIR, `dashboard.${ext}`); + writeFileSync(outPath, content); + console.log(`✓ Dashboard written to ${outPath}`); + } + break; + } + + case "summary": { + const summary = calculateSummary(); + if (args.options.json) { + console.log(JSON.stringify(summary, null, 2)); + } else { + if (summary.error) { + console.log(`⚠ ${summary.error}`); + break; + } + console.log("=== Pipeline Performance Summary ==="); + console.log(""); + console.log(`Total runs: ${summary.overview.totalRuns}`); + console.log(`Success rate: ${summary.overview.successRate}%`); + console.log(""); + console.log(`Avg duration: ${(summary.duration.average / 1000).toFixed(1)}s`); + console.log(`Fastest: ${(summary.duration.min / 1000).toFixed(1)}s`); + console.log(`Slowest: ${(summary.duration.max / 1000).toFixed(1)}s`); + console.log(`Trend: ${summary.duration.trend}`); + console.log(""); + console.log(`Cache hit rate: ${summary.cache.hitRate}%`); + console.log(`Time saved: ${(summary.cache.timeSaved / 1000).toFixed(1)}s`); + console.log(""); + console.log("Slowest stages:"); + for (const s of summary.slowestStages) { + console.log(` ${s.stage.padEnd(20)} ${(s.avgDuration / 1000).toFixed(1)}s`); + } + } + break; + } + + case "trends": { + const period = args.options.period || "7d"; + const trends = calculateTrends(period); + + if (args.options.json) { + console.log(JSON.stringify(trends, null, 2)); + } else { + if (trends.error) { + console.log(`⚠ ${trends.error}`); + break; + } + console.log(`=== Performance Trends (${period}) ===`); + console.log(""); + + if (trends.daily) { + console.log("Date Runs Avg Duration Success"); + console.log("─".repeat(50)); + for (const d of trends.daily) { + console.log( + `${d.date} ${String(d.runs).padStart(4)} ${((d.avgDuration / 1000).toFixed(1) + "s").padStart(12)} ${(d.successRate + "%").padStart(6)}` + ); + } + } + + if (trends.trend) { + console.log(""); + console.log(`Overall trend: ${trends.trend.direction} (${trends.trend.percentChange}%)`); + } + } + break; + } + + case "compare": { + if (!args.target || !args.options.target2) { + console.error("Usage: metrics-dashboard.js compare "); + process.exit(2); + } + + const comparison = compareRuns(args.target, args.options.target2); + + if (args.options.json) { + console.log(JSON.stringify(comparison, null, 2)); + } else { + if (comparison.error) { + console.log(`✗ ${comparison.error}`); + break; + } + + console.log("=== Run Comparison ==="); + console.log(""); + console.log(`Run 1: ${comparison.run1.id}`); + console.log(` ${comparison.run1.timestamp}`); + console.log(` Duration: ${(comparison.run1.duration / 1000).toFixed(1)}s`); + console.log(""); + console.log(`Run 2: ${comparison.run2.id}`); + console.log(` ${comparison.run2.timestamp}`); + console.log(` Duration: ${(comparison.run2.duration / 1000).toFixed(1)}s`); + console.log(""); + + const diff = comparison.durationDiff / 1000; + const sign = diff > 0 ? "+" : ""; + console.log(`Difference: ${sign}${diff.toFixed(1)}s ${diff > 0 ? "(slower)" : diff < 0 ? "(faster)" : ""}`); + console.log(""); + + console.log("Stage Comparison:"); + console.log("Stage Run 1 Run 2 Diff"); + console.log("─".repeat(60)); + + for (const [stage, data] of Object.entries(comparison.stages)) { + const d1 = data.run1.duration != null ? (data.run1.duration / 1000).toFixed(1) + "s" : "N/A"; + const d2 = data.run2.duration != null ? (data.run2.duration / 1000).toFixed(1) + "s" : "N/A"; + const stageDiff = (data.durationDiff / 1000).toFixed(1); + const icon = data.improved ? "↓" : data.durationDiff > 0 ? "↑" : "="; + console.log(`${stage.padEnd(20)} ${d1.padStart(10)} ${d2.padStart(10)} ${icon} ${stageDiff}s`); + } + } + break; + } + + default: + console.log("Metrics Dashboard — Build performance visualization"); + console.log(""); + console.log("Usage:"); + console.log(" metrics-dashboard.js generate Generate dashboard (HTML/MD/JSON)"); + console.log(" metrics-dashboard.js summary Show performance summary"); + console.log(" metrics-dashboard.js trends Show performance trends"); + console.log(" metrics-dashboard.js compare Compare two runs"); + console.log(""); + console.log("Options:"); + console.log(" --json Output as JSON"); + console.log(" --format html|md|json Dashboard format"); + console.log(" --output Write to specific file"); + console.log(" --period 7d|30d|all Trend analysis period"); + process.exit(args.command ? 2 : 0); +} diff --git a/scripts/pipeline-cache.js b/scripts/pipeline-cache.js new file mode 100644 index 0000000..c430e5c --- /dev/null +++ b/scripts/pipeline-cache.js @@ -0,0 +1,667 @@ +#!/usr/bin/env node +/** + * pipeline-cache.js — Asset hash-based caching for pipeline optimization + * + * Usage: + * node scripts/pipeline-cache.js hash [--output ] + * node scripts/pipeline-cache.js check [--cache ] + * node scripts/pipeline-cache.js invalidate [--cache ] + * node scripts/pipeline-cache.js clean [--max-age ] + * node scripts/pipeline-cache.js status [--json] + * + * Features: + * - Content-addressable hashing using SHA-256 + * - Phase-level cache invalidation + * - Automatic cache cleanup + * - Dependency-aware cache validation + */ + +import { createHash } from "crypto"; +import { + readFileSync, + writeFileSync, + existsSync, + readdirSync, + statSync, + mkdirSync, + unlinkSync, + rmSync, +} from "fs"; +import { join, relative, resolve, extname, basename, dirname } from "path"; +import { fileURLToPath } from "url"; +import { execSync } from "child_process"; + +const __filename = fileURLToPath(import.meta.url); +const __dirname = dirname(__filename); +const PROJECT_ROOT = resolve(__dirname, ".."); + +// Default paths +const CACHE_DIR = join(PROJECT_ROOT, ".claude", "pipeline-cache"); +const CACHE_MANIFEST = join(CACHE_DIR, "cache-manifest.json"); +const METRICS_FILE = join(CACHE_DIR, "build-metrics.json"); + +// File patterns for different input categories +const INPUT_PATTERNS = { + source: ["src/**/*.{ts,tsx,js,jsx}", "components/**/*.{ts,tsx}"], + styles: ["src/**/*.css", "styles/**/*.css", "tailwind.config.*"], + tests: ["**/*.test.{ts,tsx,js,jsx}", "**/*.spec.{ts,tsx}"], + config: [ + "package.json", + "tsconfig.json", + "vite.config.*", + "next.config.*", + ".claude/pipeline.config.json", + ], + tokens: ["design-tokens.lock.json", "tailwind.config.*"], + figma: ["build-spec.json", "design-tokens.lock.json"], +}; + +// Phase input dependencies +const PHASE_INPUTS = { + "token-sync": ["tokens", "config"], + intake: ["figma"], + "token-lock": ["figma", "tokens"], + "tdd-scaffold": ["figma", "tokens", "tests"], + "component-build": ["source", "styles", "tokens", "tests", "config"], + storybook: ["source", "styles"], + "visual-diff": ["source", "styles"], + "dark-mode": ["source", "styles"], + "e2e-tests": ["source", "tests", "config"], + "cross-browser": ["source", "styles"], + "quality-gate": ["source", "tests", "config"], + responsive: ["source", "styles"], + report: [], +}; + +// Initialize cache directory +function ensureCacheDir() { + if (!existsSync(CACHE_DIR)) { + mkdirSync(CACHE_DIR, { recursive: true }); + } +} + +// Load cache manifest +function loadManifest() { + ensureCacheDir(); + if (!existsSync(CACHE_MANIFEST)) { + return { + version: "1.0.0", + created: new Date().toISOString(), + phases: {}, + fileHashes: {}, + metrics: { + totalBuilds: 0, + cacheHits: 0, + cacheMisses: 0, + timeSaved: 0, + }, + }; + } + return JSON.parse(readFileSync(CACHE_MANIFEST, "utf-8")); +} + +// Save cache manifest +function saveManifest(manifest) { + ensureCacheDir(); + manifest.updated = new Date().toISOString(); + writeFileSync(CACHE_MANIFEST, JSON.stringify(manifest, null, 2)); +} + +// Compute SHA-256 hash of file content +function hashFile(filepath) { + try { + const content = readFileSync(filepath); + return createHash("sha256").update(content).digest("hex").slice(0, 16); + } catch { + return null; + } +} + +// Compute hash of directory (combination of all file hashes) +function hashDirectory(dirpath, patterns = ["**/*"]) { + const hashes = []; + + function walkDir(dir) { + try { + const entries = readdirSync(dir, { withFileTypes: true }); + for (const entry of entries) { + const fullPath = join(dir, entry.name); + if (entry.isDirectory()) { + if (!entry.name.startsWith(".") && entry.name !== "node_modules") { + walkDir(fullPath); + } + } else if (entry.isFile()) { + const hash = hashFile(fullPath); + if (hash) { + hashes.push(`${relative(PROJECT_ROOT, fullPath)}:${hash}`); + } + } + } + } catch { + // Directory doesn't exist or can't be read + } + } + + if (existsSync(dirpath)) { + if (statSync(dirpath).isDirectory()) { + walkDir(dirpath); + } else { + const hash = hashFile(dirpath); + if (hash) { + hashes.push(`${relative(PROJECT_ROOT, dirpath)}:${hash}`); + } + } + } + + hashes.sort(); + return createHash("sha256").update(hashes.join("\n")).digest("hex").slice(0, 16); +} + +// Find files matching glob patterns +function findFiles(patterns) { + const files = []; + + function matchesPattern(filepath, pattern) { + const regex = new RegExp( + "^" + + pattern + .replace(/\*\*/g, "{{GLOBSTAR}}") + .replace(/\*/g, "[^/]*") + .replace(/\./g, "\\.") + .replace(/{{GLOBSTAR}}/g, ".*") + + "$" + ); + return regex.test(filepath); + } + + function walkDir(dir) { + try { + const entries = readdirSync(dir, { withFileTypes: true }); + for (const entry of entries) { + const fullPath = join(dir, entry.name); + const relPath = relative(PROJECT_ROOT, fullPath); + + if (entry.isDirectory()) { + if (!entry.name.startsWith(".") && entry.name !== "node_modules") { + walkDir(fullPath); + } + } else if (entry.isFile()) { + for (const pattern of patterns) { + if (matchesPattern(relPath, pattern)) { + files.push(fullPath); + break; + } + } + } + } + } catch { + // Directory doesn't exist + } + } + + walkDir(PROJECT_ROOT); + return files; +} + +// Compute combined hash for a set of input categories +function computeInputHash(categories) { + const allPatterns = []; + for (const category of categories) { + if (INPUT_PATTERNS[category]) { + allPatterns.push(...INPUT_PATTERNS[category]); + } + } + + const files = findFiles(allPatterns); + const hashes = []; + + for (const file of files) { + const hash = hashFile(file); + if (hash) { + hashes.push(`${relative(PROJECT_ROOT, file)}:${hash}`); + } + } + + hashes.sort(); + return createHash("sha256").update(hashes.join("\n")).digest("hex").slice(0, 16); +} + +// Check if a phase's cache is valid +function checkPhaseCache(phase) { + const manifest = loadManifest(); + const phaseData = manifest.phases[phase]; + + if (!phaseData) { + return { valid: false, reason: "no-cache", changed: [] }; + } + + const inputCategories = PHASE_INPUTS[phase] || []; + if (inputCategories.length === 0) { + // Phases with no inputs always run + return { valid: false, reason: "no-inputs", changed: [] }; + } + + const currentHash = computeInputHash(inputCategories); + const cachedHash = phaseData.inputHash; + + if (currentHash === cachedHash) { + return { + valid: true, + cachedAt: phaseData.timestamp, + duration: phaseData.duration, + }; + } + + // Identify which files changed + const changed = []; + for (const category of inputCategories) { + const patterns = INPUT_PATTERNS[category] || []; + const files = findFiles(patterns); + + for (const file of files) { + const relPath = relative(PROJECT_ROOT, file); + const currentFileHash = hashFile(file); + const cachedFileHash = manifest.fileHashes[relPath]; + + if (currentFileHash !== cachedFileHash) { + changed.push(relPath); + } + } + } + + return { valid: false, reason: "hash-mismatch", changed: changed.slice(0, 10) }; +} + +// Update phase cache after successful completion +function updatePhaseCache(phase, duration, result = "success") { + const manifest = loadManifest(); + const inputCategories = PHASE_INPUTS[phase] || []; + const inputHash = inputCategories.length > 0 ? computeInputHash(inputCategories) : null; + + // Update file hashes + for (const category of inputCategories) { + const patterns = INPUT_PATTERNS[category] || []; + const files = findFiles(patterns); + + for (const file of files) { + const relPath = relative(PROJECT_ROOT, file); + const hash = hashFile(file); + if (hash) { + manifest.fileHashes[relPath] = hash; + } + } + } + + // Update phase cache + manifest.phases[phase] = { + inputHash, + timestamp: new Date().toISOString(), + duration, + result, + }; + + // Update metrics + manifest.metrics.totalBuilds++; + + saveManifest(manifest); + return { phase, inputHash, duration }; +} + +// Invalidate a phase's cache +function invalidatePhase(phase) { + const manifest = loadManifest(); + + if (manifest.phases[phase]) { + delete manifest.phases[phase]; + saveManifest(manifest); + return true; + } + + return false; +} + +// Invalidate all caches +function invalidateAll() { + const manifest = loadManifest(); + manifest.phases = {}; + manifest.fileHashes = {}; + saveManifest(manifest); +} + +// Clean old cache entries +function cleanCache(maxAgeDays = 7) { + const manifest = loadManifest(); + const cutoff = Date.now() - maxAgeDays * 24 * 60 * 60 * 1000; + let cleaned = 0; + + for (const [phase, data] of Object.entries(manifest.phases)) { + if (new Date(data.timestamp).getTime() < cutoff) { + delete manifest.phases[phase]; + cleaned++; + } + } + + if (cleaned > 0) { + saveManifest(manifest); + } + + // Also clean artifact directories + const artifactDirs = [ + join(CACHE_DIR, "artifacts"), + join(CACHE_DIR, "screenshots"), + ]; + + for (const dir of artifactDirs) { + if (existsSync(dir)) { + try { + const entries = readdirSync(dir); + for (const entry of entries) { + const entryPath = join(dir, entry); + const stat = statSync(entryPath); + if (stat.mtimeMs < cutoff) { + rmSync(entryPath, { recursive: true, force: true }); + cleaned++; + } + } + } catch { + // Ignore cleanup errors + } + } + } + + return cleaned; +} + +// Get cache status summary +function getCacheStatus() { + const manifest = loadManifest(); + const phases = Object.entries(manifest.phases).map(([name, data]) => ({ + name, + valid: checkPhaseCache(name).valid, + cachedAt: data.timestamp, + duration: data.duration, + result: data.result, + })); + + const validCount = phases.filter((p) => p.valid).length; + const totalDuration = phases.reduce((sum, p) => sum + (p.duration || 0), 0); + + return { + cacheDir: CACHE_DIR, + manifestFile: CACHE_MANIFEST, + created: manifest.created, + updated: manifest.updated, + phases: { + total: phases.length, + valid: validCount, + invalid: phases.length - validCount, + list: phases, + }, + fileHashes: Object.keys(manifest.fileHashes).length, + metrics: manifest.metrics, + estimatedTimeSaved: validCount > 0 ? totalDuration : 0, + }; +} + +// Record a cache hit +function recordCacheHit(phase, savedTime) { + const manifest = loadManifest(); + manifest.metrics.cacheHits++; + manifest.metrics.timeSaved += savedTime || 0; + saveManifest(manifest); +} + +// Record a cache miss +function recordCacheMiss() { + const manifest = loadManifest(); + manifest.metrics.cacheMisses++; + saveManifest(manifest); +} + +// Hash a specific file or directory +function hashTarget(target, outputFile = null) { + const fullPath = resolve(PROJECT_ROOT, target); + let result; + + if (existsSync(fullPath)) { + const stat = statSync(fullPath); + if (stat.isDirectory()) { + result = { + type: "directory", + path: target, + hash: hashDirectory(fullPath), + size: getDirSize(fullPath), + }; + } else { + result = { + type: "file", + path: target, + hash: hashFile(fullPath), + size: stat.size, + modified: stat.mtime.toISOString(), + }; + } + } else { + result = { error: `Path not found: ${target}` }; + } + + if (outputFile) { + writeFileSync(outputFile, JSON.stringify(result, null, 2)); + } + + return result; +} + +// Get directory size recursively +function getDirSize(dirPath) { + let size = 0; + + function walkDir(dir) { + try { + const entries = readdirSync(dir, { withFileTypes: true }); + for (const entry of entries) { + const fullPath = join(dir, entry.name); + if (entry.isDirectory()) { + if (!entry.name.startsWith(".") && entry.name !== "node_modules") { + walkDir(fullPath); + } + } else if (entry.isFile()) { + try { + size += statSync(fullPath).size; + } catch { + // Skip files we can't stat + } + } + } + } catch { + // Skip directories we can't read + } + } + + walkDir(dirPath); + return size; +} + +// Format human-readable output +function formatStatus(status) { + const lines = []; + + lines.push("=== Pipeline Cache Status ==="); + lines.push(""); + lines.push(`Cache directory: ${status.cacheDir}`); + lines.push(`Created: ${status.created || "N/A"}`); + lines.push(`Updated: ${status.updated || "N/A"}`); + lines.push(""); + lines.push(`Phases cached: ${status.phases.total}`); + lines.push(` Valid: ${status.phases.valid}`); + lines.push(` Invalid: ${status.phases.invalid}`); + lines.push(""); + lines.push(`File hashes tracked: ${status.fileHashes}`); + lines.push(""); + lines.push("Cache Metrics:"); + lines.push(` Total builds: ${status.metrics.totalBuilds}`); + lines.push(` Cache hits: ${status.metrics.cacheHits}`); + lines.push(` Cache misses: ${status.metrics.cacheMisses}`); + lines.push( + ` Hit rate: ${ + status.metrics.totalBuilds > 0 + ? ((status.metrics.cacheHits / status.metrics.totalBuilds) * 100).toFixed(1) + : 0 + }%` + ); + lines.push(` Time saved: ${(status.metrics.timeSaved / 1000).toFixed(1)}s`); + + if (status.phases.list.length > 0) { + lines.push(""); + lines.push("Cached Phases:"); + for (const phase of status.phases.list) { + const validMark = phase.valid ? "✓" : "✗"; + const duration = phase.duration ? `${(phase.duration / 1000).toFixed(1)}s` : "N/A"; + lines.push(` ${validMark} ${phase.name.padEnd(20)} ${duration.padStart(8)} ${phase.result || ""}`); + } + } + + return lines.join("\n"); +} + +// Parse CLI arguments +function parseArgs(args) { + const parsed = { + command: args[0], + target: args[1], + options: {}, + }; + + for (let i = 2; i < args.length; i++) { + const arg = args[i]; + if (arg === "--json") { + parsed.options.json = true; + } else if (arg === "--output" && args[i + 1]) { + parsed.options.output = args[++i]; + } else if (arg === "--cache" && args[i + 1]) { + parsed.options.cache = args[++i]; + } else if (arg === "--max-age" && args[i + 1]) { + parsed.options.maxAge = parseInt(args[++i], 10); + } else if (!parsed.target) { + parsed.target = arg; + } + } + + return parsed; +} + +// Main CLI handler +const args = parseArgs(process.argv.slice(2)); + +switch (args.command) { + case "hash": { + if (!args.target) { + console.error("Usage: pipeline-cache.js hash [--output ]"); + process.exit(2); + } + const result = hashTarget(args.target, args.options.output); + console.log(args.options.json ? JSON.stringify(result, null, 2) : `Hash: ${result.hash || result.error}`); + break; + } + + case "check": { + if (!args.target) { + console.error("Usage: pipeline-cache.js check [--json]"); + process.exit(2); + } + const result = checkPhaseCache(args.target); + if (args.options.json) { + console.log(JSON.stringify(result, null, 2)); + } else { + if (result.valid) { + console.log(`✓ Cache VALID for ${args.target}`); + console.log(` Cached at: ${result.cachedAt}`); + console.log(` Duration: ${(result.duration / 1000).toFixed(1)}s`); + } else { + console.log(`✗ Cache INVALID for ${args.target}`); + console.log(` Reason: ${result.reason}`); + if (result.changed && result.changed.length > 0) { + console.log(` Changed files: ${result.changed.join(", ")}`); + } + } + } + process.exit(result.valid ? 0 : 1); + } + + case "update": { + if (!args.target) { + console.error("Usage: pipeline-cache.js update [--json]"); + process.exit(2); + } + const duration = parseInt(args.options.duration || process.argv[4], 10) || 0; + const result = updatePhaseCache(args.target, duration); + console.log( + args.options.json + ? JSON.stringify(result, null, 2) + : `✓ Cache updated for ${args.target}` + ); + break; + } + + case "invalidate": { + if (!args.target) { + console.error("Usage: pipeline-cache.js invalidate "); + process.exit(2); + } + if (args.target === "all") { + invalidateAll(); + console.log("✓ All caches invalidated"); + } else { + const success = invalidatePhase(args.target); + console.log( + success + ? `✓ Cache invalidated for ${args.target}` + : `⚠ No cache found for ${args.target}` + ); + } + break; + } + + case "clean": { + const maxAge = args.options.maxAge || 7; + const cleaned = cleanCache(maxAge); + console.log(`✓ Cleaned ${cleaned} old cache entries (older than ${maxAge} days)`); + break; + } + + case "status": { + const status = getCacheStatus(); + console.log(args.options.json ? JSON.stringify(status, null, 2) : formatStatus(status)); + break; + } + + case "hit": { + const savedTime = parseInt(args.target, 10) || 0; + recordCacheHit(args.options.phase, savedTime); + console.log("✓ Cache hit recorded"); + break; + } + + case "miss": { + recordCacheMiss(); + console.log("✓ Cache miss recorded"); + break; + } + + default: + console.log("Pipeline Cache Manager"); + console.log(""); + console.log("Usage:"); + console.log(" pipeline-cache.js hash Hash a file or directory"); + console.log(" pipeline-cache.js check Check if phase cache is valid"); + console.log(" pipeline-cache.js update Update phase cache with duration"); + console.log(" pipeline-cache.js invalidate Invalidate a phase cache"); + console.log(" pipeline-cache.js clean [--max-age N] Clean old cache entries"); + console.log(" pipeline-cache.js status Show cache status"); + console.log(""); + console.log("Options:"); + console.log(" --json Output as JSON"); + console.log(" --output Write result to file"); + console.log(" --max-age Max age in days for clean command (default: 7)"); + process.exit(args.command ? 2 : 0); +} diff --git a/scripts/stage-profiler.js b/scripts/stage-profiler.js new file mode 100644 index 0000000..b525592 --- /dev/null +++ b/scripts/stage-profiler.js @@ -0,0 +1,676 @@ +#!/usr/bin/env node +/** + * stage-profiler.js — Pipeline stage timing and performance profiling + * + * Usage: + * node scripts/stage-profiler.js start + * node scripts/stage-profiler.js end [--status pass|fail] + * node scripts/stage-profiler.js report [--format md|json|ascii] + * node scripts/stage-profiler.js history [--last N] + * node scripts/stage-profiler.js analyze [--slow-threshold 30000] + * + * Features: + * - Precise stage timing with sub-second accuracy + * - Memory and CPU profiling (when available) + * - Historical trend analysis + * - Slow stage detection + * - Build performance reports + */ + +import { + readFileSync, + writeFileSync, + existsSync, + mkdirSync, + readdirSync, + statSync, +} from "fs"; +import { join, dirname } from "path"; +import { fileURLToPath } from "url"; +import { execSync } from "child_process"; + +const __filename = fileURLToPath(import.meta.url); +const __dirname = dirname(__filename); +const PROJECT_ROOT = join(__dirname, ".."); + +// Paths +const METRICS_DIR = join(PROJECT_ROOT, ".claude", "pipeline-cache", "metrics"); +const CURRENT_RUN = join(METRICS_DIR, "current-run.json"); +const HISTORY_FILE = join(METRICS_DIR, "history.json"); +const REPORT_DIR = join(PROJECT_ROOT, ".claude", "visual-qa"); + +// Ensure directories exist +function ensureDirs() { + if (!existsSync(METRICS_DIR)) { + mkdirSync(METRICS_DIR, { recursive: true }); + } +} + +// Load current run data +function loadCurrentRun() { + ensureDirs(); + if (!existsSync(CURRENT_RUN)) { + return { + runId: generateRunId(), + startTime: Date.now(), + stages: {}, + metadata: { + nodeVersion: process.version, + platform: process.platform, + arch: process.arch, + }, + }; + } + return JSON.parse(readFileSync(CURRENT_RUN, "utf-8")); +} + +// Save current run data +function saveCurrentRun(data) { + ensureDirs(); + writeFileSync(CURRENT_RUN, JSON.stringify(data, null, 2)); +} + +// Load history +function loadHistory() { + ensureDirs(); + if (!existsSync(HISTORY_FILE)) { + return { runs: [] }; + } + return JSON.parse(readFileSync(HISTORY_FILE, "utf-8")); +} + +// Save history +function saveHistory(data) { + ensureDirs(); + writeFileSync(HISTORY_FILE, JSON.stringify(data, null, 2)); +} + +// Generate run ID +function generateRunId() { + const now = new Date(); + return `run-${now.toISOString().replace(/[:.]/g, "-").slice(0, 19)}`; +} + +// Get memory usage +function getMemoryUsage() { + try { + const mem = process.memoryUsage(); + return { + heapUsed: mem.heapUsed, + heapTotal: mem.heapTotal, + external: mem.external, + rss: mem.rss, + }; + } catch { + return null; + } +} + +// Get system memory (cross-platform) +function getSystemMemory() { + try { + if (process.platform === "win32") { + const output = execSync("wmic OS get FreePhysicalMemory,TotalVisibleMemorySize /value", { + encoding: "utf-8", + timeout: 5000, + }); + const free = parseInt(output.match(/FreePhysicalMemory=(\d+)/)?.[1] || "0", 10) * 1024; + const total = parseInt(output.match(/TotalVisibleMemorySize=(\d+)/)?.[1] || "0", 10) * 1024; + return { free, total, used: total - free }; + } else { + const output = execSync("free -b", { encoding: "utf-8", timeout: 5000 }); + const lines = output.split("\n"); + const memLine = lines.find((l) => l.startsWith("Mem:")); + if (memLine) { + const parts = memLine.split(/\s+/); + return { + total: parseInt(parts[1], 10), + used: parseInt(parts[2], 10), + free: parseInt(parts[3], 10), + }; + } + } + } catch { + return null; + } + return null; +} + +// Start timing a stage +function startStage(stageName) { + const run = loadCurrentRun(); + + if (run.stages[stageName]?.startTime && !run.stages[stageName]?.endTime) { + console.warn(`⚠ Stage '${stageName}' already running`); + return; + } + + run.stages[stageName] = { + startTime: Date.now(), + startMemory: getMemoryUsage(), + startSystemMemory: getSystemMemory(), + status: "running", + }; + + saveCurrentRun(run); + + console.log(`▶ Started stage: ${stageName}`); + return run.stages[stageName]; +} + +// End timing a stage +function endStage(stageName, status = "pass") { + const run = loadCurrentRun(); + const stage = run.stages[stageName]; + + if (!stage) { + console.error(`✗ Stage '${stageName}' was not started`); + return null; + } + + if (stage.endTime) { + console.warn(`⚠ Stage '${stageName}' already ended`); + return stage; + } + + stage.endTime = Date.now(); + stage.duration = stage.endTime - stage.startTime; + stage.status = status; + stage.endMemory = getMemoryUsage(); + stage.endSystemMemory = getSystemMemory(); + + // Calculate memory delta + if (stage.startMemory && stage.endMemory) { + stage.memoryDelta = { + heapUsed: stage.endMemory.heapUsed - stage.startMemory.heapUsed, + heapTotal: stage.endMemory.heapTotal - stage.startMemory.heapTotal, + rss: stage.endMemory.rss - stage.startMemory.rss, + }; + } + + saveCurrentRun(run); + + const durationSec = (stage.duration / 1000).toFixed(2); + const statusIcon = status === "pass" ? "✓" : status === "fail" ? "✗" : "⚠"; + console.log(`${statusIcon} Ended stage: ${stageName} (${durationSec}s)`); + + return stage; +} + +// Complete the current run and archive it +function completeRun(finalStatus = "complete") { + const run = loadCurrentRun(); + run.endTime = Date.now(); + run.totalDuration = run.endTime - run.startTime; + run.status = finalStatus; + + // Calculate totals + let passCount = 0; + let failCount = 0; + let totalStageDuration = 0; + + for (const stage of Object.values(run.stages)) { + if (stage.status === "pass") passCount++; + else if (stage.status === "fail") failCount++; + totalStageDuration += stage.duration || 0; + } + + run.summary = { + stageCount: Object.keys(run.stages).length, + passed: passCount, + failed: failCount, + totalStageDuration, + overheadDuration: run.totalDuration - totalStageDuration, + parallelSpeedup: + totalStageDuration > 0 ? (totalStageDuration / run.totalDuration).toFixed(2) : 1, + }; + + // Archive to history + const history = loadHistory(); + history.runs.push({ + runId: run.runId, + timestamp: new Date(run.startTime).toISOString(), + totalDuration: run.totalDuration, + status: run.status, + summary: run.summary, + stages: Object.fromEntries( + Object.entries(run.stages).map(([name, data]) => [ + name, + { duration: data.duration, status: data.status }, + ]) + ), + }); + + // Keep only last 50 runs + if (history.runs.length > 50) { + history.runs = history.runs.slice(-50); + } + + saveHistory(history); + + // Reset current run + saveCurrentRun({ + runId: generateRunId(), + startTime: Date.now(), + stages: {}, + metadata: run.metadata, + }); + + return run; +} + +// Generate performance report +function generateReport(format = "md") { + const run = loadCurrentRun(); + const history = loadHistory(); + + // Sort stages by duration (slowest first) + const sortedStages = Object.entries(run.stages) + .filter(([_, data]) => data.duration != null) + .sort((a, b) => (b[1].duration || 0) - (a[1].duration || 0)); + + if (format === "json") { + return JSON.stringify({ current: run, history: history.runs.slice(-10) }, null, 2); + } + + const lines = []; + + if (format === "md") { + lines.push("# Pipeline Performance Report"); + lines.push(""); + lines.push(`**Run ID:** ${run.runId}`); + lines.push(`**Started:** ${new Date(run.startTime).toISOString()}`); + lines.push(""); + + lines.push("## Stage Timings"); + lines.push(""); + lines.push("| Stage | Duration | Status | Memory Delta |"); + lines.push("|-------|----------|--------|--------------|"); + + for (const [name, data] of sortedStages) { + const duration = data.duration ? `${(data.duration / 1000).toFixed(2)}s` : "N/A"; + const status = data.status === "pass" ? "✅" : data.status === "fail" ? "❌" : "⏳"; + const memDelta = data.memoryDelta + ? `${(data.memoryDelta.heapUsed / 1024 / 1024).toFixed(1)}MB` + : "N/A"; + lines.push(`| ${name} | ${duration} | ${status} | ${memDelta} |`); + } + + lines.push(""); + lines.push("## Performance Analysis"); + lines.push(""); + + // Identify slow stages (>30s) + const slowStages = sortedStages.filter(([_, d]) => (d.duration || 0) > 30000); + if (slowStages.length > 0) { + lines.push("### Slow Stages (>30s)"); + lines.push(""); + for (const [name, data] of slowStages) { + lines.push(`- **${name}**: ${(data.duration / 1000).toFixed(1)}s`); + } + lines.push(""); + } + + // Historical comparison + if (history.runs.length > 1) { + lines.push("### Historical Trend"); + lines.push(""); + const recent = history.runs.slice(-5); + lines.push("| Run | Date | Duration | Status |"); + lines.push("|-----|------|----------|--------|"); + for (const r of recent) { + const date = r.timestamp.slice(0, 10); + const duration = `${(r.totalDuration / 1000).toFixed(1)}s`; + const status = r.status === "complete" ? "✅" : "❌"; + lines.push(`| ${r.runId.slice(4, 20)} | ${date} | ${duration} | ${status} |`); + } + } + } else { + // ASCII format + lines.push("=== Pipeline Performance Report ==="); + lines.push(""); + lines.push(`Run ID: ${run.runId}`); + lines.push(`Started: ${new Date(run.startTime).toISOString()}`); + lines.push(""); + lines.push("Stage Timings (slowest first):"); + lines.push("─".repeat(60)); + + const maxNameLen = Math.max(...sortedStages.map(([n]) => n.length), 20); + + for (const [name, data] of sortedStages) { + const duration = data.duration ? (data.duration / 1000).toFixed(2) : "N/A"; + const statusIcon = data.status === "pass" ? "✓" : data.status === "fail" ? "✗" : "⏳"; + const bar = data.duration + ? "█".repeat(Math.min(Math.ceil(data.duration / 5000), 20)) + : ""; + lines.push( + `${statusIcon} ${name.padEnd(maxNameLen)} ${duration.padStart(8)}s ${bar}` + ); + } + + lines.push("─".repeat(60)); + + // Total + const totalDuration = sortedStages.reduce((sum, [_, d]) => sum + (d.duration || 0), 0); + lines.push(`Total stage time: ${(totalDuration / 1000).toFixed(2)}s`); + } + + return lines.join("\n"); +} + +// Analyze performance trends +function analyzePerformance(slowThreshold = 30000) { + const history = loadHistory(); + + if (history.runs.length < 2) { + return { error: "Need at least 2 runs for analysis" }; + } + + const recentRuns = history.runs.slice(-10); + + // Calculate averages per stage + const stageStats = {}; + + for (const run of recentRuns) { + for (const [stage, data] of Object.entries(run.stages)) { + if (!stageStats[stage]) { + stageStats[stage] = { durations: [], failures: 0, successes: 0 }; + } + if (data.duration != null) { + stageStats[stage].durations.push(data.duration); + } + if (data.status === "pass") stageStats[stage].successes++; + else if (data.status === "fail") stageStats[stage].failures++; + } + } + + // Compute statistics + const analysis = { + totalRuns: recentRuns.length, + stages: {}, + slowStages: [], + unreliableStages: [], + recommendations: [], + }; + + for (const [stage, stats] of Object.entries(stageStats)) { + const durations = stats.durations; + if (durations.length === 0) continue; + + const avg = durations.reduce((a, b) => a + b, 0) / durations.length; + const min = Math.min(...durations); + const max = Math.max(...durations); + const variance = + durations.reduce((sum, d) => sum + Math.pow(d - avg, 2), 0) / durations.length; + const stdDev = Math.sqrt(variance); + const successRate = + (stats.successes / (stats.successes + stats.failures)) * 100; + + analysis.stages[stage] = { + avgDuration: Math.round(avg), + minDuration: min, + maxDuration: max, + stdDev: Math.round(stdDev), + successRate: Math.round(successRate), + sampleCount: durations.length, + }; + + // Flag slow stages + if (avg > slowThreshold) { + analysis.slowStages.push({ + stage, + avgDuration: Math.round(avg), + recommendation: `Consider optimizing or caching ${stage}`, + }); + } + + // Flag unreliable stages (high variance or low success rate) + if (stdDev > avg * 0.5 || successRate < 80) { + analysis.unreliableStages.push({ + stage, + stdDev: Math.round(stdDev), + successRate: Math.round(successRate), + recommendation: + successRate < 80 + ? `${stage} fails ${100 - Math.round(successRate)}% of the time` + : `${stage} has high variance (±${(stdDev / 1000).toFixed(1)}s)`, + }); + } + } + + // Generate recommendations + if (analysis.slowStages.length > 0) { + analysis.recommendations.push( + `Found ${analysis.slowStages.length} slow stage(s): ${analysis.slowStages + .map((s) => s.stage) + .join(", ")}` + ); + } + + if (analysis.unreliableStages.length > 0) { + analysis.recommendations.push( + `Found ${analysis.unreliableStages.length} unreliable stage(s) needing attention` + ); + } + + // Overall trend + const recentDurations = recentRuns + .filter((r) => r.totalDuration) + .map((r) => r.totalDuration); + if (recentDurations.length >= 3) { + const firstHalf = recentDurations.slice(0, Math.floor(recentDurations.length / 2)); + const secondHalf = recentDurations.slice(Math.floor(recentDurations.length / 2)); + const firstAvg = firstHalf.reduce((a, b) => a + b, 0) / firstHalf.length; + const secondAvg = secondHalf.reduce((a, b) => a + b, 0) / secondHalf.length; + + if (secondAvg > firstAvg * 1.2) { + analysis.recommendations.push( + `Build times are trending up (+${((secondAvg / firstAvg - 1) * 100).toFixed(0)}%)` + ); + } else if (secondAvg < firstAvg * 0.8) { + analysis.recommendations.push( + `Build times are improving (-${((1 - secondAvg / firstAvg) * 100).toFixed(0)}%)` + ); + } + } + + return analysis; +} + +// Get last N runs from history +function getHistory(count = 10) { + const history = loadHistory(); + return history.runs.slice(-count); +} + +// Parse CLI arguments +function parseArgs(args) { + const parsed = { + command: args[0], + target: args[1], + options: {}, + }; + + for (let i = 2; i < args.length; i++) { + const arg = args[i]; + if (arg === "--json") { + parsed.options.json = true; + } else if (arg === "--format" && args[i + 1]) { + parsed.options.format = args[++i]; + } else if (arg === "--status" && args[i + 1]) { + parsed.options.status = args[++i]; + } else if (arg === "--last" && args[i + 1]) { + parsed.options.last = parseInt(args[++i], 10); + } else if (arg === "--slow-threshold" && args[i + 1]) { + parsed.options.slowThreshold = parseInt(args[++i], 10); + } else if (arg === "--output" && args[i + 1]) { + parsed.options.output = args[++i]; + } else if (!parsed.target) { + parsed.target = arg; + } + } + + return parsed; +} + +// Main CLI handler +const args = parseArgs(process.argv.slice(2)); + +switch (args.command) { + case "start": { + if (!args.target) { + console.error("Usage: stage-profiler.js start "); + process.exit(2); + } + startStage(args.target); + break; + } + + case "end": { + if (!args.target) { + console.error("Usage: stage-profiler.js end [--status pass|fail]"); + process.exit(2); + } + const result = endStage(args.target, args.options.status || "pass"); + if (args.options.json && result) { + console.log(JSON.stringify(result, null, 2)); + } + break; + } + + case "complete": { + const run = completeRun(args.target || "complete"); + if (args.options.json) { + console.log(JSON.stringify(run.summary, null, 2)); + } else { + console.log("✓ Run completed and archived"); + console.log(` Stages: ${run.summary.stageCount}`); + console.log(` Passed: ${run.summary.passed}`); + console.log(` Failed: ${run.summary.failed}`); + console.log(` Total duration: ${(run.totalDuration / 1000).toFixed(1)}s`); + console.log(` Parallel speedup: ${run.summary.parallelSpeedup}x`); + } + break; + } + + case "report": { + const format = args.options.format || (args.options.json ? "json" : "ascii"); + const report = generateReport(format); + + if (args.options.output) { + writeFileSync(args.options.output, report); + console.log(`✓ Report written to ${args.options.output}`); + } else { + console.log(report); + } + break; + } + + case "history": { + const count = args.options.last || 10; + const runs = getHistory(count); + + if (args.options.json) { + console.log(JSON.stringify(runs, null, 2)); + } else { + console.log(`=== Last ${runs.length} Pipeline Runs ===`); + console.log(""); + for (const run of runs) { + const date = run.timestamp.slice(0, 19).replace("T", " "); + const duration = `${(run.totalDuration / 1000).toFixed(1)}s`; + const status = run.status === "complete" ? "✓" : "✗"; + console.log(`${status} ${date} ${duration.padStart(8)} ${run.summary.passed}/${run.summary.stageCount} passed`); + } + } + break; + } + + case "analyze": { + const threshold = args.options.slowThreshold || 30000; + const analysis = analyzePerformance(threshold); + + if (args.options.json) { + console.log(JSON.stringify(analysis, null, 2)); + } else { + console.log("=== Performance Analysis ==="); + console.log(""); + console.log(`Analyzed ${analysis.totalRuns} recent runs`); + console.log(""); + + if (analysis.slowStages.length > 0) { + console.log("Slow Stages (>${threshold / 1000}s average):"); + for (const s of analysis.slowStages) { + console.log(` ⚠ ${s.stage}: ${(s.avgDuration / 1000).toFixed(1)}s avg`); + } + console.log(""); + } + + if (analysis.unreliableStages.length > 0) { + console.log("Unreliable Stages:"); + for (const s of analysis.unreliableStages) { + console.log(` ⚠ ${s.stage}: ${s.recommendation}`); + } + console.log(""); + } + + if (analysis.recommendations.length > 0) { + console.log("Recommendations:"); + for (const r of analysis.recommendations) { + console.log(` → ${r}`); + } + } + + console.log(""); + console.log("Stage Statistics:"); + for (const [stage, stats] of Object.entries(analysis.stages)) { + console.log( + ` ${stage.padEnd(20)} avg: ${(stats.avgDuration / 1000).toFixed(1)}s ` + + `min: ${(stats.minDuration / 1000).toFixed(1)}s ` + + `max: ${(stats.maxDuration / 1000).toFixed(1)}s ` + + `success: ${stats.successRate}%` + ); + } + } + break; + } + + case "status": { + const run = loadCurrentRun(); + if (args.options.json) { + console.log(JSON.stringify(run, null, 2)); + } else { + console.log("=== Current Run Status ==="); + console.log(`Run ID: ${run.runId}`); + console.log(`Started: ${new Date(run.startTime).toISOString()}`); + console.log(`Stages: ${Object.keys(run.stages).length}`); + console.log(""); + for (const [name, data] of Object.entries(run.stages)) { + const status = data.status || "unknown"; + const duration = data.duration ? `${(data.duration / 1000).toFixed(1)}s` : "running..."; + console.log(` ${name}: ${status} (${duration})`); + } + } + break; + } + + default: + console.log("Stage Profiler — Pipeline timing and performance analysis"); + console.log(""); + console.log("Usage:"); + console.log(" stage-profiler.js start Start timing a stage"); + console.log(" stage-profiler.js end End timing a stage"); + console.log(" stage-profiler.js complete Archive current run"); + console.log(" stage-profiler.js report Generate performance report"); + console.log(" stage-profiler.js history Show recent runs"); + console.log(" stage-profiler.js analyze Analyze performance trends"); + console.log(" stage-profiler.js status Show current run status"); + console.log(""); + console.log("Options:"); + console.log(" --json Output as JSON"); + console.log(" --format md|json|ascii Report format (default: ascii)"); + console.log(" --status pass|fail Stage completion status"); + console.log(" --last N Number of historical runs"); + console.log(" --slow-threshold N Threshold in ms for slow stages"); + console.log(" --output Write report to file"); + process.exit(args.command ? 2 : 0); +}