From 6c00a03a9b2a7e6aa0306eb5ec183ee500eef36b Mon Sep 17 00:00:00 2001 From: Davide Mendolia Date: Thu, 12 Feb 2026 21:16:12 +0100 Subject: [PATCH 01/20] feat(benchmarks): add cloud run single-instance k6 benchmark harness --- .gitignore | 4 + benchmarks/k6/README.md | 96 +++++ benchmarks/k6/config.json | 46 +++ benchmarks/k6/configure-cloud-run.js | 107 ++++++ benchmarks/k6/generate-summary.js | 106 ++++++ benchmarks/k6/run-benchmarks.js | 509 +++++++++++++++++++++++++++ benchmarks/k6/scenarios.js | 263 ++++++++++++++ benchmarks/k6/services.json | 50 +++ benchmarks/results/.gitkeep | 0 benchmarks/results/summary.md | 11 + 10 files changed, 1192 insertions(+) create mode 100644 benchmarks/k6/README.md create mode 100644 benchmarks/k6/config.json create mode 100755 benchmarks/k6/configure-cloud-run.js create mode 100755 benchmarks/k6/generate-summary.js create mode 100755 benchmarks/k6/run-benchmarks.js create mode 100644 benchmarks/k6/scenarios.js create mode 100644 benchmarks/k6/services.json create mode 100644 benchmarks/results/.gitkeep create mode 100644 benchmarks/results/summary.md diff --git a/.gitignore b/.gitignore index 100150be..31ec39cc 100644 --- a/.gitignore +++ b/.gitignore @@ -13,3 +13,7 @@ src/kotlin/build/reports/jacoco/* !src/kotlin/build/reports/jacoco/test/ src/kotlin/build/reports/jacoco/test/* !src/kotlin/build/reports/jacoco/test/jacocoTestReport.xml + +# Benchmark artifacts +benchmarks/results/raw/ +benchmarks/results/run-report.json diff --git a/benchmarks/k6/README.md b/benchmarks/k6/README.md new file mode 100644 index 00000000..a6d1e303 --- /dev/null +++ b/benchmarks/k6/README.md @@ -0,0 +1,96 @@ +# Cloud Run Single-Instance Benchmark Harness + +This directory contains a benchmark harness for comparing the six language implementations with Cloud Run autoscaling neutralized (`max instances = 1`). + +## What it implements + +- Two benchmark passes: + - `memory` pass (runtime/framework signal) + - `db` pass (realistic signal) +- Fixed, fairness-first Cloud Run settings for all services +- Main ranking at fixed concurrency pressure with `concurrency=80` +- Optional non-ranking extreme run at concurrency `1000` +- Sequential service execution (no parallel cross-service load) +- Raw k6 result exports and generated markdown summary + +## Files + +- `config.json`: benchmark parameters and Cloud Run parity settings +- `services.json`: service URLs and per-service DB seed/reset hooks +- `scenarios.js`: k6 workload script (read-heavy CRUD mix) +- `run-benchmarks.js`: orchestration script for full memory+db benchmark execution +- `configure-cloud-run.js`: applies identical Cloud Run settings (dry-run by default) +- `generate-summary.js`: regenerates `benchmarks/results/summary.md` from `run-report.json` + +## Prerequisites + +- `k6` installed and in `PATH` +- Node.js 20+ (uses built-in `fetch`) +- Network access from benchmark runner to all Cloud Run URLs +- If running configuration step: authenticated `gcloud` CLI and project access + +## 1) Fill service map + +Edit `benchmarks/k6/services.json`: + +- `memoryUrl`: Cloud Run URL for memory-backed deployment (no `/v1` suffix needed) +- `dbUrl`: Cloud Run URL for DB-backed deployment (no `/v1` suffix needed) +- `cloudRunService`: Cloud Run service name for settings updates +- `cloudRunRegion`: region (default `us-central1`) +- `dbSeedCommand`: optional shell command to reset+seed DB before each DB run + +Example `dbSeedCommand`: + +```bash +./scripts/seed-benchmark-db.sh typescript 10000 +``` + +## 2) Validate or apply Cloud Run parity settings + +Dry run (prints commands): + +```bash +node benchmarks/k6/configure-cloud-run.js --project +``` + +Apply settings: + +```bash +node benchmarks/k6/configure-cloud-run.js --project --execute +``` + +Settings come from `benchmarks/k6/config.json` under `cloudRun`. + +## 3) Run benchmark + +Run both passes (`memory`,`db`) with settings from `config.json`: + +```bash +node benchmarks/k6/run-benchmarks.js +``` + +Run only memory pass: + +```bash +node benchmarks/k6/run-benchmarks.js --passes memory +``` + +Outputs: + +- Raw k6 JSON: `benchmarks/results/raw//...` +- Structured run report: `benchmarks/results/run-report.json` +- Ranked markdown summary: `benchmarks/results/summary.md` + +## 4) Rebuild summary only + +```bash +node benchmarks/k6/generate-summary.js benchmarks/results/run-report.json benchmarks/results/summary.md +``` + +## Notes on fairness and interpretation + +- Keep Cloud Run settings identical across all six languages in the ranking run. +- Concurrency is a major factor even with `max instances=1`; it controls in-container contention. +- Use memory pass ranking to isolate runtime/framework signal. +- Use DB pass ranking to understand production-like behavior and DB bottleneck impact. +- Treat concurrency `1000` as saturation appendix, not primary ranking. diff --git a/benchmarks/k6/config.json b/benchmarks/k6/config.json new file mode 100644 index 00000000..b4f25af1 --- /dev/null +++ b/benchmarks/k6/config.json @@ -0,0 +1,46 @@ +{ + "basePath": "/v1", + "passes": ["memory", "db"], + "iterationsPerPass": 2, + "randomizeServiceOrder": true, + "warmup": { + "duration": "60s", + "rps": 20 + }, + "fixed": { + "duration": "180s", + "rps": 80 + }, + "stress": { + "stepDuration": "60s", + "rpsSteps": [80, 120, 160, 200] + }, + "extreme": { + "enabled": true, + "duration": "60s", + "rps": 1000, + "runPerIteration": false + }, + "slo": { + "p95Ms": 300, + "errorRate": 0.01 + }, + "workload": { + "listPercent": 50, + "getPercent": 20, + "createPercent": 20, + "updatePercent": 7, + "deletePercent": 3, + "pageSize": 25, + "seedFetchPages": 10, + "seedPageSize": 100 + }, + "cloudRun": { + "maxInstances": 1, + "minInstances": 1, + "concurrency": 80, + "cpu": "1", + "memory": "512Mi", + "timeout": "60s" + } +} diff --git a/benchmarks/k6/configure-cloud-run.js b/benchmarks/k6/configure-cloud-run.js new file mode 100755 index 00000000..6508bd62 --- /dev/null +++ b/benchmarks/k6/configure-cloud-run.js @@ -0,0 +1,107 @@ +#!/usr/bin/env node +/* eslint-disable no-console */ +const fs = require('fs'); +const path = require('path'); +const { spawnSync } = require('child_process'); + +function parseArgs(argv) { + const args = { + services: path.join('benchmarks', 'k6', 'services.json'), + config: path.join('benchmarks', 'k6', 'config.json'), + project: process.env.GOOGLE_CLOUD_PROJECT || '', + execute: false, + }; + + for (let i = 2; i < argv.length; i += 1) { + const token = argv[i]; + if (token === '--services') { + args.services = argv[++i]; + } else if (token === '--config') { + args.config = argv[++i]; + } else if (token === '--project') { + args.project = argv[++i]; + } else if (token === '--execute') { + args.execute = true; + } else if (token === '--help' || token === '-h') { + printHelp(); + process.exit(0); + } else { + throw new Error(`Unknown argument: ${token}`); + } + } + + return args; +} + +function printHelp() { + console.log(`Usage:\n node benchmarks/k6/configure-cloud-run.js [--project my-project] [--execute]\n\nBy default this script prints commands only. Add --execute to run them.`); +} + +function readJson(filePath) { + return JSON.parse(fs.readFileSync(filePath, 'utf8')); +} + +function runCommand(command, args) { + const rendered = `${command} ${args.join(' ')}`; + console.log(`\n$ ${rendered}`); + const result = spawnSync(command, args, { stdio: 'inherit' }); + if (result.status !== 0) { + throw new Error(`Command failed (${result.status}): ${rendered}`); + } +} + +function main() { + const args = parseArgs(process.argv); + if (!args.project) { + throw new Error('Missing --project (or set GOOGLE_CLOUD_PROJECT)'); + } + + const services = readJson(args.services); + const config = readJson(args.config); + const cloudRun = config.cloudRun || {}; + + for (const service of services) { + if (!service.cloudRunService) { + console.log(`Skipping ${service.name}: cloudRunService is empty`); + continue; + } + + const region = service.cloudRunRegion || 'us-central1'; + const cmd = [ + 'run', + 'services', + 'update', + service.cloudRunService, + '--project', + args.project, + '--region', + region, + '--max-instances', + String(cloudRun.maxInstances), + '--min-instances', + String(cloudRun.minInstances), + '--concurrency', + String(cloudRun.concurrency), + '--cpu', + String(cloudRun.cpu), + '--memory', + String(cloudRun.memory), + '--timeout', + String(cloudRun.timeout), + '--cpu-throttling', + ]; + + if (!args.execute) { + console.log(`\n[dry-run] gcloud ${cmd.join(' ')}`); + continue; + } + + runCommand('gcloud', cmd); + } + + if (!args.execute) { + console.log('\nDry run complete. Re-run with --execute to apply updates.'); + } +} + +main(); diff --git a/benchmarks/k6/generate-summary.js b/benchmarks/k6/generate-summary.js new file mode 100755 index 00000000..ca1fecbd --- /dev/null +++ b/benchmarks/k6/generate-summary.js @@ -0,0 +1,106 @@ +#!/usr/bin/env node +/* eslint-disable no-console */ +const fs = require('fs'); +const path = require('path'); + +function formatNumber(value, digits = 2) { + if (!Number.isFinite(value)) { + return 'n/a'; + } + return value.toFixed(digits); +} + +function writeSummary(report, outputFile) { + const lines = []; + lines.push('# Benchmark Summary'); + lines.push(''); + lines.push(`Generated: ${report.generatedAt}`); + lines.push(''); + + for (const passName of Object.keys(report.aggregated || {})) { + const rows = Object.entries(report.aggregated[passName] || {}); + rows.sort((a, b) => { + const ap = a[1].fixed?.p95; + const bp = b[1].fixed?.p95; + if (!Number.isFinite(ap) && !Number.isFinite(bp)) return 0; + if (!Number.isFinite(ap)) return 1; + if (!Number.isFinite(bp)) return -1; + return ap - bp; + }); + + lines.push(`## ${passName === 'memory' ? 'Memory Pass Ranking' : 'DB Pass Ranking'}`); + lines.push(''); + lines.push('| Rank | Service | p95 (ms) | p99 (ms) | Avg (ms) | Error Rate | Max Stable RPS |'); + lines.push('|---|---|---:|---:|---:|---:|---:|'); + + rows.forEach(([serviceName, metrics], idx) => { + lines.push( + `| ${idx + 1} | ${serviceName} | ${formatNumber(metrics.fixed?.p95)} | ${formatNumber(metrics.fixed?.p99)} | ${formatNumber(metrics.fixed?.avg)} | ${formatNumber((metrics.fixed?.errorRate ?? NaN) * 100, 3)}% | ${formatNumber(metrics.stress?.maxStableRps, 0)} |` + ); + }); + lines.push(''); + } + + const memory = report.aggregated?.memory || {}; + const db = report.aggregated?.db || {}; + const sharedServices = Object.keys(memory).filter((name) => db[name]); + + if (sharedServices.length > 0) { + lines.push('## Memory vs DB Delta'); + lines.push(''); + lines.push('| Service | Memory p95 (ms) | DB p95 (ms) | Delta (DB - Memory) |'); + lines.push('|---|---:|---:|---:|'); + + for (const serviceName of sharedServices) { + const mem = memory[serviceName]?.fixed?.p95; + const dbp = db[serviceName]?.fixed?.p95; + const delta = Number.isFinite(mem) && Number.isFinite(dbp) ? dbp - mem : null; + lines.push(`| ${serviceName} | ${formatNumber(mem)} | ${formatNumber(dbp)} | ${formatNumber(delta)} |`); + } + + lines.push(''); + } + + lines.push('## Extreme Concurrency Appendix (1000)'); + lines.push(''); + lines.push('| Pass | Service | p95 (ms) | p99 (ms) | Avg (ms) | Error Rate |'); + lines.push('|---|---|---:|---:|---:|---:|'); + + for (const [passName, servicesMap] of Object.entries(report.aggregated || {})) { + for (const [serviceName, metrics] of Object.entries(servicesMap || {})) { + if (!metrics.extreme) { + continue; + } + lines.push( + `| ${passName} | ${serviceName} | ${formatNumber(metrics.extreme?.p95)} | ${formatNumber(metrics.extreme?.p99)} | ${formatNumber(metrics.extreme?.avg)} | ${formatNumber((metrics.extreme?.errorRate ?? NaN) * 100, 3)}% |` + ); + } + } + + lines.push(''); + lines.push('Raw per-run k6 summaries are under `benchmarks/results/raw/`.'); + + fs.writeFileSync(outputFile, `${lines.join('\n')}\n`, 'utf8'); +} + +function main() { + if (process.argv.includes('--help') || process.argv.includes('-h')) { + console.log( + 'Usage:\n node benchmarks/k6/generate-summary.js [report.json] [summary.md]' + ); + process.exit(0); + } + + const reportPath = process.argv[2] || path.join('benchmarks', 'results', 'run-report.json'); + const outputPath = process.argv[3] || path.join('benchmarks', 'results', 'summary.md'); + + if (!fs.existsSync(reportPath)) { + throw new Error(`Report not found: ${reportPath}`); + } + + const report = JSON.parse(fs.readFileSync(reportPath, 'utf8')); + writeSummary(report, outputPath); + console.log(`Wrote summary: ${outputPath}`); +} + +main(); diff --git a/benchmarks/k6/run-benchmarks.js b/benchmarks/k6/run-benchmarks.js new file mode 100755 index 00000000..15aa89a8 --- /dev/null +++ b/benchmarks/k6/run-benchmarks.js @@ -0,0 +1,509 @@ +#!/usr/bin/env node +/* eslint-disable no-console */ +const fs = require('fs'); +const path = require('path'); +const { spawnSync } = require('child_process'); + +function parseArgs(argv) { + const args = { + config: path.join('benchmarks', 'k6', 'config.json'), + services: path.join('benchmarks', 'k6', 'services.json'), + resultsDir: path.join('benchmarks', 'results'), + passes: null, + }; + + for (let i = 2; i < argv.length; i += 1) { + const token = argv[i]; + if (token === '--config') { + args.config = argv[++i]; + } else if (token === '--services') { + args.services = argv[++i]; + } else if (token === '--results-dir') { + args.resultsDir = argv[++i]; + } else if (token === '--passes') { + args.passes = argv[++i].split(',').map((v) => v.trim()).filter(Boolean); + } else if (token === '--help' || token === '-h') { + printHelp(); + process.exit(0); + } else { + throw new Error(`Unknown argument: ${token}`); + } + } + + return args; +} + +function printHelp() { + console.log(`Usage:\n node benchmarks/k6/run-benchmarks.js [--config path] [--services path] [--results-dir path] [--passes memory,db]\n`); +} + +function readJson(filePath) { + return JSON.parse(fs.readFileSync(filePath, 'utf8')); +} + +function ensureDir(dir) { + fs.mkdirSync(dir, { recursive: true }); +} + +function nowStamp() { + return new Date().toISOString().replace(/[:.]/g, '-'); +} + +function runCommand(command, args, options = {}) { + const rendered = `${command} ${args.join(' ')}`; + console.log(`\n$ ${rendered}`); + const result = spawnSync(command, args, { + stdio: 'inherit', + env: options.env || process.env, + shell: false, + }); + + if (result.status !== 0) { + throw new Error(`Command failed (${result.status}): ${rendered}`); + } +} + +function runShell(command, env) { + if (!command || !command.trim()) { + return; + } + + console.log(`\n$ ${command}`); + const result = spawnSync('bash', ['-lc', command], { + stdio: 'inherit', + env: env || process.env, + }); + + if (result.status !== 0) { + throw new Error(`Command failed (${result.status}): ${command}`); + } +} + +async function httpJson(method, url, body, authHeader) { + const headers = { 'Content-Type': 'application/json' }; + if (authHeader) { + headers.Authorization = authHeader; + } + + const response = await fetch(url, { + method, + headers, + body: body ? JSON.stringify(body) : undefined, + }); + + let parsed = null; + const text = await response.text(); + if (text) { + try { + parsed = JSON.parse(text); + } catch (_err) { + parsed = null; + } + } + + return { response, body: parsed, text }; +} + +async function precheckCrud(baseUrl, basePath, authHeader) { + const prefix = `${baseUrl.replace(/\/$/, '')}${basePath}`; + + const create = await httpJson('POST', `${prefix}/lamps`, { status: true }, authHeader); + if (create.response.status !== 201 || !create.body || typeof create.body.id !== 'string') { + throw new Error(`Precheck create failed (${create.response.status})`); + } + + const lampId = create.body.id; + + const get = await httpJson('GET', `${prefix}/lamps/${lampId}`, null, authHeader); + if (get.response.status !== 200) { + throw new Error(`Precheck get failed (${get.response.status})`); + } + + const update = await httpJson('PUT', `${prefix}/lamps/${lampId}`, { status: false }, authHeader); + if (update.response.status !== 200) { + throw new Error(`Precheck update failed (${update.response.status})`); + } + + const list = await httpJson('GET', `${prefix}/lamps?pageSize=1`, null, authHeader); + if (list.response.status !== 200 || !list.body || !Array.isArray(list.body.data)) { + throw new Error(`Precheck list failed (${list.response.status})`); + } + + const del = await httpJson('DELETE', `${prefix}/lamps/${lampId}`, null, authHeader); + if (del.response.status !== 204) { + throw new Error(`Precheck delete failed (${del.response.status})`); + } +} + +function parseMetric(summary, name) { + const metric = summary.metrics[name]; + if (!metric || !metric.values) { + return null; + } + return { + avg: metric.values.avg ?? null, + p95: metric.values['p(95)'] ?? null, + p99: metric.values['p(99)'] ?? null, + min: metric.values.min ?? null, + max: metric.values.max ?? null, + }; +} + +function parseRate(summary, name) { + const metric = summary.metrics[name]; + if (!metric || !metric.values) { + return null; + } + return metric.values.rate ?? null; +} + +function median(numbers) { + const vals = numbers.filter((n) => Number.isFinite(n)).slice().sort((a, b) => a - b); + if (vals.length === 0) { + return null; + } + const mid = Math.floor(vals.length / 2); + return vals.length % 2 === 0 ? (vals[mid - 1] + vals[mid]) / 2 : vals[mid]; +} + +function maybeShuffle(list, enabled) { + const copy = list.slice(); + if (!enabled) { + return copy; + } + for (let i = copy.length - 1; i > 0; i -= 1) { + const j = Math.floor(Math.random() * (i + 1)); + [copy[i], copy[j]] = [copy[j], copy[i]]; + } + return copy; +} + +function validateService(service) { + if (!service.name) { + throw new Error('Each service must have a name'); + } +} + +function buildK6Env({ config, service, baseUrl, mode, targetRps, duration }) { + const env = { ...process.env }; + env.RUN_MODE = mode; + env.BASE_URL = baseUrl; + env.BASE_PATH = config.basePath; + env.TARGET_RPS = String(targetRps); + env.DURATION = duration; + env.PAGE_SIZE = String(config.workload.pageSize); + env.SEED_FETCH_PAGES = String(config.workload.seedFetchPages); + env.SEED_PAGE_SIZE = String(config.workload.seedPageSize); + env.LIST_WEIGHT = String(config.workload.listPercent); + env.GET_WEIGHT = String(config.workload.getPercent); + env.CREATE_WEIGHT = String(config.workload.createPercent); + env.UPDATE_WEIGHT = String(config.workload.updatePercent); + env.DELETE_WEIGHT = String(config.workload.deletePercent); + if (service.authHeader) { + env.AUTH_HEADER = service.authHeader; + } + return env; +} + +function runK6Phase({ scenarioPath, outputFile, env }) { + runCommand('k6', ['run', scenarioPath, '--summary-export', outputFile], { env }); + return readJson(outputFile); +} + +function aggregatePass(serviceRuns) { + const fixedP95 = median(serviceRuns.map((r) => r.fixed.duration?.p95)); + const fixedP99 = median(serviceRuns.map((r) => r.fixed.duration?.p99)); + const fixedAvg = median(serviceRuns.map((r) => r.fixed.duration?.avg)); + const fixedErrorRate = median(serviceRuns.map((r) => r.fixed.errorRate)); + const maxStableRps = median(serviceRuns.map((r) => r.stress.maxStableRps)); + + let extreme = null; + const extremeRuns = serviceRuns.filter((r) => r.extreme); + if (extremeRuns.length > 0) { + extreme = { + p95: median(extremeRuns.map((r) => r.extreme.duration?.p95)), + p99: median(extremeRuns.map((r) => r.extreme.duration?.p99)), + avg: median(extremeRuns.map((r) => r.extreme.duration?.avg)), + errorRate: median(extremeRuns.map((r) => r.extreme.errorRate)), + }; + } + + return { + fixed: { + p95: fixedP95, + p99: fixedP99, + avg: fixedAvg, + errorRate: fixedErrorRate, + }, + stress: { + maxStableRps, + }, + extreme, + }; +} + +function formatNumber(value, digits = 2) { + if (!Number.isFinite(value)) { + return 'n/a'; + } + return value.toFixed(digits); +} + +function writeSummary(report, outputFile) { + const lines = []; + lines.push('# Benchmark Summary'); + lines.push(''); + lines.push(`Generated: ${report.generatedAt}`); + lines.push(''); + + for (const passName of Object.keys(report.aggregated)) { + const rows = Object.entries(report.aggregated[passName]); + rows.sort((a, b) => { + const ap = a[1].fixed.p95; + const bp = b[1].fixed.p95; + if (!Number.isFinite(ap) && !Number.isFinite(bp)) return 0; + if (!Number.isFinite(ap)) return 1; + if (!Number.isFinite(bp)) return -1; + return ap - bp; + }); + + lines.push(`## ${passName === 'memory' ? 'Memory Pass Ranking' : 'DB Pass Ranking'}`); + lines.push(''); + lines.push('| Rank | Service | p95 (ms) | p99 (ms) | Avg (ms) | Error Rate | Max Stable RPS |'); + lines.push('|---|---|---:|---:|---:|---:|---:|'); + + rows.forEach(([serviceName, metrics], idx) => { + lines.push( + `| ${idx + 1} | ${serviceName} | ${formatNumber(metrics.fixed.p95)} | ${formatNumber(metrics.fixed.p99)} | ${formatNumber(metrics.fixed.avg)} | ${formatNumber(metrics.fixed.errorRate * 100, 3)}% | ${formatNumber(metrics.stress.maxStableRps, 0)} |` + ); + }); + lines.push(''); + } + + const memory = report.aggregated.memory || {}; + const db = report.aggregated.db || {}; + const services = Object.keys(memory).filter((name) => db[name]); + + if (services.length > 0) { + lines.push('## Memory vs DB Delta'); + lines.push(''); + lines.push('| Service | Memory p95 (ms) | DB p95 (ms) | Delta (DB - Memory) |'); + lines.push('|---|---:|---:|---:|'); + + for (const serviceName of services) { + const mem = memory[serviceName].fixed.p95; + const dbp = db[serviceName].fixed.p95; + const delta = Number.isFinite(mem) && Number.isFinite(dbp) ? dbp - mem : null; + lines.push(`| ${serviceName} | ${formatNumber(mem)} | ${formatNumber(dbp)} | ${formatNumber(delta)} |`); + } + lines.push(''); + } + + lines.push('## Extreme Concurrency Appendix (1000)'); + lines.push(''); + lines.push('| Pass | Service | p95 (ms) | p99 (ms) | Avg (ms) | Error Rate |'); + lines.push('|---|---|---:|---:|---:|---:|'); + + for (const [passName, servicesMap] of Object.entries(report.aggregated)) { + for (const [serviceName, metrics] of Object.entries(servicesMap)) { + if (!metrics.extreme) { + continue; + } + lines.push( + `| ${passName} | ${serviceName} | ${formatNumber(metrics.extreme.p95)} | ${formatNumber(metrics.extreme.p99)} | ${formatNumber(metrics.extreme.avg)} | ${formatNumber(metrics.extreme.errorRate * 100, 3)}% |` + ); + } + } + + lines.push(''); + lines.push('Raw per-run k6 summaries are under `benchmarks/results/raw/`.'); + + fs.writeFileSync(outputFile, `${lines.join('\n')}\n`, 'utf8'); +} + +async function main() { + const args = parseArgs(process.argv); + + const config = readJson(args.config); + const services = readJson(args.services); + + services.forEach(validateService); + + const configuredPasses = Array.isArray(config.passes) ? config.passes : ['memory', 'db']; + const passes = args.passes && args.passes.length > 0 ? args.passes : configuredPasses; + + const stamp = nowStamp(); + const scenarioPath = path.join('benchmarks', 'k6', 'scenarios.js'); + const rawRoot = path.join(args.resultsDir, 'raw', stamp); + + ensureDir(rawRoot); + + runCommand('k6', ['version']); + + const report = { + generatedAt: new Date().toISOString(), + runId: stamp, + config, + passes, + rawRoot, + runs: {}, + aggregated: {}, + }; + + for (const passName of passes) { + if (!['memory', 'db'].includes(passName)) { + throw new Error(`Unsupported pass: ${passName}`); + } + + report.runs[passName] = {}; + const order = maybeShuffle(services, Boolean(config.randomizeServiceOrder)); + + for (const service of order) { + const baseUrl = passName === 'memory' ? service.memoryUrl : service.dbUrl; + if (!baseUrl) { + throw new Error(`Missing ${passName} URL for service ${service.name}`); + } + + const serviceRuns = []; + + for (let iteration = 1; iteration <= Number(config.iterationsPerPass || 1); iteration += 1) { + console.log(`\n=== ${passName.toUpperCase()} :: ${service.name} :: iteration ${iteration} ===`); + + if (passName === 'db' && service.dbSeedCommand) { + runShell(service.dbSeedCommand); + } + + await precheckCrud(baseUrl, config.basePath, service.authHeader || ''); + + const iterDir = path.join(rawRoot, passName, service.name, `iter-${iteration}`); + ensureDir(iterDir); + + const warmupFile = path.join(iterDir, 'warmup.json'); + const warmupSummary = runK6Phase({ + scenarioPath, + outputFile: warmupFile, + env: buildK6Env({ + config, + service, + baseUrl, + mode: 'warmup', + targetRps: config.warmup.rps, + duration: config.warmup.duration, + }), + }); + + const fixedFile = path.join(iterDir, 'fixed.json'); + const fixedSummary = runK6Phase({ + scenarioPath, + outputFile: fixedFile, + env: buildK6Env({ + config, + service, + baseUrl, + mode: 'fixed', + targetRps: config.fixed.rps, + duration: config.fixed.duration, + }), + }); + + let maxStableRps = null; + const stressSteps = []; + for (const rps of config.stress.rpsSteps) { + const stressFile = path.join(iterDir, `stress-${rps}.json`); + const stressSummary = runK6Phase({ + scenarioPath, + outputFile: stressFile, + env: buildK6Env({ + config, + service, + baseUrl, + mode: 'stress', + targetRps: rps, + duration: config.stress.stepDuration, + }), + }); + + const dur = parseMetric(stressSummary, 'stress_req_duration'); + const err = parseRate(stressSummary, 'stress_error_rate') || 0; + const passed = + Number.isFinite(dur?.p95) && + dur.p95 <= Number(config.slo.p95Ms) && + err <= Number(config.slo.errorRate); + + stressSteps.push({ rps, duration: dur, errorRate: err, passed }); + + if (passed) { + maxStableRps = rps; + } else { + break; + } + } + + let extreme = null; + const shouldRunExtreme = Boolean(config.extreme?.enabled) && + (Boolean(config.extreme.runPerIteration) || iteration === 1); + + if (shouldRunExtreme) { + const extremeFile = path.join(iterDir, 'extreme-1000.json'); + const extremeSummary = runK6Phase({ + scenarioPath, + outputFile: extremeFile, + env: buildK6Env({ + config, + service, + baseUrl, + mode: 'extreme', + targetRps: config.extreme.rps, + duration: config.extreme.duration, + }), + }); + + extreme = { + duration: parseMetric(extremeSummary, 'extreme_req_duration'), + errorRate: parseRate(extremeSummary, 'extreme_error_rate') || 0, + }; + } + + const serviceRun = { + iteration, + warmup: { + duration: parseMetric(warmupSummary, 'warmup_req_duration'), + errorRate: parseRate(warmupSummary, 'warmup_error_rate') || 0, + }, + fixed: { + duration: parseMetric(fixedSummary, 'fixed_req_duration'), + errorRate: parseRate(fixedSummary, 'fixed_error_rate') || 0, + }, + stress: { + maxStableRps, + steps: stressSteps, + }, + extreme, + }; + + serviceRuns.push(serviceRun); + } + + report.runs[passName][service.name] = serviceRuns; + } + + report.aggregated[passName] = {}; + for (const [serviceName, serviceRuns] of Object.entries(report.runs[passName])) { + report.aggregated[passName][serviceName] = aggregatePass(serviceRuns); + } + } + + const reportPath = path.join(args.resultsDir, 'run-report.json'); + fs.writeFileSync(reportPath, JSON.stringify(report, null, 2), 'utf8'); + + const summaryPath = path.join(args.resultsDir, 'summary.md'); + writeSummary(report, summaryPath); + + console.log(`\nWrote report: ${reportPath}`); + console.log(`Wrote summary: ${summaryPath}`); +} + +main().catch((err) => { + console.error(err.message || err); + process.exit(1); +}); diff --git a/benchmarks/k6/scenarios.js b/benchmarks/k6/scenarios.js new file mode 100644 index 00000000..76700b66 --- /dev/null +++ b/benchmarks/k6/scenarios.js @@ -0,0 +1,263 @@ +import http from 'k6/http'; +import { check } from 'k6'; +import { Rate, Trend } from 'k6/metrics'; + +const RUN_MODE = (__ENV.RUN_MODE || 'fixed').trim(); +const BASE_URL = (__ENV.BASE_URL || '').replace(/\/$/, ''); +const BASE_PATH = __ENV.BASE_PATH || '/v1'; +const TARGET_RPS = Number(__ENV.TARGET_RPS || 1); +const DURATION = __ENV.DURATION || '60s'; +const PAGE_SIZE = Number(__ENV.PAGE_SIZE || 25); +const SEED_FETCH_PAGES = Number(__ENV.SEED_FETCH_PAGES || 10); +const SEED_PAGE_SIZE = Number(__ENV.SEED_PAGE_SIZE || 100); +const AUTH_HEADER = __ENV.AUTH_HEADER || ''; + +const LIST_WEIGHT = Number(__ENV.LIST_WEIGHT || 50); +const GET_WEIGHT = Number(__ENV.GET_WEIGHT || 20); +const CREATE_WEIGHT = Number(__ENV.CREATE_WEIGHT || 20); +const UPDATE_WEIGHT = Number(__ENV.UPDATE_WEIGHT || 7); +const DELETE_WEIGHT = Number(__ENV.DELETE_WEIGHT || 3); + +const PRE_ALLOCATED_VUS = Number( + __ENV.PRE_ALLOCATED_VUS || Math.max(10, Math.ceil(TARGET_RPS * 2)) +); +const MAX_VUS = Number(__ENV.MAX_VUS || Math.max(50, Math.ceil(TARGET_RPS * 4))); + +if (!BASE_URL) { + throw new Error('BASE_URL is required'); +} + +const requestDuration = new Trend(`${RUN_MODE}_req_duration`, true); +const errorRate = new Rate(`${RUN_MODE}_error_rate`); + +export const options = { + discardResponseBodies: false, + summaryTrendStats: ['avg', 'min', 'med', 'max', 'p(90)', 'p(95)', 'p(99)'], + scenarios: { + main: { + executor: 'constant-arrival-rate', + rate: TARGET_RPS, + timeUnit: '1s', + duration: DURATION, + preAllocatedVUs: PRE_ALLOCATED_VUS, + maxVUs: MAX_VUS, + }, + }, +}; + +const headers = { + 'Content-Type': 'application/json', +}; + +if (AUTH_HEADER) { + headers.Authorization = AUTH_HEADER; +} + +let vuOwnedIds = []; + +function url(pathAndQuery) { + return `${BASE_URL}${BASE_PATH}${pathAndQuery}`; +} + +function parseJson(resp) { + try { + return resp.json(); + } catch (_err) { + return null; + } +} + +function track(resp, ok) { + requestDuration.add(resp.timings.duration); + errorRate.add(!ok); +} + +function req(method, endpoint, body, expectedStatuses) { + const response = http.request(method, url(endpoint), body, { headers }); + const ok = check(response, { + [`${method} ${endpoint} status`]: (r) => expectedStatuses.includes(r.status), + }); + track(response, ok); + return { response, ok }; +} + +function randomBool() { + return Math.random() < 0.5; +} + +function randomFrom(list) { + return list[Math.floor(Math.random() * list.length)]; +} + +function listLamps() { + const { response } = req('GET', `/lamps?pageSize=${PAGE_SIZE}`, null, [200]); + const body = parseJson(response); + check(body, { + 'list lamps has data array': (b) => b && Array.isArray(b.data), + 'list lamps has hasMore': (b) => b && typeof b.hasMore === 'boolean', + }); +} + +function createLamp() { + const payload = JSON.stringify({ status: randomBool() }); + const { response, ok } = req('POST', '/lamps', payload, [201]); + if (!ok) { + return; + } + + const body = parseJson(response); + const hasId = check(body, { + 'create lamp has id': (b) => b && typeof b.id === 'string' && b.id.length > 0, + 'create lamp has status': (b) => b && typeof b.status === 'boolean', + }); + + if (hasId) { + vuOwnedIds.push(body.id); + } +} + +function pickLampId(data) { + if (vuOwnedIds.length > 0) { + return randomFrom(vuOwnedIds); + } + if (data.seedIds.length > 0) { + return randomFrom(data.seedIds); + } + return null; +} + +function getLamp(data) { + let lampId = pickLampId(data); + if (!lampId) { + createLamp(); + lampId = vuOwnedIds[vuOwnedIds.length - 1] || null; + } + if (!lampId) { + return; + } + + const { response } = req('GET', `/lamps/${lampId}`, null, [200]); + const body = parseJson(response); + check(body, { + 'get lamp has id': (b) => b && typeof b.id === 'string', + 'get lamp has status': (b) => b && typeof b.status === 'boolean', + }); +} + +function updateLamp(data) { + let lampId = pickLampId(data); + if (!lampId) { + createLamp(); + lampId = vuOwnedIds[vuOwnedIds.length - 1] || null; + } + if (!lampId) { + return; + } + + const payload = JSON.stringify({ status: randomBool() }); + const { response } = req('PUT', `/lamps/${lampId}`, payload, [200]); + const body = parseJson(response); + check(body, { + 'update lamp has id': (b) => b && typeof b.id === 'string', + 'update lamp has status': (b) => b && typeof b.status === 'boolean', + }); +} + +function deleteLamp() { + let lampId = null; + + if (vuOwnedIds.length > 0) { + lampId = vuOwnedIds.pop(); + } else { + const payload = JSON.stringify({ status: randomBool() }); + const { response, ok } = req('POST', '/lamps', payload, [201]); + if (!ok) { + return; + } + const body = parseJson(response); + if (!body || typeof body.id !== 'string') { + return; + } + lampId = body.id; + } + + if (!lampId) { + return; + } + + req('DELETE', `/lamps/${lampId}`, null, [204]); +} + +function pickOperation() { + const total = LIST_WEIGHT + GET_WEIGHT + CREATE_WEIGHT + UPDATE_WEIGHT + DELETE_WEIGHT; + const pick = Math.random() * total; + + if (pick < LIST_WEIGHT) { + return 'list'; + } + if (pick < LIST_WEIGHT + GET_WEIGHT) { + return 'get'; + } + if (pick < LIST_WEIGHT + GET_WEIGHT + CREATE_WEIGHT) { + return 'create'; + } + if (pick < LIST_WEIGHT + GET_WEIGHT + CREATE_WEIGHT + UPDATE_WEIGHT) { + return 'update'; + } + return 'delete'; +} + +export function setup() { + const seedIds = []; + let cursor = null; + + for (let i = 0; i < SEED_FETCH_PAGES; i += 1) { + const query = cursor + ? `/lamps?pageSize=${SEED_PAGE_SIZE}&cursor=${encodeURIComponent(cursor)}` + : `/lamps?pageSize=${SEED_PAGE_SIZE}`; + + const response = http.get(url(query), { headers }); + const ok = check(response, { + 'seed fetch status 200': (r) => r.status === 200, + }); + + if (!ok) { + break; + } + + const body = parseJson(response); + if (!body || !Array.isArray(body.data)) { + break; + } + + for (const lamp of body.data) { + if (lamp && typeof lamp.id === 'string') { + seedIds.push(lamp.id); + } + } + + if (!body.hasMore || !body.nextCursor) { + break; + } + + cursor = body.nextCursor; + } + + return { seedIds }; +} + +export default function (data) { + const operation = pickOperation(); + + if (operation === 'list') { + listLamps(); + } else if (operation === 'get') { + getLamp(data); + } else if (operation === 'create') { + createLamp(); + } else if (operation === 'update') { + updateLamp(data); + } else { + deleteLamp(); + } +} diff --git a/benchmarks/k6/services.json b/benchmarks/k6/services.json new file mode 100644 index 00000000..eb4be8a5 --- /dev/null +++ b/benchmarks/k6/services.json @@ -0,0 +1,50 @@ +[ + { + "name": "typescript", + "memoryUrl": "", + "dbUrl": "", + "cloudRunService": "", + "cloudRunRegion": "us-central1", + "dbSeedCommand": "" + }, + { + "name": "python", + "memoryUrl": "", + "dbUrl": "", + "cloudRunService": "", + "cloudRunRegion": "us-central1", + "dbSeedCommand": "" + }, + { + "name": "java", + "memoryUrl": "", + "dbUrl": "", + "cloudRunService": "", + "cloudRunRegion": "us-central1", + "dbSeedCommand": "" + }, + { + "name": "csharp", + "memoryUrl": "", + "dbUrl": "", + "cloudRunService": "", + "cloudRunRegion": "us-central1", + "dbSeedCommand": "" + }, + { + "name": "go", + "memoryUrl": "", + "dbUrl": "", + "cloudRunService": "", + "cloudRunRegion": "us-central1", + "dbSeedCommand": "" + }, + { + "name": "kotlin", + "memoryUrl": "", + "dbUrl": "", + "cloudRunService": "", + "cloudRunRegion": "us-central1", + "dbSeedCommand": "" + } +] diff --git a/benchmarks/results/.gitkeep b/benchmarks/results/.gitkeep new file mode 100644 index 00000000..e69de29b diff --git a/benchmarks/results/summary.md b/benchmarks/results/summary.md new file mode 100644 index 00000000..c115ab5d --- /dev/null +++ b/benchmarks/results/summary.md @@ -0,0 +1,11 @@ +# Benchmark Summary + +No benchmark runs yet. + +Run: + +```bash +node benchmarks/k6/run-benchmarks.js +``` + +Then this file will be replaced with ranked results. From b4f59cd76aae81be6231d7f84482167ccf3000f3 Mon Sep 17 00:00:00 2001 From: Davide Mendolia Date: Thu, 12 Feb 2026 22:26:02 +0100 Subject: [PATCH 02/20] feat(benchmarks): add cloud run service mapping and mode switch hooks --- benchmarks/k6/README.md | 10 ++++++ benchmarks/k6/run-benchmarks.js | 15 +++++++++ benchmarks/k6/services.json | 60 ++++++++++++++++++++------------- 3 files changed, 61 insertions(+), 24 deletions(-) diff --git a/benchmarks/k6/README.md b/benchmarks/k6/README.md index a6d1e303..22e8a161 100644 --- a/benchmarks/k6/README.md +++ b/benchmarks/k6/README.md @@ -37,6 +37,8 @@ Edit `benchmarks/k6/services.json`: - `dbUrl`: Cloud Run URL for DB-backed deployment (no `/v1` suffix needed) - `cloudRunService`: Cloud Run service name for settings updates - `cloudRunRegion`: region (default `us-central1`) +- `memorySetupCommand`: optional command run before memory pass for this service +- `dbSetupCommand`: optional command run before DB pass for this service - `dbSeedCommand`: optional shell command to reset+seed DB before each DB run Example `dbSeedCommand`: @@ -45,6 +47,14 @@ Example `dbSeedCommand`: ./scripts/seed-benchmark-db.sh typescript 10000 ``` +If memory and DB use the same URL with an env-var mode switch, set both URLs equal and use setup commands. +Example: + +```bash +gcloud run services update typescript-lamp-control-api --region europe-west1 --update-env-vars STORAGE_MODE=memory +gcloud run services update typescript-lamp-control-api --region europe-west1 --update-env-vars STORAGE_MODE=db +``` + ## 2) Validate or apply Cloud Run parity settings Dry run (prints commands): diff --git a/benchmarks/k6/run-benchmarks.js b/benchmarks/k6/run-benchmarks.js index 15aa89a8..96441d5a 100755 --- a/benchmarks/k6/run-benchmarks.js +++ b/benchmarks/k6/run-benchmarks.js @@ -184,6 +184,16 @@ function validateService(service) { } } +function getPassSetupCommand(service, passName) { + if (passName === 'memory') { + return service.memorySetupCommand || ''; + } + if (passName === 'db') { + return service.dbSetupCommand || ''; + } + return ''; +} + function buildK6Env({ config, service, baseUrl, mode, targetRps, duration }) { const env = { ...process.env }; env.RUN_MODE = mode; @@ -364,6 +374,11 @@ async function main() { throw new Error(`Missing ${passName} URL for service ${service.name}`); } + const passSetupCommand = getPassSetupCommand(service, passName); + if (passSetupCommand) { + runShell(passSetupCommand); + } + const serviceRuns = []; for (let iteration = 1; iteration <= Number(config.iterationsPerPass || 1); iteration += 1) { diff --git a/benchmarks/k6/services.json b/benchmarks/k6/services.json index eb4be8a5..761fbc04 100644 --- a/benchmarks/k6/services.json +++ b/benchmarks/k6/services.json @@ -1,50 +1,62 @@ [ { "name": "typescript", - "memoryUrl": "", - "dbUrl": "", - "cloudRunService": "", - "cloudRunRegion": "us-central1", + "memoryUrl": "https://typescript-lamp-control-api-827868544165.europe-west1.run.app", + "dbUrl": "https://typescript-lamp-control-api-827868544165.europe-west1.run.app", + "cloudRunService": "typescript-lamp-control-api", + "cloudRunRegion": "europe-west1", + "memorySetupCommand": "", + "dbSetupCommand": "", "dbSeedCommand": "" }, { "name": "python", - "memoryUrl": "", - "dbUrl": "", - "cloudRunService": "", - "cloudRunRegion": "us-central1", + "memoryUrl": "https://python-lamp-control-api-827868544165.europe-west1.run.app", + "dbUrl": "https://python-lamp-control-api-827868544165.europe-west1.run.app", + "cloudRunService": "python-lamp-control-api", + "cloudRunRegion": "europe-west1", + "memorySetupCommand": "", + "dbSetupCommand": "", "dbSeedCommand": "" }, { "name": "java", - "memoryUrl": "", - "dbUrl": "", - "cloudRunService": "", - "cloudRunRegion": "us-central1", + "memoryUrl": "https://java-lamp-control-api-827868544165.europe-west1.run.app", + "dbUrl": "https://java-lamp-control-api-827868544165.europe-west1.run.app", + "cloudRunService": "java-lamp-control-api", + "cloudRunRegion": "europe-west1", + "memorySetupCommand": "", + "dbSetupCommand": "", "dbSeedCommand": "" }, { "name": "csharp", - "memoryUrl": "", - "dbUrl": "", - "cloudRunService": "", - "cloudRunRegion": "us-central1", + "memoryUrl": "https://csharp-lamp-control-api-827868544165.europe-west1.run.app", + "dbUrl": "https://csharp-lamp-control-api-827868544165.europe-west1.run.app", + "cloudRunService": "csharp-lamp-control-api", + "cloudRunRegion": "europe-west1", + "memorySetupCommand": "", + "dbSetupCommand": "", "dbSeedCommand": "" }, { "name": "go", - "memoryUrl": "", - "dbUrl": "", - "cloudRunService": "", - "cloudRunRegion": "us-central1", + "memoryUrl": "https://go-lamp-control-api-827868544165.europe-west1.run.app", + "dbUrl": "https://go-lamp-control-api-827868544165.europe-west1.run.app", + "cloudRunService": "go-lamp-control-api", + "cloudRunRegion": "europe-west1", + "memorySetupCommand": "", + "dbSetupCommand": "", "dbSeedCommand": "" }, { "name": "kotlin", - "memoryUrl": "", - "dbUrl": "", - "cloudRunService": "", - "cloudRunRegion": "us-central1", + "memoryUrl": "https://kotlin-lamp-control-api-827868544165.europe-west1.run.app", + "dbUrl": "https://kotlin-lamp-control-api-827868544165.europe-west1.run.app", + "cloudRunService": "kotlin-lamp-control-api", + "cloudRunRegion": "europe-west1", + "memorySetupCommand": "", + "dbSetupCommand": "", "dbSeedCommand": "" } ] From b588431213dad9dd4a0dfbea54025b4b06e522a9 Mon Sep 17 00:00:00 2001 From: Davide Mendolia Date: Thu, 12 Feb 2026 22:35:25 +0100 Subject: [PATCH 03/20] docs(benchmarks): document db seeding and gcp runner setup --- benchmarks/k6/README.md | 56 +++++++++++++++++++++++++++++++++---- benchmarks/k6/services.json | 12 ++++---- 2 files changed, 57 insertions(+), 11 deletions(-) diff --git a/benchmarks/k6/README.md b/benchmarks/k6/README.md index 22e8a161..ec5c4cf5 100644 --- a/benchmarks/k6/README.md +++ b/benchmarks/k6/README.md @@ -41,10 +41,10 @@ Edit `benchmarks/k6/services.json`: - `dbSetupCommand`: optional command run before DB pass for this service - `dbSeedCommand`: optional shell command to reset+seed DB before each DB run -Example `dbSeedCommand`: +`dbSeedCommand` can use a shared `BENCHMARK_DATABASE_URL` env var. The current `services.json` is already configured with: ```bash -./scripts/seed-benchmark-db.sh typescript 10000 +psql "$BENCHMARK_DATABASE_URL" -v ON_ERROR_STOP=1 -c "TRUNCATE TABLE lamps RESTART IDENTITY CASCADE; INSERT INTO lamps (id, is_on, created_at, updated_at, deleted_at) SELECT uuid_generate_v5('6ba7b810-9dad-11d1-80b4-00c04fd430c8', 'lamp-' || g), (g % 2 = 0), NOW() - ((10001 - g) * INTERVAL '1 second'), NOW() - ((10001 - g) * INTERVAL '1 second'), NULL FROM generate_series(1, 10000) AS g;" ``` If memory and DB use the same URL with an env-var mode switch, set both URLs equal and use setup commands. @@ -55,7 +55,53 @@ gcloud run services update typescript-lamp-control-api --region europe-west1 --u gcloud run services update typescript-lamp-control-api --region europe-west1 --update-env-vars STORAGE_MODE=db ``` -## 2) Validate or apply Cloud Run parity settings +Before running benchmarks, export your DB URL: + +```bash +export BENCHMARK_DATABASE_URL='postgresql://:@:5432/?sslmode=require' +``` + +## 2) Run from a GCP VM (recommended) + +Create a runner VM in the same region and install required tools: + +```bash +gcloud compute instances create lamp-bench-runner \ + --project= \ + --zone=europe-west1-b \ + --machine-type=e2-standard-4 \ + --image-family=ubuntu-2204-lts \ + --image-project=ubuntu-os-cloud \ + --boot-disk-size=30GB \ + --metadata=startup-script='#!/usr/bin/env bash +set -euxo pipefail +export DEBIAN_FRONTEND=noninteractive + +apt-get update +apt-get install -y ca-certificates curl gnupg git jq postgresql-client + +curl -fsSL https://deb.nodesource.com/setup_20.x | bash - +apt-get install -y nodejs + +curl -fsSL https://dl.k6.io/key.gpg | gpg --dearmor -o /usr/share/keyrings/k6-archive-keyring.gpg +echo "deb [signed-by=/usr/share/keyrings/k6-archive-keyring.gpg] https://dl.k6.io/deb stable main" > /etc/apt/sources.list.d/k6.list +apt-get update +apt-get install -y k6 + +node --version +npm --version +k6 version +psql --version +' +``` + +Then connect: + +```bash +gcloud compute ssh lamp-bench-runner --project= --zone=europe-west1-b +``` + +## 3) Validate or apply Cloud Run parity settings Dry run (prints commands): @@ -71,7 +117,7 @@ node benchmarks/k6/configure-cloud-run.js --project --execute Settings come from `benchmarks/k6/config.json` under `cloudRun`. -## 3) Run benchmark +## 4) Run benchmark Run both passes (`memory`,`db`) with settings from `config.json`: @@ -91,7 +137,7 @@ Outputs: - Structured run report: `benchmarks/results/run-report.json` - Ranked markdown summary: `benchmarks/results/summary.md` -## 4) Rebuild summary only +## 5) Rebuild summary only ```bash node benchmarks/k6/generate-summary.js benchmarks/results/run-report.json benchmarks/results/summary.md diff --git a/benchmarks/k6/services.json b/benchmarks/k6/services.json index 761fbc04..bf3fcf0d 100644 --- a/benchmarks/k6/services.json +++ b/benchmarks/k6/services.json @@ -7,7 +7,7 @@ "cloudRunRegion": "europe-west1", "memorySetupCommand": "", "dbSetupCommand": "", - "dbSeedCommand": "" + "dbSeedCommand": "psql \"$BENCHMARK_DATABASE_URL\" -v ON_ERROR_STOP=1 -c \"TRUNCATE TABLE lamps RESTART IDENTITY CASCADE; INSERT INTO lamps (id, is_on, created_at, updated_at, deleted_at) SELECT uuid_generate_v5('6ba7b810-9dad-11d1-80b4-00c04fd430c8', 'lamp-' || g), (g % 2 = 0), NOW() - ((10001 - g) * INTERVAL '1 second'), NOW() - ((10001 - g) * INTERVAL '1 second'), NULL FROM generate_series(1, 10000) AS g;\"" }, { "name": "python", @@ -17,7 +17,7 @@ "cloudRunRegion": "europe-west1", "memorySetupCommand": "", "dbSetupCommand": "", - "dbSeedCommand": "" + "dbSeedCommand": "psql \"$BENCHMARK_DATABASE_URL\" -v ON_ERROR_STOP=1 -c \"TRUNCATE TABLE lamps RESTART IDENTITY CASCADE; INSERT INTO lamps (id, is_on, created_at, updated_at, deleted_at) SELECT uuid_generate_v5('6ba7b810-9dad-11d1-80b4-00c04fd430c8', 'lamp-' || g), (g % 2 = 0), NOW() - ((10001 - g) * INTERVAL '1 second'), NOW() - ((10001 - g) * INTERVAL '1 second'), NULL FROM generate_series(1, 10000) AS g;\"" }, { "name": "java", @@ -27,7 +27,7 @@ "cloudRunRegion": "europe-west1", "memorySetupCommand": "", "dbSetupCommand": "", - "dbSeedCommand": "" + "dbSeedCommand": "psql \"$BENCHMARK_DATABASE_URL\" -v ON_ERROR_STOP=1 -c \"TRUNCATE TABLE lamps RESTART IDENTITY CASCADE; INSERT INTO lamps (id, is_on, created_at, updated_at, deleted_at) SELECT uuid_generate_v5('6ba7b810-9dad-11d1-80b4-00c04fd430c8', 'lamp-' || g), (g % 2 = 0), NOW() - ((10001 - g) * INTERVAL '1 second'), NOW() - ((10001 - g) * INTERVAL '1 second'), NULL FROM generate_series(1, 10000) AS g;\"" }, { "name": "csharp", @@ -37,7 +37,7 @@ "cloudRunRegion": "europe-west1", "memorySetupCommand": "", "dbSetupCommand": "", - "dbSeedCommand": "" + "dbSeedCommand": "psql \"$BENCHMARK_DATABASE_URL\" -v ON_ERROR_STOP=1 -c \"TRUNCATE TABLE lamps RESTART IDENTITY CASCADE; INSERT INTO lamps (id, is_on, created_at, updated_at, deleted_at) SELECT uuid_generate_v5('6ba7b810-9dad-11d1-80b4-00c04fd430c8', 'lamp-' || g), (g % 2 = 0), NOW() - ((10001 - g) * INTERVAL '1 second'), NOW() - ((10001 - g) * INTERVAL '1 second'), NULL FROM generate_series(1, 10000) AS g;\"" }, { "name": "go", @@ -47,7 +47,7 @@ "cloudRunRegion": "europe-west1", "memorySetupCommand": "", "dbSetupCommand": "", - "dbSeedCommand": "" + "dbSeedCommand": "psql \"$BENCHMARK_DATABASE_URL\" -v ON_ERROR_STOP=1 -c \"TRUNCATE TABLE lamps RESTART IDENTITY CASCADE; INSERT INTO lamps (id, is_on, created_at, updated_at, deleted_at) SELECT uuid_generate_v5('6ba7b810-9dad-11d1-80b4-00c04fd430c8', 'lamp-' || g), (g % 2 = 0), NOW() - ((10001 - g) * INTERVAL '1 second'), NOW() - ((10001 - g) * INTERVAL '1 second'), NULL FROM generate_series(1, 10000) AS g;\"" }, { "name": "kotlin", @@ -57,6 +57,6 @@ "cloudRunRegion": "europe-west1", "memorySetupCommand": "", "dbSetupCommand": "", - "dbSeedCommand": "" + "dbSeedCommand": "psql \"$BENCHMARK_DATABASE_URL\" -v ON_ERROR_STOP=1 -c \"TRUNCATE TABLE lamps RESTART IDENTITY CASCADE; INSERT INTO lamps (id, is_on, created_at, updated_at, deleted_at) SELECT uuid_generate_v5('6ba7b810-9dad-11d1-80b4-00c04fd430c8', 'lamp-' || g), (g % 2 = 0), NOW() - ((10001 - g) * INTERVAL '1 second'), NOW() - ((10001 - g) * INTERVAL '1 second'), NULL FROM generate_series(1, 10000) AS g;\"" } ] From fa2a704c9870de690eec5a775188a2d39f050aca Mon Sep 17 00:00:00 2001 From: Davide Mendolia Date: Thu, 12 Feb 2026 23:00:24 +0100 Subject: [PATCH 04/20] docs: add postgres startup variable matrix by implementation --- docs/POSTGRES_STARTUP_VARIABLES.md | 166 +++++++++++++++++++++++++++++ 1 file changed, 166 insertions(+) create mode 100644 docs/POSTGRES_STARTUP_VARIABLES.md diff --git a/docs/POSTGRES_STARTUP_VARIABLES.md b/docs/POSTGRES_STARTUP_VARIABLES.md new file mode 100644 index 00000000..d11e7588 --- /dev/null +++ b/docs/POSTGRES_STARTUP_VARIABLES.md @@ -0,0 +1,166 @@ +# PostgreSQL Startup Variables by Implementation + +This document summarizes how each implementation under `src/` decides whether to use PostgreSQL and which environment variable formats it expects. + +## TypeScript (`src/typescript`) + +Source of truth: +- `src/typescript/src/infrastructure/app.ts` +- `src/typescript/src/infrastructure/database/client.ts` +- `src/typescript/src/cli.ts` + +PostgreSQL is enabled when: +- `DATABASE_URL` is set and non-empty. + +Required variables: +- `DATABASE_URL` + +Expected format: +- Prisma PostgreSQL URL, for example: + - `postgresql://user:password@host:5432/database` + - Optional query params are supported by Prisma (example: `?schema=public`). + +Notes: +- `USE_POSTGRES` is not read by runtime code in `src/typescript/src/*`. +- If `DATABASE_URL` is missing, app uses in-memory repository. +- In `--mode=serve` and `--mode=migrate`, migrations run only if `DATABASE_URL` is present. + +## Python (`src/python`) + +Source of truth: +- `src/python/src/openapi_server/infrastructure/config.py` +- `src/python/src/openapi_server/dependencies.py` +- `src/python/src/openapi_server/cli.py` + +PostgreSQL is enabled when: +- `DATABASE_URL` is set and not blank. + +Required variables: +- `DATABASE_URL` + +Expected format: +- Preferred input: `postgresql://user:password@host:5432/database` +- Runtime converts it to async driver format: `postgresql+asyncpg://...` +- If already provided as `postgresql+asyncpg://...`, it is used directly. +- `sslmode` query parameter is removed automatically from `DATABASE_URL`. + +Optional variables: +- `DB_HOST`, `DB_PORT`, `DB_NAME`, `DB_USER`, `DB_PASSWORD` +- `DB_POOL_MIN_SIZE`, `DB_POOL_MAX_SIZE` + +Important caveat: +- Individual `DB_*` variables alone do **not** switch to PostgreSQL mode. +- PostgreSQL mode switch depends specifically on `DATABASE_URL` presence. + +## Java (`src/java`) + +Source of truth: +- `src/java/src/main/resources/application.properties` +- `src/java/src/main/java/org/openapitools/config/OnDatabaseUrlCondition.java` +- `src/java/src/main/java/org/openapitools/config/DataSourceConfig.java` + +PostgreSQL is enabled when: +- `spring.datasource.url` resolves to a non-empty value. +- Resolution order in properties: + - `SPRING_DATASOURCE_URL` + - then `DATABASE_URL` + +Required variable: +- `SPRING_DATASOURCE_URL` or `DATABASE_URL` + +Expected format: +- Must be JDBC URL format: + - `jdbc:postgresql://host:5432/database` + +Optional variables: +- `DB_USER` (default: `lampuser`) +- `DB_PASSWORD` (default: `lamppass`) +- `DB_POOL_MAX_SIZE`, `DB_POOL_MIN_SIZE` +- `FLYWAY_ENABLED` (used for migration behavior) + +Important caveat: +- A non-JDBC URL like `postgresql://...` is not valid for `spring.datasource.url`. + +## C# (`src/csharp`) + +Source of truth: +- `src/csharp/LampControlApi/Extensions/ServiceCollectionExtensions.cs` +- `src/csharp/LampControlApi/Extensions/MigrationRunner.cs` +- `src/csharp/LampControlApi/appsettings.Development.example.json` + +PostgreSQL is enabled when: +- Connection string `ConnectionStrings:LampControl` is non-empty. +- Resolution order: + - config key `ConnectionStrings:LampControl` + - fallback env var `ConnectionStrings__LampControl` + +Required variable (if using env): +- `ConnectionStrings__LampControl` + +Expected format: +- Npgsql connection string, for example: + - `Host=localhost;Port=5432;Database=lampcontrol;Username=lampuser;Password=lamppass` + +Notes: +- `DATABASE_URL` is not used by the C# implementation. + +## Go (`src/go`) + +Source of truth: +- `src/go/api/config.go` +- `src/go/cmd/lamp-control-api/main.go` + +PostgreSQL is enabled when **any** of these is true: +- `DATABASE_URL` is set, or +- `DB_NAME` is set, or +- both `DB_HOST` and `DB_USER` are set. + +Primary variable: +- `DATABASE_URL` (takes precedence if set) + +Expected format: +- Recommended: URL form, e.g. `postgres://user:password@host:5432/database?sslmode=disable` +- Also supports pgx key-value DSN internally (built when `DATABASE_URL` is not set): + - `host=... port=... dbname=... user=... password=...` + +Optional variables: +- `DB_HOST`, `DB_PORT`, `DB_NAME`, `DB_USER`, `DB_PASSWORD` +- `DB_POOL_MIN_SIZE`, `DB_POOL_MAX_SIZE` + +## Kotlin (`src/kotlin`) + +Source of truth: +- `src/kotlin/src/main/kotlin/com/lampcontrol/database/DatabaseFactory.kt` +- `src/kotlin/src/main/kotlin/com/lampcontrol/Application.kt` + +PostgreSQL is enabled when **any** of these is true: +- `DATABASE_URL` is set, or +- `DB_NAME` is set, or +- both `DB_HOST` and `DB_USER` are set. + +Primary variable: +- `DATABASE_URL` (preferred when available) + +Expected `DATABASE_URL` format: +- Strictly parsed by regex: + - `postgresql://user:password@host:5432/database` + - or `postgres://user:password@host:5432/database` + +Important caveat: +- Query parameters (for example `?sslmode=...`) are not handled by the current parser and can fail parsing. + +Optional variables: +- `DB_HOST`, `DB_PORT`, `DB_NAME`, `DB_USER`, `DB_PASSWORD` +- `DB_POOL_MIN_SIZE`, `DB_POOL_MAX_SIZE` +- `DB_MAX_LIFETIME_MS`, `DB_IDLE_TIMEOUT_MS`, `DB_CONNECTION_TIMEOUT_MS` + +## Quick Reference Matrix + +| Language | Switch to PostgreSQL | Required variable(s) | Connection string format | +|---|---|---|---| +| TypeScript | `DATABASE_URL` non-empty | `DATABASE_URL` | `postgresql://...` (Prisma URL) | +| Python | `DATABASE_URL` non-empty | `DATABASE_URL` | `postgresql://...` or `postgresql+asyncpg://...` | +| Java | `spring.datasource.url` non-empty | `SPRING_DATASOURCE_URL` or `DATABASE_URL` | `jdbc:postgresql://...` | +| C# | `ConnectionStrings:LampControl` non-empty | `ConnectionStrings__LampControl` (env) | `Host=...;Port=...;Database=...;Username=...;Password=...` | +| Go | `DATABASE_URL` or `DB_NAME` or (`DB_HOST`+`DB_USER`) | `DATABASE_URL` recommended | `postgres://...` recommended; key-value DSN supported | +| Kotlin | `DATABASE_URL` or `DB_NAME` or (`DB_HOST`+`DB_USER`) | `DATABASE_URL` recommended | `postgresql://...` or `postgres://...` (strict parser) | From 2764dda2a2c2a7619cb22ac2120f40064691ca8a Mon Sep 17 00:00:00 2001 From: Davide Mendolia Date: Fri, 13 Feb 2026 10:44:39 +0100 Subject: [PATCH 05/20] docs: refresh postgres startup variables after syncing main --- docs/POSTGRES_STARTUP_VARIABLES.md | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/docs/POSTGRES_STARTUP_VARIABLES.md b/docs/POSTGRES_STARTUP_VARIABLES.md index d11e7588..9b230362 100644 --- a/docs/POSTGRES_STARTUP_VARIABLES.md +++ b/docs/POSTGRES_STARTUP_VARIABLES.md @@ -2,6 +2,12 @@ This document summarizes how each implementation under `src/` decides whether to use PostgreSQL and which environment variable formats it expects. +Synced with `main` and re-validated against current sources on 2026-02-13. + +## Cross-Implementation Note + +- `USE_POSTGRES` should not be used as a PostgreSQL mode switch. Current runtime code paths in `src/` use connection-related variables (for example `DATABASE_URL`, JDBC URL, or `ConnectionStrings__LampControl`) to decide storage backend. + ## TypeScript (`src/typescript`) Source of truth: From 050624a36d2610376f9771eb5d7d55ce42a82bf7 Mon Sep 17 00:00:00 2001 From: Davide Mendolia Date: Fri, 13 Feb 2026 10:57:04 +0100 Subject: [PATCH 06/20] feat(csharp): support DATABASE_URL fallback for connection string (#367) * feat(csharp): add DATABASE_URL fallback for postgres connection * test(csharp): fix file provider in connection resolution tests * chore(csharp): address PR review feedback --- .../ServiceCollectionExtensionsTests.cs | 254 ++++++++++++++++++ .../Extensions/ServiceCollectionExtensions.cs | 215 +++++++++++++++ .../LampControlApi/Properties/AssemblyInfo.cs | 3 + src/csharp/README.md | 18 +- 4 files changed, 489 insertions(+), 1 deletion(-) create mode 100644 src/csharp/LampControlApi.Tests/Extensions/ServiceCollectionExtensionsTests.cs create mode 100644 src/csharp/LampControlApi/Properties/AssemblyInfo.cs diff --git a/src/csharp/LampControlApi.Tests/Extensions/ServiceCollectionExtensionsTests.cs b/src/csharp/LampControlApi.Tests/Extensions/ServiceCollectionExtensionsTests.cs new file mode 100644 index 00000000..4b757b2c --- /dev/null +++ b/src/csharp/LampControlApi.Tests/Extensions/ServiceCollectionExtensionsTests.cs @@ -0,0 +1,254 @@ +using LampControlApi.Domain.Repositories; +using LampControlApi.Extensions; +using LampControlApi.Services; +using Microsoft.AspNetCore.Hosting; +using Microsoft.Extensions.Configuration; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.FileProviders; +using Microsoft.VisualStudio.TestTools.UnitTesting; +using Npgsql; + +namespace LampControlApi.Tests.Extensions +{ + [TestClass] + [DoNotParallelize] + public class ServiceCollectionExtensionsTests + { + private const string ConnectionStringEnvVar = "ConnectionStrings__LampControl"; + private const string DatabaseUrlEnvVar = "DATABASE_URL"; + + private string? originalConnectionStringEnvVar; + private string? originalDatabaseUrlEnvVar; + + [TestInitialize] + public void Setup() + { + this.originalConnectionStringEnvVar = Environment.GetEnvironmentVariable(ConnectionStringEnvVar); + this.originalDatabaseUrlEnvVar = Environment.GetEnvironmentVariable(DatabaseUrlEnvVar); + Environment.SetEnvironmentVariable(ConnectionStringEnvVar, null); + Environment.SetEnvironmentVariable(DatabaseUrlEnvVar, null); + } + + [TestCleanup] + public void Cleanup() + { + Environment.SetEnvironmentVariable(ConnectionStringEnvVar, this.originalConnectionStringEnvVar); + Environment.SetEnvironmentVariable(DatabaseUrlEnvVar, this.originalDatabaseUrlEnvVar); + } + + [TestMethod] + public void ResolveConnectionString_ConfigConnectionStringShouldTakePrecedence() + { + var configuration = BuildConfiguration(new Dictionary + { + ["ConnectionStrings:LampControl"] = "Host=config-host;Port=5432;Database=config-db;Username=config-user;Password=config-pass", + }); + + Environment.SetEnvironmentVariable( + ConnectionStringEnvVar, + "Host=env-host;Port=5432;Database=env-db;Username=env-user;Password=env-pass"); + Environment.SetEnvironmentVariable( + DatabaseUrlEnvVar, + "postgresql://url-user:url-pass@url-host:5432/url-db"); + + var result = ServiceCollectionExtensions.ResolveConnectionString(configuration); + + Assert.AreEqual( + "Host=config-host;Port=5432;Database=config-db;Username=config-user;Password=config-pass", + result); + } + + [TestMethod] + public void ResolveConnectionString_ConnectionStringsEnvironmentVariableShouldTakePrecedenceOverDatabaseUrl() + { + var configuration = BuildConfiguration(); + Environment.SetEnvironmentVariable( + ConnectionStringEnvVar, + "Host=env-host;Port=5432;Database=env-db;Username=env-user;Password=env-pass"); + Environment.SetEnvironmentVariable( + DatabaseUrlEnvVar, + "postgresql://url-user:url-pass@url-host:5432/url-db"); + + var result = ServiceCollectionExtensions.ResolveConnectionString(configuration); + + Assert.AreEqual( + "Host=env-host;Port=5432;Database=env-db;Username=env-user;Password=env-pass", + result); + } + + [TestMethod] + public void ResolveConnectionString_ShouldUseDatabaseUrlWhenOtherSourcesAreMissing() + { + var configuration = BuildConfiguration(); + Environment.SetEnvironmentVariable( + DatabaseUrlEnvVar, + "postgresql://lampuser:lamppass@localhost:5432/lampcontrol?sslmode=Disable&trust_server_certificate=true"); + + var result = ServiceCollectionExtensions.ResolveConnectionString(configuration); + var parsedConnectionString = new NpgsqlConnectionStringBuilder(result); + + Assert.AreEqual("localhost", parsedConnectionString.Host); + Assert.AreEqual(5432, parsedConnectionString.Port); + Assert.AreEqual("lampcontrol", parsedConnectionString.Database); + Assert.AreEqual("lampuser", parsedConnectionString.Username); + Assert.AreEqual("lamppass", parsedConnectionString.Password); + Assert.AreEqual(SslMode.Disable, parsedConnectionString.SslMode); + } + + [TestMethod] + public void ResolveConnectionString_ShouldAcceptPostgresScheme() + { + var configuration = BuildConfiguration(); + Environment.SetEnvironmentVariable( + DatabaseUrlEnvVar, + "postgres://lampuser:lamppass@localhost:5433/lampcontrol"); + + var result = ServiceCollectionExtensions.ResolveConnectionString(configuration); + var parsedConnectionString = new NpgsqlConnectionStringBuilder(result); + + Assert.AreEqual("localhost", parsedConnectionString.Host); + Assert.AreEqual(5433, parsedConnectionString.Port); + Assert.AreEqual("lampcontrol", parsedConnectionString.Database); + } + + [TestMethod] + public void ResolveConnectionString_ShouldDecodeCredentialsFromDatabaseUrl() + { + var configuration = BuildConfiguration(); + Environment.SetEnvironmentVariable( + DatabaseUrlEnvVar, + "postgresql://lamp%2Buser:p%40ss%3Aword@localhost:5432/lampcontrol"); + + var result = ServiceCollectionExtensions.ResolveConnectionString(configuration); + var parsedConnectionString = new NpgsqlConnectionStringBuilder(result); + + Assert.AreEqual("lamp+user", parsedConnectionString.Username); + Assert.AreEqual("p@ss:word", parsedConnectionString.Password); + } + + [TestMethod] + public void ResolveConnectionString_ShouldThrowWhenDatabaseUrlHasUnsupportedQueryParameter() + { + var configuration = BuildConfiguration(); + Environment.SetEnvironmentVariable( + DatabaseUrlEnvVar, + "postgresql://lampuser:lamppass@localhost:5432/lampcontrol?unknown_param=42"); + + var exception = Assert.ThrowsException( + () => ServiceCollectionExtensions.ResolveConnectionString(configuration)); + + StringAssert.Contains(exception.Message, "Unsupported DATABASE_URL query parameter"); + } + + [TestMethod] + public void ResolveConnectionString_ShouldThrowWhenDatabaseUrlHasInvalidScheme() + { + var configuration = BuildConfiguration(); + Environment.SetEnvironmentVariable( + DatabaseUrlEnvVar, + "mysql://lampuser:lamppass@localhost:5432/lampcontrol"); + + var exception = Assert.ThrowsException( + () => ServiceCollectionExtensions.ResolveConnectionString(configuration)); + + StringAssert.Contains(exception.Message, "Invalid DATABASE_URL scheme"); + } + + [TestMethod] + public void ResolveConnectionString_ShouldThrowWhenDatabaseUrlHasMissingHost() + { + var configuration = BuildConfiguration(); + Environment.SetEnvironmentVariable( + DatabaseUrlEnvVar, + "postgresql://lampuser:lamppass@/lampcontrol"); + + var exception = Assert.ThrowsException( + () => ServiceCollectionExtensions.ResolveConnectionString(configuration)); + + StringAssert.Contains(exception.Message, "Invalid DATABASE_URL value"); + } + + [TestMethod] + public void ResolveConnectionString_ShouldThrowWhenDatabaseUrlHasMissingDatabaseName() + { + var configuration = BuildConfiguration(); + Environment.SetEnvironmentVariable( + DatabaseUrlEnvVar, + "postgresql://lampuser:lamppass@localhost:5432"); + + var exception = Assert.ThrowsException( + () => ServiceCollectionExtensions.ResolveConnectionString(configuration)); + + StringAssert.Contains(exception.Message, "Database name is required"); + } + + [TestMethod] + public void ResolveConnectionString_ShouldThrowWhenDatabaseUrlIsMalformed() + { + var configuration = BuildConfiguration(); + Environment.SetEnvironmentVariable( + DatabaseUrlEnvVar, + "this-is-not-a-url"); + + var exception = Assert.ThrowsException( + () => ServiceCollectionExtensions.ResolveConnectionString(configuration)); + + StringAssert.Contains(exception.Message, "Invalid DATABASE_URL value"); + } + + [TestMethod] + public void ResolveConnectionString_ShouldThrowWhenPasswordIsMissing() + { + var configuration = BuildConfiguration(); + Environment.SetEnvironmentVariable( + DatabaseUrlEnvVar, + "postgresql://lampuser@localhost:5432/lampcontrol"); + + var exception = Assert.ThrowsException( + () => ServiceCollectionExtensions.ResolveConnectionString(configuration)); + + StringAssert.Contains(exception.Message, "Password is required"); + } + + [TestMethod] + public void AddLampControlServices_ShouldUsePostgresRepositoryWhenOnlyDatabaseUrlIsConfigured() + { + var services = new ServiceCollection(); + var configuration = BuildConfiguration(); + var environment = new TestWebHostEnvironment(); + Environment.SetEnvironmentVariable( + DatabaseUrlEnvVar, + "postgresql://lampuser:lamppass@localhost:5432/lampcontrol"); + + var usePostgres = services.AddLampControlServices(configuration, environment); + + Assert.IsTrue(usePostgres); + Assert.IsTrue( + services.Any(descriptor => + descriptor.ServiceType == typeof(ILampRepository) && + descriptor.ImplementationType == typeof(PostgresLampRepository))); + } + + private static IConfiguration BuildConfiguration(Dictionary? values = null) + { + return new ConfigurationBuilder() + .AddInMemoryCollection(values ?? new Dictionary()) + .Build(); + } + + private sealed class TestWebHostEnvironment : IWebHostEnvironment + { + public string ApplicationName { get; set; } = "LampControlApi.Tests"; + + public IFileProvider ContentRootFileProvider { get; set; } = new NullFileProvider(); + + public string ContentRootPath { get; set; } = Environment.CurrentDirectory; + + public string EnvironmentName { get; set; } = "Production"; + + public IFileProvider WebRootFileProvider { get; set; } = new NullFileProvider(); + + public string WebRootPath { get; set; } = Environment.CurrentDirectory; + } + } +} diff --git a/src/csharp/LampControlApi/Extensions/ServiceCollectionExtensions.cs b/src/csharp/LampControlApi/Extensions/ServiceCollectionExtensions.cs index 33cecb8a..eb06e866 100644 --- a/src/csharp/LampControlApi/Extensions/ServiceCollectionExtensions.cs +++ b/src/csharp/LampControlApi/Extensions/ServiceCollectionExtensions.cs @@ -3,6 +3,7 @@ using LampControlApi.Infrastructure.Database; using LampControlApi.Services; using Microsoft.EntityFrameworkCore; +using Npgsql; namespace LampControlApi.Extensions { @@ -81,7 +82,221 @@ public static bool AddLampControlServices( connectionString = Environment.GetEnvironmentVariable("ConnectionStrings__LampControl"); } + if (string.IsNullOrWhiteSpace(connectionString)) + { + var databaseUrl = Environment.GetEnvironmentVariable("DATABASE_URL"); + if (!string.IsNullOrWhiteSpace(databaseUrl)) + { + connectionString = ConvertDatabaseUrlToNpgsqlConnectionString(databaseUrl); + } + } + return connectionString; } + + private static string ConvertDatabaseUrlToNpgsqlConnectionString(string databaseUrl) + { + if (!Uri.TryCreate(databaseUrl, UriKind.Absolute, out var uri)) + { + throw new InvalidOperationException( + "Invalid DATABASE_URL value. Expected a valid absolute URI like " + + "'postgresql://user:password@host:5432/database'."); + } + + if (!string.Equals(uri.Scheme, "postgresql", StringComparison.OrdinalIgnoreCase) && + !string.Equals(uri.Scheme, "postgres", StringComparison.OrdinalIgnoreCase)) + { + throw new InvalidOperationException( + $"Invalid DATABASE_URL scheme '{uri.Scheme}'. Supported schemes are 'postgresql' and 'postgres'."); + } + + if (string.IsNullOrWhiteSpace(uri.Host)) + { + throw new InvalidOperationException( + "Invalid DATABASE_URL value. Host is required, for example: " + + "'postgresql://user:password@localhost:5432/lampcontrol'."); + } + + var databaseName = uri.AbsolutePath.Trim('/'); + if (string.IsNullOrWhiteSpace(databaseName)) + { + throw new InvalidOperationException( + "Invalid DATABASE_URL value. Database name is required in the path, for example: " + + "'postgresql://user:password@localhost:5432/lampcontrol'."); + } + + var (username, password) = ParseUserInfo(uri.UserInfo); + if (!string.IsNullOrWhiteSpace(username) && string.IsNullOrWhiteSpace(password)) + { + throw new InvalidOperationException( + "Invalid DATABASE_URL value. Password is required when a username is provided."); + } + + var builder = new NpgsqlConnectionStringBuilder + { + Host = uri.Host, + Port = uri.Port > 0 ? uri.Port : 5432, + Database = Uri.UnescapeDataString(databaseName), + }; + + if (!string.IsNullOrWhiteSpace(username)) + { + builder.Username = username; + } + + if (!string.IsNullOrWhiteSpace(password)) + { + builder.Password = password; + } + + ApplySupportedDatabaseUrlQueryParameters(builder, uri.Query); + return builder.ConnectionString; + } + + private static (string? Username, string? Password) ParseUserInfo(string? userInfo) + { + if (string.IsNullOrWhiteSpace(userInfo)) + { + return (null, null); + } + + var separatorIndex = userInfo.IndexOf(':'); + if (separatorIndex < 0) + { + return (Uri.UnescapeDataString(userInfo), null); + } + + var username = userInfo[..separatorIndex]; + var password = userInfo.Substring(separatorIndex + 1); + return (Uri.UnescapeDataString(username), Uri.UnescapeDataString(password)); + } + + private static void ApplySupportedDatabaseUrlQueryParameters( + NpgsqlConnectionStringBuilder builder, + string queryString) + { + if (string.IsNullOrWhiteSpace(queryString)) + { + return; + } + + var query = queryString.StartsWith('?') ? queryString[1..] : queryString; + if (string.IsNullOrWhiteSpace(query)) + { + return; + } + + foreach (var pair in query.Split('&', StringSplitOptions.RemoveEmptyEntries)) + { + var separatorIndex = pair.IndexOf('='); + if (separatorIndex <= 0 || separatorIndex == pair.Length - 1) + { + throw new InvalidOperationException( + $"Invalid DATABASE_URL query parameter '{pair}'. Expected key=value format."); + } + + var rawKey = pair[..separatorIndex]; + var rawValue = pair.Substring(separatorIndex + 1); + var key = Uri.UnescapeDataString(rawKey); + var value = Uri.UnescapeDataString(rawValue); + + switch (key.ToLowerInvariant()) + { + case "sslmode": + builder.SslMode = ParseSslMode(value); + break; + case "trust server certificate": + case "trust_server_certificate": + case "trustservercertificate": + // Npgsql 8+ treats this as obsolete/no-op, but keep validation for compatibility. + _ = ParseBoolean(value, key); + break; + case "pooling": + builder.Pooling = ParseBoolean(value, key); + break; + case "maximum pool size": + case "max pool size": + case "maxpoolsize": + case "pool_max_conns": + builder.MaxPoolSize = ParsePositiveInteger(value, key); + break; + case "minimum pool size": + case "min pool size": + case "minpoolsize": + case "pool_min_conns": + builder.MinPoolSize = ParseNonNegativeInteger(value, key); + break; + case "timeout": + case "connect_timeout": + builder.Timeout = ParsePositiveInteger(value, key); + break; + case "command timeout": + case "command_timeout": + case "commandtimeout": + builder.CommandTimeout = ParsePositiveInteger(value, key); + break; + case "keepalive": + builder.KeepAlive = ParseNonNegativeInteger(value, key); + break; + case "search_path": + case "search path": + builder.SearchPath = value; + break; + case "application_name": + case "application name": + builder.ApplicationName = value; + break; + default: + throw new InvalidOperationException( + $"Unsupported DATABASE_URL query parameter '{key}'. Supported parameters: " + + "sslmode, trust_server_certificate, pooling, pool_max_conns, pool_min_conns, " + + "connect_timeout, command_timeout, keepalive, search_path, application_name."); + } + } + } + + private static SslMode ParseSslMode(string value) + { + if (Enum.TryParse(value, true, out var sslMode)) + { + return sslMode; + } + + throw new InvalidOperationException( + $"Invalid DATABASE_URL sslmode value '{value}'."); + } + + private static bool ParseBoolean(string value, string key) + { + if (bool.TryParse(value, out var parsedValue)) + { + return parsedValue; + } + + throw new InvalidOperationException( + $"Invalid DATABASE_URL value for '{key}': '{value}'. Expected 'true' or 'false'."); + } + + private static int ParsePositiveInteger(string value, string key) + { + if (int.TryParse(value, out var parsedValue) && parsedValue > 0) + { + return parsedValue; + } + + throw new InvalidOperationException( + $"Invalid DATABASE_URL value for '{key}': '{value}'. Expected a positive integer."); + } + + private static int ParseNonNegativeInteger(string value, string key) + { + if (int.TryParse(value, out var parsedValue) && parsedValue >= 0) + { + return parsedValue; + } + + throw new InvalidOperationException( + $"Invalid DATABASE_URL value for '{key}': '{value}'. Expected a non-negative integer."); + } } } diff --git a/src/csharp/LampControlApi/Properties/AssemblyInfo.cs b/src/csharp/LampControlApi/Properties/AssemblyInfo.cs new file mode 100644 index 00000000..3827f725 --- /dev/null +++ b/src/csharp/LampControlApi/Properties/AssemblyInfo.cs @@ -0,0 +1,3 @@ +using System.Runtime.CompilerServices; + +[assembly: InternalsVisibleTo("LampControlApi.Tests")] diff --git a/src/csharp/README.md b/src/csharp/README.md index 68299c95..74024c0a 100644 --- a/src/csharp/README.md +++ b/src/csharp/README.md @@ -91,6 +91,12 @@ docker exec -i lamp-control-api-reference-postgres-1 psql -U lampuser -d lampcon #### Configuration The application automatically uses PostgreSQL when a connection string is configured. +Configuration precedence is: +1. `ConnectionStrings:LampControl` (appsettings/user-secrets/environment binding) +2. `ConnectionStrings__LampControl` (explicit environment variable) +3. `DATABASE_URL` (`postgresql://...` or `postgres://...` fallback) + +`DATABASE_URL` parsing is strict and fails fast with a clear startup error if the value is invalid or unsupported. **Option 1: appsettings.json** (Development) @@ -113,7 +119,17 @@ export ConnectionStrings__LampControl="Host=db.production.com;Port=5432;Database dotnet run ``` -**Option 3: User Secrets** (Development) +**Option 3: DATABASE_URL fallback** (Production/Platform-provided URLs) + +```bash +# PostgreSQL URL format (supported schemes: postgresql:// and postgres://) +export DATABASE_URL="postgresql://lampuser:lamppass@localhost:5432/lampcontrol?sslmode=disable" + +# Run application +dotnet run +``` + +**Option 4: User Secrets** (Development) ```bash # Set connection string in user secrets From cf4adcdb04dd97ad98de7c013d2d75690fbc6780 Mon Sep 17 00:00:00 2001 From: Davide Mendolia Date: Fri, 13 Feb 2026 11:03:05 +0100 Subject: [PATCH 07/20] feat(java): support postgres URI forms for DATABASE_URL (#369) --- src/java/README.md | 8 +- .../openapitools/config/DataSourceConfig.java | 36 ++++++++- .../src/main/resources/application.properties | 4 +- .../config/DataSourceConfigTest.java | 73 ++++++++++++++++++- 4 files changed, 114 insertions(+), 7 deletions(-) diff --git a/src/java/README.md b/src/java/README.md index 4eb0d61b..8d9bc299 100644 --- a/src/java/README.md +++ b/src/java/README.md @@ -168,7 +168,11 @@ Configure database connection using environment variables to enable PostgreSQL m ```bash # Required to enable PostgreSQL mode -export DATABASE_URL=jdbc:postgresql://localhost:5432/lampcontrol +# Accepted formats: jdbc:postgresql://..., postgresql://..., postgres://... +export DATABASE_URL=postgresql://localhost:5432/lampcontrol + +# Optional override (takes precedence and is used as-is without normalization) +# export SPRING_DATASOURCE_URL=jdbc:postgresql://localhost:5432/lampcontrol # Database credentials export DB_USER=lampuser @@ -200,7 +204,7 @@ spring.datasource.hikari.minimum-idle=5 mvn spring-boot:run # Run with PostgreSQL (requires DATABASE_URL) -DATABASE_URL=jdbc:postgresql://localhost:5432/lampcontrol \ +DATABASE_URL=postgresql://localhost:5432/lampcontrol \ FLYWAY_ENABLED=true \ DB_USER=lampuser \ DB_PASSWORD=lamppass \ diff --git a/src/java/src/main/java/org/openapitools/config/DataSourceConfig.java b/src/java/src/main/java/org/openapitools/config/DataSourceConfig.java index 17dda645..632339b6 100644 --- a/src/java/src/main/java/org/openapitools/config/DataSourceConfig.java +++ b/src/java/src/main/java/org/openapitools/config/DataSourceConfig.java @@ -22,8 +22,14 @@ @Conditional(OnDatabaseUrlCondition.class) public class DataSourceConfig { + @Value("${SPRING_DATASOURCE_URL:}") + private String springDatasourceUrl; + + @Value("${DATABASE_URL:}") + private String databaseUrl; + @Value("${spring.datasource.url}") - private String jdbcUrl; + private String fallbackJdbcUrl; @Value("${spring.datasource.username:lampuser}") private String username; @@ -45,7 +51,7 @@ public HikariConfig hikariConfig() { HikariConfig config = new HikariConfig(); // Set core JDBC properties - config.setJdbcUrl(jdbcUrl); + config.setJdbcUrl(resolveJdbcUrl()); config.setUsername(username); config.setPassword(password); config.setDriverClassName(driverClassName); @@ -53,6 +59,32 @@ public HikariConfig hikariConfig() { return config; } + private String resolveJdbcUrl() { + if (isNotBlank(springDatasourceUrl)) { + return springDatasourceUrl; + } + + if (isNotBlank(databaseUrl)) { + return normalizeDatabaseUrl(databaseUrl); + } + + return fallbackJdbcUrl; + } + + private String normalizeDatabaseUrl(String url) { + if (url.startsWith("postgresql://")) { + return "jdbc:" + url; + } + if (url.startsWith("postgres://")) { + return "jdbc:postgresql://" + url.substring("postgres://".length()); + } + return url; + } + + private boolean isNotBlank(String value) { + return value != null && !value.isBlank(); + } + /** * Creates a HikariCP DataSource bean from the configured HikariConfig. * diff --git a/src/java/src/main/resources/application.properties b/src/java/src/main/resources/application.properties index a388e4e2..e7aebdb7 100644 --- a/src/java/src/main/resources/application.properties +++ b/src/java/src/main/resources/application.properties @@ -6,7 +6,9 @@ spring.jackson.serialization.WRITE_DATES_AS_TIMESTAMPS=false # Database Configuration (PostgreSQL) # By default, the application uses an in-memory repository # To enable PostgreSQL, set DATABASE_URL environment variable -# Example: DATABASE_URL=jdbc:postgresql://localhost:5432/lampcontrol +# DATABASE_URL supports: jdbc:postgresql://..., postgresql://..., postgres://... +# SPRING_DATASOURCE_URL is used as-is when set (no normalization) +# Example: DATABASE_URL=postgresql://localhost:5432/lampcontrol spring.datasource.url=${SPRING_DATASOURCE_URL:${DATABASE_URL:}} spring.datasource.username=${DB_USER:lampuser} spring.datasource.password=${DB_PASSWORD:lamppass} diff --git a/src/java/src/test/java/org/openapitools/config/DataSourceConfigTest.java b/src/java/src/test/java/org/openapitools/config/DataSourceConfigTest.java index 0ac8dc07..182349be 100644 --- a/src/java/src/test/java/org/openapitools/config/DataSourceConfigTest.java +++ b/src/java/src/test/java/org/openapitools/config/DataSourceConfigTest.java @@ -10,10 +10,31 @@ class DataSourceConfigTest { @Test - void hikariConfig_ShouldReturnConfiguredHikariConfig() throws Exception { + void hikariConfig_ShouldUseSpringDatasourceUrlAsIs() throws Exception { DataSourceConfig config = new DataSourceConfig(); - setField(config, "jdbcUrl", "jdbc:postgresql://localhost:5432/lamp"); + setField(config, "springDatasourceUrl", "postgresql://localhost:5432/lamp"); + setField(config, "databaseUrl", "postgres://ignored:5432/ignored"); + setField(config, "fallbackJdbcUrl", "jdbc:postgresql://localhost:5432/fallback"); + setField(config, "username", "user"); + setField(config, "password", "pass"); + setField(config, "driverClassName", "org.postgresql.Driver"); + + HikariConfig result = config.hikariConfig(); + + assertThat(result.getJdbcUrl()).isEqualTo("postgresql://localhost:5432/lamp"); + assertThat(result.getUsername()).isEqualTo("user"); + assertThat(result.getPassword()).isEqualTo("pass"); + assertThat(result.getDriverClassName()).isEqualTo("org.postgresql.Driver"); + } + + @Test + void hikariConfig_ShouldNormalizeDatabaseUrlPostgresqlScheme() throws Exception { + DataSourceConfig config = new DataSourceConfig(); + + setField(config, "springDatasourceUrl", ""); + setField(config, "databaseUrl", "postgresql://localhost:5432/lamp"); + setField(config, "fallbackJdbcUrl", "jdbc:postgresql://localhost:5432/fallback"); setField(config, "username", "user"); setField(config, "password", "pass"); setField(config, "driverClassName", "org.postgresql.Driver"); @@ -21,6 +42,54 @@ void hikariConfig_ShouldReturnConfiguredHikariConfig() throws Exception { HikariConfig result = config.hikariConfig(); assertThat(result.getJdbcUrl()).isEqualTo("jdbc:postgresql://localhost:5432/lamp"); + } + + @Test + void hikariConfig_ShouldNormalizeDatabaseUrlPostgresScheme() throws Exception { + DataSourceConfig config = new DataSourceConfig(); + + setField(config, "springDatasourceUrl", ""); + setField(config, "databaseUrl", "postgres://localhost:5432/lamp"); + setField(config, "fallbackJdbcUrl", "jdbc:postgresql://localhost:5432/fallback"); + setField(config, "username", "user"); + setField(config, "password", "pass"); + setField(config, "driverClassName", "org.postgresql.Driver"); + + HikariConfig result = config.hikariConfig(); + + assertThat(result.getJdbcUrl()).isEqualTo("jdbc:postgresql://localhost:5432/lamp"); + } + + @Test + void hikariConfig_ShouldKeepJdbcDatabaseUrlUnchanged() throws Exception { + DataSourceConfig config = new DataSourceConfig(); + + setField(config, "springDatasourceUrl", ""); + setField(config, "databaseUrl", "jdbc:postgresql://localhost:5432/lamp"); + setField(config, "fallbackJdbcUrl", "jdbc:postgresql://localhost:5432/fallback"); + setField(config, "username", "user"); + setField(config, "password", "pass"); + setField(config, "driverClassName", "org.postgresql.Driver"); + + HikariConfig result = config.hikariConfig(); + + assertThat(result.getJdbcUrl()).isEqualTo("jdbc:postgresql://localhost:5432/lamp"); + } + + @Test + void hikariConfig_ShouldFallbackToSpringDatasourceProperty() throws Exception { + DataSourceConfig config = new DataSourceConfig(); + + setField(config, "springDatasourceUrl", ""); + setField(config, "databaseUrl", ""); + setField(config, "fallbackJdbcUrl", "jdbc:postgresql://localhost:5432/fallback"); + setField(config, "username", "user"); + setField(config, "password", "pass"); + setField(config, "driverClassName", "org.postgresql.Driver"); + + HikariConfig result = config.hikariConfig(); + + assertThat(result.getJdbcUrl()).isEqualTo("jdbc:postgresql://localhost:5432/fallback"); assertThat(result.getUsername()).isEqualTo("user"); assertThat(result.getPassword()).isEqualTo("pass"); assertThat(result.getDriverClassName()).isEqualTo("org.postgresql.Driver"); From ae2ea7d9e5a8eb7f5fc4c6e4b5e2c6b86ed0e841 Mon Sep 17 00:00:00 2001 From: Davide Mendolia Date: Fri, 13 Feb 2026 11:07:02 +0100 Subject: [PATCH 08/20] docs(go): clarify DATABASE_URL accepts postgres and postgresql schemes (#368) * docs(go): document DATABASE_URL support for postgresql scheme * docs(go): address PR review comments on DB URL schemes --- src/go/MIGRATIONS.md | 3 ++- src/go/README.md | 6 +++++- 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/src/go/MIGRATIONS.md b/src/go/MIGRATIONS.md index 60414f19..efe0a8ee 100644 --- a/src/go/MIGRATIONS.md +++ b/src/go/MIGRATIONS.md @@ -123,9 +123,10 @@ A "dirty" migration means golang-migrate detected that a migration started but d ### Connection String Format -golang-migrate requires PostgreSQL connection strings in URL format: +golang-migrate requires PostgreSQL connection strings in URL format. The `database/postgres` driver used by this service registers both `postgres://...` and `postgresql://...` schemes: ``` postgres://user:password@host:port/database?sslmode=disable +postgresql://user:password@host:port/database?sslmode=disable ``` The application automatically converts component-based connection strings to this format. diff --git a/src/go/README.md b/src/go/README.md index 4fe34423..9b1a8cc9 100644 --- a/src/go/README.md +++ b/src/go/README.md @@ -90,7 +90,7 @@ The application supports PostgreSQL storage through environment variables. If an #### Environment Variables -- `DATABASE_URL` - Full PostgreSQL connection string (takes precedence over individual parameters) +- `DATABASE_URL` - Full PostgreSQL connection string using either `postgres://...` or `postgresql://...` (takes precedence over individual parameters) - `DB_HOST` - PostgreSQL host (default: `localhost`) - `DB_PORT` - PostgreSQL port (default: `5432`) - `DB_NAME` - Database name (default: `postgres`) @@ -140,7 +140,11 @@ export DB_PASSWORD=lamp_password Using DATABASE_URL: ```bash # Development only - disables SSL. For production, use sslmode=require or sslmode=verify-full +# Choose one of the following equivalent URL schemes: +# Option 1: postgres scheme export DATABASE_URL="postgres://lamp_user:lamp_password@localhost:5432/lamp_control?sslmode=disable" +# Option 2: postgresql scheme +export DATABASE_URL="postgresql://lamp_user:lamp_password@localhost:5432/lamp_control?sslmode=disable" ``` ### Building and Running From 2a6b86f3795d2de89777bb56711a16e29e12ce6d Mon Sep 17 00:00:00 2001 From: Davide Mendolia Date: Fri, 13 Feb 2026 11:32:17 +0100 Subject: [PATCH 09/20] refactor(benchmarks): address PR review comments --- benchmarks/k6/README.md | 17 +++-- benchmarks/k6/config.json | 3 +- benchmarks/k6/configure-cloud-run.js | 21 ++++++ benchmarks/k6/generate-summary.js | 81 +---------------------- benchmarks/k6/run-benchmarks.js | 97 +++++----------------------- benchmarks/k6/services.json | 12 ++-- benchmarks/k6/summary.js | 90 ++++++++++++++++++++++++++ 7 files changed, 149 insertions(+), 172 deletions(-) create mode 100644 benchmarks/k6/summary.js diff --git a/benchmarks/k6/README.md b/benchmarks/k6/README.md index ec5c4cf5..f1cb938b 100644 --- a/benchmarks/k6/README.md +++ b/benchmarks/k6/README.md @@ -9,7 +9,7 @@ This directory contains a benchmark harness for comparing the six language imple - `db` pass (realistic signal) - Fixed, fairness-first Cloud Run settings for all services - Main ranking at fixed concurrency pressure with `concurrency=80` -- Optional non-ranking extreme run at concurrency `1000` +- Optional non-ranking extreme run at `1000 RPS` (disabled by default) - Sequential service execution (no parallel cross-service load) - Raw k6 result exports and generated markdown summary @@ -21,6 +21,7 @@ This directory contains a benchmark harness for comparing the six language imple - `run-benchmarks.js`: orchestration script for full memory+db benchmark execution - `configure-cloud-run.js`: applies identical Cloud Run settings (dry-run by default) - `generate-summary.js`: regenerates `benchmarks/results/summary.md` from `run-report.json` +- `summary.js`: shared summary rendering used by benchmark scripts ## Prerequisites @@ -39,14 +40,16 @@ Edit `benchmarks/k6/services.json`: - `cloudRunRegion`: region (default `us-central1`) - `memorySetupCommand`: optional command run before memory pass for this service - `dbSetupCommand`: optional command run before DB pass for this service -- `dbSeedCommand`: optional shell command to reset+seed DB before each DB run +- `dbSeedCommand`: optional per-service override shell command to reset+seed DB before each DB run -`dbSeedCommand` can use a shared `BENCHMARK_DATABASE_URL` env var. The current `services.json` is already configured with: +Default seeding is configured once in `benchmarks/k6/config.json` as `defaultDbSeedCommand`: ```bash psql "$BENCHMARK_DATABASE_URL" -v ON_ERROR_STOP=1 -c "TRUNCATE TABLE lamps RESTART IDENTITY CASCADE; INSERT INTO lamps (id, is_on, created_at, updated_at, deleted_at) SELECT uuid_generate_v5('6ba7b810-9dad-11d1-80b4-00c04fd430c8', 'lamp-' || g), (g % 2 = 0), NOW() - ((10001 - g) * INTERVAL '1 second'), NOW() - ((10001 - g) * INTERVAL '1 second'), NULL FROM generate_series(1, 10000) AS g;" ``` +Set `dbSeedCommand` in a service entry only when that service needs a custom seed/reset flow. + If memory and DB use the same URL with an env-var mode switch, set both URLs equal and use setup commands. Example: @@ -131,6 +134,12 @@ Run only memory pass: node benchmarks/k6/run-benchmarks.js --passes memory ``` +Enable the extreme appendix run: + +```bash +# Set benchmarks/k6/config.json -> extreme.enabled to true +``` + Outputs: - Raw k6 JSON: `benchmarks/results/raw//...` @@ -149,4 +158,4 @@ node benchmarks/k6/generate-summary.js benchmarks/results/run-report.json benchm - Concurrency is a major factor even with `max instances=1`; it controls in-container contention. - Use memory pass ranking to isolate runtime/framework signal. - Use DB pass ranking to understand production-like behavior and DB bottleneck impact. -- Treat concurrency `1000` as saturation appendix, not primary ranking. +- Treat extreme `1000 RPS` run as saturation appendix, not primary ranking. diff --git a/benchmarks/k6/config.json b/benchmarks/k6/config.json index b4f25af1..fafe6de1 100644 --- a/benchmarks/k6/config.json +++ b/benchmarks/k6/config.json @@ -16,7 +16,7 @@ "rpsSteps": [80, 120, 160, 200] }, "extreme": { - "enabled": true, + "enabled": false, "duration": "60s", "rps": 1000, "runPerIteration": false @@ -35,6 +35,7 @@ "seedFetchPages": 10, "seedPageSize": 100 }, + "defaultDbSeedCommand": "psql \"$BENCHMARK_DATABASE_URL\" -v ON_ERROR_STOP=1 -c \"TRUNCATE TABLE lamps RESTART IDENTITY CASCADE; INSERT INTO lamps (id, is_on, created_at, updated_at, deleted_at) SELECT uuid_generate_v5('6ba7b810-9dad-11d1-80b4-00c04fd430c8', 'lamp-' || g), (g % 2 = 0), NOW() - ((10001 - g) * INTERVAL '1 second'), NOW() - ((10001 - g) * INTERVAL '1 second'), NULL FROM generate_series(1, 10000) AS g;\"", "cloudRun": { "maxInstances": 1, "minInstances": 1, diff --git a/benchmarks/k6/configure-cloud-run.js b/benchmarks/k6/configure-cloud-run.js index 6508bd62..dcdbf0bb 100755 --- a/benchmarks/k6/configure-cloud-run.js +++ b/benchmarks/k6/configure-cloud-run.js @@ -50,6 +50,26 @@ function runCommand(command, args) { } } +function validateCloudRunConfig(cloudRun) { + const requiredKeys = [ + 'maxInstances', + 'minInstances', + 'concurrency', + 'cpu', + 'memory', + 'timeout', + ]; + + for (const key of requiredKeys) { + const value = cloudRun[key]; + if (value === undefined || value === null || String(value).trim() === '') { + throw new Error( + `Invalid cloudRun config: '${key}' is required in config.json (cloudRun.${key})` + ); + } + } +} + function main() { const args = parseArgs(process.argv); if (!args.project) { @@ -59,6 +79,7 @@ function main() { const services = readJson(args.services); const config = readJson(args.config); const cloudRun = config.cloudRun || {}; + validateCloudRunConfig(cloudRun); for (const service of services) { if (!service.cloudRunService) { diff --git a/benchmarks/k6/generate-summary.js b/benchmarks/k6/generate-summary.js index ca1fecbd..887bcdf3 100755 --- a/benchmarks/k6/generate-summary.js +++ b/benchmarks/k6/generate-summary.js @@ -2,86 +2,7 @@ /* eslint-disable no-console */ const fs = require('fs'); const path = require('path'); - -function formatNumber(value, digits = 2) { - if (!Number.isFinite(value)) { - return 'n/a'; - } - return value.toFixed(digits); -} - -function writeSummary(report, outputFile) { - const lines = []; - lines.push('# Benchmark Summary'); - lines.push(''); - lines.push(`Generated: ${report.generatedAt}`); - lines.push(''); - - for (const passName of Object.keys(report.aggregated || {})) { - const rows = Object.entries(report.aggregated[passName] || {}); - rows.sort((a, b) => { - const ap = a[1].fixed?.p95; - const bp = b[1].fixed?.p95; - if (!Number.isFinite(ap) && !Number.isFinite(bp)) return 0; - if (!Number.isFinite(ap)) return 1; - if (!Number.isFinite(bp)) return -1; - return ap - bp; - }); - - lines.push(`## ${passName === 'memory' ? 'Memory Pass Ranking' : 'DB Pass Ranking'}`); - lines.push(''); - lines.push('| Rank | Service | p95 (ms) | p99 (ms) | Avg (ms) | Error Rate | Max Stable RPS |'); - lines.push('|---|---|---:|---:|---:|---:|---:|'); - - rows.forEach(([serviceName, metrics], idx) => { - lines.push( - `| ${idx + 1} | ${serviceName} | ${formatNumber(metrics.fixed?.p95)} | ${formatNumber(metrics.fixed?.p99)} | ${formatNumber(metrics.fixed?.avg)} | ${formatNumber((metrics.fixed?.errorRate ?? NaN) * 100, 3)}% | ${formatNumber(metrics.stress?.maxStableRps, 0)} |` - ); - }); - lines.push(''); - } - - const memory = report.aggregated?.memory || {}; - const db = report.aggregated?.db || {}; - const sharedServices = Object.keys(memory).filter((name) => db[name]); - - if (sharedServices.length > 0) { - lines.push('## Memory vs DB Delta'); - lines.push(''); - lines.push('| Service | Memory p95 (ms) | DB p95 (ms) | Delta (DB - Memory) |'); - lines.push('|---|---:|---:|---:|'); - - for (const serviceName of sharedServices) { - const mem = memory[serviceName]?.fixed?.p95; - const dbp = db[serviceName]?.fixed?.p95; - const delta = Number.isFinite(mem) && Number.isFinite(dbp) ? dbp - mem : null; - lines.push(`| ${serviceName} | ${formatNumber(mem)} | ${formatNumber(dbp)} | ${formatNumber(delta)} |`); - } - - lines.push(''); - } - - lines.push('## Extreme Concurrency Appendix (1000)'); - lines.push(''); - lines.push('| Pass | Service | p95 (ms) | p99 (ms) | Avg (ms) | Error Rate |'); - lines.push('|---|---|---:|---:|---:|---:|'); - - for (const [passName, servicesMap] of Object.entries(report.aggregated || {})) { - for (const [serviceName, metrics] of Object.entries(servicesMap || {})) { - if (!metrics.extreme) { - continue; - } - lines.push( - `| ${passName} | ${serviceName} | ${formatNumber(metrics.extreme?.p95)} | ${formatNumber(metrics.extreme?.p99)} | ${formatNumber(metrics.extreme?.avg)} | ${formatNumber((metrics.extreme?.errorRate ?? NaN) * 100, 3)}% |` - ); - } - } - - lines.push(''); - lines.push('Raw per-run k6 summaries are under `benchmarks/results/raw/`.'); - - fs.writeFileSync(outputFile, `${lines.join('\n')}\n`, 'utf8'); -} +const { writeSummary } = require('./summary'); function main() { if (process.argv.includes('--help') || process.argv.includes('-h')) { diff --git a/benchmarks/k6/run-benchmarks.js b/benchmarks/k6/run-benchmarks.js index 96441d5a..df594d3a 100755 --- a/benchmarks/k6/run-benchmarks.js +++ b/benchmarks/k6/run-benchmarks.js @@ -3,6 +3,7 @@ const fs = require('fs'); const path = require('path'); const { spawnSync } = require('child_process'); +const { writeSummary } = require('./summary'); function parseArgs(argv) { const args = { @@ -194,6 +195,16 @@ function getPassSetupCommand(service, passName) { return ''; } +function getDbSeedCommand(config, service) { + if (service.dbSeedCommand && service.dbSeedCommand.trim()) { + return service.dbSeedCommand; + } + if (config.defaultDbSeedCommand && config.defaultDbSeedCommand.trim()) { + return config.defaultDbSeedCommand; + } + return ''; +} + function buildK6Env({ config, service, baseUrl, mode, targetRps, duration }) { const env = { ...process.env }; env.RUN_MODE = mode; @@ -252,85 +263,6 @@ function aggregatePass(serviceRuns) { }; } -function formatNumber(value, digits = 2) { - if (!Number.isFinite(value)) { - return 'n/a'; - } - return value.toFixed(digits); -} - -function writeSummary(report, outputFile) { - const lines = []; - lines.push('# Benchmark Summary'); - lines.push(''); - lines.push(`Generated: ${report.generatedAt}`); - lines.push(''); - - for (const passName of Object.keys(report.aggregated)) { - const rows = Object.entries(report.aggregated[passName]); - rows.sort((a, b) => { - const ap = a[1].fixed.p95; - const bp = b[1].fixed.p95; - if (!Number.isFinite(ap) && !Number.isFinite(bp)) return 0; - if (!Number.isFinite(ap)) return 1; - if (!Number.isFinite(bp)) return -1; - return ap - bp; - }); - - lines.push(`## ${passName === 'memory' ? 'Memory Pass Ranking' : 'DB Pass Ranking'}`); - lines.push(''); - lines.push('| Rank | Service | p95 (ms) | p99 (ms) | Avg (ms) | Error Rate | Max Stable RPS |'); - lines.push('|---|---|---:|---:|---:|---:|---:|'); - - rows.forEach(([serviceName, metrics], idx) => { - lines.push( - `| ${idx + 1} | ${serviceName} | ${formatNumber(metrics.fixed.p95)} | ${formatNumber(metrics.fixed.p99)} | ${formatNumber(metrics.fixed.avg)} | ${formatNumber(metrics.fixed.errorRate * 100, 3)}% | ${formatNumber(metrics.stress.maxStableRps, 0)} |` - ); - }); - lines.push(''); - } - - const memory = report.aggregated.memory || {}; - const db = report.aggregated.db || {}; - const services = Object.keys(memory).filter((name) => db[name]); - - if (services.length > 0) { - lines.push('## Memory vs DB Delta'); - lines.push(''); - lines.push('| Service | Memory p95 (ms) | DB p95 (ms) | Delta (DB - Memory) |'); - lines.push('|---|---:|---:|---:|'); - - for (const serviceName of services) { - const mem = memory[serviceName].fixed.p95; - const dbp = db[serviceName].fixed.p95; - const delta = Number.isFinite(mem) && Number.isFinite(dbp) ? dbp - mem : null; - lines.push(`| ${serviceName} | ${formatNumber(mem)} | ${formatNumber(dbp)} | ${formatNumber(delta)} |`); - } - lines.push(''); - } - - lines.push('## Extreme Concurrency Appendix (1000)'); - lines.push(''); - lines.push('| Pass | Service | p95 (ms) | p99 (ms) | Avg (ms) | Error Rate |'); - lines.push('|---|---|---:|---:|---:|---:|'); - - for (const [passName, servicesMap] of Object.entries(report.aggregated)) { - for (const [serviceName, metrics] of Object.entries(servicesMap)) { - if (!metrics.extreme) { - continue; - } - lines.push( - `| ${passName} | ${serviceName} | ${formatNumber(metrics.extreme.p95)} | ${formatNumber(metrics.extreme.p99)} | ${formatNumber(metrics.extreme.avg)} | ${formatNumber(metrics.extreme.errorRate * 100, 3)}% |` - ); - } - } - - lines.push(''); - lines.push('Raw per-run k6 summaries are under `benchmarks/results/raw/`.'); - - fs.writeFileSync(outputFile, `${lines.join('\n')}\n`, 'utf8'); -} - async function main() { const args = parseArgs(process.argv); @@ -384,8 +316,11 @@ async function main() { for (let iteration = 1; iteration <= Number(config.iterationsPerPass || 1); iteration += 1) { console.log(`\n=== ${passName.toUpperCase()} :: ${service.name} :: iteration ${iteration} ===`); - if (passName === 'db' && service.dbSeedCommand) { - runShell(service.dbSeedCommand); + if (passName === 'db') { + const dbSeedCommand = getDbSeedCommand(config, service); + if (dbSeedCommand) { + runShell(dbSeedCommand); + } } await precheckCrud(baseUrl, config.basePath, service.authHeader || ''); diff --git a/benchmarks/k6/services.json b/benchmarks/k6/services.json index bf3fcf0d..761fbc04 100644 --- a/benchmarks/k6/services.json +++ b/benchmarks/k6/services.json @@ -7,7 +7,7 @@ "cloudRunRegion": "europe-west1", "memorySetupCommand": "", "dbSetupCommand": "", - "dbSeedCommand": "psql \"$BENCHMARK_DATABASE_URL\" -v ON_ERROR_STOP=1 -c \"TRUNCATE TABLE lamps RESTART IDENTITY CASCADE; INSERT INTO lamps (id, is_on, created_at, updated_at, deleted_at) SELECT uuid_generate_v5('6ba7b810-9dad-11d1-80b4-00c04fd430c8', 'lamp-' || g), (g % 2 = 0), NOW() - ((10001 - g) * INTERVAL '1 second'), NOW() - ((10001 - g) * INTERVAL '1 second'), NULL FROM generate_series(1, 10000) AS g;\"" + "dbSeedCommand": "" }, { "name": "python", @@ -17,7 +17,7 @@ "cloudRunRegion": "europe-west1", "memorySetupCommand": "", "dbSetupCommand": "", - "dbSeedCommand": "psql \"$BENCHMARK_DATABASE_URL\" -v ON_ERROR_STOP=1 -c \"TRUNCATE TABLE lamps RESTART IDENTITY CASCADE; INSERT INTO lamps (id, is_on, created_at, updated_at, deleted_at) SELECT uuid_generate_v5('6ba7b810-9dad-11d1-80b4-00c04fd430c8', 'lamp-' || g), (g % 2 = 0), NOW() - ((10001 - g) * INTERVAL '1 second'), NOW() - ((10001 - g) * INTERVAL '1 second'), NULL FROM generate_series(1, 10000) AS g;\"" + "dbSeedCommand": "" }, { "name": "java", @@ -27,7 +27,7 @@ "cloudRunRegion": "europe-west1", "memorySetupCommand": "", "dbSetupCommand": "", - "dbSeedCommand": "psql \"$BENCHMARK_DATABASE_URL\" -v ON_ERROR_STOP=1 -c \"TRUNCATE TABLE lamps RESTART IDENTITY CASCADE; INSERT INTO lamps (id, is_on, created_at, updated_at, deleted_at) SELECT uuid_generate_v5('6ba7b810-9dad-11d1-80b4-00c04fd430c8', 'lamp-' || g), (g % 2 = 0), NOW() - ((10001 - g) * INTERVAL '1 second'), NOW() - ((10001 - g) * INTERVAL '1 second'), NULL FROM generate_series(1, 10000) AS g;\"" + "dbSeedCommand": "" }, { "name": "csharp", @@ -37,7 +37,7 @@ "cloudRunRegion": "europe-west1", "memorySetupCommand": "", "dbSetupCommand": "", - "dbSeedCommand": "psql \"$BENCHMARK_DATABASE_URL\" -v ON_ERROR_STOP=1 -c \"TRUNCATE TABLE lamps RESTART IDENTITY CASCADE; INSERT INTO lamps (id, is_on, created_at, updated_at, deleted_at) SELECT uuid_generate_v5('6ba7b810-9dad-11d1-80b4-00c04fd430c8', 'lamp-' || g), (g % 2 = 0), NOW() - ((10001 - g) * INTERVAL '1 second'), NOW() - ((10001 - g) * INTERVAL '1 second'), NULL FROM generate_series(1, 10000) AS g;\"" + "dbSeedCommand": "" }, { "name": "go", @@ -47,7 +47,7 @@ "cloudRunRegion": "europe-west1", "memorySetupCommand": "", "dbSetupCommand": "", - "dbSeedCommand": "psql \"$BENCHMARK_DATABASE_URL\" -v ON_ERROR_STOP=1 -c \"TRUNCATE TABLE lamps RESTART IDENTITY CASCADE; INSERT INTO lamps (id, is_on, created_at, updated_at, deleted_at) SELECT uuid_generate_v5('6ba7b810-9dad-11d1-80b4-00c04fd430c8', 'lamp-' || g), (g % 2 = 0), NOW() - ((10001 - g) * INTERVAL '1 second'), NOW() - ((10001 - g) * INTERVAL '1 second'), NULL FROM generate_series(1, 10000) AS g;\"" + "dbSeedCommand": "" }, { "name": "kotlin", @@ -57,6 +57,6 @@ "cloudRunRegion": "europe-west1", "memorySetupCommand": "", "dbSetupCommand": "", - "dbSeedCommand": "psql \"$BENCHMARK_DATABASE_URL\" -v ON_ERROR_STOP=1 -c \"TRUNCATE TABLE lamps RESTART IDENTITY CASCADE; INSERT INTO lamps (id, is_on, created_at, updated_at, deleted_at) SELECT uuid_generate_v5('6ba7b810-9dad-11d1-80b4-00c04fd430c8', 'lamp-' || g), (g % 2 = 0), NOW() - ((10001 - g) * INTERVAL '1 second'), NOW() - ((10001 - g) * INTERVAL '1 second'), NULL FROM generate_series(1, 10000) AS g;\"" + "dbSeedCommand": "" } ] diff --git a/benchmarks/k6/summary.js b/benchmarks/k6/summary.js new file mode 100644 index 00000000..4fc73079 --- /dev/null +++ b/benchmarks/k6/summary.js @@ -0,0 +1,90 @@ +#!/usr/bin/env node +const fs = require('fs'); + +function formatNumber(value, digits = 2) { + if (!Number.isFinite(value)) { + return 'n/a'; + } + return value.toFixed(digits); +} + +function writeSummary(report, outputFile) { + const lines = []; + lines.push('# Benchmark Summary'); + lines.push(''); + lines.push(`Generated: ${report.generatedAt}`); + lines.push(''); + + for (const passName of Object.keys(report.aggregated || {})) { + const rows = Object.entries(report.aggregated[passName] || {}); + rows.sort((a, b) => { + const ap = a[1].fixed?.p95; + const bp = b[1].fixed?.p95; + if (!Number.isFinite(ap) && !Number.isFinite(bp)) return 0; + if (!Number.isFinite(ap)) return 1; + if (!Number.isFinite(bp)) return -1; + return ap - bp; + }); + + lines.push(`## ${passName === 'memory' ? 'Memory Pass Ranking' : 'DB Pass Ranking'}`); + lines.push(''); + lines.push('| Rank | Service | p95 (ms) | p99 (ms) | Avg (ms) | Error Rate | Max Stable RPS |'); + lines.push('|---|---|---:|---:|---:|---:|---:|'); + + rows.forEach(([serviceName, metrics], idx) => { + lines.push( + `| ${idx + 1} | ${serviceName} | ${formatNumber(metrics.fixed?.p95)} | ${formatNumber(metrics.fixed?.p99)} | ${formatNumber(metrics.fixed?.avg)} | ${formatNumber((metrics.fixed?.errorRate ?? NaN) * 100, 3)}% | ${formatNumber(metrics.stress?.maxStableRps, 0)} |` + ); + }); + lines.push(''); + } + + const memory = report.aggregated?.memory || {}; + const db = report.aggregated?.db || {}; + const sharedServices = Object.keys(memory).filter((name) => db[name]); + + if (sharedServices.length > 0) { + lines.push('## Memory vs DB Delta'); + lines.push(''); + lines.push('| Service | Memory p95 (ms) | DB p95 (ms) | Delta (DB - Memory) |'); + lines.push('|---|---:|---:|---:|'); + + for (const serviceName of sharedServices) { + const mem = memory[serviceName]?.fixed?.p95; + const dbp = db[serviceName]?.fixed?.p95; + const delta = Number.isFinite(mem) && Number.isFinite(dbp) ? dbp - mem : null; + lines.push(`| ${serviceName} | ${formatNumber(mem)} | ${formatNumber(dbp)} | ${formatNumber(delta)} |`); + } + + lines.push(''); + } + + const extremeRps = report.config?.extreme?.rps; + const extremeLabel = Number.isFinite(extremeRps) + ? `Extreme Load Appendix (${extremeRps} RPS)` + : 'Extreme Load Appendix'; + lines.push(`## ${extremeLabel}`); + lines.push(''); + lines.push('| Pass | Service | p95 (ms) | p99 (ms) | Avg (ms) | Error Rate |'); + lines.push('|---|---|---:|---:|---:|---:|'); + + for (const [passName, servicesMap] of Object.entries(report.aggregated || {})) { + for (const [serviceName, metrics] of Object.entries(servicesMap || {})) { + if (!metrics.extreme) { + continue; + } + lines.push( + `| ${passName} | ${serviceName} | ${formatNumber(metrics.extreme?.p95)} | ${formatNumber(metrics.extreme?.p99)} | ${formatNumber(metrics.extreme?.avg)} | ${formatNumber((metrics.extreme?.errorRate ?? NaN) * 100, 3)}% |` + ); + } + } + + lines.push(''); + lines.push('Raw per-run k6 summaries are under `benchmarks/results/raw/`.'); + + fs.writeFileSync(outputFile, `${lines.join('\n')}\n`, 'utf8'); +} + +module.exports = { + writeSummary, +}; From 7bb0ff6d39aec815d8c29a9a3bc592d4b62e5c46 Mon Sep 17 00:00:00 2001 From: Davide Mendolia Date: Fri, 13 Feb 2026 11:44:32 +0100 Subject: [PATCH 10/20] docs(benchmarks): align pass setup with database-url mode switching --- benchmarks/k6/README.md | 16 +++++++++++----- benchmarks/k6/services.json | 24 ++++++++++++------------ 2 files changed, 23 insertions(+), 17 deletions(-) diff --git a/benchmarks/k6/README.md b/benchmarks/k6/README.md index f1cb938b..44162122 100644 --- a/benchmarks/k6/README.md +++ b/benchmarks/k6/README.md @@ -50,18 +50,24 @@ psql "$BENCHMARK_DATABASE_URL" -v ON_ERROR_STOP=1 -c "TRUNCATE TABLE lamps RESTA Set `dbSeedCommand` in a service entry only when that service needs a custom seed/reset flow. -If memory and DB use the same URL with an env-var mode switch, set both URLs equal and use setup commands. -Example: +If memory and DB use the same URL, run passes sequentially and toggle env vars via setup commands. +For this repository, DB mode is enabled by connection settings (for example `DATABASE_URL` or language-specific equivalents). +Example (TypeScript/Go/Python/Kotlin): ```bash -gcloud run services update typescript-lamp-control-api --region europe-west1 --update-env-vars STORAGE_MODE=memory -gcloud run services update typescript-lamp-control-api --region europe-west1 --update-env-vars STORAGE_MODE=db +gcloud run services update typescript-lamp-control-api --region europe-west1 --remove-env-vars DATABASE_URL +gcloud run services update typescript-lamp-control-api --region europe-west1 --update-env-vars DATABASE_URL="$BENCHMARK_DATABASE_URL" ``` -Before running benchmarks, export your DB URL: +Before running benchmarks, export required variables: ```bash +export GOOGLE_CLOUD_PROJECT='' export BENCHMARK_DATABASE_URL='postgresql://:@:5432/?sslmode=require' +export BENCHMARK_JDBC_DATABASE_URL='jdbc:postgresql://:5432/' +export BENCHMARK_DB_USER='' +export BENCHMARK_DB_PASSWORD='' +export BENCHMARK_CSHARP_CONNECTION_STRING='Host=;Port=5432;Database=;Username=;Password=' ``` ## 2) Run from a GCP VM (recommended) diff --git a/benchmarks/k6/services.json b/benchmarks/k6/services.json index 761fbc04..1d3633dc 100644 --- a/benchmarks/k6/services.json +++ b/benchmarks/k6/services.json @@ -5,8 +5,8 @@ "dbUrl": "https://typescript-lamp-control-api-827868544165.europe-west1.run.app", "cloudRunService": "typescript-lamp-control-api", "cloudRunRegion": "europe-west1", - "memorySetupCommand": "", - "dbSetupCommand": "", + "memorySetupCommand": "gcloud run services update typescript-lamp-control-api --project \"$GOOGLE_CLOUD_PROJECT\" --region europe-west1 --remove-env-vars DATABASE_URL", + "dbSetupCommand": "gcloud run services update typescript-lamp-control-api --project \"$GOOGLE_CLOUD_PROJECT\" --region europe-west1 --update-env-vars DATABASE_URL=\"$BENCHMARK_DATABASE_URL\"", "dbSeedCommand": "" }, { @@ -15,8 +15,8 @@ "dbUrl": "https://python-lamp-control-api-827868544165.europe-west1.run.app", "cloudRunService": "python-lamp-control-api", "cloudRunRegion": "europe-west1", - "memorySetupCommand": "", - "dbSetupCommand": "", + "memorySetupCommand": "gcloud run services update python-lamp-control-api --project \"$GOOGLE_CLOUD_PROJECT\" --region europe-west1 --remove-env-vars DATABASE_URL", + "dbSetupCommand": "gcloud run services update python-lamp-control-api --project \"$GOOGLE_CLOUD_PROJECT\" --region europe-west1 --update-env-vars DATABASE_URL=\"$BENCHMARK_DATABASE_URL\"", "dbSeedCommand": "" }, { @@ -25,8 +25,8 @@ "dbUrl": "https://java-lamp-control-api-827868544165.europe-west1.run.app", "cloudRunService": "java-lamp-control-api", "cloudRunRegion": "europe-west1", - "memorySetupCommand": "", - "dbSetupCommand": "", + "memorySetupCommand": "gcloud run services update java-lamp-control-api --project \"$GOOGLE_CLOUD_PROJECT\" --region europe-west1 --remove-env-vars DATABASE_URL", + "dbSetupCommand": "gcloud run services update java-lamp-control-api --project \"$GOOGLE_CLOUD_PROJECT\" --region europe-west1 --update-env-vars SPRING_DATASOURCE_URL=\"$BENCHMARK_JDBC_DATABASE_URL\" --update-env-vars DB_USER=\"$BENCHMARK_DB_USER\" --update-env-vars DB_PASSWORD=\"$BENCHMARK_DB_PASSWORD\"", "dbSeedCommand": "" }, { @@ -35,8 +35,8 @@ "dbUrl": "https://csharp-lamp-control-api-827868544165.europe-west1.run.app", "cloudRunService": "csharp-lamp-control-api", "cloudRunRegion": "europe-west1", - "memorySetupCommand": "", - "dbSetupCommand": "", + "memorySetupCommand": "gcloud run services update csharp-lamp-control-api --project \"$GOOGLE_CLOUD_PROJECT\" --region europe-west1 --remove-env-vars ConnectionStrings__LampControl", + "dbSetupCommand": "gcloud run services update csharp-lamp-control-api --project \"$GOOGLE_CLOUD_PROJECT\" --region europe-west1 --update-env-vars ConnectionStrings__LampControl=\"$BENCHMARK_CSHARP_CONNECTION_STRING\"", "dbSeedCommand": "" }, { @@ -45,8 +45,8 @@ "dbUrl": "https://go-lamp-control-api-827868544165.europe-west1.run.app", "cloudRunService": "go-lamp-control-api", "cloudRunRegion": "europe-west1", - "memorySetupCommand": "", - "dbSetupCommand": "", + "memorySetupCommand": "gcloud run services update go-lamp-control-api --project \"$GOOGLE_CLOUD_PROJECT\" --region europe-west1 --remove-env-vars DATABASE_URL", + "dbSetupCommand": "gcloud run services update go-lamp-control-api --project \"$GOOGLE_CLOUD_PROJECT\" --region europe-west1 --update-env-vars DATABASE_URL=\"$BENCHMARK_DATABASE_URL\"", "dbSeedCommand": "" }, { @@ -55,8 +55,8 @@ "dbUrl": "https://kotlin-lamp-control-api-827868544165.europe-west1.run.app", "cloudRunService": "kotlin-lamp-control-api", "cloudRunRegion": "europe-west1", - "memorySetupCommand": "", - "dbSetupCommand": "", + "memorySetupCommand": "gcloud run services update kotlin-lamp-control-api --project \"$GOOGLE_CLOUD_PROJECT\" --region europe-west1 --remove-env-vars DATABASE_URL", + "dbSetupCommand": "gcloud run services update kotlin-lamp-control-api --project \"$GOOGLE_CLOUD_PROJECT\" --region europe-west1 --update-env-vars DATABASE_URL=\"$BENCHMARK_DATABASE_URL\"", "dbSeedCommand": "" } ] From b9b6b736a6f18929c976ca4b6635939ffc888a7f Mon Sep 17 00:00:00 2001 From: Davide Mendolia Date: Fri, 13 Feb 2026 11:49:39 +0100 Subject: [PATCH 11/20] docs: rewrite postgres setup guide for end users --- docs/POSTGRES_STARTUP_VARIABLES.md | 233 +++++++++++++---------------- 1 file changed, 100 insertions(+), 133 deletions(-) diff --git a/docs/POSTGRES_STARTUP_VARIABLES.md b/docs/POSTGRES_STARTUP_VARIABLES.md index 9b230362..38867806 100644 --- a/docs/POSTGRES_STARTUP_VARIABLES.md +++ b/docs/POSTGRES_STARTUP_VARIABLES.md @@ -1,172 +1,139 @@ -# PostgreSQL Startup Variables by Implementation +# PostgreSQL Setup Guide (Per Language) -This document summarizes how each implementation under `src/` decides whether to use PostgreSQL and which environment variable formats it expects. +Use this as a quick reference when you want each implementation to run with PostgreSQL instead of in-memory storage. -Synced with `main` and re-validated against current sources on 2026-02-13. +## Quick Start -## Cross-Implementation Note - -- `USE_POSTGRES` should not be used as a PostgreSQL mode switch. Current runtime code paths in `src/` use connection-related variables (for example `DATABASE_URL`, JDBC URL, or `ConnectionStrings__LampControl`) to decide storage backend. +1. Pick the language implementation you want to run. +2. Export the variables listed for that language. +3. Start the app in your usual mode (`serve-only`, `serve`, or `migrate` where supported). ## TypeScript (`src/typescript`) -Source of truth: -- `src/typescript/src/infrastructure/app.ts` -- `src/typescript/src/infrastructure/database/client.ts` -- `src/typescript/src/cli.ts` - -PostgreSQL is enabled when: -- `DATABASE_URL` is set and non-empty. - -Required variables: -- `DATABASE_URL` +Set: -Expected format: -- Prisma PostgreSQL URL, for example: - - `postgresql://user:password@host:5432/database` - - Optional query params are supported by Prisma (example: `?schema=public`). +```bash +export DATABASE_URL='postgresql://:@:5432/?schema=public' +``` Notes: -- `USE_POSTGRES` is not read by runtime code in `src/typescript/src/*`. -- If `DATABASE_URL` is missing, app uses in-memory repository. -- In `--mode=serve` and `--mode=migrate`, migrations run only if `DATABASE_URL` is present. +- PostgreSQL is enabled when `DATABASE_URL` is set. +- If `DATABASE_URL` is unset, TypeScript uses in-memory storage. ## Python (`src/python`) -Source of truth: -- `src/python/src/openapi_server/infrastructure/config.py` -- `src/python/src/openapi_server/dependencies.py` -- `src/python/src/openapi_server/cli.py` +Set: -PostgreSQL is enabled when: -- `DATABASE_URL` is set and not blank. +```bash +export DATABASE_URL='postgresql://:@:5432/' +``` -Required variables: -- `DATABASE_URL` +Optional pool tuning: -Expected format: -- Preferred input: `postgresql://user:password@host:5432/database` -- Runtime converts it to async driver format: `postgresql+asyncpg://...` -- If already provided as `postgresql+asyncpg://...`, it is used directly. -- `sslmode` query parameter is removed automatically from `DATABASE_URL`. +```bash +export DB_POOL_MIN_SIZE='5' +export DB_POOL_MAX_SIZE='20' +``` -Optional variables: -- `DB_HOST`, `DB_PORT`, `DB_NAME`, `DB_USER`, `DB_PASSWORD` -- `DB_POOL_MIN_SIZE`, `DB_POOL_MAX_SIZE` - -Important caveat: -- Individual `DB_*` variables alone do **not** switch to PostgreSQL mode. -- PostgreSQL mode switch depends specifically on `DATABASE_URL` presence. +Notes: +- PostgreSQL is enabled when `DATABASE_URL` is set. +- `DB_*` variables alone do not switch Python to PostgreSQL mode. ## Java (`src/java`) -Source of truth: -- `src/java/src/main/resources/application.properties` -- `src/java/src/main/java/org/openapitools/config/OnDatabaseUrlCondition.java` -- `src/java/src/main/java/org/openapitools/config/DataSourceConfig.java` - -PostgreSQL is enabled when: -- `spring.datasource.url` resolves to a non-empty value. -- Resolution order in properties: - - `SPRING_DATASOURCE_URL` - - then `DATABASE_URL` +Set: -Required variable: -- `SPRING_DATASOURCE_URL` or `DATABASE_URL` +```bash +export SPRING_DATASOURCE_URL='jdbc:postgresql://:5432/' +export DB_USER='' +export DB_PASSWORD='' +``` -Expected format: -- Must be JDBC URL format: - - `jdbc:postgresql://host:5432/database` +Alternative: -Optional variables: -- `DB_USER` (default: `lampuser`) -- `DB_PASSWORD` (default: `lamppass`) -- `DB_POOL_MAX_SIZE`, `DB_POOL_MIN_SIZE` -- `FLYWAY_ENABLED` (used for migration behavior) +```bash +export DATABASE_URL='jdbc:postgresql://:5432/' +export DB_USER='' +export DB_PASSWORD='' +``` -Important caveat: -- A non-JDBC URL like `postgresql://...` is not valid for `spring.datasource.url`. +Notes: +- URL must be JDBC format (`jdbc:postgresql://...`). +- A plain `postgresql://...` URL will not work for Java datasource config. ## C# (`src/csharp`) -Source of truth: -- `src/csharp/LampControlApi/Extensions/ServiceCollectionExtensions.cs` -- `src/csharp/LampControlApi/Extensions/MigrationRunner.cs` -- `src/csharp/LampControlApi/appsettings.Development.example.json` - -PostgreSQL is enabled when: -- Connection string `ConnectionStrings:LampControl` is non-empty. -- Resolution order: - - config key `ConnectionStrings:LampControl` - - fallback env var `ConnectionStrings__LampControl` - -Required variable (if using env): -- `ConnectionStrings__LampControl` +Set: -Expected format: -- Npgsql connection string, for example: - - `Host=localhost;Port=5432;Database=lampcontrol;Username=lampuser;Password=lamppass` +```bash +export ConnectionStrings__LampControl='Host=;Port=5432;Database=;Username=;Password=' +``` Notes: -- `DATABASE_URL` is not used by the C# implementation. +- PostgreSQL is enabled when `ConnectionStrings__LampControl` is set. +- `DATABASE_URL` is not used by C# runtime config. ## Go (`src/go`) -Source of truth: -- `src/go/api/config.go` -- `src/go/cmd/lamp-control-api/main.go` +Recommended: -PostgreSQL is enabled when **any** of these is true: -- `DATABASE_URL` is set, or -- `DB_NAME` is set, or -- both `DB_HOST` and `DB_USER` are set. +```bash +export DATABASE_URL='postgres://:@:5432/?sslmode=disable' +``` -Primary variable: -- `DATABASE_URL` (takes precedence if set) +Alternative (component vars): -Expected format: -- Recommended: URL form, e.g. `postgres://user:password@host:5432/database?sslmode=disable` -- Also supports pgx key-value DSN internally (built when `DATABASE_URL` is not set): - - `host=... port=... dbname=... user=... password=...` +```bash +export DB_HOST='' +export DB_PORT='5432' +export DB_NAME='' +export DB_USER='' +export DB_PASSWORD='' +``` -Optional variables: -- `DB_HOST`, `DB_PORT`, `DB_NAME`, `DB_USER`, `DB_PASSWORD` -- `DB_POOL_MIN_SIZE`, `DB_POOL_MAX_SIZE` +Optional pool tuning: + +```bash +export DB_POOL_MIN_SIZE='0' +export DB_POOL_MAX_SIZE='4' +``` + +Notes: +- `DATABASE_URL` takes precedence over component vars. ## Kotlin (`src/kotlin`) -Source of truth: -- `src/kotlin/src/main/kotlin/com/lampcontrol/database/DatabaseFactory.kt` -- `src/kotlin/src/main/kotlin/com/lampcontrol/Application.kt` - -PostgreSQL is enabled when **any** of these is true: -- `DATABASE_URL` is set, or -- `DB_NAME` is set, or -- both `DB_HOST` and `DB_USER` are set. - -Primary variable: -- `DATABASE_URL` (preferred when available) - -Expected `DATABASE_URL` format: -- Strictly parsed by regex: - - `postgresql://user:password@host:5432/database` - - or `postgres://user:password@host:5432/database` - -Important caveat: -- Query parameters (for example `?sslmode=...`) are not handled by the current parser and can fail parsing. - -Optional variables: -- `DB_HOST`, `DB_PORT`, `DB_NAME`, `DB_USER`, `DB_PASSWORD` -- `DB_POOL_MIN_SIZE`, `DB_POOL_MAX_SIZE` -- `DB_MAX_LIFETIME_MS`, `DB_IDLE_TIMEOUT_MS`, `DB_CONNECTION_TIMEOUT_MS` - -## Quick Reference Matrix - -| Language | Switch to PostgreSQL | Required variable(s) | Connection string format | -|---|---|---|---| -| TypeScript | `DATABASE_URL` non-empty | `DATABASE_URL` | `postgresql://...` (Prisma URL) | -| Python | `DATABASE_URL` non-empty | `DATABASE_URL` | `postgresql://...` or `postgresql+asyncpg://...` | -| Java | `spring.datasource.url` non-empty | `SPRING_DATASOURCE_URL` or `DATABASE_URL` | `jdbc:postgresql://...` | -| C# | `ConnectionStrings:LampControl` non-empty | `ConnectionStrings__LampControl` (env) | `Host=...;Port=...;Database=...;Username=...;Password=...` | -| Go | `DATABASE_URL` or `DB_NAME` or (`DB_HOST`+`DB_USER`) | `DATABASE_URL` recommended | `postgres://...` recommended; key-value DSN supported | -| Kotlin | `DATABASE_URL` or `DB_NAME` or (`DB_HOST`+`DB_USER`) | `DATABASE_URL` recommended | `postgresql://...` or `postgres://...` (strict parser) | +Recommended: + +```bash +export DATABASE_URL='postgresql://:@:5432/' +``` + +Alternative (component vars): + +```bash +export DB_HOST='' +export DB_PORT='5432' +export DB_NAME='' +export DB_USER='' +export DB_PASSWORD='' +``` + +Optional pool/timeout tuning: + +```bash +export DB_POOL_MIN_SIZE='0' +export DB_POOL_MAX_SIZE='4' +export DB_MAX_LIFETIME_MS='3600000' +export DB_IDLE_TIMEOUT_MS='1800000' +export DB_CONNECTION_TIMEOUT_MS='30000' +``` + +Notes: +- If you use `DATABASE_URL`, keep it in standard `postgresql://...` or `postgres://...` form. + +## Common Gotchas + +- Java needs `jdbc:postgresql://...`; others usually use `postgresql://...` or `postgres://...`. +- C# uses `ConnectionStrings__LampControl`, not `DATABASE_URL`. +- If an app still runs in memory mode, first verify the exact variable name is exported in the same shell/session used to start the app. From 385d989d5d5128a56745a36ddb9ccec6b683464f Mon Sep 17 00:00:00 2001 From: Davide Mendolia Date: Fri, 13 Feb 2026 11:57:39 +0100 Subject: [PATCH 12/20] feat(benchmarks): resolve cloud run project from config by default --- benchmarks/k6/README.md | 5 +++-- benchmarks/k6/config.json | 2 ++ benchmarks/k6/configure-cloud-run.js | 29 +++++++++++++++++++++------- 3 files changed, 27 insertions(+), 9 deletions(-) diff --git a/benchmarks/k6/README.md b/benchmarks/k6/README.md index 44162122..294c3786 100644 --- a/benchmarks/k6/README.md +++ b/benchmarks/k6/README.md @@ -115,16 +115,17 @@ gcloud compute ssh lamp-bench-runner --project= --zone=europe-w Dry run (prints commands): ```bash -node benchmarks/k6/configure-cloud-run.js --project +node benchmarks/k6/configure-cloud-run.js ``` Apply settings: ```bash -node benchmarks/k6/configure-cloud-run.js --project --execute +node benchmarks/k6/configure-cloud-run.js --execute ``` Settings come from `benchmarks/k6/config.json` under `cloudRun`. +Project resolution order is: `--project`, `cloudRun.projectId`, `cloudRun.projectNumber`, `GOOGLE_CLOUD_PROJECT`. ## 4) Run benchmark diff --git a/benchmarks/k6/config.json b/benchmarks/k6/config.json index fafe6de1..a06d5a8f 100644 --- a/benchmarks/k6/config.json +++ b/benchmarks/k6/config.json @@ -37,6 +37,8 @@ }, "defaultDbSeedCommand": "psql \"$BENCHMARK_DATABASE_URL\" -v ON_ERROR_STOP=1 -c \"TRUNCATE TABLE lamps RESTART IDENTITY CASCADE; INSERT INTO lamps (id, is_on, created_at, updated_at, deleted_at) SELECT uuid_generate_v5('6ba7b810-9dad-11d1-80b4-00c04fd430c8', 'lamp-' || g), (g % 2 = 0), NOW() - ((10001 - g) * INTERVAL '1 second'), NOW() - ((10001 - g) * INTERVAL '1 second'), NULL FROM generate_series(1, 10000) AS g;\"", "cloudRun": { + "projectId": "lamp-control-469416", + "projectNumber": "827868544165", "maxInstances": 1, "minInstances": 1, "concurrency": 80, diff --git a/benchmarks/k6/configure-cloud-run.js b/benchmarks/k6/configure-cloud-run.js index dcdbf0bb..aacf0b7f 100755 --- a/benchmarks/k6/configure-cloud-run.js +++ b/benchmarks/k6/configure-cloud-run.js @@ -8,7 +8,7 @@ function parseArgs(argv) { const args = { services: path.join('benchmarks', 'k6', 'services.json'), config: path.join('benchmarks', 'k6', 'config.json'), - project: process.env.GOOGLE_CLOUD_PROJECT || '', + project: '', execute: false, }; @@ -34,7 +34,7 @@ function parseArgs(argv) { } function printHelp() { - console.log(`Usage:\n node benchmarks/k6/configure-cloud-run.js [--project my-project] [--execute]\n\nBy default this script prints commands only. Add --execute to run them.`); + console.log(`Usage:\n node benchmarks/k6/configure-cloud-run.js [--project my-project] [--execute]\n\nProject is resolved in this order: --project, cloudRun.projectId, cloudRun.projectNumber, GOOGLE_CLOUD_PROJECT.\nBy default this script prints commands only. Add --execute to run them.`); } function readJson(filePath) { @@ -70,16 +70,31 @@ function validateCloudRunConfig(cloudRun) { } } -function main() { - const args = parseArgs(process.argv); - if (!args.project) { - throw new Error('Missing --project (or set GOOGLE_CLOUD_PROJECT)'); +function resolveProject(argsProject, cloudRun) { + if (argsProject && argsProject.trim()) { + return argsProject.trim(); + } + if (cloudRun.projectId && String(cloudRun.projectId).trim()) { + return String(cloudRun.projectId).trim(); + } + if (cloudRun.projectNumber && String(cloudRun.projectNumber).trim()) { + return String(cloudRun.projectNumber).trim(); } + if (process.env.GOOGLE_CLOUD_PROJECT && process.env.GOOGLE_CLOUD_PROJECT.trim()) { + return process.env.GOOGLE_CLOUD_PROJECT.trim(); + } + throw new Error( + "Missing project configuration. Set cloudRun.projectId (or projectNumber) in config.json, or pass --project, or set GOOGLE_CLOUD_PROJECT." + ); +} +function main() { + const args = parseArgs(process.argv); const services = readJson(args.services); const config = readJson(args.config); const cloudRun = config.cloudRun || {}; validateCloudRunConfig(cloudRun); + const project = resolveProject(args.project, cloudRun); for (const service of services) { if (!service.cloudRunService) { @@ -94,7 +109,7 @@ function main() { 'update', service.cloudRunService, '--project', - args.project, + project, '--region', region, '--max-instances', From f3905d2966819eea5062b5c99fe3bbf57eb6f29e Mon Sep 17 00:00:00 2001 From: Davide Mendolia Date: Fri, 13 Feb 2026 12:25:44 +0100 Subject: [PATCH 13/20] feat(benchmarks): support skip-setup and config-based project env --- benchmarks/k6/README.md | 9 ++++++++- benchmarks/k6/run-benchmarks.js | 21 +++++++++++++++++---- 2 files changed, 25 insertions(+), 5 deletions(-) diff --git a/benchmarks/k6/README.md b/benchmarks/k6/README.md index 294c3786..9cf6cf5e 100644 --- a/benchmarks/k6/README.md +++ b/benchmarks/k6/README.md @@ -62,7 +62,6 @@ gcloud run services update typescript-lamp-control-api --region europe-west1 --u Before running benchmarks, export required variables: ```bash -export GOOGLE_CLOUD_PROJECT='' export BENCHMARK_DATABASE_URL='postgresql://:@:5432/?sslmode=require' export BENCHMARK_JDBC_DATABASE_URL='jdbc:postgresql://:5432/' export BENCHMARK_DB_USER='' @@ -70,6 +69,8 @@ export BENCHMARK_DB_PASSWORD='' export BENCHMARK_CSHARP_CONNECTION_STRING='Host=;Port=5432;Database=;Username=;Password=' ``` +`GOOGLE_CLOUD_PROJECT` is optional for `run-benchmarks.js`; if unset, it uses `cloudRun.projectId` from `benchmarks/k6/config.json`. + ## 2) Run from a GCP VM (recommended) Create a runner VM in the same region and install required tools: @@ -141,6 +142,12 @@ Run only memory pass: node benchmarks/k6/run-benchmarks.js --passes memory ``` +Run benchmark without running setup commands (`memorySetupCommand` / `dbSetupCommand`): + +```bash +node benchmarks/k6/run-benchmarks.js --passes memory --skip-setup +``` + Enable the extreme appendix run: ```bash diff --git a/benchmarks/k6/run-benchmarks.js b/benchmarks/k6/run-benchmarks.js index df594d3a..cdef0701 100755 --- a/benchmarks/k6/run-benchmarks.js +++ b/benchmarks/k6/run-benchmarks.js @@ -11,6 +11,7 @@ function parseArgs(argv) { services: path.join('benchmarks', 'k6', 'services.json'), resultsDir: path.join('benchmarks', 'results'), passes: null, + skipSetup: false, }; for (let i = 2; i < argv.length; i += 1) { @@ -23,6 +24,8 @@ function parseArgs(argv) { args.resultsDir = argv[++i]; } else if (token === '--passes') { args.passes = argv[++i].split(',').map((v) => v.trim()).filter(Boolean); + } else if (token === '--skip-setup') { + args.skipSetup = true; } else if (token === '--help' || token === '-h') { printHelp(); process.exit(0); @@ -35,7 +38,7 @@ function parseArgs(argv) { } function printHelp() { - console.log(`Usage:\n node benchmarks/k6/run-benchmarks.js [--config path] [--services path] [--results-dir path] [--passes memory,db]\n`); + console.log(`Usage:\n node benchmarks/k6/run-benchmarks.js [--config path] [--services path] [--results-dir path] [--passes memory,db] [--skip-setup]\n`); } function readJson(filePath) { @@ -205,6 +208,15 @@ function getDbSeedCommand(config, service) { return ''; } +function buildSetupEnv(config) { + const env = { ...process.env }; + const projectId = config?.cloudRun?.projectId; + if ((!env.GOOGLE_CLOUD_PROJECT || !env.GOOGLE_CLOUD_PROJECT.trim()) && projectId) { + env.GOOGLE_CLOUD_PROJECT = String(projectId).trim(); + } + return env; +} + function buildK6Env({ config, service, baseUrl, mode, targetRps, duration }) { const env = { ...process.env }; env.RUN_MODE = mode; @@ -277,6 +289,7 @@ async function main() { const stamp = nowStamp(); const scenarioPath = path.join('benchmarks', 'k6', 'scenarios.js'); const rawRoot = path.join(args.resultsDir, 'raw', stamp); + const setupEnv = buildSetupEnv(config); ensureDir(rawRoot); @@ -307,8 +320,8 @@ async function main() { } const passSetupCommand = getPassSetupCommand(service, passName); - if (passSetupCommand) { - runShell(passSetupCommand); + if (!args.skipSetup && passSetupCommand) { + runShell(passSetupCommand, setupEnv); } const serviceRuns = []; @@ -319,7 +332,7 @@ async function main() { if (passName === 'db') { const dbSeedCommand = getDbSeedCommand(config, service); if (dbSeedCommand) { - runShell(dbSeedCommand); + runShell(dbSeedCommand, setupEnv); } } From 27d28bfc84dfd19fc2a2853889638747e1ecb69b Mon Sep 17 00:00:00 2001 From: Davide Mendolia Date: Fri, 13 Feb 2026 12:45:56 +0100 Subject: [PATCH 14/20] fix(benchmarks): retry precheck and continue on iteration failures --- benchmarks/k6/README.md | 4 + benchmarks/k6/run-benchmarks.js | 287 ++++++++++++++++++++------------ 2 files changed, 181 insertions(+), 110 deletions(-) diff --git a/benchmarks/k6/README.md b/benchmarks/k6/README.md index 9cf6cf5e..a21628a2 100644 --- a/benchmarks/k6/README.md +++ b/benchmarks/k6/README.md @@ -148,6 +148,10 @@ Run benchmark without running setup commands (`memorySetupCommand` / `dbSetupCom node benchmarks/k6/run-benchmarks.js --passes memory --skip-setup ``` +Runtime behavior: +- Precheck CRUD uses retry with exponential backoff (up to 4 attempts total). +- If an iteration still fails (precheck, k6, or setup error), the runner logs the error, records the failed iteration in `run-report.json`, and continues with the next iteration/service. + Enable the extreme appendix run: ```bash diff --git a/benchmarks/k6/run-benchmarks.js b/benchmarks/k6/run-benchmarks.js index cdef0701..ba57322f 100755 --- a/benchmarks/k6/run-benchmarks.js +++ b/benchmarks/k6/run-benchmarks.js @@ -83,6 +83,12 @@ function runShell(command, env) { } } +function sleep(ms) { + return new Promise((resolve) => { + setTimeout(resolve, ms); + }); +} + async function httpJson(method, url, body, authHeader) { const headers = { 'Content-Type': 'application/json' }; if (authHeader) { @@ -108,6 +114,34 @@ async function httpJson(method, url, body, authHeader) { return { response, body: parsed, text }; } +async function runPrecheckWithRetry(baseUrl, basePath, authHeader, options = {}) { + const retries = Number.isFinite(options.retries) ? options.retries : 3; + const initialDelayMs = Number.isFinite(options.initialDelayMs) ? options.initialDelayMs : 1000; + const maxDelayMs = Number.isFinite(options.maxDelayMs) ? options.maxDelayMs : 5000; + let attempt = 0; + let delayMs = initialDelayMs; + let lastError = null; + + while (attempt <= retries) { + attempt += 1; + try { + await precheckCrud(baseUrl, basePath, authHeader); + return; + } catch (error) { + lastError = error; + const message = error && error.message ? error.message : String(error); + if (attempt > retries) { + break; + } + console.warn(`[precheck] attempt ${attempt}/${retries + 1} failed: ${message}. Retrying in ${delayMs}ms...`); + await sleep(delayMs); + delayMs = Math.min(delayMs * 2, maxDelayMs); + } + } + + throw lastError || new Error('Precheck failed after retries'); +} + async function precheckCrud(baseUrl, basePath, authHeader) { const prefix = `${baseUrl.replace(/\/$/, '')}${basePath}`; @@ -139,26 +173,41 @@ async function precheckCrud(baseUrl, basePath, authHeader) { } } +function metricValues(metric) { + if (!metric) { + return null; + } + if (metric.values && typeof metric.values === 'object') { + return metric.values; + } + return metric; +} + function parseMetric(summary, name) { const metric = summary.metrics[name]; - if (!metric || !metric.values) { + const values = metricValues(metric); + if (!values) { return null; } return { - avg: metric.values.avg ?? null, - p95: metric.values['p(95)'] ?? null, - p99: metric.values['p(99)'] ?? null, - min: metric.values.min ?? null, - max: metric.values.max ?? null, + avg: values.avg ?? null, + p95: values['p(95)'] ?? null, + p99: values['p(99)'] ?? null, + min: values.min ?? null, + max: values.max ?? null, }; } function parseRate(summary, name) { const metric = summary.metrics[name]; - if (!metric || !metric.values) { + const values = metricValues(metric); + if (!values) { return null; } - return metric.values.rate ?? null; + if (values.rate != null) { + return values.rate; + } + return values.value ?? null; } function median(numbers) { @@ -244,14 +293,15 @@ function runK6Phase({ scenarioPath, outputFile, env }) { } function aggregatePass(serviceRuns) { - const fixedP95 = median(serviceRuns.map((r) => r.fixed.duration?.p95)); - const fixedP99 = median(serviceRuns.map((r) => r.fixed.duration?.p99)); - const fixedAvg = median(serviceRuns.map((r) => r.fixed.duration?.avg)); - const fixedErrorRate = median(serviceRuns.map((r) => r.fixed.errorRate)); - const maxStableRps = median(serviceRuns.map((r) => r.stress.maxStableRps)); + const successfulRuns = serviceRuns.filter((r) => !r.failed); + const fixedP95 = median(successfulRuns.map((r) => r.fixed.duration?.p95)); + const fixedP99 = median(successfulRuns.map((r) => r.fixed.duration?.p99)); + const fixedAvg = median(successfulRuns.map((r) => r.fixed.duration?.avg)); + const fixedErrorRate = median(successfulRuns.map((r) => r.fixed.errorRate)); + const maxStableRps = median(successfulRuns.map((r) => r.stress.maxStableRps)); let extreme = null; - const extremeRuns = serviceRuns.filter((r) => r.extreme); + const extremeRuns = successfulRuns.filter((r) => r.extreme); if (extremeRuns.length > 0) { extreme = { p95: median(extremeRuns.map((r) => r.extreme.duration?.p95)), @@ -262,6 +312,8 @@ function aggregatePass(serviceRuns) { } return { + successfulIterations: successfulRuns.length, + failedIterations: serviceRuns.length - successfulRuns.length, fixed: { p95: fixedP95, p99: fixedP99, @@ -329,122 +381,137 @@ async function main() { for (let iteration = 1; iteration <= Number(config.iterationsPerPass || 1); iteration += 1) { console.log(`\n=== ${passName.toUpperCase()} :: ${service.name} :: iteration ${iteration} ===`); - if (passName === 'db') { - const dbSeedCommand = getDbSeedCommand(config, service); - if (dbSeedCommand) { - runShell(dbSeedCommand, setupEnv); + try { + if (passName === 'db') { + const dbSeedCommand = getDbSeedCommand(config, service); + if (dbSeedCommand) { + runShell(dbSeedCommand, setupEnv); + } } - } - await precheckCrud(baseUrl, config.basePath, service.authHeader || ''); - - const iterDir = path.join(rawRoot, passName, service.name, `iter-${iteration}`); - ensureDir(iterDir); - - const warmupFile = path.join(iterDir, 'warmup.json'); - const warmupSummary = runK6Phase({ - scenarioPath, - outputFile: warmupFile, - env: buildK6Env({ - config, - service, - baseUrl, - mode: 'warmup', - targetRps: config.warmup.rps, - duration: config.warmup.duration, - }), - }); - - const fixedFile = path.join(iterDir, 'fixed.json'); - const fixedSummary = runK6Phase({ - scenarioPath, - outputFile: fixedFile, - env: buildK6Env({ - config, - service, - baseUrl, - mode: 'fixed', - targetRps: config.fixed.rps, - duration: config.fixed.duration, - }), - }); - - let maxStableRps = null; - const stressSteps = []; - for (const rps of config.stress.rpsSteps) { - const stressFile = path.join(iterDir, `stress-${rps}.json`); - const stressSummary = runK6Phase({ + await runPrecheckWithRetry(baseUrl, config.basePath, service.authHeader || '', { + retries: 3, + initialDelayMs: 1000, + maxDelayMs: 5000, + }); + + const iterDir = path.join(rawRoot, passName, service.name, `iter-${iteration}`); + ensureDir(iterDir); + + const warmupFile = path.join(iterDir, 'warmup.json'); + const warmupSummary = runK6Phase({ scenarioPath, - outputFile: stressFile, + outputFile: warmupFile, env: buildK6Env({ config, service, baseUrl, - mode: 'stress', - targetRps: rps, - duration: config.stress.stepDuration, + mode: 'warmup', + targetRps: config.warmup.rps, + duration: config.warmup.duration, }), }); - const dur = parseMetric(stressSummary, 'stress_req_duration'); - const err = parseRate(stressSummary, 'stress_error_rate') || 0; - const passed = - Number.isFinite(dur?.p95) && - dur.p95 <= Number(config.slo.p95Ms) && - err <= Number(config.slo.errorRate); - - stressSteps.push({ rps, duration: dur, errorRate: err, passed }); - - if (passed) { - maxStableRps = rps; - } else { - break; - } - } - - let extreme = null; - const shouldRunExtreme = Boolean(config.extreme?.enabled) && - (Boolean(config.extreme.runPerIteration) || iteration === 1); - - if (shouldRunExtreme) { - const extremeFile = path.join(iterDir, 'extreme-1000.json'); - const extremeSummary = runK6Phase({ + const fixedFile = path.join(iterDir, 'fixed.json'); + const fixedSummary = runK6Phase({ scenarioPath, - outputFile: extremeFile, + outputFile: fixedFile, env: buildK6Env({ config, service, baseUrl, - mode: 'extreme', - targetRps: config.extreme.rps, - duration: config.extreme.duration, + mode: 'fixed', + targetRps: config.fixed.rps, + duration: config.fixed.duration, }), }); - extreme = { - duration: parseMetric(extremeSummary, 'extreme_req_duration'), - errorRate: parseRate(extremeSummary, 'extreme_error_rate') || 0, + let maxStableRps = null; + const stressSteps = []; + for (const rps of config.stress.rpsSteps) { + const stressFile = path.join(iterDir, `stress-${rps}.json`); + const stressSummary = runK6Phase({ + scenarioPath, + outputFile: stressFile, + env: buildK6Env({ + config, + service, + baseUrl, + mode: 'stress', + targetRps: rps, + duration: config.stress.stepDuration, + }), + }); + + const dur = parseMetric(stressSummary, 'stress_req_duration'); + const err = parseRate(stressSummary, 'stress_error_rate') || 0; + const passed = + Number.isFinite(dur?.p95) && + dur.p95 <= Number(config.slo.p95Ms) && + err <= Number(config.slo.errorRate); + + stressSteps.push({ rps, duration: dur, errorRate: err, passed }); + + if (passed) { + maxStableRps = rps; + } else { + break; + } + } + + let extreme = null; + const shouldRunExtreme = Boolean(config.extreme?.enabled) && + (Boolean(config.extreme.runPerIteration) || iteration === 1); + + if (shouldRunExtreme) { + const extremeFile = path.join(iterDir, 'extreme-1000.json'); + const extremeSummary = runK6Phase({ + scenarioPath, + outputFile: extremeFile, + env: buildK6Env({ + config, + service, + baseUrl, + mode: 'extreme', + targetRps: config.extreme.rps, + duration: config.extreme.duration, + }), + }); + + extreme = { + duration: parseMetric(extremeSummary, 'extreme_req_duration'), + errorRate: parseRate(extremeSummary, 'extreme_error_rate') || 0, + }; + } + + const serviceRun = { + iteration, + failed: false, + warmup: { + duration: parseMetric(warmupSummary, 'warmup_req_duration'), + errorRate: parseRate(warmupSummary, 'warmup_error_rate') || 0, + }, + fixed: { + duration: parseMetric(fixedSummary, 'fixed_req_duration'), + errorRate: parseRate(fixedSummary, 'fixed_error_rate') || 0, + }, + stress: { + maxStableRps, + steps: stressSteps, + }, + extreme, }; - } - const serviceRun = { - iteration, - warmup: { - duration: parseMetric(warmupSummary, 'warmup_req_duration'), - errorRate: parseRate(warmupSummary, 'warmup_error_rate') || 0, - }, - fixed: { - duration: parseMetric(fixedSummary, 'fixed_req_duration'), - errorRate: parseRate(fixedSummary, 'fixed_error_rate') || 0, - }, - stress: { - maxStableRps, - steps: stressSteps, - }, - extreme, - }; - - serviceRuns.push(serviceRun); + serviceRuns.push(serviceRun); + } catch (error) { + const message = error && error.message ? error.message : String(error); + console.warn(`[iteration failed] ${passName}/${service.name}/iter-${iteration}: ${message}. Continuing...`); + serviceRuns.push({ + iteration, + failed: true, + error: message, + }); + } } report.runs[passName][service.name] = serviceRuns; From d7f8c8d44daeced080d299a09d9a232604e7fcc5 Mon Sep 17 00:00:00 2001 From: Davide Mendolia Date: Fri, 13 Feb 2026 13:24:45 +0100 Subject: [PATCH 15/20] feat(benchmarks): add fast benchmark config profile --- benchmarks/k6/README.md | 8 +++++ benchmarks/k6/config.fast.json | 56 ++++++++++++++++++++++++++++++++++ 2 files changed, 64 insertions(+) create mode 100644 benchmarks/k6/config.fast.json diff --git a/benchmarks/k6/README.md b/benchmarks/k6/README.md index a21628a2..58c7e4e4 100644 --- a/benchmarks/k6/README.md +++ b/benchmarks/k6/README.md @@ -16,6 +16,7 @@ This directory contains a benchmark harness for comparing the six language imple ## Files - `config.json`: benchmark parameters and Cloud Run parity settings +- `config.fast.json`: shorter benchmark profile for quicker p95-focused comparisons - `services.json`: service URLs and per-service DB seed/reset hooks - `scenarios.js`: k6 workload script (read-heavy CRUD mix) - `run-benchmarks.js`: orchestration script for full memory+db benchmark execution @@ -136,6 +137,12 @@ Run both passes (`memory`,`db`) with settings from `config.json`: node benchmarks/k6/run-benchmarks.js ``` +Run both passes with the faster profile: + +```bash +node benchmarks/k6/run-benchmarks.js --config benchmarks/k6/config.fast.json +``` + Run only memory pass: ```bash @@ -177,3 +184,4 @@ node benchmarks/k6/generate-summary.js benchmarks/results/run-report.json benchm - Use memory pass ranking to isolate runtime/framework signal. - Use DB pass ranking to understand production-like behavior and DB bottleneck impact. - Treat extreme `1000 RPS` run as saturation appendix, not primary ranking. +- Use `config.fast.json` for iterative checks and `config.json` for final publication-quality runs. diff --git a/benchmarks/k6/config.fast.json b/benchmarks/k6/config.fast.json new file mode 100644 index 00000000..0ae2dc9d --- /dev/null +++ b/benchmarks/k6/config.fast.json @@ -0,0 +1,56 @@ +{ + "basePath": "/v1", + "passes": [ + "memory", + "db" + ], + "iterationsPerPass": 3, + "randomizeServiceOrder": true, + "warmup": { + "duration": "30s", + "rps": 20 + }, + "fixed": { + "duration": "90s", + "rps": 80 + }, + "stress": { + "stepDuration": "45s", + "rpsSteps": [ + 80, + 120, + 160 + ] + }, + "extreme": { + "enabled": false, + "duration": "60s", + "rps": 1000, + "runPerIteration": false + }, + "slo": { + "p95Ms": 300, + "errorRate": 0.01 + }, + "workload": { + "listPercent": 50, + "getPercent": 20, + "createPercent": 20, + "updatePercent": 7, + "deletePercent": 3, + "pageSize": 25, + "seedFetchPages": 10, + "seedPageSize": 100 + }, + "defaultDbSeedCommand": "psql \"$BENCHMARK_DATABASE_URL\" -v ON_ERROR_STOP=1 -c \"TRUNCATE TABLE lamps RESTART IDENTITY CASCADE; INSERT INTO lamps (id, is_on, created_at, updated_at, deleted_at) SELECT uuid_generate_v5('6ba7b810-9dad-11d1-80b4-00c04fd430c8', 'lamp-' || g), (g % 2 = 0), NOW() - ((10001 - g) * INTERVAL '1 second'), NOW() - ((10001 - g) * INTERVAL '1 second'), NULL FROM generate_series(1, 10000) AS g;\"", + "cloudRun": { + "projectId": "lamp-control-469416", + "projectNumber": "827868544165", + "maxInstances": 1, + "minInstances": 1, + "concurrency": 80, + "cpu": "1", + "memory": "512Mi", + "timeout": "60s" + } +} From 8af85b21d2ebf6b21c06be74bb69b5b610f1f872 Mon Sep 17 00:00:00 2001 From: Davide Mendolia Date: Sun, 15 Feb 2026 12:20:22 +0100 Subject: [PATCH 16/20] chore(benchmarks): enforce Cloud Run startup probe settings --- benchmarks/k6/README.md | 10 ++++++++++ benchmarks/k6/config.fast.json | 8 +++++++- benchmarks/k6/config.json | 8 +++++++- benchmarks/k6/configure-cloud-run.js | 19 +++++++++++++++++++ 4 files changed, 43 insertions(+), 2 deletions(-) diff --git a/benchmarks/k6/README.md b/benchmarks/k6/README.md index 58c7e4e4..d83386cd 100644 --- a/benchmarks/k6/README.md +++ b/benchmarks/k6/README.md @@ -128,6 +128,16 @@ node benchmarks/k6/configure-cloud-run.js --execute Settings come from `benchmarks/k6/config.json` under `cloudRun`. Project resolution order is: `--project`, `cloudRun.projectId`, `cloudRun.projectNumber`, `GOOGLE_CLOUD_PROJECT`. +Startup probe applied to all services: + +```yaml +startupProbe: + timeoutSeconds: 1 + periodSeconds: 10 + failureThreshold: 3 + tcpSocket: + port: 8080 +``` ## 4) Run benchmark diff --git a/benchmarks/k6/config.fast.json b/benchmarks/k6/config.fast.json index 0ae2dc9d..8b55fc07 100644 --- a/benchmarks/k6/config.fast.json +++ b/benchmarks/k6/config.fast.json @@ -51,6 +51,12 @@ "concurrency": 80, "cpu": "1", "memory": "512Mi", - "timeout": "60s" + "timeout": "60s", + "startupProbe": { + "timeoutSeconds": 1, + "periodSeconds": 10, + "failureThreshold": 3, + "tcpPort": 8080 + } } } diff --git a/benchmarks/k6/config.json b/benchmarks/k6/config.json index a06d5a8f..03f2275f 100644 --- a/benchmarks/k6/config.json +++ b/benchmarks/k6/config.json @@ -44,6 +44,12 @@ "concurrency": 80, "cpu": "1", "memory": "512Mi", - "timeout": "60s" + "timeout": "60s", + "startupProbe": { + "timeoutSeconds": 1, + "periodSeconds": 10, + "failureThreshold": 3, + "tcpPort": 8080 + } } } diff --git a/benchmarks/k6/configure-cloud-run.js b/benchmarks/k6/configure-cloud-run.js index aacf0b7f..d9ee1f5b 100755 --- a/benchmarks/k6/configure-cloud-run.js +++ b/benchmarks/k6/configure-cloud-run.js @@ -68,6 +68,22 @@ function validateCloudRunConfig(cloudRun) { ); } } + + const startupProbe = cloudRun.startupProbe || {}; + const requiredStartupProbeKeys = [ + 'timeoutSeconds', + 'periodSeconds', + 'failureThreshold', + 'tcpPort', + ]; + for (const key of requiredStartupProbeKeys) { + const value = startupProbe[key]; + if (value === undefined || value === null || String(value).trim() === '') { + throw new Error( + `Invalid cloudRun config: 'startupProbe.${key}' is required in config.json` + ); + } + } } function resolveProject(argsProject, cloudRun) { @@ -94,6 +110,7 @@ function main() { const config = readJson(args.config); const cloudRun = config.cloudRun || {}; validateCloudRunConfig(cloudRun); + const startupProbe = cloudRun.startupProbe; const project = resolveProject(args.project, cloudRun); for (const service of services) { @@ -125,6 +142,8 @@ function main() { '--timeout', String(cloudRun.timeout), '--cpu-throttling', + '--startup-probe', + `timeoutSeconds=${startupProbe.timeoutSeconds},periodSeconds=${startupProbe.periodSeconds},failureThreshold=${startupProbe.failureThreshold},tcpSocket.port=${startupProbe.tcpPort}`, ]; if (!args.execute) { From f3b610ed9dcc8925903e1eff913b6b631cb0cf86 Mon Sep 17 00:00:00 2001 From: Davide Mendolia Date: Sun, 15 Feb 2026 12:28:48 +0100 Subject: [PATCH 17/20] chore(benchmarks): set Cloud Run min instances to 0 --- benchmarks/k6/config.fast.json | 2 +- benchmarks/k6/config.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/benchmarks/k6/config.fast.json b/benchmarks/k6/config.fast.json index 8b55fc07..e11e68e8 100644 --- a/benchmarks/k6/config.fast.json +++ b/benchmarks/k6/config.fast.json @@ -47,7 +47,7 @@ "projectId": "lamp-control-469416", "projectNumber": "827868544165", "maxInstances": 1, - "minInstances": 1, + "minInstances": 0, "concurrency": 80, "cpu": "1", "memory": "512Mi", diff --git a/benchmarks/k6/config.json b/benchmarks/k6/config.json index 03f2275f..71b6aa39 100644 --- a/benchmarks/k6/config.json +++ b/benchmarks/k6/config.json @@ -40,7 +40,7 @@ "projectId": "lamp-control-469416", "projectNumber": "827868544165", "maxInstances": 1, - "minInstances": 1, + "minInstances": 0, "concurrency": 80, "cpu": "1", "memory": "512Mi", From f21935d51131cac5876b370963efd25511b1a2b3 Mon Sep 17 00:00:00 2001 From: Davide Mendolia Date: Sun, 15 Feb 2026 13:29:17 +0100 Subject: [PATCH 18/20] docs(benchmarks): add macOS caffeinate run commands --- benchmarks/k6/README.md | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/benchmarks/k6/README.md b/benchmarks/k6/README.md index d83386cd..2a8d50ec 100644 --- a/benchmarks/k6/README.md +++ b/benchmarks/k6/README.md @@ -165,6 +165,18 @@ Run benchmark without running setup commands (`memorySetupCommand` / `dbSetupCom node benchmarks/k6/run-benchmarks.js --passes memory --skip-setup ``` +Run from macOS without sleep interruptions: + +```bash +caffeinate -i node benchmarks/k6/run-benchmarks.js +``` + +Fast profile on macOS: + +```bash +caffeinate -i node benchmarks/k6/run-benchmarks.js --config benchmarks/k6/config.fast.json +``` + Runtime behavior: - Precheck CRUD uses retry with exponential backoff (up to 4 attempts total). - If an iteration still fails (precheck, k6, or setup error), the runner logs the error, records the failed iteration in `run-report.json`, and continues with the next iteration/service. From ba276776b69d0ff0b31c9acfec365f7f549db72c Mon Sep 17 00:00:00 2001 From: Davide Mendolia Date: Sun, 15 Feb 2026 16:05:55 +0100 Subject: [PATCH 19/20] feat(benchmarks): add cold-start appendix measurement --- benchmarks/k6/README.md | 15 +++++- benchmarks/k6/config.fast.json | 9 ++++ benchmarks/k6/config.json | 9 ++++ benchmarks/k6/run-benchmarks.js | 91 +++++++++++++++++++++++++++++++-- benchmarks/k6/scenarios.js | 74 +++++++++++++++++++++++---- benchmarks/k6/summary.js | 16 ++++++ 6 files changed, 198 insertions(+), 16 deletions(-) diff --git a/benchmarks/k6/README.md b/benchmarks/k6/README.md index 2a8d50ec..593e79cd 100644 --- a/benchmarks/k6/README.md +++ b/benchmarks/k6/README.md @@ -9,6 +9,7 @@ This directory contains a benchmark harness for comparing the six language imple - `db` pass (realistic signal) - Fixed, fairness-first Cloud Run settings for all services - Main ranking at fixed concurrency pressure with `concurrency=80` +- Separate cold-start appendix (not used in primary ranking) - Optional non-ranking extreme run at `1000 RPS` (disabled by default) - Sequential service execution (no parallel cross-service load) - Raw k6 result exports and generated markdown summary @@ -139,6 +140,8 @@ startupProbe: port: 8080 ``` +For meaningful cold-start sampling, keep `cloudRun.minInstances=0`. + ## 4) Run benchmark Run both passes (`memory`,`db`) with settings from `config.json`: @@ -177,8 +180,17 @@ Fast profile on macOS: caffeinate -i node benchmarks/k6/run-benchmarks.js --config benchmarks/k6/config.fast.json ``` +Disable cold-start appendix for quick local iterations: + +```bash +node benchmarks/k6/run-benchmarks.js --config benchmarks/k6/config.fast.json +# then set coldStart.enabled=false in the selected config +``` + Runtime behavior: -- Precheck CRUD uses retry with exponential backoff (up to 4 attempts total). +- Cold-start probe runs before warmup/fixed/stress and is reported separately. +- Cold-start probe waits optional cooldown (`coldStart.cooldownSeconds`) to improve scale-to-zero likelihood. +- Precheck CRUD uses retry with exponential backoff (up to 7 attempts total). - If an iteration still fails (precheck, k6, or setup error), the runner logs the error, records the failed iteration in `run-report.json`, and continues with the next iteration/service. Enable the extreme appendix run: @@ -192,6 +204,7 @@ Outputs: - Raw k6 JSON: `benchmarks/results/raw//...` - Structured run report: `benchmarks/results/run-report.json` - Ranked markdown summary: `benchmarks/results/summary.md` +- Cold-start artifact per sampled iteration: `benchmarks/results/raw////iter-*/cold-start.json` ## 5) Rebuild summary only diff --git a/benchmarks/k6/config.fast.json b/benchmarks/k6/config.fast.json index e11e68e8..013d01ab 100644 --- a/benchmarks/k6/config.fast.json +++ b/benchmarks/k6/config.fast.json @@ -28,6 +28,15 @@ "rps": 1000, "runPerIteration": false }, + "coldStart": { + "enabled": true, + "runPerIteration": false, + "cooldownSeconds": 900, + "maxWaitSeconds": 60, + "probeIntervalMs": 500, + "endpoint": "/lamps?pageSize=1", + "successStatus": 200 + }, "slo": { "p95Ms": 300, "errorRate": 0.01 diff --git a/benchmarks/k6/config.json b/benchmarks/k6/config.json index 71b6aa39..b4080897 100644 --- a/benchmarks/k6/config.json +++ b/benchmarks/k6/config.json @@ -21,6 +21,15 @@ "rps": 1000, "runPerIteration": false }, + "coldStart": { + "enabled": true, + "runPerIteration": false, + "cooldownSeconds": 900, + "maxWaitSeconds": 60, + "probeIntervalMs": 500, + "endpoint": "/lamps?pageSize=1", + "successStatus": 200 + }, "slo": { "p95Ms": 300, "errorRate": 0.01 diff --git a/benchmarks/k6/run-benchmarks.js b/benchmarks/k6/run-benchmarks.js index ba57322f..6c55b2e5 100755 --- a/benchmarks/k6/run-benchmarks.js +++ b/benchmarks/k6/run-benchmarks.js @@ -210,6 +210,24 @@ function parseRate(summary, name) { return values.value ?? null; } +function parseScalar(summary, name) { + const metric = summary.metrics[name]; + const values = metricValues(metric); + if (!values) { + return null; + } + if (Number.isFinite(values.value)) { + return values.value; + } + if (Number.isFinite(values.avg)) { + return values.avg; + } + if (Number.isFinite(values.max)) { + return values.max; + } + return null; +} + function median(numbers) { const vals = numbers.filter((n) => Number.isFinite(n)).slice().sort((a, b) => a - b); if (vals.length === 0) { @@ -281,6 +299,10 @@ function buildK6Env({ config, service, baseUrl, mode, targetRps, duration }) { env.CREATE_WEIGHT = String(config.workload.createPercent); env.UPDATE_WEIGHT = String(config.workload.updatePercent); env.DELETE_WEIGHT = String(config.workload.deletePercent); + env.COLD_START_MAX_WAIT_SECONDS = String(config.coldStart?.maxWaitSeconds ?? 60); + env.COLD_START_PROBE_INTERVAL_MS = String(config.coldStart?.probeIntervalMs ?? 500); + env.COLD_START_ENDPOINT = String(config.coldStart?.endpoint || '/lamps?pageSize=1'); + env.COLD_START_SUCCESS_STATUS = String(config.coldStart?.successStatus ?? 200); if (service.authHeader) { env.AUTH_HEADER = service.authHeader; } @@ -311,9 +333,24 @@ function aggregatePass(serviceRuns) { }; } + const coldSuccessfulRuns = successfulRuns.filter((r) => + r.coldStart && + !r.coldStart.failed && + Number.isFinite(r.coldStart.readyMs) + ); + const coldFailedRuns = successfulRuns.filter((r) => r.coldStart && r.coldStart.failed); + const coldStart = { + readyMs: median(coldSuccessfulRuns.map((r) => r.coldStart.readyMs)), + attempts: median(coldSuccessfulRuns.map((r) => r.coldStart.attempts)), + errorRate: median(coldSuccessfulRuns.map((r) => r.coldStart.errorRate)), + successfulColdSamples: coldSuccessfulRuns.length, + failedColdSamples: coldFailedRuns.length, + }; + return { successfulIterations: successfulRuns.length, failedIterations: serviceRuns.length - successfulRuns.length, + coldStart, fixed: { p95: fixedP95, p99: fixedP99, @@ -380,6 +417,8 @@ async function main() { for (let iteration = 1; iteration <= Number(config.iterationsPerPass || 1); iteration += 1) { console.log(`\n=== ${passName.toUpperCase()} :: ${service.name} :: iteration ${iteration} ===`); + const iterDir = path.join(rawRoot, passName, service.name, `iter-${iteration}`); + ensureDir(iterDir); try { if (passName === 'db') { @@ -389,15 +428,55 @@ async function main() { } } + let coldStart = null; + const shouldRunColdStart = Boolean(config.coldStart?.enabled) && + (Boolean(config.coldStart.runPerIteration) || iteration === 1); + + if (shouldRunColdStart) { + const cooldownSeconds = Number(config.coldStart?.cooldownSeconds || 0); + if (cooldownSeconds > 0) { + console.log(`Waiting ${cooldownSeconds}s cooldown before cold-start probe...`); + await sleep(cooldownSeconds * 1000); + } + + const coldStartFile = path.join(iterDir, 'cold-start.json'); + const coldStartSummary = runK6Phase({ + scenarioPath, + outputFile: coldStartFile, + env: buildK6Env({ + config, + service, + baseUrl, + mode: 'cold_start', + targetRps: 1, + duration: `${Number(config.coldStart?.maxWaitSeconds || 60)}s`, + }), + }); + + const readyMetric = parseMetric(coldStartSummary, 'cold_start_ready_ms'); + const readyMs = readyMetric?.avg ?? null; + coldStart = { + readyMs, + attempts: parseScalar(coldStartSummary, 'cold_start_attempts'), + firstSuccessStatus: parseScalar(coldStartSummary, 'cold_start_first_success_status'), + errorRate: parseRate(coldStartSummary, 'cold_start_error_rate') || 0, + duration: parseMetric(coldStartSummary, 'cold_start_req_duration'), + failed: !Number.isFinite(readyMs), + }; + + if (coldStart.failed) { + console.warn( + `[cold-start] ${passName}/${service.name}/iter-${iteration}: no successful response within max wait`, + ); + } + } + await runPrecheckWithRetry(baseUrl, config.basePath, service.authHeader || '', { - retries: 3, + retries: 6, initialDelayMs: 1000, - maxDelayMs: 5000, + maxDelayMs: 10000, }); - const iterDir = path.join(rawRoot, passName, service.name, `iter-${iteration}`); - ensureDir(iterDir); - const warmupFile = path.join(iterDir, 'warmup.json'); const warmupSummary = runK6Phase({ scenarioPath, @@ -487,6 +566,7 @@ async function main() { const serviceRun = { iteration, failed: false, + coldStart, warmup: { duration: parseMetric(warmupSummary, 'warmup_req_duration'), errorRate: parseRate(warmupSummary, 'warmup_error_rate') || 0, @@ -510,6 +590,7 @@ async function main() { iteration, failed: true, error: message, + coldStart: null, }); } } diff --git a/benchmarks/k6/scenarios.js b/benchmarks/k6/scenarios.js index 76700b66..1afe6942 100644 --- a/benchmarks/k6/scenarios.js +++ b/benchmarks/k6/scenarios.js @@ -1,6 +1,7 @@ import http from 'k6/http'; import { check } from 'k6'; -import { Rate, Trend } from 'k6/metrics'; +import { sleep } from 'k6'; +import { Gauge, Rate, Trend } from 'k6/metrics'; const RUN_MODE = (__ENV.RUN_MODE || 'fixed').trim(); const BASE_URL = (__ENV.BASE_URL || '').replace(/\/$/, ''); @@ -11,6 +12,10 @@ const PAGE_SIZE = Number(__ENV.PAGE_SIZE || 25); const SEED_FETCH_PAGES = Number(__ENV.SEED_FETCH_PAGES || 10); const SEED_PAGE_SIZE = Number(__ENV.SEED_PAGE_SIZE || 100); const AUTH_HEADER = __ENV.AUTH_HEADER || ''; +const COLD_START_ENDPOINT = __ENV.COLD_START_ENDPOINT || '/lamps?pageSize=1'; +const COLD_START_SUCCESS_STATUS = Number(__ENV.COLD_START_SUCCESS_STATUS || 200); +const COLD_START_MAX_WAIT_SECONDS = Number(__ENV.COLD_START_MAX_WAIT_SECONDS || 60); +const COLD_START_PROBE_INTERVAL_MS = Number(__ENV.COLD_START_PROBE_INTERVAL_MS || 500); const LIST_WEIGHT = Number(__ENV.LIST_WEIGHT || 50); const GET_WEIGHT = Number(__ENV.GET_WEIGHT || 20); @@ -29,20 +34,32 @@ if (!BASE_URL) { const requestDuration = new Trend(`${RUN_MODE}_req_duration`, true); const errorRate = new Rate(`${RUN_MODE}_error_rate`); +const coldStartReadyMs = new Trend('cold_start_ready_ms', true); +const coldStartAttempts = new Gauge('cold_start_attempts'); +const coldStartFirstSuccessStatus = new Gauge('cold_start_first_success_status'); export const options = { discardResponseBodies: false, summaryTrendStats: ['avg', 'min', 'med', 'max', 'p(90)', 'p(95)', 'p(99)'], - scenarios: { - main: { - executor: 'constant-arrival-rate', - rate: TARGET_RPS, - timeUnit: '1s', - duration: DURATION, - preAllocatedVUs: PRE_ALLOCATED_VUS, - maxVUs: MAX_VUS, + scenarios: RUN_MODE === 'cold_start' + ? { + main: { + executor: 'per-vu-iterations', + vus: 1, + iterations: 1, + maxDuration: `${Math.max(10, COLD_START_MAX_WAIT_SECONDS + 10)}s`, + }, + } + : { + main: { + executor: 'constant-arrival-rate', + rate: TARGET_RPS, + timeUnit: '1s', + duration: DURATION, + preAllocatedVUs: PRE_ALLOCATED_VUS, + maxVUs: MAX_VUS, + }, }, - }, }; const headers = { @@ -208,6 +225,10 @@ function pickOperation() { } export function setup() { + if (RUN_MODE === 'cold_start') { + return { seedIds: [] }; + } + const seedIds = []; let cursor = null; @@ -246,7 +267,40 @@ export function setup() { return { seedIds }; } +function runColdStartProbe() { + const startedAtMs = Date.now(); + const deadlineMs = startedAtMs + Math.max(1000, COLD_START_MAX_WAIT_SECONDS * 1000); + let attempts = 0; + + while (Date.now() <= deadlineMs) { + const response = http.get(url(COLD_START_ENDPOINT), { headers }); + attempts += 1; + + const ok = response.status === COLD_START_SUCCESS_STATUS; + requestDuration.add(response.timings.duration); + errorRate.add(!ok); + + if (ok) { + const readyMs = Date.now() - startedAtMs; + coldStartReadyMs.add(readyMs); + coldStartAttempts.add(attempts); + coldStartFirstSuccessStatus.add(response.status); + return; + } + + sleep(Math.max(10, COLD_START_PROBE_INTERVAL_MS) / 1000); + } + + coldStartAttempts.add(attempts); + coldStartFirstSuccessStatus.add(0); +} + export default function (data) { + if (RUN_MODE === 'cold_start') { + runColdStartProbe(); + return; + } + const operation = pickOperation(); if (operation === 'list') { diff --git a/benchmarks/k6/summary.js b/benchmarks/k6/summary.js index 4fc73079..ae0cee5d 100644 --- a/benchmarks/k6/summary.js +++ b/benchmarks/k6/summary.js @@ -59,6 +59,22 @@ function writeSummary(report, outputFile) { lines.push(''); } + lines.push('## Cold Start Appendix'); + lines.push(''); + lines.push('| Pass | Service | Ready Time (ms) | Attempts | Error Rate | Samples (ok/failed) |'); + lines.push('|---|---|---:|---:|---:|---:|'); + for (const [passName, servicesMap] of Object.entries(report.aggregated || {})) { + for (const [serviceName, metrics] of Object.entries(servicesMap || {})) { + const cold = metrics.coldStart || {}; + const okSamples = Number.isFinite(cold.successfulColdSamples) ? cold.successfulColdSamples : 0; + const failedSamples = Number.isFinite(cold.failedColdSamples) ? cold.failedColdSamples : 0; + lines.push( + `| ${passName} | ${serviceName} | ${formatNumber(cold.readyMs)} | ${formatNumber(cold.attempts, 0)} | ${formatNumber((cold.errorRate ?? NaN) * 100, 3)}% | ${okSamples}/${failedSamples} |` + ); + } + } + lines.push(''); + const extremeRps = report.config?.extreme?.rps; const extremeLabel = Number.isFinite(extremeRps) ? `Extreme Load Appendix (${extremeRps} RPS)` From 1a54431a9f8e1e1d9d2a0c72e8c11f024f94831c Mon Sep 17 00:00:00 2001 From: Davide Mendolia Date: Sun, 15 Feb 2026 21:03:22 +0100 Subject: [PATCH 20/20] fix(benchmarks): avoid content-type on bodyless requests (#370) --- benchmarks/k6/run-benchmarks.js | 5 ++++- benchmarks/k6/scenarios.js | 10 ++++++---- 2 files changed, 10 insertions(+), 5 deletions(-) diff --git a/benchmarks/k6/run-benchmarks.js b/benchmarks/k6/run-benchmarks.js index 6c55b2e5..8d17adfc 100755 --- a/benchmarks/k6/run-benchmarks.js +++ b/benchmarks/k6/run-benchmarks.js @@ -90,10 +90,13 @@ function sleep(ms) { } async function httpJson(method, url, body, authHeader) { - const headers = { 'Content-Type': 'application/json' }; + const headers = {}; if (authHeader) { headers.Authorization = authHeader; } + if (body !== undefined && body !== null) { + headers['Content-Type'] = 'application/json'; + } const response = await fetch(url, { method, diff --git a/benchmarks/k6/scenarios.js b/benchmarks/k6/scenarios.js index 1afe6942..e8a4fa02 100644 --- a/benchmarks/k6/scenarios.js +++ b/benchmarks/k6/scenarios.js @@ -62,9 +62,7 @@ export const options = { }, }; -const headers = { - 'Content-Type': 'application/json', -}; +const headers = {}; if (AUTH_HEADER) { headers.Authorization = AUTH_HEADER; @@ -90,7 +88,11 @@ function track(resp, ok) { } function req(method, endpoint, body, expectedStatuses) { - const response = http.request(method, url(endpoint), body, { headers }); + const requestHeaders = { ...headers }; + if (body !== undefined && body !== null) { + requestHeaders['Content-Type'] = 'application/json'; + } + const response = http.request(method, url(endpoint), body, { headers: requestHeaders }); const ok = check(response, { [`${method} ${endpoint} status`]: (r) => expectedStatuses.includes(r.status), });