Skip to content

feat: add scheduled keep active step #33

feat: add scheduled keep active step

feat: add scheduled keep active step #33

Workflow file for this run

name: Tests
on:
push:
tags:
- "*"
branches:
- main
pull_request:
branches:
- main
workflow_dispatch:
permissions:
contents: read
env:
RESULTS_FILE: /tmp/test_results.txt
jobs:
# ============================================
# INPUT VALIDATION - ENV VARIABLES (dry-run)
# ============================================
validation-env:
name: "Validation: ENV"
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Setup test helpers
run: |
cat > /tmp/test_helpers.sh << 'EOF'
GREEN='\033[32m'
RED='\033[31m'
RESET='\033[0m'
STEP_FAILED=0
run_valid() {
local name="$1"
shift
if "$@" > /dev/null 2>&1; then
echo -e "${GREEN}✓ Passed${RESET}: $name"
echo "pass:$name" >> "$RESULTS_FILE"
else
echo -e "${RED}✗ Failed${RESET}: $name"
echo "fail:$name" >> "$RESULTS_FILE"
STEP_FAILED=1
fi
}
run_invalid() {
local name="$1"
shift
if "$@" > /dev/null 2>&1; then
echo -e "${RED}✗ Failed (should have been rejected)${RESET}: $name"
echo "fail:$name" >> "$RESULTS_FILE"
STEP_FAILED=1
else
echo -e "${GREEN}✓ Passed (correctly rejected)${RESET}: $name"
echo "pass:$name" >> "$RESULTS_FILE"
fi
}
check_step_result() {
exit $STEP_FAILED
}
EOF
touch "$RESULTS_FILE"
- name: Build test image
run: docker build -t diskmark:test .
- name: "Default: configuration"
if: always()
run: |
source /tmp/test_helpers.sh
run_valid "Default configuration" docker run --rm -e DRY_RUN=1 diskmark:test
check_step_result
- name: "SIZE: valid values"
if: always()
run: |
source /tmp/test_helpers.sh
run_valid "SIZE=512K" docker run --rm -e DRY_RUN=1 -e SIZE=512K diskmark:test
run_valid "SIZE=100M" docker run --rm -e DRY_RUN=1 -e SIZE=100M diskmark:test
run_valid "SIZE=1G" docker run --rm -e DRY_RUN=1 -e SIZE=1G diskmark:test
run_valid "SIZE=2T" docker run --rm -e DRY_RUN=1 -e SIZE=2T diskmark:test
run_valid "SIZE=1P" docker run --rm -e DRY_RUN=1 -e SIZE=1P diskmark:test
run_valid "SIZE=100m (lowercase)" docker run --rm -e DRY_RUN=1 -e SIZE=100m diskmark:test
run_valid "SIZE=1g (lowercase)" docker run --rm -e DRY_RUN=1 -e SIZE=1g diskmark:test
run_valid "SIZE=(empty, defaults to 1G)" docker run --rm -e DRY_RUN=1 -e SIZE= diskmark:test
run_valid "SIZE=1 (bytes)" docker run --rm -e DRY_RUN=1 -e SIZE=1 diskmark:test
run_valid "SIZE=999P" docker run --rm -e DRY_RUN=1 -e SIZE=999P diskmark:test
check_step_result
- name: "SIZE: invalid values"
if: always()
run: |
source /tmp/test_helpers.sh
run_invalid "SIZE=-1G" docker run --rm -e DRY_RUN=1 -e SIZE=-1G diskmark:test
run_invalid "SIZE=0" docker run --rm -e DRY_RUN=1 -e SIZE=0 diskmark:test
run_invalid "SIZE=1X (invalid unit)" docker run --rm -e DRY_RUN=1 -e SIZE=1X diskmark:test
run_invalid "SIZE=abc (non-numeric)" docker run --rm -e DRY_RUN=1 -e SIZE=abc diskmark:test
run_invalid "SIZE=2.5G (float)" docker run --rm -e DRY_RUN=1 -e SIZE=2.5G diskmark:test
check_step_result
- name: "PROFILE: valid values"
if: always()
run: |
source /tmp/test_helpers.sh
run_valid "PROFILE=auto" docker run --rm -e DRY_RUN=1 -e PROFILE=auto diskmark:test
run_valid "PROFILE=default" docker run --rm -e DRY_RUN=1 -e PROFILE=default diskmark:test
run_valid "PROFILE=nvme" docker run --rm -e DRY_RUN=1 -e PROFILE=nvme diskmark:test
check_step_result
- name: "PROFILE: invalid values"
if: always()
run: |
source /tmp/test_helpers.sh
run_invalid "PROFILE=fast" docker run --rm -e DRY_RUN=1 -e PROFILE=fast diskmark:test
run_invalid "PROFILE=ssd" docker run --rm -e DRY_RUN=1 -e PROFILE=ssd diskmark:test
check_step_result
- name: "TARGET: valid values"
if: always()
run: |
source /tmp/test_helpers.sh
run_valid "TARGET=/disk" docker run --rm -e DRY_RUN=1 -e TARGET=/disk diskmark:test
run_valid "TARGET=/tmp (tmpfs)" docker run --rm --tmpfs /tmp -e DRY_RUN=1 -e TARGET=/tmp diskmark:test
check_step_result
- name: "TARGET: invalid values"
if: always()
run: |
source /tmp/test_helpers.sh
run_invalid "TARGET=/ (root)" docker run --rm -e DRY_RUN=1 -e TARGET=/ diskmark:test
run_invalid "TARGET=/nonexistent/readonly" docker run --rm -e DRY_RUN=1 -e TARGET=/nonexistent/readonly diskmark:test
check_step_result
- name: "IO: valid values"
if: always()
run: |
source /tmp/test_helpers.sh
run_valid "IO=direct" docker run --rm -e DRY_RUN=1 -e IO=direct diskmark:test
run_valid "IO=buffered" docker run --rm -e DRY_RUN=1 -e IO=buffered diskmark:test
check_step_result
- name: "IO: invalid values"
if: always()
run: |
source /tmp/test_helpers.sh
run_invalid "IO=sync" docker run --rm -e DRY_RUN=1 -e IO=sync diskmark:test
run_invalid "IO=async" docker run --rm -e DRY_RUN=1 -e IO=async diskmark:test
check_step_result
- name: "DATA: valid values"
if: always()
run: |
source /tmp/test_helpers.sh
run_valid "DATA=random" docker run --rm -e DRY_RUN=1 -e DATA=random diskmark:test
run_valid "DATA=rand" docker run --rm -e DRY_RUN=1 -e DATA=rand diskmark:test
run_valid "DATA=zero" docker run --rm -e DRY_RUN=1 -e DATA=zero diskmark:test
run_valid "DATA=0" docker run --rm -e DRY_RUN=1 -e DATA=0 diskmark:test
run_valid "DATA=0x00" docker run --rm -e DRY_RUN=1 -e DATA=0x00 diskmark:test
check_step_result
- name: "DATA: invalid values"
if: always()
run: |
source /tmp/test_helpers.sh
run_invalid "DATA=ones" docker run --rm -e DRY_RUN=1 -e DATA=ones diskmark:test
run_invalid "DATA=0xFF" docker run --rm -e DRY_RUN=1 -e DATA=0xFF diskmark:test
check_step_result
- name: "WARMUP: valid values"
if: always()
run: |
source /tmp/test_helpers.sh
run_valid "WARMUP=0" docker run --rm -e DRY_RUN=1 -e WARMUP=0 diskmark:test
run_valid "WARMUP=1" docker run --rm -e DRY_RUN=1 -e WARMUP=1 diskmark:test
run_valid "WARMUP=1 WARMUP_SIZE=8M" docker run --rm -e DRY_RUN=1 -e WARMUP=1 -e WARMUP_SIZE=8M diskmark:test
run_valid "WARMUP=1 WARMUP_SIZE=64M" docker run --rm -e DRY_RUN=1 -e WARMUP=1 -e WARMUP_SIZE=64M diskmark:test
run_valid "WARMUP=1 WARMUP_SIZE=128M" docker run --rm -e DRY_RUN=1 -e WARMUP=1 -e WARMUP_SIZE=128M diskmark:test
check_step_result
- name: "WARMUP: invalid values"
if: always()
run: |
source /tmp/test_helpers.sh
run_invalid "WARMUP=2" docker run --rm -e DRY_RUN=1 -e WARMUP=2 diskmark:test
run_invalid "WARMUP=yes" docker run --rm -e DRY_RUN=1 -e WARMUP=yes diskmark:test
run_invalid "WARMUP=-1" docker run --rm -e DRY_RUN=1 -e WARMUP=-1 diskmark:test
run_invalid "WARMUP_SIZE=invalid" docker run --rm -e DRY_RUN=1 -e WARMUP=1 -e WARMUP_SIZE=invalid diskmark:test
run_invalid "WARMUP_SIZE=-8M" docker run --rm -e DRY_RUN=1 -e WARMUP=1 -e WARMUP_SIZE=-8M diskmark:test
check_step_result
- name: "RUNTIME: valid values"
if: always()
run: |
source /tmp/test_helpers.sh
run_valid "RUNTIME=500ms" docker run --rm -e DRY_RUN=1 -e RUNTIME=500ms diskmark:test
run_valid "RUNTIME=5s" docker run --rm -e DRY_RUN=1 -e RUNTIME=5s diskmark:test
run_valid "RUNTIME=2m" docker run --rm -e DRY_RUN=1 -e RUNTIME=2m diskmark:test
run_valid "RUNTIME=1h" docker run --rm -e DRY_RUN=1 -e RUNTIME=1h diskmark:test
run_valid "RUNTIME=1ms" docker run --rm -e DRY_RUN=1 -e RUNTIME=1ms diskmark:test
run_valid "RUNTIME=999h" docker run --rm -e DRY_RUN=1 -e RUNTIME=999h diskmark:test
check_step_result
- name: "RUNTIME: invalid values"
if: always()
run: |
source /tmp/test_helpers.sh
run_invalid "RUNTIME=5 (no unit)" docker run --rm -e DRY_RUN=1 -e RUNTIME=5 diskmark:test
run_invalid "RUNTIME=5sec" docker run --rm -e DRY_RUN=1 -e RUNTIME=5sec diskmark:test
run_invalid "RUNTIME=-5s" docker run --rm -e DRY_RUN=1 -e RUNTIME=-5s diskmark:test
check_step_result
- name: "LOOPS: valid values"
if: always()
run: |
source /tmp/test_helpers.sh
run_valid "LOOPS=1" docker run --rm -e DRY_RUN=1 -e LOOPS=1 diskmark:test
run_valid "LOOPS=5" docker run --rm -e DRY_RUN=1 -e LOOPS=5 diskmark:test
run_valid "LOOPS=100" docker run --rm -e DRY_RUN=1 -e LOOPS=100 diskmark:test
run_valid "LOOPS=9999" docker run --rm -e DRY_RUN=1 -e LOOPS=9999 diskmark:test
run_valid "LOOPS+RUNTIME (hybrid mode)" docker run --rm -e DRY_RUN=1 -e LOOPS=5 -e RUNTIME=10s diskmark:test
check_step_result
- name: "LOOPS: invalid values"
if: always()
run: |
source /tmp/test_helpers.sh
run_invalid "LOOPS=0" docker run --rm -e DRY_RUN=1 -e LOOPS=0 diskmark:test
run_invalid "LOOPS=-1" docker run --rm -e DRY_RUN=1 -e LOOPS=-1 diskmark:test
run_invalid "LOOPS=abc" docker run --rm -e DRY_RUN=1 -e LOOPS=abc diskmark:test
check_step_result
- name: "JOB: valid values"
if: always()
run: |
source /tmp/test_helpers.sh
run_valid "JOB=SEQ1MQ8T1" docker run --rm -e DRY_RUN=1 -e JOB=SEQ1MQ8T1 diskmark:test
run_valid "JOB=SEQ128KQ32T1" docker run --rm -e DRY_RUN=1 -e JOB=SEQ128KQ32T1 diskmark:test
run_valid "JOB=RND4KQ32T16" docker run --rm -e DRY_RUN=1 -e JOB=RND4KQ32T16 diskmark:test
run_valid "JOB=RND4KQ1T1" docker run --rm -e DRY_RUN=1 -e JOB=RND4KQ1T1 diskmark:test
run_valid "JOB=SEQ4MQ1T4" docker run --rm -e DRY_RUN=1 -e JOB=SEQ4MQ1T4 diskmark:test
check_step_result
- name: "JOB: invalid values"
if: always()
run: |
source /tmp/test_helpers.sh
run_invalid "JOB=INVALID" docker run --rm -e DRY_RUN=1 -e JOB=INVALID diskmark:test
run_invalid "JOB=SEQ1M (missing Q and T)" docker run --rm -e DRY_RUN=1 -e JOB=SEQ1M diskmark:test
run_invalid "JOB=RND4KQ32 (missing T)" docker run --rm -e DRY_RUN=1 -e JOB=RND4KQ32 diskmark:test
run_invalid "JOB=XXX4KQ32T1 (invalid prefix)" docker run --rm -e DRY_RUN=1 -e JOB=XXX4KQ32T1 diskmark:test
check_step_result
- name: "FORMAT: valid values"
if: always()
run: |
source /tmp/test_helpers.sh
run_valid "FORMAT unset (default)" docker run --rm -e DRY_RUN=1 diskmark:test
run_valid "FORMAT=json" docker run --rm -e DRY_RUN=1 -e FORMAT=json diskmark:test
run_valid "FORMAT=yaml" docker run --rm -e DRY_RUN=1 -e FORMAT=yaml diskmark:test
run_valid "FORMAT=xml" docker run --rm -e DRY_RUN=1 -e FORMAT=xml diskmark:test
check_step_result
- name: "FORMAT: invalid values"
if: always()
run: |
source /tmp/test_helpers.sh
run_invalid "FORMAT=txt" docker run --rm -e DRY_RUN=1 -e FORMAT=txt diskmark:test
run_invalid "FORMAT=csv" docker run --rm -e DRY_RUN=1 -e FORMAT=csv diskmark:test
run_invalid "FORMAT=html" docker run --rm -e DRY_RUN=1 -e FORMAT=html diskmark:test
run_invalid "FORMAT=TEXT" docker run --rm -e DRY_RUN=1 -e FORMAT=TEXT diskmark:test
check_step_result
- name: "UPDATE_CHECK: valid values"
if: always()
run: |
source /tmp/test_helpers.sh
run_valid "UPDATE_CHECK=0" docker run --rm -e DRY_RUN=1 -e UPDATE_CHECK=0 diskmark:test
run_valid "UPDATE_CHECK=1" docker run --rm -e DRY_RUN=1 -e UPDATE_CHECK=1 diskmark:test
check_step_result
- name: "UPDATE_CHECK: invalid values"
if: always()
run: |
source /tmp/test_helpers.sh
run_invalid "UPDATE_CHECK=2" docker run --rm -e DRY_RUN=1 -e UPDATE_CHECK=2 diskmark:test
run_invalid "UPDATE_CHECK=yes" docker run --rm -e DRY_RUN=1 -e UPDATE_CHECK=yes diskmark:test
run_invalid "UPDATE_CHECK=-1" docker run --rm -e DRY_RUN=1 -e UPDATE_CHECK=-1 diskmark:test
check_step_result
- name: "DRY_RUN: invalid values"
if: always()
run: |
source /tmp/test_helpers.sh
run_invalid "DRY_RUN=yes" docker run --rm -e DRY_RUN=yes diskmark:test
check_step_result
- name: "Combined: valid parameters"
if: always()
run: |
source /tmp/test_helpers.sh
run_valid "SIZE+WARMUP+PROFILE+IO+DATA" docker run --rm -e DRY_RUN=1 -e SIZE=2G -e WARMUP=1 -e WARMUP_SIZE=64M -e PROFILE=nvme -e IO=direct -e DATA=random diskmark:test
run_valid "SIZE+LOOPS+DATA+IO" docker run --rm -e DRY_RUN=1 -e SIZE=512M -e LOOPS=3 -e DATA=zero -e IO=buffered diskmark:test
check_step_result
- name: Test Summary
if: always()
run: |
echo ""
echo "============================================"
echo " ENV VALIDATION TEST SUMMARY"
echo "============================================"
echo ""
PASS_COUNT=$(grep -c "^pass:" "$RESULTS_FILE" 2>/dev/null || echo 0)
FAIL_COUNT=$(grep -c "^fail:" "$RESULTS_FILE" 2>/dev/null || echo 0)
TOTAL=$((PASS_COUNT + FAIL_COUNT))
echo "✅ Passed: $PASS_COUNT"
echo "❌ Failed: $FAIL_COUNT"
echo "━━━━━━━━━━━━━━━━━━━━━━━━"
echo " Total: $TOTAL"
echo ""
if [ "$FAIL_COUNT" -gt 0 ]; then
echo "Failed tests:"
grep "^fail:" "$RESULTS_FILE" | cut -d: -f2- | while read -r test; do
echo " ❌ $test"
done
exit 1
else
echo "All tests passed! 🎉"
fi
# ============================================
# INPUT VALIDATION - CLI ARGUMENTS (dry-run)
# ============================================
validation-cli:
name: "Validation: CLI"
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Setup test helpers
run: |
cat > /tmp/test_helpers.sh << 'EOF'
GREEN='\033[32m'
RED='\033[31m'
RESET='\033[0m'
STEP_FAILED=0
run_valid() {
local name="$1"
shift
if "$@" > /dev/null 2>&1; then
echo -e "${GREEN}✓ Passed${RESET}: $name"
echo "pass:$name" >> "$RESULTS_FILE"
else
echo -e "${RED}✗ Failed${RESET}: $name"
echo "fail:$name" >> "$RESULTS_FILE"
STEP_FAILED=1
fi
}
run_invalid() {
local name="$1"
shift
if "$@" > /dev/null 2>&1; then
echo -e "${RED}✗ Failed (should have been rejected)${RESET}: $name"
echo "fail:$name" >> "$RESULTS_FILE"
STEP_FAILED=1
else
echo -e "${GREEN}✓ Passed (correctly rejected)${RESET}: $name"
echo "pass:$name" >> "$RESULTS_FILE"
fi
}
check_step_result() {
exit $STEP_FAILED
}
EOF
touch "$RESULTS_FILE"
- name: Build test image
run: docker build -t diskmark:test .
- name: "Help/Version: options"
if: always()
run: |
source /tmp/test_helpers.sh
run_valid "--help" docker run --rm diskmark:test --help
run_valid "-h" docker run --rm diskmark:test -h
run_valid "--version" docker run --rm diskmark:test --version
run_valid "-v" docker run --rm diskmark:test -v
check_step_result
- name: "SIZE: long options"
if: always()
run: |
source /tmp/test_helpers.sh
run_valid "--size 1G --dry-run" docker run --rm diskmark:test --size 1G --dry-run
run_valid "--size=512M --dry-run" docker run --rm diskmark:test --size=512M --dry-run
check_step_result
- name: "SIZE: short options"
if: always()
run: |
source /tmp/test_helpers.sh
run_valid "-s 1G -n" docker run --rm diskmark:test -s 1G -n
check_step_result
- name: "PROFILE: long options"
if: always()
run: |
source /tmp/test_helpers.sh
run_valid "--profile nvme --dry-run" docker run --rm diskmark:test --profile nvme --dry-run
run_valid "--profile=default --dry-run" docker run --rm diskmark:test --profile=default --dry-run
check_step_result
- name: "PROFILE: short options"
if: always()
run: |
source /tmp/test_helpers.sh
run_valid "-p nvme -n" docker run --rm diskmark:test -p nvme -n
check_step_result
- name: "TARGET: long options"
if: always()
run: |
source /tmp/test_helpers.sh
run_valid "--target /disk --dry-run" docker run --rm diskmark:test --target /disk --dry-run
run_valid "--target=/disk --dry-run" docker run --rm diskmark:test --target=/disk --dry-run
check_step_result
- name: "TARGET: short options"
if: always()
run: |
source /tmp/test_helpers.sh
run_valid "-t /disk -n" docker run --rm diskmark:test -t /disk -n
check_step_result
- name: "IO: long options"
if: always()
run: |
source /tmp/test_helpers.sh
run_valid "--io buffered --dry-run" docker run --rm diskmark:test --io buffered --dry-run
run_valid "--io=direct --dry-run" docker run --rm diskmark:test --io=direct --dry-run
check_step_result
- name: "IO: short options"
if: always()
run: |
source /tmp/test_helpers.sh
run_valid "-i buffered -n" docker run --rm diskmark:test -i buffered -n
check_step_result
- name: "DATA: long options"
if: always()
run: |
source /tmp/test_helpers.sh
run_valid "--data zero --dry-run" docker run --rm diskmark:test --data zero --dry-run
run_valid "--data=random --dry-run" docker run --rm diskmark:test --data=random --dry-run
check_step_result
- name: "DATA: short options"
if: always()
run: |
source /tmp/test_helpers.sh
run_valid "-d zero -n" docker run --rm diskmark:test -d zero -n
check_step_result
- name: "WARMUP: flag options"
if: always()
run: |
source /tmp/test_helpers.sh
run_valid "--warmup --dry-run" docker run --rm diskmark:test --warmup --dry-run
run_valid "-w --dry-run" docker run --rm diskmark:test -w --dry-run
run_valid "--no-warmup --dry-run" docker run --rm diskmark:test --no-warmup --dry-run
run_valid "--warmup-size 64M --dry-run" docker run --rm diskmark:test --warmup-size 64M --dry-run
run_valid "--warmup-size=128M --dry-run" docker run --rm diskmark:test --warmup-size=128M --dry-run
check_step_result
- name: "RUNTIME: long options"
if: always()
run: |
source /tmp/test_helpers.sh
run_valid "--runtime 5s --dry-run" docker run --rm diskmark:test --runtime 5s --dry-run
run_valid "--runtime=500ms --dry-run" docker run --rm diskmark:test --runtime=500ms --dry-run
check_step_result
- name: "RUNTIME: short options"
if: always()
run: |
source /tmp/test_helpers.sh
run_valid "-r 5s -n" docker run --rm diskmark:test -r 5s -n
check_step_result
- name: "LOOPS: long options"
if: always()
run: |
source /tmp/test_helpers.sh
run_valid "--loops 3 --dry-run" docker run --rm diskmark:test --loops 3 --dry-run
run_valid "--loops=5 --dry-run" docker run --rm diskmark:test --loops=5 --dry-run
check_step_result
- name: "LOOPS: short options"
if: always()
run: |
source /tmp/test_helpers.sh
run_valid "-l 3 -n" docker run --rm diskmark:test -l 3 -n
check_step_result
- name: "JOB: long options"
if: always()
run: |
source /tmp/test_helpers.sh
run_valid "--job SEQ1MQ1T1 --dry-run" docker run --rm diskmark:test --job SEQ1MQ1T1 --dry-run
run_valid "--job=RND4KQ32T1 --dry-run" docker run --rm diskmark:test --job=RND4KQ32T1 --dry-run
check_step_result
- name: "JOB: short options"
if: always()
run: |
source /tmp/test_helpers.sh
run_valid "-j SEQ1MQ1T1 -n" docker run --rm diskmark:test -j SEQ1MQ1T1 -n
check_step_result
- name: "FORMAT: long options"
if: always()
run: |
source /tmp/test_helpers.sh
run_valid "--format json --dry-run" docker run --rm diskmark:test --format json --dry-run
run_valid "--format=yaml --dry-run" docker run --rm diskmark:test --format=yaml --dry-run
check_step_result
- name: "FORMAT: short options"
if: always()
run: |
source /tmp/test_helpers.sh
run_valid "-f json -n" docker run --rm diskmark:test -f json -n
check_step_result
- name: "UPDATE_CHECK: flag options"
if: always()
run: |
source /tmp/test_helpers.sh
run_valid "--no-update-check --dry-run" docker run --rm diskmark:test --no-update-check --dry-run
run_valid "-u --dry-run" docker run --rm diskmark:test -u --dry-run
check_step_result
- name: "Display: flag options"
if: always()
run: |
source /tmp/test_helpers.sh
run_valid "--color --dry-run" docker run --rm diskmark:test --color --dry-run
run_valid "--no-color --dry-run" docker run --rm diskmark:test --no-color --dry-run
run_valid "--emoji --dry-run" docker run --rm diskmark:test --emoji --dry-run
run_valid "--no-emoji --dry-run" docker run --rm diskmark:test --no-emoji --dry-run
check_step_result
- name: "Combined: mixed options"
if: always()
run: |
source /tmp/test_helpers.sh
run_valid "Multiple short options" docker run --rm diskmark:test -s 1G -p nvme -w -r 5s -n
run_valid "Mixed long and short" docker run --rm diskmark:test --size 1G -p nvme --warmup -n
run_valid "All with equals" docker run --rm diskmark:test --size=1G --profile=nvme --runtime=5s --dry-run
check_step_result
- name: "Override: CLI overrides ENV"
if: always()
run: |
source /tmp/test_helpers.sh
run_valid "CLI --size overrides env SIZE" docker run --rm -e SIZE=100G diskmark:test --size 1G --dry-run
run_valid "CLI --profile overrides env PROFILE" docker run --rm -e PROFILE=default diskmark:test --profile nvme --dry-run
run_valid "CLI --target overrides env TARGET" docker run --rm -e TARGET=/tmp diskmark:test --target /disk --dry-run
check_step_result
- name: "Invalid: rejected options"
if: always()
run: |
source /tmp/test_helpers.sh
run_invalid "Unknown option --invalid" docker run --rm diskmark:test --invalid
run_invalid "Unknown short option -x" docker run --rm diskmark:test -x
run_invalid "Missing value for --size" docker run --rm diskmark:test --size
run_invalid "Missing value for -s" docker run --rm diskmark:test -s
check_step_result
- name: Test Summary
if: always()
run: |
echo ""
echo "============================================"
echo " CLI VALIDATION TEST SUMMARY"
echo "============================================"
echo ""
PASS_COUNT=$(grep -c "^pass:" "$RESULTS_FILE" 2>/dev/null || echo 0)
FAIL_COUNT=$(grep -c "^fail:" "$RESULTS_FILE" 2>/dev/null || echo 0)
TOTAL=$((PASS_COUNT + FAIL_COUNT))
echo "✅ Passed: $PASS_COUNT"
echo "❌ Failed: $FAIL_COUNT"
echo "━━━━━━━━━━━━━━━━━━━━━━━━"
echo " Total: $TOTAL"
echo ""
if [ "$FAIL_COUNT" -gt 0 ]; then
echo "Failed tests:"
grep "^fail:" "$RESULTS_FILE" | cut -d: -f2- | while read -r test; do
echo " ❌ $test"
done
exit 1
else
echo "All tests passed! 🎉"
fi
# ============================================
# BENCHMARK TESTS (actual execution, concurrent)
# ============================================
benchmark-profiles:
name: "Benchmark: Profiles"
runs-on: ubuntu-latest
needs: [validation-env, validation-cli]
env:
RESULTS_FILE: /tmp/benchmark_results.txt
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Setup test helpers
run: |
cat > /tmp/test_helpers.sh << 'EOF'
GREEN='\033[32m'
RED='\033[31m'
RESET='\033[0m'
STEP_FAILED=0
run_profile() {
local name="$1"
shift
local cmd=("$@")
local new_cmd=()
for arg in "${cmd[@]}"; do
new_cmd+=("$arg")
if [[ "$arg" == "--rm" ]]; then
new_cmd+=("-e" "UPDATE_CHECK=0" "-e" "FORMAT=json" "-e" "RUNTIME=500ms")
fi
done
if "${new_cmd[@]}" > /dev/null 2>&1; then
echo -e "${GREEN}✓ Passed${RESET}: $name"
echo "pass:$name" >> "$RESULTS_FILE"
else
echo -e "${RED}✗ Failed${RESET}: $name"
echo "fail:$name" >> "$RESULTS_FILE"
STEP_FAILED=1
fi
}
check_step_result() {
exit $STEP_FAILED
}
EOF
touch "$RESULTS_FILE"
- name: Build test image
run: docker build -t diskmark:test .
- name: "PROFILE: auto"
run: |
source /tmp/test_helpers.sh
run_profile "PROFILE=auto" docker run --rm -e SIZE=8M -e PROFILE=auto diskmark:test
check_step_result
- name: "PROFILE: default"
run: |
source /tmp/test_helpers.sh
run_profile "PROFILE=default" docker run --rm -e SIZE=8M -e PROFILE=default diskmark:test
check_step_result
- name: "PROFILE: nvme"
run: |
source /tmp/test_helpers.sh
run_profile "PROFILE=nvme" docker run --rm -e SIZE=8M -e PROFILE=nvme diskmark:test
check_step_result
- name: Test Summary
if: always()
run: |
echo ""
echo "============================================"
echo " PROFILE BENCHMARK SUMMARY"
echo "============================================"
PASS_COUNT=$(grep -c "^pass:" "$RESULTS_FILE" 2>/dev/null || echo 0)
FAIL_COUNT=$(grep -c "^fail:" "$RESULTS_FILE" 2>/dev/null || echo 0)
echo "✅ Passed: $PASS_COUNT | ❌ Failed: $FAIL_COUNT"
[ "$FAIL_COUNT" -gt 0 ] && exit 1 || echo "All tests passed! 🎉"
benchmark-params:
name: "Benchmark: Parameters"
runs-on: ubuntu-latest
needs: [validation-env, validation-cli]
env:
RESULTS_FILE: /tmp/benchmark_results.txt
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Setup test helpers
run: |
cat > /tmp/test_helpers.sh << 'EOF'
GREEN='\033[32m'
RED='\033[31m'
RESET='\033[0m'
STEP_FAILED=0
run_fast() {
local name="$1"
shift
local cmd=("$@")
local new_cmd=()
for arg in "${cmd[@]}"; do
new_cmd+=("$arg")
if [[ "$arg" == "--rm" ]]; then
new_cmd+=("-e" "UPDATE_CHECK=0" "-e" "JOB=SEQ1MQ1T1" "-e" "FORMAT=json" "-e" "RUNTIME=100ms")
fi
done
if "${new_cmd[@]}" > /dev/null 2>&1; then
echo -e "${GREEN}✓ Passed${RESET}: $name"
echo "pass:$name" >> "$RESULTS_FILE"
else
echo -e "${RED}✗ Failed${RESET}: $name"
echo "fail:$name" >> "$RESULTS_FILE"
STEP_FAILED=1
fi
}
check_step_result() {
exit $STEP_FAILED
}
EOF
touch "$RESULTS_FILE"
- name: Build test image
run: docker build -t diskmark:test .
- name: "IO: modes"
run: |
source /tmp/test_helpers.sh
run_fast "IO=direct" docker run --rm -e SIZE=8M -e IO=direct diskmark:test
run_fast "IO=buffered" docker run --rm -e SIZE=8M -e IO=buffered diskmark:test
check_step_result
- name: "DATA: patterns"
run: |
source /tmp/test_helpers.sh
run_fast "DATA=random" docker run --rm -e SIZE=8M -e DATA=random diskmark:test
run_fast "DATA=zero" docker run --rm -e SIZE=8M -e DATA=zero diskmark:test
run_fast "DATA=0x00" docker run --rm -e SIZE=8M -e DATA=0x00 diskmark:test
check_step_result
- name: "SIZE: formats"
run: |
source /tmp/test_helpers.sh
run_fast "SIZE=1048576 (bytes)" docker run --rm -e SIZE=1048576 diskmark:test
run_fast "SIZE=512K" docker run --rm -e SIZE=512K diskmark:test
run_fast "SIZE=8M" docker run --rm -e SIZE=8M diskmark:test
run_fast "SIZE=8m (lowercase)" docker run --rm -e SIZE=8m diskmark:test
check_step_result
- name: "WARMUP: options"
run: |
source /tmp/test_helpers.sh
run_fast "WARMUP=0" docker run --rm -e SIZE=8M -e WARMUP=0 diskmark:test
run_fast "WARMUP=1" docker run --rm -e SIZE=8M -e WARMUP=1 diskmark:test
run_fast "WARMUP=1 WARMUP_SIZE=2M" docker run --rm -e SIZE=8M -e WARMUP=1 -e WARMUP_SIZE=2M diskmark:test
check_step_result
- name: "RUNTIME: formats"
run: |
source /tmp/test_helpers.sh
run_fast "RUNTIME=50ms" docker run --rm -e SIZE=8M -e RUNTIME=50ms diskmark:test
run_fast "RUNTIME=100ms" docker run --rm -e SIZE=8M -e RUNTIME=100ms diskmark:test
check_step_result
- name: "LOOPS: counts"
run: |
source /tmp/test_helpers.sh
run_fast "LOOPS=1" docker run --rm -e SIZE=8M -e LOOPS=1 diskmark:test
run_fast "LOOPS=2" docker run --rm -e SIZE=8M -e LOOPS=2 diskmark:test
check_step_result
- name: "JOB: patterns"
run: |
source /tmp/test_helpers.sh
run_fast "JOB=SEQ128KQ1T1" docker run --rm -e SIZE=8M -e JOB=SEQ128KQ1T1 diskmark:test
run_fast "JOB=SEQ1MQ8T1" docker run --rm -e SIZE=8M -e JOB=SEQ1MQ8T1 diskmark:test
run_fast "JOB=RND4KQ1T1" docker run --rm -e SIZE=8M -e JOB=RND4KQ1T1 diskmark:test
run_fast "JOB=RND4KQ32T1" docker run --rm -e SIZE=8M -e JOB=RND4KQ32T1 diskmark:test
run_fast "JOB=RND4KQ1T4" docker run --rm -e SIZE=8M -e JOB=RND4KQ1T4 diskmark:test
check_step_result
- name: Test Summary
if: always()
run: |
echo ""
echo "============================================"
echo " PARAMETER BENCHMARK SUMMARY"
echo "============================================"
PASS_COUNT=$(grep -c "^pass:" "$RESULTS_FILE" 2>/dev/null || echo 0)
FAIL_COUNT=$(grep -c "^fail:" "$RESULTS_FILE" 2>/dev/null || echo 0)
echo "✅ Passed: $PASS_COUNT | ❌ Failed: $FAIL_COUNT"
[ "$FAIL_COUNT" -gt 0 ] && exit 1 || echo "All tests passed! 🎉"
benchmark-formats:
name: "Benchmark: Formats"
runs-on: ubuntu-latest
needs: [validation-env, validation-cli]
env:
RESULTS_FILE: /tmp/benchmark_results.txt
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Setup test helpers
run: |
cat > /tmp/test_helpers.sh << 'EOF'
GREEN='\033[32m'
RED='\033[31m'
RESET='\033[0m'
STEP_FAILED=0
EOF
touch "$RESULTS_FILE"
- name: Build test image
run: docker build -t diskmark:test .
- name: Install PyYAML
run: pip install pyyaml
- name: "FORMAT: json"
run: |
source /tmp/test_helpers.sh
docker run --rm -e SIZE=8M -e RUNTIME=100ms -e JOB=SEQ1MQ1T1 -e FORMAT=json -e UPDATE_CHECK=0 diskmark:test > /tmp/output.json
if python3 -c "import json; json.load(open('/tmp/output.json'))"; then
echo -e "${GREEN}✓ Passed${RESET}: FORMAT=json produces valid JSON"
echo "pass:FORMAT=json valid" >> "$RESULTS_FILE"
else
echo -e "${RED}✗ Failed${RESET}: FORMAT=json produces valid JSON"
echo "fail:FORMAT=json valid" >> "$RESULTS_FILE"
exit 1
fi
if grep -q '"results"' /tmp/output.json && grep -q '"configuration"' /tmp/output.json; then
echo -e "${GREEN}✓ Passed${RESET}: FORMAT=json contains expected structure"
echo "pass:FORMAT=json structure" >> "$RESULTS_FILE"
else
echo -e "${RED}✗ Failed${RESET}: FORMAT=json contains expected structure"
echo "fail:FORMAT=json structure" >> "$RESULTS_FILE"
exit 1
fi
- name: "FORMAT: yaml"
run: |
source /tmp/test_helpers.sh
docker run --rm -e SIZE=8M -e RUNTIME=100ms -e JOB=SEQ1MQ1T1 -e FORMAT=yaml -e UPDATE_CHECK=0 diskmark:test > /tmp/output.yaml
if python3 -c "import yaml; yaml.safe_load(open('/tmp/output.yaml'))"; then
echo -e "${GREEN}✓ Passed${RESET}: FORMAT=yaml produces valid YAML"
echo "pass:FORMAT=yaml valid" >> "$RESULTS_FILE"
else
echo -e "${RED}✗ Failed${RESET}: FORMAT=yaml produces valid YAML"
echo "fail:FORMAT=yaml valid" >> "$RESULTS_FILE"
exit 1
fi
if grep -q 'results:' /tmp/output.yaml && grep -q 'configuration:' /tmp/output.yaml; then
echo -e "${GREEN}✓ Passed${RESET}: FORMAT=yaml contains expected structure"
echo "pass:FORMAT=yaml structure" >> "$RESULTS_FILE"
else
echo -e "${RED}✗ Failed${RESET}: FORMAT=yaml contains expected structure"
echo "fail:FORMAT=yaml structure" >> "$RESULTS_FILE"
exit 1
fi
- name: "FORMAT: xml"
run: |
source /tmp/test_helpers.sh
docker run --rm -e SIZE=8M -e RUNTIME=100ms -e JOB=SEQ1MQ1T1 -e FORMAT=xml -e UPDATE_CHECK=0 diskmark:test > /tmp/output.xml
if python3 -c "import xml.etree.ElementTree as ET; ET.parse('/tmp/output.xml')"; then
echo -e "${GREEN}✓ Passed${RESET}: FORMAT=xml produces valid XML"
echo "pass:FORMAT=xml valid" >> "$RESULTS_FILE"
else
echo -e "${RED}✗ Failed${RESET}: FORMAT=xml produces valid XML"
echo "fail:FORMAT=xml valid" >> "$RESULTS_FILE"
exit 1
fi
if grep -q '<results>' /tmp/output.xml && grep -q '<configuration>' /tmp/output.xml; then
echo -e "${GREEN}✓ Passed${RESET}: FORMAT=xml contains expected structure"
echo "pass:FORMAT=xml structure" >> "$RESULTS_FILE"
else
echo -e "${RED}✗ Failed${RESET}: FORMAT=xml contains expected structure"
echo "fail:FORMAT=xml structure" >> "$RESULTS_FILE"
exit 1
fi
- name: Test Summary
if: always()
run: |
echo ""
echo "============================================"
echo " FORMAT BENCHMARK SUMMARY"
echo "============================================"
PASS_COUNT=$(grep -c "^pass:" "$RESULTS_FILE" 2>/dev/null || echo 0)
FAIL_COUNT=$(grep -c "^fail:" "$RESULTS_FILE" 2>/dev/null || echo 0)
echo "✅ Passed: $PASS_COUNT | ❌ Failed: $FAIL_COUNT"
[ "$FAIL_COUNT" -gt 0 ] && exit 1 || echo "All tests passed! 🎉"
benchmark-edge:
name: "Benchmark: Edge Cases"
runs-on: ubuntu-latest
needs: [validation-env, validation-cli]
env:
RESULTS_FILE: /tmp/benchmark_results.txt
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Setup test helpers
run: |
cat > /tmp/test_helpers.sh << 'EOF'
GREEN='\033[32m'
RED='\033[31m'
RESET='\033[0m'
STEP_FAILED=0
run_fast() {
local name="$1"
shift
local cmd=("$@")
local new_cmd=()
for arg in "${cmd[@]}"; do
new_cmd+=("$arg")
if [[ "$arg" == "--rm" ]]; then
new_cmd+=("-e" "UPDATE_CHECK=0" "-e" "JOB=SEQ1MQ1T1" "-e" "FORMAT=json" "-e" "RUNTIME=100ms")
fi
done
if "${new_cmd[@]}" > /dev/null 2>&1; then
echo -e "${GREEN}✓ Passed${RESET}: $name"
echo "pass:$name" >> "$RESULTS_FILE"
else
echo -e "${RED}✗ Failed${RESET}: $name"
echo "fail:$name" >> "$RESULTS_FILE"
STEP_FAILED=1
fi
}
check_step_result() {
exit $STEP_FAILED
}
EOF
touch "$RESULTS_FILE"
- name: Build test image
run: docker build -t diskmark:test .
- name: "Edge: combined parameters"
run: |
source /tmp/test_helpers.sh
run_fast "buffered+warmup+zero" docker run --rm -e SIZE=8M -e IO=buffered -e WARMUP=1 -e DATA=zero diskmark:test
run_fast "job+warmup+random" docker run --rm -e SIZE=8M -e JOB=SEQ1MQ1T1 -e WARMUP=1 -e DATA=random diskmark:test
run_fast "SIZE<WARMUP_SIZE" docker run --rm -e SIZE=1M -e WARMUP=1 -e WARMUP_SIZE=4M diskmark:test
run_fast "minimal size and runtime" docker run --rm -e SIZE=4K -e RUNTIME=50ms diskmark:test
check_step_result
- name: Test Summary
if: always()
run: |
echo ""
echo "============================================"
echo " EDGE CASE BENCHMARK SUMMARY"
echo "============================================"
PASS_COUNT=$(grep -c "^pass:" "$RESULTS_FILE" 2>/dev/null || echo 0)
FAIL_COUNT=$(grep -c "^fail:" "$RESULTS_FILE" 2>/dev/null || echo 0)
echo "✅ Passed: $PASS_COUNT | ❌ Failed: $FAIL_COUNT"
[ "$FAIL_COUNT" -gt 0 ] && exit 1 || echo "All tests passed! 🎉"