Skip to content

Performance Benchmark #7

Performance Benchmark

Performance Benchmark #7

Workflow file for this run

name: Performance Benchmark
on:
pull_request:
branches: [ "main" ]
types: [ closed ]
push:
tags: [ "*" ]
workflow_dispatch:
# Declare default permissions as read only
permissions: read-all
# Prevent concurrent benchmark runs to avoid interference
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: false # Don't cancel in-progress runs as benchmarks are expensive
jobs:
benchmark:
name: Run JMH Benchmarks
runs-on: ubuntu-latest
# Only run on merged PRs, not just closed ones
if: github.event_name != 'pull_request' || github.event.pull_request.merged == true
# Add timeout to prevent long-running jobs (increased for integration benchmarks)
timeout-minutes: 45
permissions:
# Needed to upload artifacts
contents: write
steps:
- name: Harden the runner (Audit all outbound calls)
uses: step-security/harden-runner@0634a2670c59f64b4a01f0f96f84700a4088b9f0 # v2.12.0
with:
egress-policy: audit
- name: Checkout code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
fetch-depth: 0 # Fetch all history for proper versioning
- name: Set up JDK 21
uses: actions/setup-java@c5195efecf7bdfc987ee8bae7a71cb8b11521c00 # v4.7.1
with:
java-version: '21'
distribution: 'temurin'
cache: maven
- name: Build api-sheriff-library
run: |
# Build library module first to ensure test artifact is available for benchmarking
./mvnw --no-transfer-progress clean install -DskipTests
- name: Run Micro Benchmarks
run: |
# Create directory for benchmark results
mkdir -p benchmark-results
# Run micro benchmarks using configuration from pom.xml
./mvnw --no-transfer-progress clean verify -pl benchmarking/benchmark-library -Pbenchmark
# Move micro benchmark results to expected location for upload
if [ -d "benchmarking/benchmark-library/target/benchmark-results" ]; then
echo "Moving micro benchmark results to upload location..."
mv benchmarking/benchmark-library/target/benchmark-results/* benchmark-results/ 2>/dev/null || true
fi
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.7.1
- name: Run Integration Benchmarks
run: |
# Run integration benchmarks with native image using profile that includes container startup
echo "🚀 Running integration benchmarks with native Quarkus..."
./mvnw --no-transfer-progress clean verify -pl benchmarking/benchmark-integration-quarkus -Pbenchmark-testing -DskipTests
# Check what was actually created
echo "📁 Checking benchmark-integration-quarkus/target directory structure:"
ls -la benchmarking/benchmark-integration-quarkus/target/ || echo "Target directory not found"
echo "📁 Checking benchmark-integration-quarkus/target/benchmark-results directory:"
ls -la benchmarking/benchmark-integration-quarkus/target/benchmark-results/ || echo "Benchmark results directory not found"
echo "🔍 Finding all JSON files in benchmark-integration-quarkus:"
find benchmarking/benchmark-integration-quarkus -name "*.json" -type f | head -20
# Move integration benchmark results to expected location for upload
if [ -d "benchmarking/benchmark-integration-quarkus/target/benchmark-results" ]; then
echo "Moving integration benchmark results to upload location..."
# Copy ALL json files from benchmark-results directory
for file in benchmarking/benchmark-integration-quarkus/target/benchmark-results/*.json; do
if [ -f "$file" ]; then
echo "Copying: $(basename "$file")"
cp "$file" benchmark-results/
fi
done
else
echo "❌ Directory benchmarking/benchmark-integration-quarkus/target/benchmark-results does not exist!"
fi
# Add timestamp to results
echo "{ \"timestamp\": \"$(date -u +"%Y-%m-%dT%H:%M:%SZ")\", \"commit\": \"${{ github.sha }}\" }" > benchmark-results/metadata.json
# Verify that benchmark results were generated
echo "📊 Final contents of benchmark-results directory:"
ls -la benchmark-results/
echo "🔍 Searching for any benchmark-result files in entire workspace:"
find . -name "*benchmark-result*.json" -type f || echo "No benchmark result files found anywhere"
- name: Upload benchmark results
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
with:
name: benchmark-results
path: benchmark-results/
retention-days: 90 # Keep results for 90 days
- name: Process All Benchmarks
run: |
# Get current date for badge timestamp in Berlin time
TIMESTAMP=$(TZ='Europe/Berlin' date +"%Y-%m-%d")
TIMESTAMP_WITH_TIME=$(TZ='Europe/Berlin' date +"%Y-%m-%d %H:%M %Z")
# Create gh-pages directory
mkdir -p gh-pages
# Process all benchmarks using orchestration script
bash benchmarking/scripts/process-all-benchmarks.sh \
benchmark-results \
benchmarking/doc/templates \
gh-pages \
"${{ github.sha }}" \
"$TIMESTAMP" \
"$TIMESTAMP_WITH_TIME"
echo "📊 Benchmark processing completed"
# Display processing results for debugging
if [ -f "gh-pages/processing-results.json" ]; then
echo "📋 Processing Summary:"
jq -r '
"🔬 Micro Benchmarks: " + .processing.micro.status + " - " + .processing.micro.message,
"🔗 Integration Benchmarks: " + .processing.integration.status + " - " + .processing.integration.message,
"❌ Errors: " + (.errors | length | tostring)
' gh-pages/processing-results.json
fi
- name: Deploy to cuioss.github.io
uses: JamesIves/github-pages-deploy-action@6c2d9db40f9296374acc17b90404b6e8864128c8 # v4.7.3
with:
folder: gh-pages
repository-name: cuioss/cuioss.github.io
target-folder: api-sheriff/benchmarks
branch: main
token: ${{ secrets.PAGES_DEPLOY_TOKEN }}