name: Benchmarks
on:
workflow_dispatch:
schedule:
- cron: '0 2 * * *'
push:
branches: [ main ]
paths:
- 'src/**'
- 'benches/**'
- 'Cargo.toml'
- '.github/workflows/benchmarks.yml'
env:
CARGO_TERM_COLOR: always
RUST_BACKTRACE: 1
jobs:
micro-benchmarks:
name: Micro-benchmarks
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Install Rust
uses: dtolnay/rust-toolchain@stable
- name: Cache cargo registry
uses: actions/cache@v3
with:
path: ~/.cargo/registry
key: ${{ runner.os }}-cargo-registry-${{ hashFiles('**/Cargo.lock') }}
- name: Cache cargo index
uses: actions/cache@v3
with:
path: ~/.cargo/git
key: ${{ runner.os }}-cargo-git-${{ hashFiles('**/Cargo.lock') }}
- name: Cache cargo build
uses: actions/cache@v3
with:
path: target
key: ${{ runner.os }}-cargo-build-target-${{ hashFiles('**/Cargo.lock') }}
- name: Run micro-benchmarks
run: cargo bench --bench micro_benchmarks -- --verbose
- name: Upload benchmark results
uses: actions/upload-artifact@v4
if: always()
with:
name: micro-benchmark-results
path: target/criterion
retention-days: 30
integration-benchmarks:
name: Integration Benchmarks
runs-on: ubuntu-latest
if: github.event_name == 'schedule' || github.event_name == 'workflow_dispatch'
services:
postgres:
image: postgres:17-alpine
env:
POSTGRES_USER: postgres
POSTGRES_PASSWORD: postgres
POSTGRES_DB: fraiseql_bench
options: >-
--health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
ports:
- 5432:5432
steps:
- uses: actions/checkout@v4
- name: Install Rust
uses: dtolnay/rust-toolchain@stable
- name: Cache cargo registry
uses: actions/cache@v3
with:
path: ~/.cargo/registry
key: ${{ runner.os }}-cargo-registry-${{ hashFiles('**/Cargo.lock') }}
- name: Cache cargo index
uses: actions/cache@v3
with:
path: ~/.cargo/git
key: ${{ runner.os }}-cargo-git-${{ hashFiles('**/Cargo.lock') }}
- name: Cache cargo build
uses: actions/cache@v3
with:
path: target
key: ${{ runner.os }}-cargo-build-target-${{ hashFiles('**/Cargo.lock') }}
- name: Wait for Postgres to be ready
run: |
until pg_isready -h localhost -U postgres; do
echo "Waiting for Postgres..."
sleep 1
done
env:
PGPASSWORD: postgres
- name: Create test database
run: psql -U postgres -h localhost -c "CREATE DATABASE fraiseql_bench"
env:
PGPASSWORD: postgres
- name: Load benchmark test data
run: psql -U postgres -h localhost fraiseql_bench < benches/setup.sql
env:
PGPASSWORD: postgres
- name: Verify test data loaded
run: |
COUNT=$(psql -U postgres -h localhost fraiseql_bench -t -c "SELECT COUNT(*) FROM v_test_100k")
echo "Loaded $COUNT rows in v_test_100k"
[ "$COUNT" = "100000" ] && echo "✓ Test data verified" || (echo "✗ Test data count mismatch" && exit 1)
env:
PGPASSWORD: postgres
- name: Run integration benchmarks
run: cargo bench --bench integration_benchmarks --features bench-with-postgres -- --verbose
env:
POSTGRES_HOST: localhost
POSTGRES_USER: postgres
POSTGRES_PASSWORD: postgres
POSTGRES_DB: fraiseql_bench
- name: Upload benchmark results
uses: actions/upload-artifact@v4
if: always()
with:
name: integration-benchmark-results
path: target/criterion
retention-days: 30
benchmark-summary:
name: Benchmark Summary
runs-on: ubuntu-latest
needs: [micro-benchmarks]
if: always()
steps:
- name: Check micro-benchmarks status
run: |
if [ "${{ needs.micro-benchmarks.result }}" = "success" ]; then
echo "✓ Micro-benchmarks passed"
else
echo "✗ Micro-benchmarks failed"
exit 1
fi
- name: Check integration-benchmarks status (if run)
run: |
if [ "${{ github.event_name }}" = "schedule" ] || [ "${{ github.event_name }}" = "workflow_dispatch" ]; then
if [ "${{ needs.integration-benchmarks.result }}" = "success" ]; then
echo "✓ Integration benchmarks passed"
else
echo "✗ Integration benchmarks failed"
exit 1
fi
else
echo "⊘ Integration benchmarks skipped (schedule/manual only)"
fi