name: Nightly Benchmarks (Heavy)
on:
schedule:
- cron: '0 2 * * *' workflow_dispatch:
permissions:
contents: read
issues: write
jobs:
benchmark:
name: Run heavy benchmarks
runs-on: ubuntu-latest
timeout-minutes: 360
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Install Rust toolchain
uses: dtolnay/rust-toolchain@stable
- name: Cache cargo registry
uses: actions/cache@v4
with:
path: |
~/.cargo/registry
~/.cargo/git
key: ${{ runner.os }}-cargo-bench-${{ hashFiles('**/Cargo.lock') }}
restore-keys: |
${{ runner.os }}-cargo-bench-
- name: Run heavy benchmarks (negation through concurrent_btree_scan)
run: |
set -o pipefail
(
CARGO_TERM_COLOR=never cargo bench -- negation
CARGO_TERM_COLOR=never cargo bench -- disjunction
CARGO_TERM_COLOR=never cargo bench -- aggregation
CARGO_TERM_COLOR=never cargo bench -- expr
CARGO_TERM_COLOR=never cargo bench -- window
CARGO_TERM_COLOR=never cargo bench -- temporal_metadata
CARGO_TERM_COLOR=never cargo bench -- udf
CARGO_TERM_COLOR=never cargo bench -- aggregation_extras
CARGO_TERM_COLOR=never cargo bench -- query_extras
CARGO_TERM_COLOR=never cargo bench -- concurrent_btree_scan
) 2>&1 | tee bench_heavy.txt
- name: Install Bencher CLI
uses: bencherdev/bencher@v0.4.25
- name: Upload to Bencher
id: bencher
continue-on-error: true
run: |
bencher run \
--project minigraf \
--token "${{ secrets.BENCHER_API_TOKEN }}" \
--branch main \
--testbed ubuntu-latest \
--threshold-measure latency \
--threshold-test t_test \
--threshold-upper-boundary 0.99 \
--err \
--adapter rust_criterion \
--file bench_heavy.txt
- name: Open regression issue
if: steps.bencher.outcome == 'failure'
uses: actions/github-script@v6
with:
script: |
const today = new Date().toISOString().slice(0, 10);
const title = `Benchmark regression (heavy) - ${today}`;
// Deduplication: skip if an open regression issue already exists
const { data: openIssues } = await github.rest.issues.listForRepo({
owner: context.repo.owner,
repo: context.repo.repo,
labels: 'performance',
state: 'open',
per_page: 100,
});
const existing = openIssues.filter(i =>
i.title.startsWith('Benchmark regression')
);
if (existing.length > 0) {
console.log(`Skipping: open regression issue already exists (#${existing[0].number})`);
return;
}
const runUrl = `${context.serverUrl}/${context.repo.owner}/${context.repo.repo}/actions/runs/${context.runId}`;
await github.rest.issues.create({
owner: context.repo.owner,
repo: context.repo.repo,
title: title,
body: [
'Bencher detected a performance regression in the nightly heavy benchmark run.',
'',
`**Run:** ${runUrl}`,
'',
'Please review the Bencher dashboard for details on which benchmarks regressed.',
].join('\n'),
labels: ['performance'],
});