name: Release
on:
push:
tags:
- 'v*'
workflow_dispatch:
inputs:
tag:
description: Existing git tag to release, for example v1.3.1
required: true
type: string
permissions:
contents: write
pull-requests: read
concurrency:
group: release-${{ github.event_name == 'workflow_dispatch' && inputs.tag || github.ref }}
cancel-in-progress: false
env:
CARGO_TERM_COLOR: always
BIN_NAME: oc-stats
PACKAGE_NAME: opencode-stats
jobs:
build-linux:
name: Build Linux (${{ matrix.target }})
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
include:
- target: x86_64-unknown-linux-gnu
artifact_name: oc-stats-x86_64-unknown-linux-gnu
archive_name: oc-stats-x86_64-unknown-linux-gnu.tar.gz
- target: x86_64-unknown-linux-musl
artifact_name: oc-stats-x86_64-unknown-linux-musl
archive_name: oc-stats-x86_64-unknown-linux-musl.tar.gz
steps:
- uses: actions/checkout@v4
with:
ref: ${{ github.event_name == 'workflow_dispatch' && inputs.tag || github.ref }}
- uses: dtolnay/rust-toolchain@stable
with:
targets: ${{ matrix.target }}
- uses: goto-bus-stop/setup-zig@v2
with:
version: 0.13.0
- name: Install cargo-zigbuild
run: cargo install --locked cargo-zigbuild
- name: Build binary
run: cargo zigbuild --release --locked --target ${{ matrix.target }}
- name: Package archive
shell: bash
run: |
mkdir -p dist
cp "target/${{ matrix.target }}/release/${BIN_NAME}" "dist/${BIN_NAME}"
tar -C dist -czf "dist/${{ matrix.archive_name }}" "${BIN_NAME}"
- uses: actions/upload-artifact@v4
with:
name: ${{ matrix.artifact_name }}
path: dist/${{ matrix.archive_name }}
if-no-files-found: error
build-macos:
name: Build macOS (${{ matrix.target }})
runs-on: macos-latest
strategy:
fail-fast: false
matrix:
target:
- x86_64-apple-darwin
- aarch64-apple-darwin
steps:
- uses: actions/checkout@v4
with:
ref: ${{ github.event_name == 'workflow_dispatch' && inputs.tag || github.ref }}
- uses: dtolnay/rust-toolchain@stable
with:
targets: ${{ matrix.target }}
- name: Build binary
run: cargo build --release --locked --target ${{ matrix.target }}
- name: Package archive
run: |
mkdir -p dist
cp "target/${{ matrix.target }}/release/${BIN_NAME}" "dist/${BIN_NAME}"
tar -C dist -czf "dist/oc-stats-${{ matrix.target }}.tar.gz" "${BIN_NAME}"
- uses: actions/upload-artifact@v4
with:
name: oc-stats-${{ matrix.target }}
path: dist/oc-stats-${{ matrix.target }}.tar.gz
if-no-files-found: error
build-windows:
name: Build Windows
runs-on: windows-latest
steps:
- uses: actions/checkout@v4
with:
ref: ${{ github.event_name == 'workflow_dispatch' && inputs.tag || github.ref }}
- uses: dtolnay/rust-toolchain@stable
with:
targets: x86_64-pc-windows-msvc
- name: Build binary
run: cargo build --release --locked --target x86_64-pc-windows-msvc
- name: Package archive
shell: pwsh
run: |
New-Item -ItemType Directory -Force -Path dist | Out-Null
Copy-Item "target/x86_64-pc-windows-msvc/release/${env:BIN_NAME}.exe" "dist/${env:BIN_NAME}.exe"
Compress-Archive -Path "dist/${env:BIN_NAME}.exe" -DestinationPath "dist/oc-stats-x86_64-pc-windows-msvc.zip" -Force
- uses: actions/upload-artifact@v4
with:
name: oc-stats-x86_64-pc-windows-msvc
path: dist/oc-stats-x86_64-pc-windows-msvc.zip
if-no-files-found: error
publish-crates:
name: Publish crate
needs: [build-linux, build-macos, build-windows]
runs-on: ubuntu-latest
if: github.event_name == 'push'
steps:
- uses: actions/checkout@v4
with:
ref: ${{ github.event_name == 'workflow_dispatch' && inputs.tag || github.ref }}
- uses: dtolnay/rust-toolchain@stable
- name: Ensure crates.io token is configured
shell: bash
run: |
if [ -z "${{ secrets.CRATES_IO_TOKEN }}" ]; then
echo "CRATES_IO_TOKEN secret is not configured."
exit 1
fi
- name: Publish to crates.io
run: cargo publish --locked --token ${{ secrets.CRATES_IO_TOKEN }}
release:
name: Create GitHub release
needs: [build-linux, build-macos, build-windows, publish-crates]
if: |
always() &&
needs.build-linux.result == 'success' &&
needs.build-macos.result == 'success' &&
needs.build-windows.result == 'success' &&
(needs.publish-crates.result == 'success' || needs.publish-crates.result == 'skipped')
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Resolve release tags
id: tags
shell: bash
run: |
set -euo pipefail
current_tag="${{ github.event_name == 'workflow_dispatch' && inputs.tag || github.ref_name }}"
echo "current=${current_tag}" >> "$GITHUB_OUTPUT"
previous_tag="$(git tag --sort=-version:refname | grep -Fxv "${current_tag}" | head -n 1 || true)"
echo "previous=${previous_tag}" >> "$GITHUB_OUTPUT"
- name: Download build artifacts
uses: actions/download-artifact@v4
with:
path: dist
merge-multiple: true
- name: Generate structured release notes
uses: actions/github-script@v7
env:
CURRENT_TAG: ${{ steps.tags.outputs.current }}
PREVIOUS_TAG: ${{ steps.tags.outputs.previous }}
BIN_NAME: ${{ env.BIN_NAME }}
PACKAGE_NAME: ${{ env.PACKAGE_NAME }}
with:
script: |
const fs = require('node:fs');
const currentTag = process.env.CURRENT_TAG;
const previousTag = process.env.PREVIOUS_TAG;
const owner = context.repo.owner;
const repo = context.repo.repo;
const { data: repoData } = await github.rest.repos.get({ owner, repo });
const repoOwner = (repoData.owner?.login || '').toLowerCase();
const myChanges = [];
const communityPRs = new Map();
const seenEntries = new Set();
const prCache = new Map();
function parseConventional(subject) {
const match = subject.match(/^([a-z]+)(?:\(([^)]+)\))?!?:\s*(.+)$/i);
if (!match) {
return { type: '', scope: '', title: subject.trim() };
}
return {
type: match[1].toLowerCase(),
scope: (match[2] || '').toLowerCase(),
title: match[3].trim(),
};
}
function normalizeTitle(raw) {
return raw
.replace(/^\s*[-*]\s*/, '')
.replace(/\s+/g, ' ')
.trim();
}
function shouldSkip(raw, type) {
const text = raw.toLowerCase();
if (text.startsWith('merge pull request')) return true;
if (text.startsWith('merge branch')) return true;
if (!type) return false;
if (type !== 'chore') return false;
return /release|version|bump|publish|发布|版本/.test(text);
}
function extractPrNumber(text) {
const match = text.match(/\(#(\d+)\)\s*$/) || text.match(/#(\d+)/);
return match ? Number.parseInt(match[1], 10) : null;
}
function isCommunity(pr) {
if (!pr?.user?.login) return false;
const login = pr.user.login.toLowerCase();
if (login === repoOwner) return false;
if (login.endsWith('[bot]')) return false;
const association = (pr.author_association || '').toUpperCase();
return ['CONTRIBUTOR', 'FIRST_TIME_CONTRIBUTOR', 'FIRST_TIMER', 'NONE'].includes(association);
}
async function loadPullByNumber(number) {
if (!number) return null;
if (prCache.has(number)) return prCache.get(number);
try {
const { data } = await github.rest.pulls.get({ owner, repo, pull_number: number });
prCache.set(number, data);
return data;
} catch {
return null;
}
}
async function loadPullByCommit(sha) {
try {
const { data } = await github.rest.repos.listPullRequestsAssociatedWithCommit({
owner,
repo,
commit_sha: sha,
});
if (!Array.isArray(data) || data.length === 0) return null;
const pull = data.find((item) => item.merged_at) || data[0];
if (pull?.number) prCache.set(pull.number, pull);
return pull;
} catch {
return null;
}
}
const commits = [];
if (previousTag) {
const { data } = await github.rest.repos.compareCommitsWithBasehead({
owner,
repo,
basehead: `${previousTag}...${currentTag}`,
per_page: 250,
});
commits.push(...(data.commits || []));
} else {
const data = await github.paginate(github.rest.repos.listCommits, {
owner,
repo,
sha: currentTag,
per_page: 50,
});
commits.push(...data);
}
for (const commit of commits) {
const subject = (commit.commit?.message || '').split('\n')[0].trim();
if (!subject) continue;
const fromCommit = parseConventional(subject);
if (shouldSkip(subject, fromCommit.type)) continue;
const inlinePrNumber = extractPrNumber(subject);
const pull = (await loadPullByNumber(inlinePrNumber)) || (await loadPullByCommit(commit.sha));
const sourceText = pull?.title || subject;
const parsed = parseConventional(sourceText);
if (shouldSkip(sourceText, parsed.type || fromCommit.type)) continue;
const cleanTitle = normalizeTitle(parsed.title || sourceText);
if (!cleanTitle) continue;
const prNumber = pull?.number || inlinePrNumber;
const dedupeKey = prNumber ? `pr-${prNumber}` : `commit-${cleanTitle.toLowerCase()}`;
if (seenEntries.has(dedupeKey)) continue;
seenEntries.add(dedupeKey);
const itemText = `${cleanTitle}${prNumber ? ` (#${prNumber})` : ''}`;
if (pull && isCommunity(pull)) {
const login = pull.user.login;
const list = communityPRs.get(login) || [];
list.push(itemText);
communityPRs.set(login, list);
} else {
myChanges.push(`- ${itemText}`);
}
}
const body = [
'## Install',
'',
`- From crates.io: \`cargo install ${process.env.PACKAGE_NAME}\``,
`- Installed command: \`${process.env.BIN_NAME}\``,
'- From GitHub: download the archive for your platform from the assets below',
'',
];
if (myChanges.length > 0) {
body.push('## Changes', '', ...myChanges, '');
} else if (communityPRs.size === 0) {
body.push('## Changes', '', '- Maintenance release with no notable user-facing changes.', '');
}
if (communityPRs.size > 0) {
const contributorCount = communityPRs.size;
body.push(`**Thank you to ${contributorCount} community contributor${contributorCount === 1 ? '' : 's'}:**`);
const orderedContributors = [...communityPRs.entries()].sort(([a], [b]) => a.localeCompare(b));
for (const [login, items] of orderedContributors) {
body.push(`- @${login}:`);
for (const item of items) {
body.push(` - ${item}`);
}
}
body.push('');
}
if (previousTag) {
body.push(`**Full Changelog**: https://github.com/${owner}/${repo}/compare/${previousTag}...${currentTag}`);
}
fs.writeFileSync('release-notes.md', `${body.join('\n').trim()}\n`, 'utf8');
- name: Create or update release
uses: softprops/action-gh-release@v2
with:
tag_name: ${{ steps.tags.outputs.current }}
name: ${{ steps.tags.outputs.current }}
body_path: release-notes.md
files: dist/*
fail_on_unmatched_files: true