# This file was autogenerated by cargo-dist: https://opensource.axo.dev/cargo-dist/
#
# Copyright 2022-2024, axodotdev
# SPDX-License-Identifier: MIT or Apache-2.0
#
# CI that:
#
# * checks for a Git Tag that looks like a release
# * builds artifacts with cargo-dist (archives, installers, hashes)
# * uploads those artifacts to temporary workflow zip
# * on success, uploads the artifacts to a GitHub Release
#
# Note that the GitHub Release will be created with a generated
# title/body based on your changelogs.
name: Release
permissions:
"contents": "read"
"actions": "read"
env:
FORCE_JAVASCRIPT_ACTIONS_TO_NODE24: "true"
# This task will run whenever you push a git tag that looks like a version
# like "1.0.0", "v0.1.0-prerelease.1", "my-app/0.1.0", "releases/v1.0.0", etc.
# Various formats will be parsed into a VERSION and an optional PACKAGE_NAME, where
# PACKAGE_NAME must be the name of a Cargo package in your workspace, and VERSION
# must be a Cargo-style SemVer Version (must have at least major.minor.patch).
#
# If PACKAGE_NAME is specified, then the announcement will be for that
# package (erroring out if it doesn't have the given version or isn't cargo-dist-able).
#
# If PACKAGE_NAME isn't specified, then the announcement will be for all
# (cargo-dist-able) packages in the workspace with that version (this mode is
# intended for workspaces with only one dist-able package, or with all dist-able
# packages versioned/released in lockstep).
#
# If you push multiple tags at once, separate instances of this workflow will
# spin up, creating an independent announcement for each one. However, GitHub
# will hard limit this to 3 tags per commit, as it will assume more tags is a
# mistake.
#
# If there's a prerelease-style suffix to the version, then the release(s)
# will be marked as a prerelease.
on:
workflow_dispatch:
inputs:
tag:
description: "Release tag to build and publish, for example v0.1.0"
required: true
type: string
pull_request:
push:
tags:
- '**[0-9]+.[0-9]+.[0-9]+*'
jobs:
# Run 'cargo dist plan' (or host) to determine what tasks we need to do
plan:
runs-on: "ubuntu-22.04"
outputs:
val: ${{ steps.plan.outputs.manifest }}
tag: ${{ steps.release-context.outputs.tag }}
tag-flag: ${{ steps.release-context.outputs.tag-flag }}
publishing: ${{ steps.release-context.outputs.publishing }}
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
steps:
- uses: actions/checkout@v4
with:
submodules: recursive
- id: release-context
shell: bash
env:
EVENT_NAME: ${{ github.event_name }}
RELEASE_TAG: ${{ inputs.tag || github.ref_name }}
run: |
if [[ "$EVENT_NAME" == "pull_request" ]]; then
{
echo "tag="
echo "tag-flag="
echo "publishing=false"
} >> "$GITHUB_OUTPUT"
exit 0
fi
{
echo "tag=$RELEASE_TAG"
echo "tag-flag=--tag=$RELEASE_TAG"
echo "publishing=true"
} >> "$GITHUB_OUTPUT"
- name: Install cargo-dist
# we specify bash to get pipefail; it guards against the `curl` command
# failing. otherwise `sh` won't catch that `curl` returned non-0
shell: bash
run: "curl --proto '=https' --tlsv1.2 -LsSf https://github.com/axodotdev/cargo-dist/releases/download/v0.22.0/cargo-dist-installer.sh | sh"
- name: Cache cargo-dist
uses: actions/upload-artifact@v4
with:
name: cargo-dist-cache
path: ~/.cargo/bin/cargo-dist
# sure would be cool if github gave us proper conditionals...
# so here's a doubly-nested ternary-via-truthiness to try to provide the best possible
# functionality based on whether this is a pull_request, and whether it's from a fork.
# (PRs run on the *source* but secrets are usually on the *target* -- that's *good*
# but also really annoying to build CI around when it needs secrets to work right.)
- id: plan
env:
EVENT_NAME: ${{ github.event_name }}
RELEASE_TAG: ${{ steps.release-context.outputs.tag }}
run: |
if [[ "$EVENT_NAME" == "pull_request" ]]; then
cargo dist plan --allow-dirty --output-format=json > plan-dist-manifest.json
else
cargo dist host --steps=create --tag="$RELEASE_TAG" --allow-dirty --output-format=json > plan-dist-manifest.json
fi
echo "cargo dist ran successfully"
cat plan-dist-manifest.json
echo "manifest=$(jq -c "." plan-dist-manifest.json)" >> "$GITHUB_OUTPUT"
- name: "Upload dist-manifest.json"
uses: actions/upload-artifact@v4
with:
name: artifacts-plan-dist-manifest
path: plan-dist-manifest.json
# Build and packages all the platform-specific things
build-local-artifacts:
name: build-local-artifacts (${{ join(matrix.targets, ', ') }})
# Let the initial task tell us to not run (currently very blunt)
needs:
- plan
if: ${{ fromJson(needs.plan.outputs.val).ci.github.artifacts_matrix.include != null && (needs.plan.outputs.publishing == 'true' || fromJson(needs.plan.outputs.val).ci.github.pr_run_mode == 'upload') }}
strategy:
fail-fast: false
# Target platforms/runners are computed by cargo-dist in create-release.
# Each member of the matrix has the following arguments:
#
# - runner: the github runner
# - dist-args: cli flags to pass to cargo dist
# - install-dist: expression to run to install cargo-dist on the runner
#
# Typically there will be:
# - 1 "global" task that builds universal installers
# - N "local" tasks that build each platform's binaries and platform-specific installers
matrix: ${{ fromJson(needs.plan.outputs.val).ci.github.artifacts_matrix }}
runs-on: ${{ matrix.runner }}
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
BUILD_MANIFEST_NAME: target/distrib/${{ join(matrix.targets, '-') }}-dist-manifest.json
steps:
- name: enable windows longpaths
run: |
git config --global core.longpaths true
- uses: actions/checkout@v4
with:
submodules: recursive
- name: "Install vcpkg build dependencies (Linux)"
if: runner.os == 'Linux'
run: "sudo apt-get update -y && sudo apt-get --assume-yes install nasm autoconf autoconf-archive automake libtool"
- name: "Install vcpkg build dependencies (macOS)"
if: runner.os == 'macOS'
run: "brew install nasm autoconf autoconf-archive automake libtool pkg-config"
- name: "Install vcpkg build dependencies (Windows)"
if: runner.os == 'Windows'
run: "choco install nasm strawberryperl -y"
- name: "Cache cargo-vcpkg binary"
id: "cargo-vcpkg-cache"
uses: "actions/cache@v4"
with:
"key": "cargo-vcpkg-${{ runner.os }}-${{ runner.arch }}-v1"
"path": "~/.cargo/bin/cargo-vcpkg"
- name: "Install vcpkg"
if: "steps.cargo-vcpkg-cache.outputs.cache-hit != 'true'"
run: "cargo install cargo-vcpkg"
- name: "Cache vcpkg workspace"
id: "vcpkg-cache"
uses: "actions/cache@v4"
with:
"key": "vcpkg-root-${{ runner.os }}-${{ join(matrix.targets, '_') }}-${{ hashFiles('Cargo.toml') }}"
"path": |
target/vcpkg/.vcpkg-root
target/vcpkg/downloads
target/vcpkg/installed
target/vcpkg/packages
target/vcpkg/ports
target/vcpkg/scripts
target/vcpkg/triplets
target/vcpkg/vcpkg
"restore-keys": |
vcpkg-root-${{ runner.os }}-${{ join(matrix.targets, '_') }}-
- name: "Reset partial vcpkg workspace cache"
if: "steps.vcpkg-cache.outputs.cache-hit != 'true'"
shell: bash
run: rm -rf target/vcpkg
- name: "Build vcpkg dependencies"
if: "steps.vcpkg-cache.outputs.cache-hit != 'true'"
shell: bash
run: |
set -euo pipefail
for attempt in 1 2 3; do
if cargo vcpkg --verbose build; then
exit 0
fi
if [[ "$attempt" == "3" ]]; then
exit 1
fi
sleep $((attempt * 30))
done
env:
"CMAKE_BUILD_PARALLEL_LEVEL": 2
"VCPKG_BINARY_SOURCES": "clear;x-gha,readwrite"
"VCPKG_CMAKE_CONFIGURE_OPTIONS": "-Wno-dev -DCMAKE_POLICY_DEFAULT_CMP0174=NEW"
"VCPKG_FEATURE_FLAGS": "manifests,binarycaching"
"VCPKG_MAX_CONCURRENCY": 2
- name: Install cargo-dist
run: ${{ matrix.install_dist }}
# Get the dist-manifest
- name: Fetch local artifacts
uses: actions/download-artifact@v4
with:
pattern: artifacts-*
path: target/distrib/
merge-multiple: true
- name: Install dependencies
run: |
${{ matrix.packages_install }}
- name: Build artifacts
shell: bash
run: |
if [[ "${RUNNER_OS}" == "macOS" ]]; then
export RUSTFLAGS="${RUSTFLAGS:-} -C link-arg=-framework -C link-arg=Foundation -C link-arg=-framework -C link-arg=CoreAudio -C link-arg=-framework -C link-arg=AVFoundation -C link-arg=-framework -C link-arg=CoreGraphics -C link-arg=-framework -C link-arg=OpenGL -C link-arg=-framework -C link-arg=CoreImage -C link-arg=-framework -C link-arg=AppKit -C link-arg=-framework -C link-arg=AudioToolbox -C link-arg=-framework -C link-arg=CoreFoundation -C link-arg=-framework -C link-arg=CoreMedia -C link-arg=-framework -C link-arg=CoreVideo -C link-arg=-framework -C link-arg=CoreServices -C link-arg=-framework -C link-arg=Security -C link-arg=-framework -C link-arg=VideoToolbox"
fi
if [[ "${RUNNER_OS}" == "Windows" ]]; then
export VCPKGRS_TRIPLET="x64-windows-static"
export VCPKG_DEFAULT_TRIPLET="x64-windows-static"
export RUSTFLAGS="${RUSTFLAGS:-} -C target-feature=+crt-static -C link-arg=Mfplat.lib -C link-arg=Strmiids.lib -C link-arg=Mfuuid.lib -C link-arg=Bcrypt.lib -C link-arg=Ncrypt.lib -C link-arg=Crypt32.lib -C link-arg=Secur32.lib -C link-arg=Ole32.lib -C link-arg=User32.lib"
fi
# Actually do builds and make zips and whatnot
cargo dist build ${{ needs.plan.outputs.tag-flag }} --allow-dirty --print=linkage --output-format=json ${{ matrix.dist_args }} > dist-manifest.json
echo "cargo dist ran successfully"
- id: cargo-dist
name: Post-build
# We force bash here just because github makes it really hard to get values up
# to "real" actions without writing to env-vars, and writing to env-vars has
# inconsistent syntax between shell and powershell.
shell: bash
run: |
# Parse out what we just built and upload it to scratch storage
echo "paths<<EOF" >> "$GITHUB_OUTPUT"
jq --raw-output ".upload_files[]" dist-manifest.json >> "$GITHUB_OUTPUT"
echo "EOF" >> "$GITHUB_OUTPUT"
cp dist-manifest.json "$BUILD_MANIFEST_NAME"
- name: "Upload artifacts"
uses: actions/upload-artifact@v4
with:
name: artifacts-build-local-${{ join(matrix.targets, '_') }}
path: |
${{ steps.cargo-dist.outputs.paths }}
${{ env.BUILD_MANIFEST_NAME }}
# Build and package all the platform-agnostic(ish) things
build-global-artifacts:
needs:
- plan
- build-local-artifacts
runs-on: "ubuntu-22.04"
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
BUILD_MANIFEST_NAME: target/distrib/global-dist-manifest.json
steps:
- uses: actions/checkout@v4
with:
submodules: recursive
- name: Install cached cargo-dist
uses: actions/download-artifact@v4
with:
name: cargo-dist-cache
path: ~/.cargo/bin/
- run: chmod +x ~/.cargo/bin/cargo-dist
# Get all the local artifacts for the global tasks to use (for e.g. checksums)
- name: Fetch local artifacts
uses: actions/download-artifact@v4
with:
pattern: artifacts-*
path: target/distrib/
merge-multiple: true
- id: cargo-dist
shell: bash
run: |
cargo dist build ${{ needs.plan.outputs.tag-flag }} --allow-dirty --output-format=json "--artifacts=global" > dist-manifest.json
echo "cargo dist ran successfully"
# Parse out what we just built and upload it to scratch storage
echo "paths<<EOF" >> "$GITHUB_OUTPUT"
jq --raw-output ".upload_files[]" dist-manifest.json >> "$GITHUB_OUTPUT"
echo "EOF" >> "$GITHUB_OUTPUT"
cp dist-manifest.json "$BUILD_MANIFEST_NAME"
- name: "Upload artifacts"
uses: actions/upload-artifact@v4
with:
name: artifacts-build-global
path: |
${{ steps.cargo-dist.outputs.paths }}
${{ env.BUILD_MANIFEST_NAME }}
benchmark-runner-check:
needs:
- plan
if: ${{ needs.plan.outputs.publishing == 'true' && github.event_name != 'workflow_dispatch' }}
runs-on: "ubuntu-22.04"
permissions:
contents: read
actions: read
outputs:
run-benchmarks: ${{ steps.check.outputs.run-benchmarks }}
env:
BENCHMARK_SOURCE_PATH: ${{ secrets.BENCHMARK_SOURCE_PATH }}
HOME: /tmp/direct-play-nice-gha-home
CARGO_HOME: /tmp/direct-play-nice-gha-home/.cargo
RUSTUP_HOME: /tmp/direct-play-nice-gha-home/.rustup
FALLBACK_GH_TOKEN: ${{ secrets.RELEASE_PLZ_TOKEN != '' && secrets.RELEASE_PLZ_TOKEN || secrets.GITHUB_TOKEN }}
steps:
- name: Generate runner app token
id: runner-app-token
uses: actions/create-github-app-token@v1
with:
app-id: ${{ secrets.RUNNER_APP_ID }}
private-key: ${{ secrets.RUNNER_APP_PRIVATE_KEY }}
owner: ${{ github.repository_owner }}
repositories: ${{ github.event.repository.name }}
- id: check
shell: bash
env:
GH_TOKEN: ${{ steps.runner-app-token.outputs.token != '' && steps.runner-app-token.outputs.token || env.FALLBACK_GH_TOKEN }}
run: |
set -euo pipefail
if [[ -z "${BENCHMARK_SOURCE_PATH:-}" ]]; then
echo "run-benchmarks=false" >> "$GITHUB_OUTPUT"
echo "Skipping benchmarks: BENCHMARK_SOURCE_PATH secret is not set."
exit 0
fi
if ! has_runner="$(gh api repos/${{ github.repository }}/actions/runners --paginate \
--jq '[.runners[] | select(.status=="online" and ([.labels[].name] | index("plex-bench")))] | length > 0')"; then
echo "::warning::Unable to query self-hosted runner inventory via API; skipping benchmarks."
echo "run-benchmarks=false" >> "$GITHUB_OUTPUT"
exit 0
fi
if [[ "$has_runner" == "true" ]]; then
echo "run-benchmarks=true" >> "$GITHUB_OUTPUT"
echo "Benchmark runner is online."
else
echo "run-benchmarks=false" >> "$GITHUB_OUTPUT"
echo "Skipping benchmarks: no online self-hosted runner with label 'plex-bench'."
fi
benchmarks:
needs:
- plan
- benchmark-runner-check
if: ${{ needs.plan.outputs.publishing == 'true' && needs.benchmark-runner-check.outputs.run-benchmarks == 'true' }}
runs-on:
- self-hosted
- Linux
- X64
- plex-bench
timeout-minutes: 180
permissions:
contents: read
actions: read
env:
BENCHMARK_SOURCE_PATH: ${{ secrets.BENCHMARK_SOURCE_PATH }}
steps:
- uses: actions/checkout@v4
with:
submodules: recursive
- name: Ensure benchmark dependencies
shell: bash
run: |
set -euo pipefail
missing=()
for tool in ffmpeg ffprobe nasm autoconf automake libtool zip unzip tar cmake; do
command -v "$tool" >/dev/null 2>&1 || missing+=("$tool")
done
if [[ "${#missing[@]}" -eq 0 ]]; then
echo "Benchmark dependencies already present."
exit 0
fi
echo "::error title=Missing benchmark dependencies::Runner image is missing ${missing[*]}. Preinstall them in the self-hosted runner image."
exit 1
- name: Prepare Rust home directories
shell: bash
run: |
set -euo pipefail
mkdir -p "$HOME" "$CARGO_HOME" "$RUSTUP_HOME"
- name: Install Rust
uses: dtolnay/rust-toolchain@stable
- name: Install cargo-vcpkg
shell: bash
run: |
set -euo pipefail
command -v cargo-vcpkg >/dev/null 2>&1 || cargo install cargo-vcpkg --locked
- name: Cache vcpkg artifacts
uses: actions/cache@v4
with:
path: |
target/vcpkg/installed
target/vcpkg/packages
target/vcpkg/downloads
target/vcpkg/buildtrees
key: vcpkg-release-${{ runner.os }}-${{ hashFiles('Cargo.lock', 'vcpkg.json', 'vcpkg-configuration.json', 'VCPKG_DEPS_LIST.txt') }}
restore-keys: |
vcpkg-release-${{ runner.os }}-
- name: Build vcpkg dependencies
env:
VCPKG_FEATURE_FLAGS: manifests,binarycaching
VCPKG_BINARY_SOURCES: clear;x-gha,readwrite
VCPKG_CMAKE_CONFIGURE_OPTIONS: -Wno-dev -DCMAKE_POLICY_DEFAULT_CMP0174=NEW
VCPKG_MAX_CONCURRENCY: 2
CMAKE_BUILD_PARALLEL_LEVEL: 2
run: cargo vcpkg --verbose build
- name: Export VCPKG_ROOT
run: echo "VCPKG_ROOT=$(pwd)/target/vcpkg" >> "$GITHUB_ENV"
- name: Export LIBCLANG_PATH
shell: bash
run: |
set -euo pipefail
libclang_path="$(ldconfig -p | awk '/libclang\.so/{print $4; exit} /libclang-[0-9]+\.so/{print $4; exit}')"
if [[ -z "$libclang_path" ]]; then
echo "::error title=Missing libclang::Runner image must include libclang shared library."
exit 1
fi
echo "LIBCLANG_PATH=$(dirname "$libclang_path")" >> "$GITHUB_ENV"
- name: Force regenerate rusty_ffmpeg bindings
run: cargo clean -p rusty_ffmpeg
- name: Cache cargo build artifacts
uses: Swatinem/rust-cache@v2
with:
prefix-key: release-bench-v1
- name: Build release binary
shell: bash
run: |
set -euo pipefail
cargo build --release --bin direct_play_nice
bin_path="$(find target -path '*/release/direct_play_nice' -type f | head -n1)"
if [[ -z "$bin_path" ]]; then
echo "::error title=Missing benchmark binary::cargo build did not produce direct_play_nice."
find target -maxdepth 4 -type f -name 'direct_play_nice*' -print || true
exit 1
fi
mkdir -p target/release
if [[ "$bin_path" != "target/release/direct_play_nice" ]]; then
cp "$bin_path" target/release/direct_play_nice
fi
chmod +x target/release/direct_play_nice
ls -l target/release/direct_play_nice
- name: Validate benchmark source path
run: |
test -n "$BENCHMARK_SOURCE_PATH"
test -f "$BENCHMARK_SOURCE_PATH"
mkdir -p benchmark_artifacts
- name: Ensure free disk headroom
shell: bash
run: |
set -euo pipefail
avail_kb="$(df --output=avail -k / | tail -n1 | tr -d ' ')"
min_kb="$((10 * 1024 * 1024))"
if (( avail_kb < min_kb )); then
echo "::error title=Insufficient disk space::Need at least 10GiB free on /. Available: ${avail_kb} KiB."
exit 1
fi
- name: Validate GPU acceleration path
shell: bash
run: |
set -euo pipefail
command -v nvidia-smi >/dev/null 2>&1
nvidia-smi -L
ffmpeg -hide_banner -encoders | grep -q 'h264_nvenc'
- name: Validate OCR runtime path
shell: bash
run: |
set -euo pipefail
test -f /opt/direct-play-nice/ort122-runtime/lib/libonnxruntime.so
test -f /opt/direct-play-nice/ort116-runtime/lib/libcublasLt.so.12
test -f /opt/direct-play-nice/cudnn9-runtime/lib/libcudnn.so.9
test -f /opt/direct-play-nice/nvrtc12-runtime/lib/libnvrtc.so.12
test -f /opt/direct-play-nice/cudnn8-runtime/usr/lib/libcudnn.so.8
- name: Run OCR benchmark
shell: bash
run: |
set -euo pipefail
bash scripts/ocr-tools/run_ocr_stress_benchmark.sh \
--bin target/release/direct_play_nice \
--source "$BENCHMARK_SOURCE_PATH" \
--run-dir benchmark_artifacts/ocr \
--output-name ocr.mp4 \
--ocr-engine pp-ocr-v3 \
--sub-mode force \
--max-source-seconds 240 \
--sample-ms 200 \
--ocr-max-jobs 1 \
--jobs-per-gpu 1 \
--cuda-devices 0 \
--ort-lib /opt/direct-play-nice/ort122-runtime/lib \
--env "LD_LIBRARY_PATH=/opt/direct-play-nice/cudnn9-runtime/lib:/opt/direct-play-nice/nvrtc12-runtime/lib:/opt/direct-play-nice/cuda12-runtime/lib:/opt/direct-play-nice/cudnn8-runtime/usr/lib:/opt/direct-play-nice/ort122-runtime/lib:/opt/direct-play-nice/ort116-runtime/lib:/opt/cuda/lib64:/usr/lib:${LD_LIBRARY_PATH:-}" \
--env "DPN_OCR_SKIP_CLS=1"
- name: Run transcoding benchmark
run: |
bash scripts/transcode-tools/run_transcode_benchmark.sh \
--bin target/release/direct_play_nice \
--source "$BENCHMARK_SOURCE_PATH" \
--run-dir benchmark_artifacts/transcode \
--hw-accel nvenc \
--video-quality 1080p \
--sub-mode skip \
--max-source-seconds 300 \
--sample-ms 200
- name: Collect benchmark artifacts
if: always()
run: |
[[ -f benchmark_artifacts/transcode/benchmark_summary.md ]] && cp -f benchmark_artifacts/transcode/benchmark_summary.md benchmark_artifacts/TRANSCODE_BENCHMARK.md || true
[[ -f benchmark_artifacts/transcode/benchmark_summary.json ]] && cp -f benchmark_artifacts/transcode/benchmark_summary.json benchmark_artifacts/TRANSCODE_BENCHMARK.json || true
[[ -f benchmark_artifacts/ocr/benchmark_summary.md ]] && cp -f benchmark_artifacts/ocr/benchmark_summary.md benchmark_artifacts/OCR_BENCHMARK.md || true
[[ -f benchmark_artifacts/ocr/benchmark_summary.json ]] && cp -f benchmark_artifacts/ocr/benchmark_summary.json benchmark_artifacts/OCR_BENCHMARK.json || true
[[ -f benchmark_artifacts/ocr/run.log ]] && cp -f benchmark_artifacts/ocr/run.log benchmark_artifacts/OCR_BENCHMARK.log || true
[[ -f benchmark_artifacts/ocr/gpu_smi.csv ]] && cp -f benchmark_artifacts/ocr/gpu_smi.csv benchmark_artifacts/OCR_BENCHMARK_GPU.csv || true
- name: Upload benchmark artifacts
if: always()
uses: actions/upload-artifact@v4
with:
name: artifacts-benchmarks
path: |
benchmark_artifacts/TRANSCODE_BENCHMARK.md
benchmark_artifacts/TRANSCODE_BENCHMARK.json
benchmark_artifacts/OCR_BENCHMARK.md
benchmark_artifacts/OCR_BENCHMARK.json
benchmark_artifacts/OCR_BENCHMARK.log
benchmark_artifacts/OCR_BENCHMARK_GPU.csv
- name: Cleanup local benchmark outputs
if: always()
shell: bash
run: |
set -euo pipefail
rm -rf benchmark_artifacts
# Determines if we should publish/announce
host:
needs:
- plan
- build-local-artifacts
- build-global-artifacts
- benchmarks
# Only run if we're "publishing", and only if local and global didn't fail (skipped is fine)
if: ${{ always() && needs.plan.outputs.publishing == 'true' && (needs.build-global-artifacts.result == 'skipped' || needs.build-global-artifacts.result == 'success') && (needs.build-local-artifacts.result == 'skipped' || needs.build-local-artifacts.result == 'success') && (needs.benchmarks.result == 'skipped' || needs.benchmarks.result == 'success') }}
permissions:
contents: write
actions: read
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
runs-on: "ubuntu-22.04"
outputs:
val: ${{ steps.host.outputs.manifest }}
steps:
- uses: actions/checkout@v4
with:
submodules: recursive
- name: Install cached cargo-dist
uses: actions/download-artifact@v4
with:
name: cargo-dist-cache
path: ~/.cargo/bin/
- run: chmod +x ~/.cargo/bin/cargo-dist
# Fetch artifacts from scratch-storage
- name: Fetch artifacts
uses: actions/download-artifact@v4
with:
pattern: artifacts-*
path: target/distrib/
merge-multiple: true
- id: host
shell: bash
run: |
cargo dist host ${{ needs.plan.outputs.tag-flag }} --allow-dirty --steps=upload --steps=release --output-format=json > dist-manifest.json
echo "artifacts uploaded and released successfully"
cat dist-manifest.json
echo "manifest=$(jq -c "." dist-manifest.json)" >> "$GITHUB_OUTPUT"
- name: "Upload dist-manifest.json"
uses: actions/upload-artifact@v4
with:
# Overwrite the previous copy
name: artifacts-dist-manifest
path: dist-manifest.json
# Create a GitHub Release while uploading all files to it
- name: "Download GitHub Artifacts"
uses: actions/download-artifact@v4
with:
pattern: artifacts-*
path: artifacts
merge-multiple: true
- name: Cleanup
run: |
# Remove the granular manifests
rm -f artifacts/*-dist-manifest.json
- id: release-notes
name: Resolve GitHub Release notes
env:
ANNOUNCEMENT_BODY: "${{ fromJson(steps.host.outputs.manifest).announcement_github_body }}"
RELEASE_TAG: "${{ needs.plan.outputs.tag }}"
shell: bash
run: |
set -euo pipefail
notes_file="$RUNNER_TEMP/release-notes.md"
if bash scripts/extract_changelog_release_notes.sh "$RELEASE_TAG" "$notes_file"; then
echo "Using CHANGELOG.md notes for $RELEASE_TAG."
else
echo "::warning title=Using cargo-dist release notes::No concrete CHANGELOG.md section found for $RELEASE_TAG."
printf '%s\n' "$ANNOUNCEMENT_BODY" > "$notes_file"
fi
echo "notes-file=$notes_file" >> "$GITHUB_OUTPUT"
- name: Create GitHub Release
env:
PRERELEASE_FLAG: "${{ fromJson(steps.host.outputs.manifest).announcement_is_prerelease && '--prerelease' || '' }}"
ANNOUNCEMENT_TITLE: "${{ fromJson(steps.host.outputs.manifest).announcement_title }}"
RELEASE_COMMIT: "${{ github.sha }}"
RELEASE_TAG: "${{ needs.plan.outputs.tag }}"
RELEASE_NOTES_FILE: "${{ steps.release-notes.outputs.notes-file }}"
run: |
git fetch --force --tags origin
release_commit="$(git rev-list -n1 "$RELEASE_TAG" 2>/dev/null || true)"
if [[ -z "$release_commit" ]]; then
release_commit="$RELEASE_COMMIT"
fi
release_args=()
if [[ -n "$PRERELEASE_FLAG" ]]; then
release_args+=("$PRERELEASE_FLAG")
fi
if gh release view "$RELEASE_TAG" >/dev/null 2>&1; then
gh release upload "$RELEASE_TAG" artifacts/* --clobber
gh release edit "$RELEASE_TAG" \
--target "$release_commit" \
"${release_args[@]}" \
--title "$ANNOUNCEMENT_TITLE" \
--notes-file "$RELEASE_NOTES_FILE"
else
gh release create "$RELEASE_TAG" \
--target "$release_commit" \
"${release_args[@]}" \
--title "$ANNOUNCEMENT_TITLE" \
--notes-file "$RELEASE_NOTES_FILE" \
artifacts/*
fi
announce:
needs:
- plan
- host
# use "always() && ..." to allow us to wait for all publish jobs while
# still allowing individual publish jobs to skip themselves (for prereleases).
# "host" however must run to completion, no skipping allowed!
if: ${{ always() && needs.host.result == 'success' }}
runs-on: "ubuntu-22.04"
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
steps:
- uses: actions/checkout@v4
with:
submodules: recursive