use crate::backend::SecurityFeature;
use crate::backend::VersionInfo;
use crate::backend::asset_matcher::{self, Asset, AssetPicker, ChecksumFetcher};
use crate::backend::backend_type::BackendType;
use crate::backend::platform_target::PlatformTarget;
use crate::backend::static_helpers::{
get_filename_from_url, install_artifact, lookup_platform_key, lookup_platform_key_for_target,
template_string, try_with_v_prefix, try_with_v_prefix_and_repo, verify_artifact,
};
use crate::cli::args::{BackendArg, ToolVersionType};
use crate::config::{Config, Settings};
use crate::env;
use crate::file;
use crate::http::HTTP;
use crate::install_context::InstallContext;
use crate::lockfile::{PlatformInfo, ProvenanceType};
use crate::toolset::ToolVersionOptions;
use crate::toolset::{ToolRequest, ToolVersion};
use crate::{backend::Backend, forgejo, github, gitlab};
use async_trait::async_trait;
use eyre::Result;
use regex::Regex;
use std::collections::{BTreeMap, HashMap};
use std::fmt::Debug;
use std::sync::Arc;
use xx::regex;
#[derive(Debug)]
pub struct UnifiedGitBackend {
ba: Arc<BackendArg>,
}
struct ReleaseAsset {
name: String,
url: String,
url_api: String,
digest: Option<String>,
}
const DEFAULT_GITHUB_API_BASE_URL: &str = "https://api.github.com";
const DEFAULT_GITLAB_API_BASE_URL: &str = "https://gitlab.com/api/v4";
const DEFAULT_FORGEJO_API_BASE_URL: &str = "https://codeberg.org/api/v1";
enum VerificationStatus {
NoAttestations,
Error(String),
}
fn is_slsa_format_issue(e: &sigstore_verification::AttestationError) -> bool {
match e {
sigstore_verification::AttestationError::NoAttestations => true,
sigstore_verification::AttestationError::Verification(msg) => {
msg.contains("does not contain valid attestations")
|| msg.contains("No certificate found")
|| msg.contains("neither DSSE envelope nor message signature")
}
_ => false,
}
}
pub fn install_time_option_keys() -> Vec<String> {
vec![
"asset_pattern".into(),
"url".into(),
"version_prefix".into(),
"no_app".into(),
]
}
#[async_trait]
impl Backend for UnifiedGitBackend {
fn get_type(&self) -> BackendType {
if self.is_gitlab() {
BackendType::Gitlab
} else if self.is_forgejo() {
BackendType::Forgejo
} else {
BackendType::Github
}
}
fn ba(&self) -> &Arc<BackendArg> {
&self.ba
}
async fn security_info(&self) -> Vec<SecurityFeature> {
if self.is_gitlab() || self.is_forgejo() {
return vec![];
}
let mut features = vec![];
let repo = self.ba.tool_name();
let opts = self.ba.opts();
let api_url = self.get_api_url(&opts);
let releases = github::list_releases_from_url(api_url.as_str(), &repo)
.await
.unwrap_or_default();
let latest_release = releases.first();
if let Some(release) = latest_release {
let has_checksum = release.assets.iter().any(|a| {
let name = a.name.to_lowercase();
name.contains("sha256")
|| name.contains("checksum")
|| name.ends_with(".sha256")
|| name.ends_with(".sha512")
});
if has_checksum {
features.push(SecurityFeature::Checksum {
algorithm: Some("sha256".to_string()),
});
}
}
if let Some(release) = latest_release {
let has_attestations = release.assets.iter().any(|a| {
let name = a.name.to_lowercase();
name.ends_with(".sigstore.json") || name.ends_with(".sigstore")
});
if has_attestations {
features.push(SecurityFeature::GithubAttestations {
signer_workflow: None,
});
}
}
if let Some(release) = latest_release {
let has_slsa = release.assets.iter().any(|a| {
let name = a.name.to_lowercase();
name.contains(".intoto.jsonl")
|| name.contains("provenance")
|| name.ends_with(".attestation")
});
if has_slsa {
features.push(SecurityFeature::Slsa { level: None });
}
}
features
}
async fn _list_remote_versions(&self, config: &Arc<Config>) -> Result<Vec<VersionInfo>> {
let repo = self.ba.tool_name();
let id = self.ba.to_string();
let opts = config
.get_tool_opts(&self.ba)
.await?
.unwrap_or_else(|| self.ba.opts());
let api_url = self.get_api_url(&opts);
let version_prefix = opts.get("version_prefix");
let web_url_base = if self.is_gitlab() {
if api_url == DEFAULT_GITLAB_API_BASE_URL {
format!("https://gitlab.com/{}", repo)
} else {
let web_url = api_url.replace("/api/v4", "");
format!("{}/{}", web_url, repo)
}
} else if self.is_forgejo() {
if api_url == DEFAULT_FORGEJO_API_BASE_URL {
format!("https://codeberg.org/{}", repo)
} else {
let web_url = api_url.replace("/api/v1", "");
format!("{}/{}", web_url, repo)
}
} else if api_url == DEFAULT_GITHUB_API_BASE_URL {
format!("https://github.com/{}", repo)
} else {
let web_url = api_url.replace("/api/v3", "").replace("api.", "");
format!("{}/{}", web_url, repo)
};
let raw_versions: Vec<VersionInfo> = if self.is_gitlab() {
gitlab::list_releases_from_url(api_url.as_str(), &repo)
.await?
.into_iter()
.filter(|r| version_prefix.is_none_or(|p| r.tag_name.starts_with(p)))
.map(|r| VersionInfo {
version: self.strip_version_prefix(&r.tag_name, &opts),
created_at: r.released_at,
release_url: Some(format!("{}/-/releases/{}", web_url_base, r.tag_name)),
..Default::default()
})
.collect()
} else if self.is_forgejo() {
forgejo::list_releases_from_url(api_url.as_str(), &repo)
.await?
.into_iter()
.filter(|r| version_prefix.is_none_or(|p| r.tag_name.starts_with(p)))
.map(|r| VersionInfo {
version: self.strip_version_prefix(&r.tag_name, &opts),
created_at: Some(r.created_at),
release_url: Some(format!("{}/releases/tag/{}", web_url_base, r.tag_name)),
..Default::default()
})
.collect()
} else {
github::list_releases_from_url(api_url.as_str(), &repo)
.await?
.into_iter()
.filter(|r| version_prefix.is_none_or(|p| r.tag_name.starts_with(p)))
.map(|r| VersionInfo {
version: self.strip_version_prefix(&r.tag_name, &opts),
created_at: Some(r.created_at),
release_url: Some(format!("{}/releases/tag/{}", web_url_base, r.tag_name)),
..Default::default()
})
.collect()
};
let versions = raw_versions
.into_iter()
.filter(|v| match v.version.parse::<ToolVersionType>() {
Ok(ToolVersionType::Version(_)) => true,
_ => {
warn!("Invalid version: {id}@{}", v.version);
false
}
})
.rev()
.collect();
Ok(versions)
}
async fn latest_stable_version(&self, config: &Arc<Config>) -> eyre::Result<Option<String>> {
if Settings::get().offline() {
trace!("Skipping latest stable version due to offline mode");
return Ok(None);
}
let repo = self.ba.tool_name();
let opts = config
.get_tool_opts(&self.ba)
.await?
.unwrap_or_else(|| self.ba.opts());
let api_url = self.get_api_url(&opts);
let version_prefix = opts.get("version_prefix");
let latest_tag = if self.is_gitlab() {
return self.latest_version(config, Some("latest".into())).await;
} else if self.is_forgejo() {
match forgejo::get_release_for_url(&api_url, &repo, "latest").await {
Ok(r) => Some(r.tag_name),
Err(e) => {
debug!("Failed to fetch latest Forgejo release for {repo}: {e}");
None
}
}
} else {
match github::get_release_for_url(&api_url, &repo, "latest").await {
Ok(r) => Some(r.tag_name),
Err(e) => {
debug!("Failed to fetch latest GitHub release for {repo}: {e}");
None
}
}
};
let latest_version = latest_tag
.filter(|tag| version_prefix.is_none_or(|p| tag.starts_with(p)))
.map(|tag| self.strip_version_prefix(&tag, &opts));
match latest_version {
Some(version) => Ok(Some(version)),
None => self.latest_version(config, Some("latest".into())).await,
}
}
async fn install_version_(
&self,
ctx: &InstallContext,
mut tv: ToolVersion,
) -> Result<ToolVersion> {
let repo = self.repo();
let opts = ctx
.config
.get_tool_opts(&self.ba)
.await?
.unwrap_or_else(|| self.ba.opts());
let api_url = self.get_api_url(&opts);
let platform_key = self.get_platform_key();
let asset = if let Some(existing_platform) = tv.lock_platforms.get(&platform_key)
&& existing_platform.url.is_some()
{
debug!(
"Using existing URL from lockfile for platform {}: {}",
platform_key,
existing_platform.url.clone().unwrap_or_default()
);
ReleaseAsset {
name: get_filename_from_url(existing_platform.url.as_deref().unwrap_or("")),
url: existing_platform.url.clone().unwrap_or_default(),
url_api: existing_platform.url_api.clone().unwrap_or_default(),
digest: None, }
} else {
self.resolve_asset_url(&tv, &opts, &repo, &api_url).await?
};
self.download_and_install(ctx, &mut tv, &asset, &opts)
.await?;
Ok(tv)
}
async fn list_bin_paths(
&self,
_config: &Arc<Config>,
tv: &ToolVersion,
) -> Result<Vec<std::path::PathBuf>> {
let mise_bins_dir = tv.install_path().join(".mise-bins");
if self.get_filter_bins(tv).is_some() || mise_bins_dir.is_dir() {
return Ok(vec![mise_bins_dir]);
}
self.discover_bin_paths(tv)
}
fn resolve_lockfile_options(
&self,
request: &ToolRequest,
_target: &PlatformTarget,
) -> BTreeMap<String, String> {
let opts = request.options();
let mut result = BTreeMap::new();
for key in ["asset_pattern", "url", "version_prefix"] {
if let Some(value) = opts.get(key) {
result.insert(key.to_string(), value.to_string());
}
}
result
}
async fn resolve_lock_info(
&self,
tv: &ToolVersion,
target: &PlatformTarget,
) -> Result<PlatformInfo> {
let repo = self.repo();
let opts = tv.request.options();
let api_url = self.get_api_url(&opts);
let asset = self
.resolve_asset_url_for_target(tv, &opts, &repo, &api_url, target)
.await;
match asset {
Ok(asset) => {
let mut provenance = if !self.is_gitlab() && !self.is_forgejo() {
self.detect_provenance_type(
tv,
&opts,
&repo,
&api_url,
asset.digest.as_deref(),
target,
)
.await
} else {
None
};
if provenance.is_some() && target.is_current() {
match self
.verify_provenance_at_lock_time(tv, &opts, &repo, &api_url, &asset)
.await
{
Ok(verified) => provenance = Some(verified),
Err(e) => {
warn!(
"lock-time provenance verification failed for {}, \
will be verified at install time: {e}",
self.ba.full()
);
provenance = None;
}
}
}
Ok(PlatformInfo {
url: Some(asset.url),
url_api: Some(asset.url_api),
checksum: asset.digest,
provenance,
..Default::default()
})
}
Err(e) => {
debug!(
"Failed to resolve asset for {} on {}: {}",
self.ba.full(),
target.to_key(),
e
);
Ok(PlatformInfo::default())
}
}
}
}
impl UnifiedGitBackend {
pub fn from_arg(ba: BackendArg) -> Self {
Self { ba: Arc::new(ba) }
}
async fn detect_provenance_type(
&self,
tv: &ToolVersion,
opts: &ToolVersionOptions,
repo: &str,
api_url: &str,
asset_digest: Option<&str>,
target: &PlatformTarget,
) -> Option<ProvenanceType> {
let settings = Settings::get();
let version = &tv.version;
let version_prefix = opts.get("version_prefix");
let release =
try_with_v_prefix_and_repo(version, version_prefix, Some(repo), |candidate| {
let api_url = api_url.to_string();
let repo = repo.to_string();
async move { github::get_release_for_url(&api_url, &repo, &candidate).await }
})
.await
.ok()?;
if settings.github_attestations
&& settings.github.github_attestations
&& let Some(digest) = asset_digest
{
let parts: Vec<&str> = repo.split('/').collect();
if parts.len() == 2 {
let (owner, repo_name) = (parts[0], parts[1]);
match sigstore_verification::sources::github::GitHubSource::new(
owner,
repo_name,
env::GITHUB_TOKEN.as_deref(),
) {
Ok(source) => {
use sigstore_verification::AttestationSource;
let artifact_ref = sigstore_verification::ArtifactRef::from_digest(digest);
match source.fetch_attestations(&artifact_ref).await {
Ok(attestations) if !attestations.is_empty() => {
return Some(ProvenanceType::GithubAttestations);
}
Ok(_) => {}
Err(e) => {
warn!(
"GitHub attestation API query failed for {owner}/{repo_name}: {e}. \
Lockfile may not record github-attestations provenance."
);
}
}
}
Err(e) => {
warn!(
"Failed to create GitHub attestation source for {owner}/{repo_name}: {e}. \
Lockfile may not record github-attestations provenance."
);
}
}
}
}
if settings.slsa && settings.github.slsa {
let asset_names: Vec<String> = release.assets.iter().map(|a| a.name.clone()).collect();
let picker = AssetPicker::with_libc(
target.os_name().to_string(),
target.arch_name().to_string(),
target.qualifier().map(|s| s.to_string()),
);
if let Some(provenance_name) = picker.pick_best_provenance(&asset_names) {
let url = release
.assets
.iter()
.find(|a| a.name == provenance_name)
.map(|a| a.browser_download_url.clone());
return Some(ProvenanceType::Slsa { url });
}
}
None
}
async fn verify_provenance_at_lock_time(
&self,
tv: &ToolVersion,
opts: &ToolVersionOptions,
repo: &str,
api_url: &str,
asset: &ReleaseAsset,
) -> Result<ProvenanceType> {
let tmp_dir = tempfile::tempdir()?;
let filename = get_filename_from_url(&asset.url);
let artifact_path = tmp_dir.path().join(&filename);
info!(
"downloading artifact for lock-time provenance verification: {}",
filename
);
let download_url = if self.is_gitlab() {
asset.url.clone()
} else {
asset.url_api.clone()
};
let headers = if self.is_gitlab() {
gitlab::get_headers(&download_url)
} else if self.is_forgejo() {
forgejo::get_headers(&download_url)
} else {
github::get_headers(&download_url)
};
HTTP.download_file_with_headers(&download_url, &artifact_path, &headers, None)
.await?;
let settings = Settings::get();
if settings.github_attestations && settings.github.github_attestations {
let parts: Vec<&str> = repo.split('/').collect();
if parts.len() == 2 {
let (owner, repo_name) = (parts[0], parts[1]);
match sigstore_verification::verify_github_attestation(
&artifact_path,
owner,
repo_name,
env::GITHUB_TOKEN.as_deref(),
None,
)
.await
{
Ok(true) => {
debug!("lock-time GitHub attestations verified for {}", repo);
return Ok(ProvenanceType::GithubAttestations);
}
Ok(false) => {
return Err(eyre::eyre!(
"GitHub artifact attestations verification returned false"
));
}
Err(sigstore_verification::AttestationError::NoAttestations) => {
debug!("no GitHub attestations found at lock time, trying SLSA");
}
Err(e) => {
return Err(eyre::eyre!(
"GitHub artifact attestations verification failed: {e}"
));
}
}
}
}
if settings.slsa && settings.github.slsa {
let version = &tv.version;
let version_prefix = opts.get("version_prefix");
let release =
try_with_v_prefix_and_repo(version, version_prefix, Some(repo), |candidate| {
let api_url = api_url.to_string();
let repo = repo.to_string();
async move { github::get_release_for_url(&api_url, &repo, &candidate).await }
})
.await?;
let asset_names: Vec<String> = release.assets.iter().map(|a| a.name.clone()).collect();
let current_platform = PlatformTarget::from_current();
let picker = AssetPicker::with_libc(
current_platform.os_name().to_string(),
current_platform.arch_name().to_string(),
current_platform.qualifier().map(|s| s.to_string()),
);
if let Some(provenance_name) = picker.pick_best_provenance(&asset_names) {
let provenance_asset = release
.assets
.iter()
.find(|a| a.name == provenance_name)
.expect("provenance asset should exist since we found its name");
let provenance_path = tmp_dir.path().join(&provenance_asset.name);
HTTP.download_file(
&provenance_asset.browser_download_url,
&provenance_path,
None,
)
.await?;
let provenance_url = provenance_asset.browser_download_url.clone();
match sigstore_verification::verify_slsa_provenance(
&artifact_path,
&provenance_path,
1u8,
)
.await
{
Ok(true) => {
debug!("lock-time SLSA provenance verified for {}", repo);
return Ok(ProvenanceType::Slsa {
url: Some(provenance_url),
});
}
Ok(false) => {
return Err(eyre::eyre!("SLSA provenance verification failed"));
}
Err(e) => {
if is_slsa_format_issue(&e) {
debug!("SLSA provenance file not in verifiable format: {e}");
} else {
return Err(eyre::eyre!("SLSA verification error: {e}"));
}
}
}
}
}
Err(eyre::eyre!(
"provenance was detected but could not be verified at lock time"
))
}
fn is_gitlab(&self) -> bool {
self.ba.backend_type() == BackendType::Gitlab
}
fn is_forgejo(&self) -> bool {
self.ba.backend_type() == BackendType::Forgejo
}
fn repo(&self) -> String {
self.ba.tool_name()
}
fn format_asset_list<'a, I>(assets: I) -> String
where
I: Iterator<Item = &'a String>,
{
assets.cloned().collect::<Vec<_>>().join(", ")
}
fn get_api_url(&self, opts: &ToolVersionOptions) -> String {
opts.get("api_url")
.unwrap_or(if self.is_gitlab() {
DEFAULT_GITLAB_API_BASE_URL
} else if self.is_forgejo() {
DEFAULT_FORGEJO_API_BASE_URL
} else {
DEFAULT_GITHUB_API_BASE_URL
})
.to_string()
}
async fn download_and_install(
&self,
ctx: &InstallContext,
tv: &mut ToolVersion,
asset: &ReleaseAsset,
opts: &ToolVersionOptions,
) -> Result<()> {
let filename = asset.name.clone();
let file_path = tv.download_path().join(&filename);
let has_checksum = lookup_platform_key(opts, "checksum")
.or_else(|| opts.get("checksum").map(|s| s.to_string()))
.is_some();
let platform_key = self.get_platform_key();
let platform_info = tv.lock_platforms.entry(platform_key).or_default();
platform_info.url = Some(asset.url.clone());
platform_info.url_api = Some(asset.url_api.clone());
if let Some(digest) = &asset.digest {
debug!("using GitHub API digest for checksum verification");
platform_info.checksum = Some(digest.clone());
}
let url = match asset.url_api.starts_with(DEFAULT_GITHUB_API_BASE_URL)
|| asset.url_api.starts_with(DEFAULT_GITLAB_API_BASE_URL)
|| asset.url_api.starts_with(DEFAULT_FORGEJO_API_BASE_URL)
{
true => match HTTP.head(asset.url.clone()).await {
Ok(resp) => {
let content_type = resp
.headers()
.get("content-type")
.and_then(|v| v.to_str().ok())
.unwrap_or("");
if content_type.contains("text/html") {
debug!("Browser URL returned HTML (likely auth page), using API URL");
asset.url_api.clone()
} else {
asset.url.clone()
}
}
Err(_) => asset.url_api.clone(),
},
false => {
debug!(
"Since the tool resides on a custom GitHub/GitLab API ({:?}), the asset download will be performed using the given API instead of browser URL download",
asset.url_api
);
asset.url_api.clone()
}
};
let headers = if self.is_gitlab() {
gitlab::get_headers(&url)
} else if self.is_forgejo() {
forgejo::get_headers(&url)
} else {
github::get_headers(&url)
};
ctx.pr.set_message(format!("download {filename}"));
HTTP.download_file_with_headers(url, &file_path, &headers, Some(ctx.pr.as_ref()))
.await?;
ctx.pr.next_operation();
if has_checksum {
verify_artifact(tv, &file_path, opts, Some(ctx.pr.as_ref()))?;
}
let platform_key = self.get_platform_key();
let has_lockfile_integrity = tv
.lock_platforms
.get(&platform_key)
.is_some_and(|pi| pi.checksum.is_some() && pi.provenance.is_some());
self.verify_checksum(ctx, tv, &file_path)?;
let settings = Settings::get();
let force_verify = settings.force_provenance_verify();
if has_lockfile_integrity && !force_verify {
self.ensure_provenance_setting_enabled(tv, &platform_key)?;
} else {
let provenance_result = self
.verify_attestations_or_slsa(ctx, tv, &file_path)
.await?;
if let Some(provenance_type) = provenance_result {
let platform_info = tv.lock_platforms.entry(platform_key).or_default();
platform_info.provenance = Some(provenance_type);
}
}
ctx.pr.next_operation();
install_artifact(tv, &file_path, opts, Some(ctx.pr.as_ref()))?;
if let Some(bins) = self.get_filter_bins(tv) {
self.create_symlink_bin_dir(tv, bins)?;
}
Ok(())
}
fn discover_bin_paths(&self, tv: &ToolVersion) -> Result<Vec<std::path::PathBuf>> {
let opts = tv.request.options();
if let Some(bin_path_template) = lookup_platform_key(&opts, "bin_path")
.or_else(|| opts.get("bin_path").map(|s| s.to_string()))
{
let bin_path = template_string(&bin_path_template, tv);
return Ok(vec![tv.install_path().join(&bin_path)]);
}
let bin_path = tv.install_path().join("bin");
if bin_path.exists() {
return Ok(vec![bin_path]);
}
let contents_macos = tv.install_path().join("Contents").join("MacOS");
if contents_macos.is_dir() {
return Ok(vec![contents_macos]);
}
if let Ok(entries) = std::fs::read_dir(tv.install_path()) {
for entry in entries.flatten() {
let path = entry.path();
if path.is_file() && file::is_executable(&path) {
return Ok(vec![tv.install_path()]);
}
}
}
let mut paths = Vec::new();
if let Ok(entries) = std::fs::read_dir(tv.install_path()) {
for entry in entries.flatten() {
let path = entry.path();
if path.is_dir() {
let path_str = path.file_name().unwrap_or_default().to_string_lossy();
if path_str.ends_with(".app") {
let macos_dir = path.join("Contents").join("MacOS");
if macos_dir.is_dir() {
paths.push(macos_dir);
continue;
}
}
let sub_bin_path = path.join("bin");
if sub_bin_path.exists() {
paths.push(sub_bin_path);
} else {
if let Ok(sub_entries) = std::fs::read_dir(&path) {
for sub_entry in sub_entries.flatten() {
let sub_path = sub_entry.path();
if sub_path.is_file() && file::is_executable(&sub_path) {
paths.push(path.clone());
break;
}
}
}
}
}
}
}
if paths.is_empty() {
Ok(vec![tv.install_path()])
} else {
Ok(paths)
}
}
async fn resolve_asset_url(
&self,
tv: &ToolVersion,
opts: &ToolVersionOptions,
repo: &str,
api_url: &str,
) -> Result<ReleaseAsset> {
let current_platform = PlatformTarget::from_current();
self.resolve_asset_url_for_target(tv, opts, repo, api_url, ¤t_platform)
.await
}
async fn resolve_asset_url_for_target(
&self,
tv: &ToolVersion,
opts: &ToolVersionOptions,
repo: &str,
api_url: &str,
target: &PlatformTarget,
) -> Result<ReleaseAsset> {
if let Some(direct_url) = lookup_platform_key_for_target(opts, "url", target) {
return Ok(ReleaseAsset {
name: get_filename_from_url(&direct_url),
url: direct_url.clone(),
url_api: direct_url.clone(),
digest: None, });
}
let version = &tv.version;
let version_prefix = opts.get("version_prefix");
if self.is_gitlab() {
try_with_v_prefix(version, version_prefix, |candidate| async move {
self.resolve_gitlab_asset_url_for_target(
tv, opts, repo, api_url, &candidate, target,
)
.await
})
.await
} else if self.is_forgejo() {
try_with_v_prefix(version, version_prefix, |candidate| async move {
self.resolve_forgejo_asset_url_for_target(
tv, opts, repo, api_url, &candidate, target,
)
.await
})
.await
} else {
try_with_v_prefix_and_repo(
version,
version_prefix,
Some(repo),
|candidate| async move {
self.resolve_github_asset_url_for_target(
tv, opts, repo, api_url, &candidate, target,
)
.await
},
)
.await
}
}
async fn resolve_github_asset_url_for_target(
&self,
tv: &ToolVersion,
opts: &ToolVersionOptions,
repo: &str,
api_url: &str,
version: &str,
target: &PlatformTarget,
) -> Result<ReleaseAsset> {
let release = github::get_release_for_url(api_url, repo, version).await?;
let available_assets: Vec<String> = release.assets.iter().map(|a| a.name.clone()).collect();
let assets_with_urls: Vec<Asset> = release
.assets
.iter()
.map(|a| Asset::new(&a.name, &a.browser_download_url))
.collect();
if let Some(pattern) = lookup_platform_key_for_target(opts, "asset_pattern", target)
.or_else(|| opts.get("asset_pattern").map(|s| s.to_string()))
{
let templated_pattern = template_string_for_target(&pattern, tv, target);
let asset = release
.assets
.into_iter()
.find(|a| self.matches_pattern(&a.name, &templated_pattern))
.ok_or_else(|| {
eyre::eyre!(
"No matching asset found for pattern: {}\nAvailable assets: {}",
templated_pattern,
Self::format_asset_list(available_assets.iter())
)
})?;
let digest = if asset.digest.is_some() {
asset.digest
} else {
self.try_fetch_checksum_from_assets(&assets_with_urls, &asset.name)
.await
};
return Ok(ReleaseAsset {
name: asset.name,
url: asset.browser_download_url,
url_api: asset.url,
digest,
});
}
let no_app = opts
.get("no_app")
.and_then(|v| v.parse::<bool>().ok())
.unwrap_or(false);
let asset_name = asset_matcher::AssetMatcher::new()
.for_target(target)
.with_no_app(no_app)
.pick_from(&available_assets)?
.name;
let asset = self
.find_asset_case_insensitive(&release.assets, &asset_name, |a| &a.name)
.ok_or_else(|| {
eyre::eyre!(
"Auto-detected asset not found: {}\nAvailable assets: {}",
asset_name,
Self::format_asset_list(available_assets.iter())
)
})?;
let digest = if asset.digest.is_some() {
asset.digest.clone()
} else {
self.try_fetch_checksum_from_assets(&assets_with_urls, &asset.name)
.await
};
Ok(ReleaseAsset {
name: asset.name.clone(),
url: asset.browser_download_url.clone(),
url_api: asset.url.clone(),
digest,
})
}
async fn resolve_gitlab_asset_url_for_target(
&self,
tv: &ToolVersion,
opts: &ToolVersionOptions,
repo: &str,
api_url: &str,
version: &str,
target: &PlatformTarget,
) -> Result<ReleaseAsset> {
let release = gitlab::get_release_for_url(api_url, repo, version).await?;
let available_assets: Vec<String> = release
.assets
.links
.iter()
.map(|a| a.name.clone())
.collect();
let assets_with_urls: Vec<Asset> = release
.assets
.links
.iter()
.map(|a| Asset::new(&a.name, &a.direct_asset_url))
.collect();
if let Some(pattern) = lookup_platform_key_for_target(opts, "asset_pattern", target)
.or_else(|| opts.get("asset_pattern").map(|s| s.to_string()))
{
let templated_pattern = template_string_for_target(&pattern, tv, target);
let asset = release
.assets
.links
.into_iter()
.find(|a| self.matches_pattern(&a.name, &templated_pattern))
.ok_or_else(|| {
eyre::eyre!(
"No matching asset found for pattern: {}\nAvailable assets: {}",
templated_pattern,
Self::format_asset_list(available_assets.iter())
)
})?;
let digest = self
.try_fetch_checksum_from_assets(&assets_with_urls, &asset.name)
.await;
return Ok(ReleaseAsset {
name: asset.name,
url: asset.direct_asset_url.clone(),
url_api: asset.url,
digest,
});
}
let no_app = opts
.get("no_app")
.and_then(|v| v.parse::<bool>().ok())
.unwrap_or(false);
let asset_name = asset_matcher::AssetMatcher::new()
.for_target(target)
.with_no_app(no_app)
.pick_from(&available_assets)?
.name;
let asset = self
.find_asset_case_insensitive(&release.assets.links, &asset_name, |a| &a.name)
.ok_or_else(|| {
eyre::eyre!(
"Auto-detected asset not found: {}\nAvailable assets: {}",
asset_name,
Self::format_asset_list(available_assets.iter())
)
})?;
let digest = self
.try_fetch_checksum_from_assets(&assets_with_urls, &asset.name)
.await;
Ok(ReleaseAsset {
name: asset.name.clone(),
url: asset.direct_asset_url.clone(),
url_api: asset.url.clone(),
digest,
})
}
async fn resolve_forgejo_asset_url_for_target(
&self,
tv: &ToolVersion,
opts: &ToolVersionOptions,
repo: &str,
api_url: &str,
version: &str,
target: &PlatformTarget,
) -> Result<ReleaseAsset> {
let release = forgejo::get_release_for_url(api_url, repo, version).await?;
let available_assets: Vec<String> = release.assets.iter().map(|a| a.name.clone()).collect();
let assets_with_urls: Vec<Asset> = release
.assets
.iter()
.map(|a| Asset::new(&a.name, &a.browser_download_url))
.collect();
let asset_url_api = |asset_uuid: &str| {
format!(
"{}/attachments/{}",
api_url.replace("/api/v1", ""),
asset_uuid
)
};
if let Some(pattern) = lookup_platform_key_for_target(opts, "asset_pattern", target)
.or_else(|| opts.get("asset_pattern").map(|s| s.to_string()))
{
let templated_pattern = template_string_for_target(&pattern, tv, target);
let asset = release
.assets
.into_iter()
.find(|a| self.matches_pattern(&a.name, &templated_pattern))
.ok_or_else(|| {
eyre::eyre!(
"No matching asset found for pattern: {}\nAvailable assets: {}",
templated_pattern,
Self::format_asset_list(available_assets.iter())
)
})?;
let digest = self
.try_fetch_checksum_from_assets(&assets_with_urls, &asset.name)
.await;
return Ok(ReleaseAsset {
name: asset.name,
url: asset.browser_download_url,
url_api: asset_url_api(&asset.uuid),
digest,
});
}
let no_app = opts
.get("no_app")
.and_then(|v| v.parse::<bool>().ok())
.unwrap_or(false);
let asset_name = asset_matcher::AssetMatcher::new()
.for_target(target)
.with_no_app(no_app)
.pick_from(&available_assets)?
.name;
let asset = self
.find_asset_case_insensitive(&release.assets, &asset_name, |a| &a.name)
.ok_or_else(|| {
eyre::eyre!(
"Auto-detected asset not found: {}\nAvailable assets: {}",
asset_name,
Self::format_asset_list(available_assets.iter())
)
})?;
let digest = self
.try_fetch_checksum_from_assets(&assets_with_urls, &asset.name)
.await;
Ok(ReleaseAsset {
name: asset.name.clone(),
url: asset.browser_download_url.clone(),
url_api: asset_url_api(&asset.uuid),
digest,
})
}
fn find_asset_case_insensitive<'a, T>(
&self,
assets: &'a [T],
target_name: &str,
get_name: impl Fn(&T) -> &str,
) -> Option<&'a T> {
assets
.iter()
.find(|a| get_name(a) == target_name)
.or_else(|| {
let target_lower = target_name.to_lowercase();
assets
.iter()
.find(|a| get_name(a).to_lowercase() == target_lower)
})
}
fn matches_pattern(&self, asset_name: &str, pattern: &str) -> bool {
let regex_pattern = pattern
.replace(".", "\\.")
.replace("*", ".*")
.replace("?", ".");
if let Ok(re) = Regex::new(&format!("^{regex_pattern}$")) {
re.is_match(asset_name)
} else {
asset_name.contains(pattern)
}
}
fn strip_version_prefix(&self, tag_name: &str, opts: &ToolVersionOptions) -> String {
if let Some(prefix) = opts.get("version_prefix")
&& let Some(stripped) = tag_name.strip_prefix(prefix)
{
return stripped.to_string();
}
if let Some(caps) = regex!(r"^([^@]+)@(\d.*)$").captures(tag_name) {
let prefix = caps.get(1).unwrap().as_str();
let version = caps.get(2).unwrap().as_str();
let repo = self.repo();
let repo_short_name = repo.split('/').next_back();
if repo_short_name == Some(prefix) || repo == prefix {
return version.to_string();
}
}
if tag_name.starts_with('v') {
tag_name.trim_start_matches('v').to_string()
} else {
tag_name.to_string()
}
}
async fn try_fetch_checksum_from_assets(
&self,
assets: &[Asset],
asset_name: &str,
) -> Option<String> {
let fetcher = ChecksumFetcher::new(assets);
match fetcher.fetch_checksum_for(asset_name).await {
Some(result) => {
debug!(
"Found checksum for {} from {}: {}",
asset_name,
result.source_file,
result.to_string_formatted()
);
Some(result.to_string_formatted())
}
None => {
trace!("No checksum file found for {}", asset_name);
None
}
}
}
fn get_filter_bins(&self, tv: &ToolVersion) -> Option<Vec<String>> {
let opts = tv.request.options();
let filter_bins = lookup_platform_key(&opts, "filter_bins")
.or_else(|| opts.get("filter_bins").map(|s| s.to_string()))?;
Some(
filter_bins
.split(',')
.map(|s| s.trim().to_string())
.filter(|s| !s.is_empty())
.collect(),
)
}
fn create_symlink_bin_dir(&self, tv: &ToolVersion, bins: Vec<String>) -> Result<()> {
let symlink_dir = tv.install_path().join(".mise-bins");
file::create_dir_all(&symlink_dir)?;
let install_path = tv.install_path();
let bin_paths = self.discover_bin_paths(tv)?;
let mut src_dirs = bin_paths;
if !src_dirs.contains(&install_path) {
src_dirs.push(install_path);
}
for bin_name in bins {
let mut found = false;
for dir in &src_dirs {
let src = dir.join(&bin_name);
if src.exists() {
let dst = symlink_dir.join(&bin_name);
if !dst.exists() {
file::make_symlink_or_copy(&src, &dst)?;
}
found = true;
break;
}
}
if !found {
warn!(
"Could not find binary '{}' in install directories. Available paths: {:?}",
bin_name, src_dirs
);
}
}
Ok(())
}
fn ensure_provenance_setting_enabled(
&self,
tv: &ToolVersion,
platform_key: &str,
) -> Result<()> {
super::ensure_provenance_setting_enabled(tv, platform_key, |provenance| {
let settings = Settings::get();
match provenance {
ProvenanceType::GithubAttestations => {
Ok(!settings.github_attestations || !settings.github.github_attestations)
}
ProvenanceType::Slsa { .. } => Ok(!settings.slsa || !settings.github.slsa),
_ => Err(eyre::eyre!(
"Lockfile has unexpected provenance type {provenance} for github backend tool {tv}. \
Update the lockfile to remove the stale provenance entry."
)),
}
})
}
async fn verify_attestations_or_slsa(
&self,
ctx: &InstallContext,
tv: &ToolVersion,
file_path: &std::path::Path,
) -> Result<Option<ProvenanceType>> {
let settings = Settings::get();
let platform_key = self.get_platform_key();
let locked_provenance = tv
.lock_platforms
.get(&platform_key)
.and_then(|pi| pi.provenance.clone());
if self.is_gitlab() || self.is_forgejo() {
if let Some(ref expected) = locked_provenance {
return Err(eyre::eyre!(
"Lockfile requires {expected} provenance for {tv} but verification is not available \
for GitLab/Forgejo backends. This may indicate a downgrade attack."
));
}
return Ok(None);
}
let skip_attestations = locked_provenance
.as_ref()
.is_some_and(|l| !l.is_github_attestations());
let skip_slsa = locked_provenance.as_ref().is_some_and(|l| !l.is_slsa());
if !skip_attestations && settings.github_attestations && settings.github.github_attestations
{
match self
.try_verify_github_attestations(ctx, tv, file_path)
.await
{
Ok(true) => {
if let Some(ref expected) = locked_provenance
&& !expected.is_github_attestations()
{
return Err(eyre::eyre!(
"Lockfile requires {expected} provenance for {tv} but github-attestations was verified. \
This may indicate a provenance type mismatch."
));
}
return Ok(Some(ProvenanceType::GithubAttestations));
}
Ok(false) => {
return Err(eyre::eyre!(
"GitHub artifact attestations verification failed for {tv}"
));
}
Err(VerificationStatus::NoAttestations) => {
debug!("No GitHub artifact attestations found for {tv}, trying SLSA");
}
Err(VerificationStatus::Error(e)) => {
return Err(eyre::eyre!(
"GitHub artifact attestations verification error for {tv}: {e}"
));
}
}
}
if !skip_slsa && settings.slsa && settings.github.slsa {
match self.try_verify_slsa(ctx, tv, file_path).await {
Ok((true, provenance_url)) => {
if let Some(ref expected) = locked_provenance
&& !expected.is_slsa()
{
return Err(eyre::eyre!(
"Lockfile requires {expected} provenance for {tv} but slsa was verified. \
This may indicate a provenance type mismatch."
));
}
return Ok(Some(ProvenanceType::Slsa {
url: provenance_url,
}));
}
Ok((false, _)) => {
return Err(eyre::eyre!("SLSA provenance verification failed for {tv}"));
}
Err(VerificationStatus::NoAttestations) => {
debug!("No SLSA provenance found for {tv}");
}
Err(VerificationStatus::Error(e)) => {
return Err(eyre::eyre!("SLSA verification error for {tv}: {e}"));
}
}
}
if let Some(ref expected) = locked_provenance {
return Err(eyre::eyre!(
"Lockfile requires {expected} provenance for {tv} but verification was not performed. \
This may indicate a downgrade attack. Enable the corresponding verification setting \
or update the lockfile."
));
}
Ok(None)
}
async fn try_verify_github_attestations(
&self,
ctx: &InstallContext,
tv: &ToolVersion,
file_path: &std::path::Path,
) -> std::result::Result<bool, VerificationStatus> {
ctx.pr
.set_message("verify GitHub artifact attestations".to_string());
let repo = self.repo();
let parts: Vec<&str> = repo.split('/').collect();
if parts.len() != 2 {
return Err(VerificationStatus::Error(format!(
"Invalid repo format: {repo}"
)));
}
let (owner, repo_name) = (parts[0], parts[1]);
match sigstore_verification::verify_github_attestation(
file_path,
owner,
repo_name,
env::GITHUB_TOKEN.as_deref(),
None, )
.await
{
Ok(verified) => {
if verified {
ctx.pr
.set_message("✓ GitHub artifact attestations verified".to_string());
debug!("GitHub artifact attestations verified successfully for {tv}");
}
Ok(verified)
}
Err(sigstore_verification::AttestationError::NoAttestations) => {
Err(VerificationStatus::NoAttestations)
}
Err(e) => Err(VerificationStatus::Error(e.to_string())),
}
}
async fn try_verify_slsa(
&self,
ctx: &InstallContext,
tv: &ToolVersion,
file_path: &std::path::Path,
) -> std::result::Result<(bool, Option<String>), VerificationStatus> {
if self.is_gitlab() || self.is_forgejo() {
return Err(VerificationStatus::NoAttestations);
}
ctx.pr.set_message("verify SLSA provenance".to_string());
let repo = self.repo();
let opts = tv.request.options();
let api_url = self.get_api_url(&opts);
let version = &tv.version;
let version_prefix = opts.get("version_prefix");
let release =
match try_with_v_prefix_and_repo(version, version_prefix, Some(&repo), |candidate| {
let api_url = api_url.clone();
let repo = repo.clone();
async move { github::get_release_for_url(&api_url, &repo, &candidate).await }
})
.await
{
Ok(r) => r,
Err(e) => {
return Err(VerificationStatus::Error(format!(
"Failed to get release: {e}"
)));
}
};
let asset_names: Vec<String> = release.assets.iter().map(|a| a.name.clone()).collect();
let current_platform = PlatformTarget::from_current();
let picker = AssetPicker::with_libc(
current_platform.os_name().to_string(),
current_platform.arch_name().to_string(),
current_platform.qualifier().map(|s| s.to_string()),
);
let provenance_name = match picker.pick_best_provenance(&asset_names) {
Some(name) => name,
None => return Err(VerificationStatus::NoAttestations),
};
let provenance_asset = release
.assets
.iter()
.find(|a| a.name == provenance_name)
.expect("provenance asset should exist since we found its name");
let download_dir = tv.download_path();
let provenance_path = download_dir.join(&provenance_asset.name);
ctx.pr
.set_message(format!("download {}", provenance_asset.name));
if let Err(e) = HTTP
.download_file(
&provenance_asset.browser_download_url,
&provenance_path,
Some(ctx.pr.as_ref()),
)
.await
{
return Err(VerificationStatus::Error(format!(
"Failed to download provenance: {e}"
)));
}
ctx.pr.set_message("verify SLSA provenance".to_string());
let provenance_download_url = provenance_asset.browser_download_url.clone();
match sigstore_verification::verify_slsa_provenance(
file_path,
&provenance_path,
1, )
.await
{
Ok(verified) => {
if verified {
debug!("SLSA provenance verified successfully for {tv}");
Ok((true, Some(provenance_download_url)))
} else {
Ok((false, None))
}
}
Err(e) => {
if is_slsa_format_issue(&e) {
debug!("SLSA provenance file not in verifiable format for {tv}: {e}");
Err(VerificationStatus::NoAttestations)
} else {
Err(VerificationStatus::Error(e.to_string()))
}
}
}
}
}
fn template_string_for_target(template: &str, tv: &ToolVersion, target: &PlatformTarget) -> String {
let version = &tv.version;
let os = target.os_name();
let arch = target.arch_name();
let darwin_os = if os == "macos" { "darwin" } else { os };
let amd64_arch = match arch {
"x64" => "amd64",
_ => arch, };
let x86_64_arch = match arch {
"x64" => "x86_64",
"arm64" => "aarch64",
_ => arch,
};
let gnu_arch = match arch {
"x64" => "x86_64",
_ => arch,
};
let has_legacy_placeholder = [
"{version}",
"{os}",
"{arch}",
"{darwin_os}",
"{amd64_arch}",
"{x86_64_arch}",
"{gnu_arch}",
]
.iter()
.any(|p| template.contains(p) && !template.contains(&format!("{{{p}}}")));
if has_legacy_placeholder {
deprecated_at!(
"2026.3.0",
"2027.3.0",
"legacy-version-template",
"Use Tera syntax (e.g., {{{{ version }}}}) instead of legacy {{version}} in templates"
);
return template
.replace("{version}", version)
.replace("{os}", os)
.replace("{arch}", arch)
.replace("{darwin_os}", darwin_os)
.replace("{amd64_arch}", amd64_arch)
.replace("{x86_64_arch}", x86_64_arch)
.replace("{gnu_arch}", gnu_arch);
}
let mut ctx = crate::tera::BASE_CONTEXT.clone();
ctx.insert("version", version);
ctx.insert("os", os);
ctx.insert("arch", arch);
ctx.insert("darwin_os", darwin_os);
ctx.insert("amd64_arch", amd64_arch);
ctx.insert("x86_64_arch", x86_64_arch);
ctx.insert("gnu_arch", gnu_arch);
let mut tera = crate::tera::get_tera(None);
let make_remapping_fn = |value: String| {
move |args: &HashMap<String, tera::Value>| -> tera::Result<tera::Value> {
if let Some(s) = args.get(value.as_str()).and_then(|v| v.as_str()) {
Ok(tera::Value::String(s.to_string()))
} else {
Ok(tera::Value::String(value.clone()))
}
}
};
tera.register_function("os", make_remapping_fn(os.to_string()));
tera.register_function("arch", make_remapping_fn(arch.to_string()));
match tera.render_str(template, &ctx) {
Ok(rendered) => rendered,
Err(e) => {
warn!("Failed to render template '{}': {}", template, e);
template.to_string()
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::cli::args::BackendArg;
fn create_test_backend() -> UnifiedGitBackend {
UnifiedGitBackend::from_arg(BackendArg::new(
"github".to_string(),
Some("github:test/repo".to_string()),
))
}
#[test]
fn test_pattern_matching() {
let backend = create_test_backend();
assert!(backend.matches_pattern("test-v1.0.0.zip", "test-*"));
assert!(!backend.matches_pattern("other-v1.0.0.zip", "test-*"));
}
#[test]
fn test_version_prefix_functionality() {
let backend = create_test_backend();
let default_opts = ToolVersionOptions::default();
assert_eq!(
backend.strip_version_prefix("v1.0.0", &default_opts),
"1.0.0"
);
assert_eq!(
backend.strip_version_prefix("1.0.0", &default_opts),
"1.0.0"
);
assert_eq!(
backend.strip_version_prefix("repo@0.15.0", &default_opts),
"0.15.0"
);
assert_eq!(
backend.strip_version_prefix("repo@1.2.3", &default_opts),
"1.2.3"
);
assert_eq!(
backend.strip_version_prefix("test/repo@2.0.0", &default_opts),
"2.0.0"
);
assert_eq!(
backend.strip_version_prefix("other_package@0.15.0", &default_opts),
"other_package@0.15.0"
);
assert_eq!(
backend.strip_version_prefix("repo@beta", &default_opts),
"repo@beta"
);
let mut opts = ToolVersionOptions::default();
opts.opts.insert(
"version_prefix".to_string(),
toml::Value::String("release-".to_string()),
);
assert_eq!(
backend.strip_version_prefix("release-1.0.0", &opts),
"1.0.0"
);
assert_eq!(backend.strip_version_prefix("1.0.0", &opts), "1.0.0");
}
#[test]
fn test_find_asset_case_insensitive() {
let backend = create_test_backend();
struct TestAsset {
name: String,
}
let assets = vec![
TestAsset {
name: "tool-1.0.0-linux-x86_64.tar.gz".to_string(),
},
TestAsset {
name: "tool-1.0.0-Darwin-x86_64.tar.gz".to_string(),
},
TestAsset {
name: "tool-1.0.0-Windows-x86_64.zip".to_string(),
},
];
let result =
backend.find_asset_case_insensitive(&assets, "tool-1.0.0-linux-x86_64.tar.gz", |a| {
&a.name
});
assert!(result.is_some());
assert_eq!(result.unwrap().name, "tool-1.0.0-linux-x86_64.tar.gz");
let result = backend.find_asset_case_insensitive(
&assets,
"tool-1.0.0-darwin-x86_64.tar.gz", |a| &a.name,
);
assert!(result.is_some());
assert_eq!(result.unwrap().name, "tool-1.0.0-Darwin-x86_64.tar.gz");
let result = backend.find_asset_case_insensitive(
&assets,
"tool-1.0.0-windows-x86_64.zip", |a| &a.name,
);
assert!(result.is_some());
assert_eq!(result.unwrap().name, "tool-1.0.0-Windows-x86_64.zip");
let result =
backend.find_asset_case_insensitive(&assets, "nonexistent-asset.tar.gz", |a| &a.name);
assert!(result.is_none());
}
#[test]
fn test_is_slsa_format_issue_no_attestations() {
let err = sigstore_verification::AttestationError::NoAttestations;
assert!(is_slsa_format_issue(&err));
}
#[test]
fn test_is_slsa_format_issue_invalid_format() {
let err = sigstore_verification::AttestationError::Verification(
"File does not contain valid attestations or SLSA provenance".to_string(),
);
assert!(is_slsa_format_issue(&err));
}
#[test]
fn test_is_slsa_format_issue_no_certificate() {
let err = sigstore_verification::AttestationError::Verification(
"No certificate found in attestation bundle".to_string(),
);
assert!(is_slsa_format_issue(&err));
}
#[test]
fn test_is_slsa_format_issue_no_dsse_envelope() {
let err = sigstore_verification::AttestationError::Verification(
"Bundle has neither DSSE envelope nor message signature".to_string(),
);
assert!(is_slsa_format_issue(&err));
}
#[test]
fn test_is_slsa_format_issue_real_verification_failure() {
let err = sigstore_verification::AttestationError::Verification(
"Artifact digest mismatch: expected abc123".to_string(),
);
assert!(!is_slsa_format_issue(&err));
}
#[test]
fn test_is_slsa_format_issue_signature_failure() {
let err = sigstore_verification::AttestationError::Verification(
"P-256 signature verification failed: invalid signature".to_string(),
);
assert!(!is_slsa_format_issue(&err));
}
#[test]
fn test_is_slsa_format_issue_api_error() {
let err = sigstore_verification::AttestationError::Api("connection refused".to_string());
assert!(!is_slsa_format_issue(&err));
}
}