use crate::cache::{CacheManager, CacheManagerBuilder};
use crate::config::Settings;
use crate::tokens;
use crate::{dirs, duration, env};
use eyre::Result;
use heck::ToKebabCase;
use reqwest::IntoUrl;
use reqwest::header::{HeaderMap, HeaderValue};
use serde_derive::{Deserialize, Serialize};
use std::collections::HashMap;
use std::fmt;
use std::path::PathBuf;
use std::sync::LazyLock as Lazy;
use tokio::sync::RwLock;
use tokio::sync::RwLockReadGuard;
use xx::regex;
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct GithubRelease {
pub tag_name: String,
pub draft: bool,
pub prerelease: bool,
pub created_at: String,
pub assets: Vec<GithubAsset>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct GithubTag {
pub name: String,
pub commit: Option<GithubTagCommit>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct GithubTagCommit {
pub sha: String,
pub url: String,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct GithubCommit {
pub commit: GithubCommitInfo,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct GithubCommitInfo {
pub committer: GithubCommitPerson,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct GithubCommitPerson {
pub date: String,
}
#[derive(Debug, Clone)]
pub struct GithubTagWithDate {
pub name: String,
pub date: Option<String>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct GithubAsset {
pub name: String,
pub browser_download_url: String,
pub url: String,
#[serde(default)]
pub digest: Option<String>,
}
type CacheGroup<T> = HashMap<String, CacheManager<T>>;
static RELEASES_CACHE: Lazy<RwLock<CacheGroup<Vec<GithubRelease>>>> = Lazy::new(Default::default);
static RELEASE_CACHE: Lazy<RwLock<CacheGroup<GithubRelease>>> = Lazy::new(Default::default);
static TAGS_CACHE: Lazy<RwLock<CacheGroup<Vec<String>>>> = Lazy::new(Default::default);
pub static API_URL: &str = "https://api.github.com";
pub static API_PATH: &str = "/api/v3";
async fn get_tags_cache(key: &str) -> RwLockReadGuard<'_, CacheGroup<Vec<String>>> {
TAGS_CACHE
.write()
.await
.entry(key.to_string())
.or_insert_with(|| {
CacheManagerBuilder::new(cache_dir().join(format!("{key}-tags.msgpack.z")))
.with_fresh_duration(Some(duration::DAILY))
.build()
});
TAGS_CACHE.read().await
}
async fn get_releases_cache(key: &str) -> RwLockReadGuard<'_, CacheGroup<Vec<GithubRelease>>> {
RELEASES_CACHE
.write()
.await
.entry(key.to_string())
.or_insert_with(|| {
CacheManagerBuilder::new(cache_dir().join(format!("{key}-releases.msgpack.z")))
.with_fresh_duration(Some(duration::DAILY))
.build()
});
RELEASES_CACHE.read().await
}
async fn get_release_cache<'a>(key: &str) -> RwLockReadGuard<'a, CacheGroup<GithubRelease>> {
RELEASE_CACHE
.write()
.await
.entry(key.to_string())
.or_insert_with(|| {
CacheManagerBuilder::new(cache_dir().join(format!("{key}.msgpack.z")))
.with_fresh_duration(Some(duration::DAILY))
.build()
});
RELEASE_CACHE.read().await
}
pub async fn list_releases(repo: &str) -> Result<Vec<GithubRelease>> {
let key = repo.to_kebab_case();
let cache = get_releases_cache(&key).await;
let cache = cache.get(&key).unwrap();
Ok(cache
.get_or_try_init_async(async || list_releases_(API_URL, repo).await)
.await?
.to_vec())
}
pub async fn list_releases_from_url(api_url: &str, repo: &str) -> Result<Vec<GithubRelease>> {
let key = format!("{api_url}-{repo}").to_kebab_case();
let cache = get_releases_cache(&key).await;
let cache = cache.get(&key).unwrap();
Ok(cache
.get_or_try_init_async(async || list_releases_(api_url, repo).await)
.await?
.to_vec())
}
async fn list_releases_(api_url: &str, repo: &str) -> Result<Vec<GithubRelease>> {
let url = format!("{api_url}/repos/{repo}/releases");
let headers = get_headers(&url);
let (mut releases, mut headers) = crate::http::HTTP_FETCH
.json_headers_with_headers::<Vec<GithubRelease>, _>(url, &headers)
.await?;
if *env::MISE_LIST_ALL_VERSIONS {
while let Some(next) = next_page(&headers) {
headers = get_headers(&next);
let (more, h) = crate::http::HTTP_FETCH
.json_headers_with_headers::<Vec<GithubRelease>, _>(next, &headers)
.await?;
releases.extend(more);
headers = h;
}
}
releases.retain(|r| !r.draft && !r.prerelease);
Ok(releases)
}
pub async fn list_tags(repo: &str) -> Result<Vec<String>> {
let key = repo.to_kebab_case();
let cache = get_tags_cache(&key).await;
let cache = cache.get(&key).unwrap();
Ok(cache
.get_or_try_init_async(async || list_tags_(API_URL, repo).await)
.await?
.to_vec())
}
pub async fn list_tags_from_url(api_url: &str, repo: &str) -> Result<Vec<String>> {
let key = format!("{api_url}-{repo}").to_kebab_case();
let cache = get_tags_cache(&key).await;
let cache = cache.get(&key).unwrap();
Ok(cache
.get_or_try_init_async(async || list_tags_(api_url, repo).await)
.await?
.to_vec())
}
async fn list_tags_(api_url: &str, repo: &str) -> Result<Vec<String>> {
let url = format!("{api_url}/repos/{repo}/tags");
let headers = get_headers(&url);
let (mut tags, mut headers) = crate::http::HTTP_FETCH
.json_headers_with_headers::<Vec<GithubTag>, _>(url, &headers)
.await?;
if *env::MISE_LIST_ALL_VERSIONS {
while let Some(next) = next_page(&headers) {
headers = get_headers(&next);
let (more, h) = crate::http::HTTP_FETCH
.json_headers_with_headers::<Vec<GithubTag>, _>(next, &headers)
.await?;
tags.extend(more);
headers = h;
}
}
Ok(tags.into_iter().map(|t| t.name).collect())
}
pub async fn list_tags_with_dates(repo: &str) -> Result<Vec<GithubTagWithDate>> {
list_tags_with_dates_(API_URL, repo).await
}
async fn list_tags_with_dates_(api_url: &str, repo: &str) -> Result<Vec<GithubTagWithDate>> {
let url = format!("{api_url}/repos/{repo}/tags");
let headers = get_headers(&url);
let (mut tags, mut response_headers) = crate::http::HTTP_FETCH
.json_headers_with_headers::<Vec<GithubTag>, _>(url, &headers)
.await?;
while let Some(next) = next_page(&response_headers) {
response_headers = get_headers(&next);
let (more, h) = crate::http::HTTP_FETCH
.json_headers_with_headers::<Vec<GithubTag>, _>(next, &response_headers)
.await?;
tags.extend(more);
response_headers = h;
}
let results = crate::parallel::parallel(tags, |tag| async move {
let date = if let Some(commit) = tag.commit {
let headers = get_headers(&commit.url);
match crate::http::HTTP_FETCH
.json_with_headers::<GithubCommit, _>(&commit.url, &headers)
.await
{
Ok(commit_info) => Some(commit_info.commit.committer.date),
Err(e) => {
warn!("Failed to fetch commit date for tag {}: {}", tag.name, e);
None
}
}
} else {
None
};
Ok((tag.name, date))
})
.await?;
Ok(results
.into_iter()
.map(|(name, date)| GithubTagWithDate { name, date })
.collect())
}
pub async fn get_release(repo: &str, tag: &str) -> Result<GithubRelease> {
let key = format!("{repo}-{tag}").to_kebab_case();
let cache = get_release_cache(&key).await;
let cache = cache.get(&key).unwrap();
Ok(cache
.get_or_try_init_async(async || get_release_(API_URL, repo, tag).await)
.await?
.clone())
}
pub async fn get_release_for_url(api_url: &str, repo: &str, tag: &str) -> Result<GithubRelease> {
let key = format!("{api_url}-{repo}-{tag}").to_kebab_case();
let cache = get_release_cache(&key).await;
let cache = cache.get(&key).unwrap();
Ok(cache
.get_or_try_init_async(async || get_release_(api_url, repo, tag).await)
.await?
.clone())
}
pub async fn get_release_with_build_revision(repo: &str, version: &str) -> Result<GithubRelease> {
let releases = list_releases(repo).await?;
match pick_best_build_revision(releases, version) {
Some(release) => Ok(release),
None => get_release(repo, version).await,
}
}
fn pick_best_build_revision(releases: Vec<GithubRelease>, version: &str) -> Option<GithubRelease> {
let prefix = format!("{version}-");
releases
.into_iter()
.filter(|r| {
r.tag_name == version
|| r.tag_name
.strip_prefix(&prefix)
.is_some_and(|suffix| suffix.parse::<u32>().is_ok())
})
.max_by_key(|r| {
r.tag_name
.strip_prefix(&prefix)
.and_then(|s| s.parse::<u32>().ok())
.unwrap_or(0)
})
}
async fn get_release_(api_url: &str, repo: &str, tag: &str) -> Result<GithubRelease> {
let url = if tag == "latest" {
format!("{api_url}/repos/{repo}/releases/latest")
} else {
format!("{api_url}/repos/{repo}/releases/tags/{tag}")
};
let headers = get_headers(&url);
crate::http::HTTP_FETCH
.json_with_headers(url, &headers)
.await
}
fn next_page(headers: &HeaderMap) -> Option<String> {
let link = headers
.get("link")
.map(|l| l.to_str().unwrap_or_default().to_string())
.unwrap_or_default();
regex!(r#"<([^>]+)>; rel="next""#)
.captures(&link)
.map(|c| c.get(1).unwrap().as_str().to_string())
}
fn cache_dir() -> PathBuf {
dirs::CACHE.join("github")
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum TokenSource {
EnvVar(&'static str),
TokensFile,
GhCli,
CredentialCommand,
GitCredential,
}
impl fmt::Display for TokenSource {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
TokenSource::EnvVar(name) => write!(f, "{name}"),
TokenSource::TokensFile => write!(f, "github_tokens.toml"),
TokenSource::GhCli => write!(f, "gh CLI (hosts.yml)"),
TokenSource::CredentialCommand => write!(f, "credential_command"),
TokenSource::GitCredential => write!(f, "git credential fill"),
}
}
}
fn canonical_host(host: Option<&str>) -> Option<&str> {
match host {
Some("api.github.com") => Some("github.com"),
Some(h) if h.ends_with(".githubusercontent.com") => Some("github.com"),
other => other,
}
}
pub fn resolve_token(host: &str) -> Option<(String, TokenSource)> {
let settings = Settings::get();
let is_ghcom = host == "github.com"
|| host == "api.github.com"
|| host.ends_with(".githubusercontent.com");
let lookup_host = if host == "api.github.com" || host.ends_with(".githubusercontent.com") {
"github.com"
} else {
host
};
if !is_ghcom && let Some(token) = env::MISE_GITHUB_ENTERPRISE_TOKEN.as_deref() {
return Some((
token.to_string(),
TokenSource::EnvVar("MISE_GITHUB_ENTERPRISE_TOKEN"),
));
}
for var_name in &["MISE_GITHUB_TOKEN", "GITHUB_API_TOKEN", "GITHUB_TOKEN"] {
if let Some(token) = std::env::var(var_name)
.ok()
.map(|t| t.trim().to_string())
.filter(|t| !t.is_empty())
{
return Some((token, TokenSource::EnvVar(var_name)));
}
}
let credential_command = &settings.github.credential_command;
if !credential_command.is_empty()
&& let Some(token) =
tokens::get_credential_command_token("github", credential_command, lookup_host)
{
return Some((token, TokenSource::CredentialCommand));
}
if let Some(token) = MISE_GITHUB_TOKENS.get(lookup_host) {
return Some((token.clone(), TokenSource::TokensFile));
}
if settings.github.gh_cli_tokens
&& let Some(token) = GH_HOSTS.get(lookup_host)
{
return Some((token.clone(), TokenSource::GhCli));
}
if settings.github.use_git_credentials
&& let Some(token) = tokens::get_git_credential_token("github", lookup_host)
{
return Some((token, TokenSource::GitCredential));
}
None
}
pub fn get_headers<U: IntoUrl>(url: U) -> HeaderMap {
let mut headers = HeaderMap::new();
let url = url.into_url().unwrap();
let host = url.host_str();
let lookup_host = canonical_host(host).unwrap_or("github.com");
if let Some((token, _source)) = resolve_token(lookup_host) {
headers.insert(
reqwest::header::AUTHORIZATION,
HeaderValue::from_str(format!("Bearer {token}").as_str()).unwrap(),
);
headers.insert(
"x-github-api-version",
HeaderValue::from_static("2022-11-28"),
);
}
if url.path().contains("/releases/assets/") {
headers.insert(
"accept",
HeaderValue::from_static("application/octet-stream"),
);
}
headers
}
pub fn is_gh_host(host: &str) -> bool {
MISE_GITHUB_TOKENS.contains_key(host)
|| (Settings::get().github.gh_cli_tokens && GH_HOSTS.contains_key(host))
}
static MISE_GITHUB_TOKENS: Lazy<HashMap<String, String>> =
Lazy::new(|| read_mise_github_tokens().unwrap_or_default());
#[cfg(test)]
fn parse_github_tokens(contents: &str) -> Option<HashMap<String, String>> {
tokens::parse_tokens_toml(contents)
}
fn read_mise_github_tokens() -> Option<HashMap<String, String>> {
tokens::read_tokens_toml("github_tokens.toml", "github_tokens.toml")
}
static GH_HOSTS: Lazy<HashMap<String, String>> = Lazy::new(|| read_gh_hosts().unwrap_or_default());
fn gh_hosts_path() -> Option<PathBuf> {
if let Ok(dir) = std::env::var("GH_CONFIG_DIR") {
return Some(PathBuf::from(dir).join("hosts.yml"));
}
let xdg_path = env::XDG_CONFIG_HOME.join("gh/hosts.yml");
if xdg_path.exists() {
return Some(xdg_path);
}
#[cfg(target_os = "macos")]
{
let macos_path = dirs::HOME.join("Library/Application Support/gh/hosts.yml");
if macos_path.exists() {
return Some(macos_path);
}
}
Some(xdg_path)
}
fn read_gh_hosts() -> Option<HashMap<String, String>> {
let hosts_path = gh_hosts_path()?;
let contents = match std::fs::read_to_string(&hosts_path) {
Ok(c) => c,
Err(e) => {
trace!("gh hosts.yml not readable at {}: {e}", hosts_path.display());
return None;
}
};
let hosts: HashMap<String, GhHostEntry> = match serde_yaml::from_str(&contents) {
Ok(h) => h,
Err(e) => {
debug!(
"failed to parse gh hosts.yml at {}: {e}",
hosts_path.display()
);
return None;
}
};
Some(
hosts
.into_iter()
.filter_map(|(host, entry)| entry.oauth_token.map(|token| (host, token)))
.collect(),
)
}
#[derive(Deserialize)]
struct GhHostEntry {
oauth_token: Option<String>,
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_parse_github_tokens() {
let toml = r#"
[tokens."github.com"]
token = "ghp_abc123"
[tokens."github.mycompany.com"]
token = "ghp_def456"
"#;
let result = parse_github_tokens(toml).unwrap();
assert_eq!(result.get("github.com").unwrap(), "ghp_abc123");
assert_eq!(result.get("github.mycompany.com").unwrap(), "ghp_def456");
}
#[test]
fn test_parse_github_tokens_empty() {
assert!(parse_github_tokens("").is_none());
}
#[test]
fn test_parse_github_tokens_empty_tokens() {
let toml = "[tokens]\n";
let result = parse_github_tokens(toml).unwrap();
assert!(result.is_empty());
}
#[test]
fn test_parse_github_tokens_missing_token_field() {
let toml = r#"
[tokens."github.com"]
something_else = "value"
"#;
let result = parse_github_tokens(toml).unwrap();
assert!(result.is_empty());
}
fn make_release(tag: &str) -> GithubRelease {
GithubRelease {
tag_name: tag.to_string(),
draft: false,
prerelease: false,
created_at: String::new(),
assets: vec![],
}
}
#[test]
fn test_build_revision_selects_highest() {
let releases = vec![
make_release("3.3.11"),
make_release("3.3.11-1"),
make_release("3.3.11-2"),
make_release("3.3.10-1"),
];
let best = pick_best_build_revision(releases, "3.3.11").unwrap();
assert_eq!(best.tag_name, "3.3.11-2");
}
#[test]
fn test_build_revision_falls_back_to_base() {
let releases = vec![make_release("3.3.11"), make_release("3.3.10-1")];
let best = pick_best_build_revision(releases, "3.3.11").unwrap();
assert_eq!(best.tag_name, "3.3.11");
}
#[test]
fn test_build_revision_no_match() {
let releases = vec![make_release("3.3.10"), make_release("3.3.10-1")];
let best = pick_best_build_revision(releases, "3.3.11");
assert!(best.is_none());
}
#[test]
fn test_build_revision_ignores_non_numeric_suffix() {
let releases = vec![
make_release("3.3.11"),
make_release("3.3.11-rc1"),
make_release("3.3.11-1"),
];
let best = pick_best_build_revision(releases, "3.3.11").unwrap();
assert_eq!(best.tag_name, "3.3.11-1");
}
}