use crate::cache::{CacheManager, CacheManagerBuilder};
use crate::{dirs, duration, env};
use eyre::Result;
use heck::ToKebabCase;
use reqwest::IntoUrl;
use reqwest::header::{HeaderMap, HeaderValue};
use serde_derive::{Deserialize, Serialize};
use std::collections::HashMap;
use std::path::PathBuf;
use std::sync::LazyLock as Lazy;
use std::sync::{RwLock, RwLockReadGuard};
use xx::regex;
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct GithubRelease {
pub tag_name: String,
pub draft: bool,
pub prerelease: bool,
pub assets: Vec<GithubAsset>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct GithubTag {
pub name: String,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct GithubAsset {
pub name: String,
pub browser_download_url: String,
}
type CacheGroup<T> = HashMap<String, CacheManager<T>>;
static RELEASES_CACHE: Lazy<RwLock<CacheGroup<Vec<GithubRelease>>>> = Lazy::new(Default::default);
static RELEASE_CACHE: Lazy<RwLock<CacheGroup<GithubRelease>>> = Lazy::new(Default::default);
static TAGS_CACHE: Lazy<RwLock<CacheGroup<Vec<String>>>> = Lazy::new(Default::default);
pub static API_URL: &str = "https://api.github.com";
fn get_tags_cache(key: &str) -> RwLockReadGuard<'_, CacheGroup<Vec<String>>> {
TAGS_CACHE
.write()
.unwrap()
.entry(key.to_string())
.or_insert_with(|| {
CacheManagerBuilder::new(cache_dir().join(format!("{key}-tags.msgpack.z")))
.with_fresh_duration(Some(duration::DAILY))
.build()
});
TAGS_CACHE.read().unwrap()
}
fn get_releases_cache(key: &str) -> RwLockReadGuard<'_, CacheGroup<Vec<GithubRelease>>> {
RELEASES_CACHE
.write()
.unwrap()
.entry(key.to_string())
.or_insert_with(|| {
CacheManagerBuilder::new(cache_dir().join(format!("{key}-releases.msgpack.z")))
.with_fresh_duration(Some(duration::DAILY))
.build()
});
RELEASES_CACHE.read().unwrap()
}
fn get_release_cache<'a>(key: &str) -> RwLockReadGuard<'a, CacheGroup<GithubRelease>> {
RELEASE_CACHE
.write()
.unwrap()
.entry(key.to_string())
.or_insert_with(|| {
CacheManagerBuilder::new(cache_dir().join(format!("{key}.msgpack.z")))
.with_fresh_duration(Some(duration::DAILY))
.build()
});
RELEASE_CACHE.read().unwrap()
}
pub fn list_releases(repo: &str) -> Result<Vec<GithubRelease>> {
let key = repo.to_kebab_case();
let cache = get_releases_cache(&key);
let cache = cache.get(&key).unwrap();
Ok(cache
.get_or_try_init(|| list_releases_(API_URL, repo))?
.to_vec())
}
pub fn list_releases_from_url(api_url: &str, repo: &str) -> Result<Vec<GithubRelease>> {
let key = format!("{api_url}-{repo}").to_kebab_case();
let cache = get_releases_cache(&key);
let cache = cache.get(&key).unwrap();
Ok(cache
.get_or_try_init(|| list_releases_(api_url, repo))?
.to_vec())
}
fn list_releases_(api_url: &str, repo: &str) -> Result<Vec<GithubRelease>> {
let url = format!("{api_url}/repos/{repo}/releases");
let headers = get_headers(&url);
let (mut releases, mut headers) = crate::http::HTTP_FETCH
.json_headers_with_headers::<Vec<GithubRelease>, _>(url, &headers)?;
if *env::MISE_LIST_ALL_VERSIONS {
while let Some(next) = next_page(&headers) {
let (more, h) = crate::http::HTTP_FETCH
.json_headers_with_headers::<Vec<GithubRelease>, _>(next, &headers)?;
releases.extend(more);
headers = h;
}
}
releases.retain(|r| !r.draft && !r.prerelease);
Ok(releases)
}
pub fn list_tags(repo: &str) -> Result<Vec<String>> {
let key = repo.to_kebab_case();
let cache = get_tags_cache(&key);
let cache = cache.get(&key).unwrap();
Ok(cache
.get_or_try_init(|| list_tags_(API_URL, repo))?
.to_vec())
}
pub fn list_tags_from_url(api_url: &str, repo: &str) -> Result<Vec<String>> {
let key = format!("{api_url}-{repo}").to_kebab_case();
let cache = get_tags_cache(&key);
let cache = cache.get(&key).unwrap();
Ok(cache
.get_or_try_init(|| list_tags_(api_url, repo))?
.to_vec())
}
fn list_tags_(api_url: &str, repo: &str) -> Result<Vec<String>> {
let url = format!("{api_url}/repos/{repo}/tags");
let headers = get_headers(&url);
let (mut tags, mut headers) =
crate::http::HTTP_FETCH.json_headers_with_headers::<Vec<GithubTag>, _>(url, &headers)?;
if *env::MISE_LIST_ALL_VERSIONS {
while let Some(next) = next_page(&headers) {
let (more, h) = crate::http::HTTP_FETCH
.json_headers_with_headers::<Vec<GithubTag>, _>(next, &headers)?;
tags.extend(more);
headers = h;
}
}
Ok(tags.into_iter().map(|t| t.name).collect())
}
pub fn get_release(repo: &str, tag: &str) -> Result<GithubRelease> {
let key = format!("{repo}-{tag}").to_kebab_case();
let cache = get_release_cache(&key);
let cache = cache.get(&key).unwrap();
Ok(cache
.get_or_try_init(|| get_release_(API_URL, repo, tag))?
.clone())
}
pub fn get_release_for_url(api_url: &str, repo: &str, tag: &str) -> Result<GithubRelease> {
let key = format!("{api_url}-{repo}-{tag}").to_kebab_case();
let cache = get_release_cache(&key);
let cache = cache.get(&key).unwrap();
Ok(cache
.get_or_try_init(|| get_release_(api_url, repo, tag))?
.clone())
}
fn get_release_(api_url: &str, repo: &str, tag: &str) -> Result<GithubRelease> {
let url = format!("{api_url}/repos/{repo}/releases/tags/{tag}");
let headers = get_headers(&url);
crate::http::HTTP_FETCH.json_with_headers(url, &headers)
}
fn next_page(headers: &HeaderMap) -> Option<String> {
let link = headers
.get("link")
.map(|l| l.to_str().unwrap_or_default().to_string())
.unwrap_or_default();
regex!(r#"<([^>]+)>; rel="next""#)
.captures(&link)
.map(|c| c.get(1).unwrap().as_str().to_string())
}
fn cache_dir() -> PathBuf {
dirs::CACHE.join("github")
}
fn get_headers<U: IntoUrl>(url: U) -> HeaderMap {
let mut headers = HeaderMap::new();
let url = url.into_url().unwrap();
let mut set_headers = |token: &str| {
headers.insert(
"authorization",
HeaderValue::from_str(format!("token {token}").as_str()).unwrap(),
);
headers.insert(
"x-github-api-version",
HeaderValue::from_static("2022-11-28"),
);
};
if url.host_str() == Some("api.github.com") {
if let Some(token) = env::GITHUB_TOKEN.as_ref() {
set_headers(token);
}
} else if let Some(token) = env::MISE_GITHUB_ENTERPRISE_TOKEN.as_ref() {
set_headers(token);
}
headers
}