use crate::backend::Backend;
use crate::backend::VersionInfo;
use crate::backend::backend_type::BackendType;
use crate::backend::static_helpers::{
clean_binary_name, get_filename_from_url, list_available_platforms_with_key,
lookup_platform_key, rename_executable_in_dir, template_string, verify_artifact,
};
use crate::backend::version_list;
use crate::cli::args::BackendArg;
use crate::config::Config;
use crate::config::Settings;
use crate::http::HTTP;
use crate::install_context::InstallContext;
use crate::toolset::ToolVersion;
use crate::toolset::ToolVersionOptions;
use crate::ui::progress_report::SingleReport;
use crate::{dirs, file, hash};
use async_trait::async_trait;
use eyre::Result;
use serde::{Deserialize, Serialize};
use std::fmt::Debug;
use std::path::{Path, PathBuf};
use std::sync::Arc;
use std::time::{SystemTime, UNIX_EPOCH};
const HTTP_TARBALLS_DIR: &str = "http-tarballs";
const METADATA_FILE: &str = "metadata.json";
fn get_opt(opts: &ToolVersionOptions, key: &str) -> Option<String> {
lookup_platform_key(opts, key).or_else(|| opts.get(key).map(|s| s.to_string()))
}
#[derive(Debug, Serialize, Deserialize)]
struct CacheMetadata {
url: String,
checksum: Option<String>,
size: u64,
extracted_at: u64,
platform: String,
}
#[derive(Debug, Clone)]
enum ExtractionType {
RawFile { filename: String },
Archive,
}
struct FileInfo {
effective_path: PathBuf,
extension: String,
format: file::TarFormat,
is_compressed_binary: bool,
}
impl FileInfo {
fn new(file_path: &Path, opts: &ToolVersionOptions) -> Self {
let effective_path = if let Some(added_ext) = get_opt(opts, "format") {
let mut path = file_path.to_path_buf();
let current_ext = path.extension().and_then(|e| e.to_str()).unwrap_or("");
let new_ext = if current_ext.is_empty() {
added_ext
} else {
format!("{}.{}", current_ext, added_ext)
};
path.set_extension(new_ext);
path
} else {
file_path.to_path_buf()
};
let file_name = effective_path.file_name().unwrap().to_string_lossy();
let format = file::TarFormat::from_file_name(&file_name);
let extension = format
.extension()
.map(|s| s.to_string())
.unwrap_or_else(|| {
effective_path
.extension()
.and_then(|s| s.to_str())
.unwrap_or("")
.to_string()
});
let is_compressed_binary = !format.is_archive() && format != file::TarFormat::Raw;
Self {
effective_path,
extension,
format,
is_compressed_binary,
}
}
fn file_name(&self) -> String {
self.effective_path
.file_name()
.unwrap()
.to_string_lossy()
.to_string()
}
fn decompressed_name(&self) -> String {
self.file_name()
.trim_end_matches(&format!(".{}", self.extension))
.to_string()
}
}
#[derive(Debug)]
pub struct HttpBackend {
ba: Arc<BackendArg>,
}
impl HttpBackend {
pub fn from_arg(ba: BackendArg) -> Self {
Self { ba: Arc::new(ba) }
}
fn tarballs_dir() -> PathBuf {
dirs::DATA.join(HTTP_TARBALLS_DIR)
}
fn cache_path(&self, cache_key: &str) -> PathBuf {
Self::tarballs_dir().join(cache_key)
}
fn metadata_path(&self, cache_key: &str) -> PathBuf {
self.cache_path(cache_key).join(METADATA_FILE)
}
fn is_cached(&self, cache_key: &str) -> bool {
self.cache_path(cache_key).exists() && self.metadata_path(cache_key).exists()
}
fn cache_key(&self, file_path: &Path, opts: &ToolVersionOptions) -> Result<String> {
let checksum = hash::file_hash_blake3(file_path, None)?;
let mut parts = vec![checksum];
if let Some(strip) = get_opt(opts, "strip_components") {
parts.push(format!("strip_{strip}"));
}
if let Some(rename) = get_opt(opts, "rename_exe") {
parts.push(format!("rename_{rename}"));
if let Some(bin_path) = get_opt(opts, "bin_path") {
parts.push(format!("binpath_{bin_path}"));
}
}
let key = parts.join("_");
debug!("Cache key: {}", key);
Ok(key)
}
fn dest_filename(
&self,
file_path: &Path,
file_info: &FileInfo,
opts: &ToolVersionOptions,
) -> String {
if let Some(bin_name) = get_opt(opts, "bin") {
return bin_name;
}
let raw_name = if file_info.is_compressed_binary {
file_info.decompressed_name()
} else {
file_path.file_name().unwrap().to_string_lossy().to_string()
};
clean_binary_name(&raw_name, Some(&self.ba.tool_name))
}
fn extraction_type_from_cache(&self, cache_key: &str, file_info: &FileInfo) -> ExtractionType {
if !file_info.is_compressed_binary && file_info.format != file::TarFormat::Raw {
return ExtractionType::Archive;
}
let cache_path = self.cache_path(cache_key);
for entry in xx::file::ls(&cache_path).unwrap_or_default() {
if let Some(name) = entry.file_name().map(|n| n.to_string_lossy().to_string()) {
if name != METADATA_FILE {
return ExtractionType::RawFile { filename: name };
}
}
}
ExtractionType::RawFile {
filename: self.ba.tool_name.clone(),
}
}
fn extract_to_cache(
&self,
tv: &ToolVersion,
file_path: &Path,
cache_key: &str,
url: &str,
opts: &ToolVersionOptions,
pr: Option<&dyn SingleReport>,
) -> Result<ExtractionType> {
let cache_path = self.cache_path(cache_key);
file::create_dir_all(Self::tarballs_dir())?;
let tmp_path = Self::tarballs_dir().join(format!(
"{}.tmp-{}-{}",
cache_key,
std::process::id(),
SystemTime::now().duration_since(UNIX_EPOCH)?.as_millis()
));
if tmp_path.exists() {
let _ = file::remove_all(&tmp_path);
}
let extraction_type = self.extract_artifact(tv, &tmp_path, file_path, opts, pr)?;
if cache_path.exists() {
file::remove_all(&cache_path)?;
}
std::fs::rename(&tmp_path, &cache_path)?;
self.write_metadata(cache_key, url, file_path, opts)?;
Ok(extraction_type)
}
fn extract_artifact(
&self,
tv: &ToolVersion,
dest: &Path,
file_path: &Path,
opts: &ToolVersionOptions,
pr: Option<&dyn SingleReport>,
) -> Result<ExtractionType> {
file::create_dir_all(dest)?;
let file_info = FileInfo::new(file_path, opts);
if file_info.is_compressed_binary {
self.extract_compressed_binary(dest, file_path, &file_info, opts, pr)
} else if file_info.format == file::TarFormat::Raw {
self.extract_raw_file(dest, file_path, &file_info, opts, pr)
} else {
self.extract_archive(tv, dest, file_path, &file_info, opts, pr)
}
}
fn extract_compressed_binary(
&self,
dest: &Path,
file_path: &Path,
file_info: &FileInfo,
opts: &ToolVersionOptions,
pr: Option<&dyn SingleReport>,
) -> Result<ExtractionType> {
let filename = self.dest_filename(file_path, file_info, opts);
let dest_file = dest.join(&filename);
if let Some(pr) = pr {
pr.set_message(format!("extract {}", file_info.file_name()));
}
file::untar(
file_path,
&dest_file,
&file::TarOptions {
pr,
..file::TarOptions::new(file_info.format)
},
)?;
file::make_executable(&dest_file)?;
Ok(ExtractionType::RawFile { filename })
}
fn extract_raw_file(
&self,
dest: &Path,
file_path: &Path,
file_info: &FileInfo,
opts: &ToolVersionOptions,
pr: Option<&dyn SingleReport>,
) -> Result<ExtractionType> {
let filename = self.dest_filename(file_path, file_info, opts);
let dest_file = dest.join(&filename);
if let Some(pr) = pr {
pr.set_message(format!("extract {}", file_info.file_name()));
}
file::copy(file_path, &dest_file)?;
file::make_executable(&dest_file)?;
Ok(ExtractionType::RawFile { filename })
}
fn extract_archive(
&self,
tv: &ToolVersion,
dest: &Path,
file_path: &Path,
file_info: &FileInfo,
opts: &ToolVersionOptions,
pr: Option<&dyn SingleReport>,
) -> Result<ExtractionType> {
let mut strip_components: Option<usize> =
get_opt(opts, "strip_components").and_then(|s| s.parse().ok());
if strip_components.is_none()
&& get_opt(opts, "bin_path").is_none()
&& file::should_strip_components(file_path, file_info.format).unwrap_or(false)
{
debug!("Auto-detected single directory archive, using strip_components=1");
strip_components = Some(1);
}
let tar_opts = file::TarOptions {
format: file_info.format,
strip_components: strip_components.unwrap_or(0),
pr,
preserve_mtime: false,
};
file::untar(file_path, dest, &tar_opts)?;
if let Some(rename_to) = get_opt(opts, "rename_exe") {
let search_dir = if let Some(bin_path_template) = get_opt(opts, "bin_path") {
let bin_path = template_string(&bin_path_template, tv);
dest.join(&bin_path)
} else {
let bin_dir = dest.join("bin");
if bin_dir.is_dir() {
bin_dir
} else {
dest.to_path_buf()
}
};
let tool_name = self.ba.tool_name.rsplit('/').next().unwrap();
rename_executable_in_dir(&search_dir, &rename_to, Some(tool_name))?;
}
Ok(ExtractionType::Archive)
}
fn write_metadata(
&self,
cache_key: &str,
url: &str,
file_path: &Path,
opts: &ToolVersionOptions,
) -> Result<()> {
let metadata = CacheMetadata {
url: url.to_string(),
checksum: get_opt(opts, "checksum"),
size: file_path.metadata()?.len(),
extracted_at: SystemTime::now().duration_since(UNIX_EPOCH)?.as_secs(),
platform: self.get_platform_key(),
};
let json = serde_json::to_string_pretty(&metadata)?;
file::write(self.metadata_path(cache_key), json)?;
Ok(())
}
fn create_install_symlink(
&self,
tv: &ToolVersion,
cache_key: &str,
extraction_type: &ExtractionType,
opts: &ToolVersionOptions,
) -> Result<()> {
let cache_path = self.cache_path(cache_key);
let version_name = if tv.version == "latest" || tv.version.is_empty() {
&cache_key[..7.min(cache_key.len())] } else {
&tv.version
};
let install_path = tv.ba().installs_path.join(version_name);
if install_path.exists() {
file::remove_all(&install_path)?;
}
if let Some(parent) = install_path.parent() {
file::create_dir_all(parent)?;
}
if let ExtractionType::RawFile { filename } = extraction_type
&& let Some(bin_path_template) = get_opt(opts, "bin_path")
{
let bin_path = template_string(&bin_path_template, tv);
let dest_dir = install_path.join(&bin_path);
file::create_dir_all(&dest_dir)?;
let cached_file = cache_path.join(filename);
let install_file = dest_dir.join(filename);
file::make_symlink(&cached_file, &install_file)?;
return Ok(());
}
file::make_symlink(&cache_path, &install_path)?;
Ok(())
}
fn create_version_alias_symlink(&self, tv: &ToolVersion, cache_key: &str) -> Result<()> {
if tv.version != "latest" && !tv.version.is_empty() {
return Ok(());
}
let content_version = &cache_key[..7.min(cache_key.len())];
let original_path = tv.ba().installs_path.join(&tv.version);
let content_path = tv.ba().installs_path.join(content_version);
if original_path.exists() {
file::remove_all(&original_path)?;
}
if let Some(parent) = original_path.parent() {
file::create_dir_all(parent)?;
}
file::make_symlink(&content_path, &original_path)?;
Ok(())
}
fn verify_checksum(
&self,
ctx: &InstallContext,
tv: &mut ToolVersion,
file_path: &Path,
) -> Result<()> {
let settings = Settings::get();
let filename = file_path.file_name().unwrap().to_string_lossy();
let lockfile_enabled = settings.lockfile_enabled();
let platform_key = self.get_platform_key();
let platform_info = tv.lock_platforms.entry(platform_key).or_default();
if let Some(checksum) = &platform_info.checksum {
ctx.pr.set_message(format!("checksum {filename}"));
let (algo, check) = checksum
.split_once(':')
.ok_or_else(|| eyre::eyre!("Invalid checksum format: {checksum}"))?;
hash::ensure_checksum(file_path, check, Some(ctx.pr.as_ref()), algo)?;
} else if lockfile_enabled {
ctx.pr.set_message(format!("generate checksum {filename}"));
let h = hash::file_hash_blake3(file_path, Some(ctx.pr.as_ref()))?;
platform_info.checksum = Some(format!("blake3:{h}"));
}
if let Some(expected_size) = platform_info.size {
ctx.pr.set_message(format!("verify size {filename}"));
let actual_size = file_path.metadata()?.len();
if actual_size != expected_size {
return Err(eyre::eyre!(
"Size mismatch for {filename}: expected {expected_size}, got {actual_size}"
));
}
} else if lockfile_enabled {
platform_info.size = Some(file_path.metadata()?.len());
}
Ok(())
}
async fn fetch_versions(&self, config: &Arc<Config>) -> Result<Vec<String>> {
let opts = if !self.ba.opts().contains_key("version_list_url") {
config.get_tool_opts(&self.ba).await?.unwrap_or_default()
} else {
self.ba.opts()
};
let url = match opts.get("version_list_url") {
Some(url) => url.to_string(),
None => return Ok(vec![]),
};
let regex = opts.get("version_regex");
let json_path = opts.get("version_json_path");
let version_expr = opts.get("version_expr");
version_list::fetch_versions(&url, regex, json_path, version_expr).await
}
}
pub fn install_time_option_keys() -> Vec<String> {
vec![
"url".into(),
"checksum".into(),
"version_list_url".into(),
"version_regex".into(),
"version_json_path".into(),
"version_expr".into(),
"format".into(),
"rename_exe".into(),
]
}
#[async_trait]
impl Backend for HttpBackend {
fn get_type(&self) -> BackendType {
BackendType::Http
}
fn ba(&self) -> &Arc<BackendArg> {
&self.ba
}
async fn install_operation_count(&self, tv: &ToolVersion, _ctx: &InstallContext) -> usize {
let opts = tv.request.options();
super::http_install_operation_count(
get_opt(&opts, "checksum").is_some(),
&self.get_platform_key(),
tv,
)
}
async fn _list_remote_versions(&self, config: &Arc<Config>) -> Result<Vec<VersionInfo>> {
let versions = self.fetch_versions(config).await?;
Ok(versions
.into_iter()
.map(|v| VersionInfo {
version: v,
..Default::default()
})
.collect())
}
async fn install_version_(
&self,
ctx: &InstallContext,
mut tv: ToolVersion,
) -> Result<ToolVersion> {
let opts = tv.request.options();
let url_template = get_opt(&opts, "url").ok_or_else(|| {
let platform_key = self.get_platform_key();
let available = list_available_platforms_with_key(&opts, "url");
if !available.is_empty() {
eyre::eyre!(
"No URL for platform {platform_key}. Available: {}. \
Provide 'url' or add 'platforms.{platform_key}.url'",
available.join(", ")
)
} else {
eyre::eyre!("Http backend requires 'url' option")
}
})?;
let url = template_string(&url_template, &tv);
let filename = get_filename_from_url(&url);
let file_path = tv.download_path().join(&filename);
let platform_key = self.get_platform_key();
tv.lock_platforms
.entry(platform_key.clone())
.or_default()
.url = Some(url.clone());
let settings = Settings::get();
let lockfile_enabled = settings.lockfile_enabled();
let has_lockfile_checksum = tv
.lock_platforms
.get(&platform_key)
.and_then(|p| p.checksum.as_ref())
.is_some();
ctx.pr.set_message(format!("download {filename}"));
HTTP.download_file(&url, &file_path, Some(ctx.pr.as_ref()))
.await?;
if get_opt(&opts, "checksum").is_some() {
ctx.pr.next_operation();
}
verify_artifact(&tv, &file_path, &opts, Some(ctx.pr.as_ref()))?;
let cache_key = self.cache_key(&file_path, &opts)?;
let file_info = FileInfo::new(&file_path, &opts);
let cache_path = self.cache_path(&cache_key);
let _lock = crate::lock_file::get(&cache_path, ctx.force)?;
ctx.pr.next_operation();
let extraction_type = if self.is_cached(&cache_key) {
ctx.pr.set_message("using cached tarball".into());
ctx.pr.set_length(1);
ctx.pr.set_position(1);
self.extraction_type_from_cache(&cache_key, &file_info)
} else {
ctx.pr.set_message("extracting to cache".into());
self.extract_to_cache(
&tv,
&file_path,
&cache_key,
&url,
&opts,
Some(ctx.pr.as_ref()),
)?
};
self.create_install_symlink(&tv, &cache_key, &extraction_type, &opts)?;
self.create_version_alias_symlink(&tv, &cache_key)?;
if lockfile_enabled || has_lockfile_checksum {
ctx.pr.next_operation();
}
self.verify_checksum(ctx, &mut tv, &file_path)?;
Ok(tv)
}
async fn list_bin_paths(
&self,
_config: &Arc<Config>,
tv: &ToolVersion,
) -> Result<Vec<PathBuf>> {
let opts = tv.request.options();
if let Some(bin_path_template) = get_opt(&opts, "bin_path") {
let bin_path = template_string(&bin_path_template, tv);
return Ok(vec![tv.install_path().join(bin_path)]);
}
let bin_dir = tv.install_path().join("bin");
if bin_dir.exists() {
return Ok(vec![bin_dir]);
}
let mut paths = Vec::new();
if let Ok(entries) = std::fs::read_dir(tv.install_path()) {
for entry in entries.flatten() {
let path = entry.path();
if path.is_dir() {
let sub_bin = path.join("bin");
if sub_bin.exists() {
paths.push(sub_bin);
}
}
}
}
if paths.is_empty() {
Ok(vec![tv.install_path()])
} else {
Ok(paths)
}
}
}