use std::collections::{HashMap, HashSet, VecDeque};
use std::path::{Path, PathBuf};
use serde::{Deserialize, Serialize};
use tracing::{debug, info, warn};
use crate::error::{BuildError, Result};
use crate::macos_toolchain::{
ensure_base_rootfs, extract_version_from_tag, provision_toolchain, ToolchainSpec,
};
use crate::sandbox_builder::SandboxImageConfig;
const ZLAYER_REGISTRY: &str = "ghcr.io/blackleafdigital/zlayer";
const REPO_SOURCES_BASE: &str = "https://zachhandley.github.io/RepoSources/maps";
const PACKAGE_MAP_CACHE_TTL_SECS: u64 = 7 * 24 * 3600;
#[derive(Debug, Deserialize, Serialize)]
struct PackageMapFile {
metadata: PackageMapMetadata,
mappings: HashMap<String, String>,
}
#[derive(Debug, Deserialize, Serialize)]
struct PackageMapMetadata {
generated_at: String,
source: String,
distro: String,
total_mappings: usize,
}
#[must_use]
pub fn rewrite_image_for_macos(image_ref: &str) -> Option<String> {
if image_ref.starts_with(ZLAYER_REGISTRY) {
return None;
}
let stripped = strip_registry_prefix(image_ref);
let (name, tag) = split_name_tag(&stripped);
let base_name = name.rsplit('/').next().unwrap_or(&name);
if is_base_distro(base_name) {
return Some(format!("{ZLAYER_REGISTRY}/base:latest"));
}
let canonical = match base_name {
"golang" | "go" => "golang",
"node" => "node",
"rust" => "rust",
"python" | "python3" => "python",
"deno" => "deno",
"bun" => "bun",
"swift" => "swift",
"zig" => "zig",
"eclipse-temurin" | "amazoncorretto" | "openjdk" => "java",
name if name.contains("graalvm") => "graalvm",
_ => return None,
};
let version = extract_version_from_tag(&tag);
Some(format!("{ZLAYER_REGISTRY}/{canonical}:{version}"))
}
fn is_base_distro(name: &str) -> bool {
matches!(
name,
"ubuntu"
| "debian"
| "alpine"
| "centos"
| "fedora"
| "rockylinux"
| "almalinux"
| "archlinux"
| "amazonlinux"
| "busybox"
)
}
fn strip_registry_prefix(image_ref: &str) -> String {
let prefixes = [
"docker.io/library/",
"docker.io/",
"index.docker.io/library/",
"index.docker.io/",
];
for prefix in &prefixes {
if let Some(rest) = image_ref.strip_prefix(prefix) {
return rest.to_string();
}
}
image_ref.to_string()
}
fn split_name_tag(image_ref: &str) -> (String, String) {
if let Some((name, tag)) = image_ref.rsplit_once(':') {
(name.to_string(), tag.to_string())
} else {
(image_ref.to_string(), "latest".to_string())
}
}
pub fn resolve_ghcr_auth() -> zlayer_registry::RegistryAuth {
use zlayer_registry::RegistryAuth;
if let Ok(token) = std::env::var("GHCR_TOKEN") {
if !token.is_empty() {
debug!("Using GHCR_TOKEN for registry auth");
return RegistryAuth::Basic("_token".to_string(), token);
}
}
if let Ok(token) = std::env::var("GITHUB_TOKEN") {
if !token.is_empty() {
debug!("Using GITHUB_TOKEN for registry auth");
return RegistryAuth::Basic("_token".to_string(), token);
}
}
if let Some(creds) = read_docker_config_ghcr_auth() {
debug!("Using Docker config credentials for GHCR");
return creds;
}
debug!("No GHCR credentials found, using anonymous auth");
RegistryAuth::Anonymous
}
fn read_docker_config_ghcr_auth() -> Option<zlayer_registry::RegistryAuth> {
use base64::prelude::*;
use zlayer_registry::RegistryAuth;
let home = dirs::home_dir()?;
let config_path = home.join(".docker").join("config.json");
let contents = std::fs::read_to_string(&config_path).ok()?;
let config: serde_json::Value = serde_json::from_str(&contents).ok()?;
let auth_b64 = config.get("auths")?.get("ghcr.io")?.get("auth")?.as_str()?;
let decoded = BASE64_STANDARD.decode(auth_b64).ok()?;
let decoded_str = String::from_utf8(decoded).ok()?;
let (user, pass) = decoded_str.split_once(':')?;
Some(RegistryAuth::Basic(user.to_string(), pass.to_string()))
}
#[cfg(feature = "cache")]
pub async fn try_pull_zlayer_image(
image_ref: &str,
image_dir: &Path,
rootfs_dir: &Path,
) -> Result<bool> {
use zlayer_registry::{BlobCache, ImagePuller, LayerUnpacker};
info!("Attempting to pull ZLayer image: {}", image_ref);
let cache = match BlobCache::new() {
Ok(c) => c,
Err(e) => {
warn!("Failed to create blob cache for GHCR pull: {e}");
return Ok(false);
}
};
let puller = ImagePuller::new(cache);
let auth = resolve_ghcr_auth();
let layers = match puller.pull_image(image_ref, &auth).await {
Ok(l) => l,
Err(e) => {
warn!("Failed to pull ZLayer image {image_ref}: {e}");
return Ok(false);
}
};
info!(
"Pulled {} layers for {}, extracting to rootfs",
layers.len(),
image_ref
);
tokio::fs::create_dir_all(rootfs_dir).await?;
let mut unpacker = LayerUnpacker::new(rootfs_dir.to_path_buf());
let layer_refs: Vec<(Vec<u8>, String)> = layers;
if let Err(e) = unpacker.unpack_layers(&layer_refs).await {
warn!("Failed to unpack layers for {image_ref}: {e}");
return Ok(false);
}
match puller.pull_image_config(image_ref, &auth).await {
Ok(ic) => {
if let Ok(json) = serde_json::to_string_pretty(&ic) {
let _ = tokio::fs::write(image_dir.join("image_config.json"), json).await;
}
}
Err(e) => debug!("Could not pull image config for {image_ref}: {e}"),
}
info!(
"Successfully pulled and extracted ZLayer image: {}",
image_ref
);
Ok(true)
}
pub async fn build_toolchain_as_image(
spec: &ToolchainSpec,
image_ref: &str,
data_dir: &Path,
) -> Result<PathBuf> {
let image_name = sanitize_image_name(image_ref);
let image_dir = data_dir.join("images").join(&image_name);
let rootfs_dir = image_dir.join("rootfs");
tokio::fs::create_dir_all(&rootfs_dir).await?;
ensure_base_rootfs(&rootfs_dir).await?;
let cache_dir = data_dir.join("toolchain-cache");
let tmp_dir = data_dir.join("tmp");
tokio::fs::create_dir_all(&cache_dir).await?;
tokio::fs::create_dir_all(&tmp_dir).await?;
provision_toolchain(spec, &rootfs_dir, &cache_dir, &tmp_dir).await?;
let mut config = toolchain_spec_to_config(spec);
{
use sha2::{Digest, Sha256};
let mut hasher = Sha256::new();
hasher.update(format!("{spec:?}").as_bytes());
config.source_hash = Some(format!("{:x}", hasher.finalize()));
}
let config_json =
serde_json::to_string_pretty(&config).map_err(|e| BuildError::CacheError {
message: format!("failed to serialise image config: {e}"),
})?;
tokio::fs::write(image_dir.join("config.json"), config_json).await?;
info!(
"Built toolchain image for {} v{} at {}",
spec.language,
spec.version,
image_dir.display()
);
Ok(image_dir)
}
pub async fn build_base_image(image_ref: &str, data_dir: &Path) -> Result<PathBuf> {
let image_name = sanitize_image_name(image_ref);
let image_dir = data_dir.join("images").join(&image_name);
let rootfs_dir = image_dir.join("rootfs");
tokio::fs::create_dir_all(&rootfs_dir).await?;
ensure_base_rootfs(&rootfs_dir).await?;
let source_hash = {
use sha2::{Digest, Sha256};
let mut hasher = Sha256::new();
hasher.update(format!("base_image:{image_ref}").as_bytes());
format!("{:x}", hasher.finalize())
};
let config = SandboxImageConfig {
env: vec![
"PATH=/usr/local/bin:/usr/bin:/bin:/usr/sbin:/sbin".to_string(),
"HOME=/root".to_string(),
],
working_dir: "/".to_string(),
entrypoint: None,
cmd: Some(vec!["/bin/sh".to_string()]),
exposed_ports: HashMap::new(),
labels: HashMap::new(),
user: None,
volumes: Vec::new(),
stop_signal: None,
shell: None,
healthcheck: None,
source_hash: Some(source_hash),
};
let config_json =
serde_json::to_string_pretty(&config).map_err(|e| BuildError::CacheError {
message: format!("failed to serialise image config: {e}"),
})?;
tokio::fs::write(image_dir.join("config.json"), config_json).await?;
info!("Built base image at {}", image_dir.display());
Ok(image_dir)
}
#[must_use]
pub fn toolchain_spec_to_config(spec: &ToolchainSpec) -> SandboxImageConfig {
let mut env: Vec<String> = spec.env.iter().map(|(k, v)| format!("{k}={v}")).collect();
let mut path_parts: Vec<&str> = spec.path_dirs.iter().map(String::as_str).collect();
path_parts.extend(["/usr/local/bin", "/usr/bin", "/bin", "/usr/sbin", "/sbin"]);
let path_value = path_parts.join(":");
env.push(format!("PATH={path_value}"));
env.push("HOME=/root".to_string());
SandboxImageConfig {
env,
working_dir: "/".to_string(),
entrypoint: None,
cmd: Some(vec!["/bin/sh".to_string()]),
exposed_ports: HashMap::new(),
labels: HashMap::new(),
user: None,
volumes: Vec::new(),
stop_signal: None,
shell: None,
healthcheck: None,
source_hash: None,
}
}
#[derive(Debug, Deserialize)]
struct BrewFormulaInfo {
bottle: BrewBottle,
#[serde(default)]
dependencies: Vec<String>,
#[serde(default)]
versions: BrewVersions,
}
#[derive(Debug, Default, Deserialize)]
struct BrewVersions {
#[serde(default)]
stable: Option<String>,
}
#[derive(Debug, Deserialize)]
struct BrewBottle {
stable: BrewBottleStable,
}
#[derive(Debug, Deserialize)]
struct BrewBottleStable {
files: HashMap<String, BrewBottleFile>,
}
#[derive(Debug, Deserialize)]
struct BrewBottleFile {
url: String,
}
#[derive(Debug)]
enum ResolvedPackage {
HomebrewBottle(BrewFormulaInfo),
DirectRelease {
name: String,
#[allow(dead_code)]
source: String,
url: String,
asset_name: String,
},
Tap {
name: String,
#[allow(dead_code)]
tap: String,
url: String,
},
UvPython { version: String },
}
#[derive(Debug, Deserialize)]
struct DiscoveryResponse {
name: String,
#[serde(default)]
source: Option<String>,
#[serde(default)]
source_url: Option<String>,
#[serde(default)]
data: Option<serde_json::Value>,
}
#[allow(clippy::too_many_lines)]
async fn resolve_package(formula: &str) -> Result<ResolvedPackage> {
if formula == "python3" || formula == "python" || formula == "python@3" {
return Ok(ResolvedPackage::UvPython {
version: "3".to_string(),
});
}
let cached_url = format!("{REPO_SOURCES_BASE}/../formulas/{formula}.json");
if let Ok(resp) = reqwest::get(&cached_url).await {
if resp.status().is_success() {
if let Ok(info) = resp.json::<BrewFormulaInfo>().await {
debug!("Using cached formula from RepoSources: {}", formula);
return Ok(ResolvedPackage::HomebrewBottle(info));
}
}
}
let api_url = format!("https://formulae.brew.sh/api/formula/{formula}.json");
info!("Fetching Homebrew formula info for: {}", formula);
let response = reqwest::get(&api_url)
.await
.map_err(|e| BuildError::RegistryError {
message: format!("failed to fetch Homebrew formula info for {formula}: {e}"),
})?;
if response.status().is_success() {
let body = response
.bytes()
.await
.map_err(|e| BuildError::RegistryError {
message: format!("failed to read Homebrew formula response for {formula}: {e}"),
})?;
let info: BrewFormulaInfo =
serde_json::from_slice(&body).map_err(|e| BuildError::RegistryError {
message: format!("failed to parse Homebrew formula JSON for {formula}: {e}"),
})?;
let formula_name = formula.to_string();
let body_clone = body.to_vec();
tokio::spawn(async move {
let now = utc_iso8601(
std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
.unwrap_or_default()
.as_secs(),
);
let _ = reqwest::Client::new()
.post("https://reposync.blackleafdigital.com/formula")
.header("zlayer-repo-sync", &now)
.header("content-type", "application/json")
.body(format!(
r#"{{"name":"{}","data":{}}}"#,
formula_name,
String::from_utf8_lossy(&body_clone)
))
.send()
.await;
});
return Ok(ResolvedPackage::HomebrewBottle(info));
}
info!(
"Homebrew API returned {} for {}, asking RepoSourceSyncer to discover",
response.status(),
formula
);
let discover_url =
format!("https://reposync.blackleafdigital.com/formula/{formula}?discover=true");
if let Ok(resp) = reqwest::get(&discover_url).await {
if resp.status().is_success() {
if let Ok(text) = resp.text().await {
if let Ok(discovery) = serde_json::from_str::<DiscoveryResponse>(&text) {
if let Some(ref source) = discovery.source {
if let Some(tap_name) = source.strip_prefix("tap:") {
let url = discovery.source_url.unwrap_or_default();
info!(
"Discovered {} as tap {} via RepoSourceSyncer",
formula, tap_name
);
return Ok(ResolvedPackage::Tap {
name: discovery.name,
tap: tap_name.to_string(),
url,
});
}
if source.starts_with("github-release:")
|| source.starts_with("gitlab-release:")
|| source.starts_with("codeberg-release:")
|| source.starts_with("forgejo-release:")
{
let url = discovery.source_url.clone().unwrap_or_default();
let asset_name = url.rsplit('/').next().unwrap_or(formula).to_string();
info!(
"Discovered {} as direct release via RepoSourceSyncer",
formula
);
return Ok(ResolvedPackage::DirectRelease {
name: discovery.name,
source: source.clone(),
url,
asset_name,
});
}
}
if let Some(ref data) = discovery.data {
if let Ok(info) = serde_json::from_value::<BrewFormulaInfo>(data.clone()) {
info!("Discovered {} via RepoSourceSyncer (bottle data)", formula);
return Ok(ResolvedPackage::HomebrewBottle(info));
}
}
}
if let Ok(info) = serde_json::from_str::<BrewFormulaInfo>(&text) {
info!("Discovered {} via RepoSourceSyncer", formula);
return Ok(ResolvedPackage::HomebrewBottle(info));
}
debug!(
"RepoSourceSyncer returned data for {} but not in any recognised format",
formula
);
}
}
}
Err(BuildError::RegistryError {
message: format!(
"Formula '{formula}' not found in Homebrew, RepoSources, or forge discovery"
),
})
}
#[allow(clippy::too_many_lines)]
async fn install_package(
formula: &str,
package: &ResolvedPackage,
rootfs_dir: &Path,
tmp_dir: &Path,
) -> Result<()> {
match package {
ResolvedPackage::HomebrewBottle(info) => {
install_homebrew_bottle(formula, info, rootfs_dir, tmp_dir).await
}
ResolvedPackage::DirectRelease {
name,
url,
asset_name,
..
} => install_direct_release(name, url, asset_name, rootfs_dir, tmp_dir).await,
ResolvedPackage::Tap { name, url, .. } => {
let asset_name = url.rsplit('/').next().unwrap_or(name);
install_direct_release(name, url, asset_name, rootfs_dir, tmp_dir).await
}
ResolvedPackage::UvPython { version } => {
install_uv_python(version, rootfs_dir, tmp_dir).await
}
}
}
#[allow(
clippy::too_many_lines,
clippy::case_sensitive_file_extension_comparisons
)]
async fn install_direct_release(
name: &str,
url: &str,
asset_name: &str,
rootfs_dir: &Path,
tmp_dir: &Path,
) -> Result<()> {
tokio::fs::create_dir_all(tmp_dir).await?;
let download_path = tmp_dir.join(asset_name);
info!("Downloading release asset for {} from {}", name, url);
let bytes = reqwest::get(url)
.await
.map_err(|e| BuildError::RegistryError {
message: format!("failed to download release for {name}: {e}"),
})?
.bytes()
.await
.map_err(|e| BuildError::RegistryError {
message: format!("failed to read release bytes for {name}: {e}"),
})?;
tokio::fs::write(&download_path, &bytes).await?;
let usr_local_bin = rootfs_dir.join("usr/local/bin");
tokio::fs::create_dir_all(&usr_local_bin).await?;
let extract_dir = tmp_dir.join(format!("{name}_release_extract"));
let _ = tokio::fs::remove_dir_all(&extract_dir).await;
tokio::fs::create_dir_all(&extract_dir).await?;
let lower = asset_name.to_lowercase();
let is_archive = lower.ends_with(".zip")
|| lower.ends_with(".tar.gz")
|| lower.ends_with(".tgz")
|| lower.ends_with(".tar.xz");
if is_archive {
let output = if lower.ends_with(".zip") {
tokio::process::Command::new("unzip")
.args(["-o"])
.arg(&download_path)
.arg("-d")
.arg(&extract_dir)
.output()
.await?
} else if lower.ends_with(".tar.gz") || lower.ends_with(".tgz") {
tokio::process::Command::new("tar")
.args(["xzf"])
.arg(&download_path)
.arg("-C")
.arg(&extract_dir)
.output()
.await?
} else {
tokio::process::Command::new("tar")
.args(["xJf"])
.arg(&download_path)
.arg("-C")
.arg(&extract_dir)
.output()
.await?
};
if !output.status.success() {
return Err(BuildError::RegistryError {
message: format!(
"failed to extract release archive for {name}: {}",
String::from_utf8_lossy(&output.stderr)
),
});
}
let find_output = tokio::process::Command::new("find")
.arg(&extract_dir)
.args(["-type", "f"])
.output()
.await?;
let files_list = String::from_utf8_lossy(&find_output.stdout);
for line in files_list.lines() {
let path = Path::new(line.trim());
if !path.exists() {
continue;
}
let file_output = tokio::process::Command::new("file")
.arg(path)
.output()
.await;
if let Ok(fo) = file_output {
let file_desc = String::from_utf8_lossy(&fo.stdout);
if file_desc.contains("Mach-O") {
if let Some(file_name) = path.file_name() {
let dest = usr_local_bin.join(file_name);
let _ = tokio::fs::copy(path, &dest).await;
let _ = tokio::process::Command::new("chmod")
.args(["+x"])
.arg(&dest)
.status()
.await;
info!(
"Installed binary {} from release {}",
file_name.to_string_lossy(),
name
);
}
}
}
}
} else {
let dest = usr_local_bin.join(name);
tokio::fs::copy(&download_path, &dest).await?;
let _ = tokio::process::Command::new("chmod")
.args(["+x"])
.arg(&dest)
.status()
.await;
info!("Installed single binary {} from release", name);
}
let _ = tokio::fs::remove_file(&download_path).await;
let _ = tokio::process::Command::new("rm")
.args(["-rf"])
.arg(&extract_dir)
.status()
.await;
Ok(())
}
async fn install_uv_python(version: &str, rootfs_dir: &Path, tmp_dir: &Path) -> Result<()> {
let uv_bin = rootfs_dir.join("opt/homebrew/bin/uv");
if !uv_bin.exists() {
info!("uv not found in rootfs, installing via Homebrew bottle");
let uv_pkg = resolve_package("uv").await?;
Box::pin(install_package("uv", &uv_pkg, rootfs_dir, tmp_dir)).await?;
}
if !uv_bin.exists() {
return Err(BuildError::RegistryError {
message: "failed to install uv — binary not found after installation".to_string(),
});
}
let python_install_dir = rootfs_dir.join("usr/local/python");
tokio::fs::create_dir_all(&python_install_dir).await?;
info!("Installing Python {} via uv", version);
let output = tokio::process::Command::new(&uv_bin)
.args(["python", "install", version])
.env("UV_PYTHON_INSTALL_DIR", &python_install_dir)
.output()
.await?;
if !output.status.success() {
return Err(BuildError::RegistryError {
message: format!(
"uv python install failed: {}",
String::from_utf8_lossy(&output.stderr)
),
});
}
let usr_local_bin = rootfs_dir.join("usr/local/bin");
tokio::fs::create_dir_all(&usr_local_bin).await?;
let find_output = tokio::process::Command::new("find")
.arg(&python_install_dir)
.args(["-name", "python3", "-type", "f"])
.output()
.await?;
let python_bin_path = String::from_utf8_lossy(&find_output.stdout);
if let Some(line) = python_bin_path.lines().next() {
let python_path = PathBuf::from(line.trim());
if python_path.exists() {
let symlink_python3 = usr_local_bin.join("python3");
let symlink_python = usr_local_bin.join("python");
let _ = tokio::fs::remove_file(&symlink_python3).await;
let _ = tokio::fs::remove_file(&symlink_python).await;
let _ = tokio::fs::symlink(&python_path, &symlink_python3).await;
let _ = tokio::fs::symlink(&python_path, &symlink_python).await;
info!("Symlinked python3 and python to {}", python_path.display());
}
}
Ok(())
}
#[allow(clippy::too_many_lines)]
async fn install_homebrew_bottle(
formula: &str,
info: &BrewFormulaInfo,
rootfs_dir: &Path,
tmp_dir: &Path,
) -> Result<()> {
let platform_tag = bottle_platform_tag();
let bottle_file = info
.bottle
.stable
.files
.get(&platform_tag)
.or_else(|| info.bottle.stable.files.get("all"))
.ok_or_else(|| BuildError::RegistryError {
message: format!(
"no Homebrew bottle for {formula} on platform {platform_tag}; \
available: {:?}",
info.bottle.stable.files.keys().collect::<Vec<_>>()
),
})?;
let tarball_path = tmp_dir.join(format!("{formula}.tar.gz"));
tokio::fs::create_dir_all(tmp_dir).await?;
info!(
"Downloading bottle for {} from {}",
formula, bottle_file.url
);
let bottle_bytes = if let Some((image_ref, digest)) = parse_ghcr_blob_url(&bottle_file.url) {
let cache = zlayer_registry::BlobCache::new().map_err(|e| BuildError::RegistryError {
message: format!("failed to create blob cache: {e}"),
})?;
let puller = zlayer_registry::ImagePuller::new(cache);
let auth = zlayer_registry::RegistryAuth::Anonymous;
let data = puller
.pull_blob(&image_ref, &digest, &auth)
.await
.map_err(|e| BuildError::RegistryError {
message: format!("failed to download bottle for {formula}: {e}"),
})?;
data
} else {
reqwest::get(&bottle_file.url)
.await
.map_err(|e| BuildError::RegistryError {
message: format!("failed to download bottle for {formula}: {e}"),
})?
.bytes()
.await
.map_err(|e| BuildError::RegistryError {
message: format!("failed to read bottle bytes for {formula}: {e}"),
})?
.to_vec()
};
tokio::fs::write(&tarball_path, &bottle_bytes).await?;
let extract_tmp = tmp_dir.join(format!("{formula}_extract"));
tokio::fs::create_dir_all(&extract_tmp).await?;
let output = tokio::process::Command::new("tar")
.args(["xzf"])
.arg(&tarball_path)
.arg("-C")
.arg(&extract_tmp)
.output()
.await?;
if !output.status.success() {
return Err(BuildError::RegistryError {
message: format!(
"failed to extract bottle for {formula}: {}",
String::from_utf8_lossy(&output.stderr)
),
});
}
let formula_dir = extract_tmp.join(formula);
let version = if formula_dir.exists() {
let mut entries = tokio::fs::read_dir(&formula_dir).await?;
let mut found_version = String::from("unknown");
if let Some(entry) = entries.next_entry().await? {
found_version = entry.file_name().to_string_lossy().to_string();
}
found_version
} else {
"latest".to_string()
};
let cellar_dir = rootfs_dir
.join("opt/homebrew/Cellar")
.join(formula)
.join(&version);
tokio::fs::create_dir_all(&cellar_dir).await?;
let src_version_dir = formula_dir.join(&version);
if src_version_dir.exists() {
let cp_output = tokio::process::Command::new("cp")
.args(["-R"])
.arg(format!("{}/", src_version_dir.display()))
.arg(format!("{}/", cellar_dir.display()))
.output()
.await?;
if !cp_output.status.success() {
let _ = tokio::process::Command::new("cp")
.args(["-R"])
.arg(format!("{}/.", src_version_dir.display()))
.arg(cellar_dir.display().to_string())
.output()
.await;
}
}
let homebrew_bin = rootfs_dir.join("opt/homebrew/bin");
tokio::fs::create_dir_all(&homebrew_bin).await?;
let cellar_bin = cellar_dir.join("bin");
if cellar_bin.exists() {
let mut bin_entries = tokio::fs::read_dir(&cellar_bin).await?;
while let Some(entry) = bin_entries.next_entry().await? {
let entry_path = entry.path();
let file_name = entry.file_name();
let link_path = homebrew_bin.join(&file_name);
let _ = tokio::fs::remove_file(&link_path).await;
if let Err(e) = tokio::fs::symlink(&entry_path, &link_path).await {
debug!("Failed to symlink {}: {e}", file_name.to_string_lossy());
}
}
}
let _ = tokio::process::Command::new("chmod")
.args(["-R", "u+w"])
.arg(&extract_tmp)
.status()
.await;
let _ = tokio::process::Command::new("rm")
.args(["-rf"])
.arg(&extract_tmp)
.status()
.await;
let _ = tokio::fs::remove_file(&tarball_path).await;
Ok(())
}
pub async fn install_with_deps(formula: &str, rootfs_dir: &Path, tmp_dir: &Path) -> Result<()> {
let mut installed = HashSet::new();
let mut queue = VecDeque::new();
queue.push_back(formula.to_string());
while let Some(current) = queue.pop_front() {
if installed.contains(¤t) {
continue;
}
let cellar = rootfs_dir.join("opt/homebrew/Cellar").join(¤t);
let usr_bin = rootfs_dir.join("usr/local/bin").join(¤t);
if cellar.exists() || usr_bin.exists() {
debug!("Skipping {} (already in rootfs)", current);
installed.insert(current);
continue;
}
let package = match resolve_package(¤t).await {
Ok(pkg) => pkg,
Err(e) => {
warn!("Failed to resolve package {}: {} (skipping)", current, e);
installed.insert(current);
continue;
}
};
if let ResolvedPackage::HomebrewBottle(ref info) = package {
for dep in &info.dependencies {
if !installed.contains(dep) {
queue.push_back(dep.clone());
}
}
}
match install_package(¤t, &package, rootfs_dir, tmp_dir).await {
Ok(()) => {
let version_str = match &package {
ResolvedPackage::HomebrewBottle(info) => info
.versions
.stable
.as_deref()
.unwrap_or("unknown")
.to_string(),
ResolvedPackage::UvPython { version } => version.clone(),
_ => "latest".to_string(),
};
info!("Installed {} v{}", current, version_str);
}
Err(e) => {
warn!("Failed to install {}: {} (continuing)", current, e);
}
}
installed.insert(current);
}
Ok(())
}
pub async fn map_linux_packages(
packages: &[&str],
distro: &str,
cache_dir: &Path,
) -> Vec<(String, bool)> {
let map = load_or_fetch_package_map(distro, cache_dir).await;
packages
.iter()
.map(|&pkg| resolve_single_package(pkg, &map))
.collect()
}
fn resolve_single_package(pkg: &str, map: &HashMap<String, String>) -> (String, bool) {
if is_linux_only_package(pkg) {
return (pkg.to_string(), true);
}
if let Some(brew) = map.get(pkg) {
return (brew.clone(), false);
}
if let Some(brew) = try_name_transforms(pkg, map) {
return (brew, false);
}
map_single_package_hardcoded(pkg)
}
fn is_linux_only_package(pkg: &str) -> bool {
matches!(
pkg,
"build-essential"
| "gcc"
| "g++"
| "make"
| "ca-certificates"
| "apt-transport-https"
| "gnupg"
| "gnupg2"
| "musl-dev"
| "musl-tools"
| "musl"
| "libc-dev"
| "libc6-dev"
| "linux-headers"
| "linux-headers-generic"
| "software-properties-common"
| "procps"
)
}
fn try_name_transforms(pkg: &str, map: &HashMap<String, String>) -> Option<String> {
if let Some(base) = pkg.strip_suffix("-dev") {
if let Some(brew) = map.get(base) {
return Some(brew.clone());
}
}
if let Some(rest) = pkg.strip_prefix("lib") {
if let Some(brew) = map.get(rest) {
return Some(brew.clone());
}
if let Some(base) = rest.strip_suffix("-dev") {
if let Some(brew) = map.get(base) {
return Some(brew.clone());
}
}
}
let without_digits = pkg.trim_end_matches(|c: char| c.is_ascii_digit() || c == '.');
if without_digits != pkg && !without_digits.is_empty() {
if let Some(brew) = map.get(without_digits) {
return Some(brew.clone());
}
let without_g = without_digits.trim_end_matches('g');
if without_g != without_digits && !without_g.is_empty() {
if let Some(brew) = map.get(without_g) {
return Some(brew.clone());
}
}
}
None
}
fn map_single_package_hardcoded(pkg: &str) -> (String, bool) {
let (brew_name, skip) = match pkg {
"curl" | "libcurl4-openssl-dev" | "libcurl-dev" => ("curl", false),
"git" => ("git", false),
"wget" => ("wget", false),
"jq" => ("jq", false),
"cmake" => ("cmake", false),
"pkg-config" => ("pkg-config", false),
"autoconf" => ("autoconf", false),
"automake" => ("automake", false),
"unzip" => ("unzip", false),
"zip" => ("zip", false),
"rsync" => ("rsync", false),
"tree" => ("tree", false),
"htop" => ("htop", false),
"tmux" => ("tmux", false),
"vim" => ("vim", false),
"libssl-dev" | "openssl-dev" | "libssl3" => ("openssl", false),
"libpq-dev" | "postgresql-client" => ("libpq", false),
"libsqlite3-dev" | "sqlite-dev" => ("sqlite", false),
"libffi-dev" => ("libffi", false),
"libxml2-dev" | "libxml2" => ("libxml2", false),
"libyaml-dev" => ("libyaml", false),
"libreadline-dev" => ("readline", false),
"libncurses-dev" | "libncurses5-dev" | "ncurses-dev" => ("ncurses", false),
"zlib1g-dev" | "zlib-dev" => ("zlib", false),
"libbz2-dev" => ("bzip2", false),
"liblzma-dev" | "xz-dev" => ("xz", false),
"libzstd-dev" => ("zstd", false),
"python3" | "python3-dev" | "python3-pip" => ("python@3", false),
"nodejs" => ("node", false),
"default-jdk" | "openjdk-17-jdk" | "openjdk-21-jdk" => ("openjdk", false),
"imagemagick" | "libmagickwand-dev" => ("imagemagick", false),
"ffmpeg" | "libavcodec-dev" => ("ffmpeg", false),
"libprotobuf-dev" | "protobuf-compiler" => ("protobuf", false),
"golang" => ("go", false),
other => (other, false),
};
(brew_name.to_string(), skip)
}
async fn load_or_fetch_package_map(distro: &str, cache_dir: &Path) -> HashMap<String, String> {
let map_dir = cache_dir.join("package-maps");
let cache_path = map_dir.join(format!("{distro}.json"));
if let Ok(meta) = tokio::fs::metadata(&cache_path).await {
if let Ok(modified) = meta.modified() {
let age = modified
.elapsed()
.unwrap_or(std::time::Duration::from_secs(u64::MAX));
if age.as_secs() < PACKAGE_MAP_CACHE_TTL_SECS {
if let Some(map) = read_cached_map(&cache_path).await {
debug!(
"Using cached package map for {distro} ({} mappings, age {}s)",
map.len(),
age.as_secs()
);
return map;
}
}
}
}
let url = format!("{REPO_SOURCES_BASE}/{distro}.json");
debug!("Fetching package map from {url}");
match fetch_package_map(&url).await {
Ok(map_file) => {
info!(
"Fetched {} package mappings for {distro} from RepoSources",
map_file.mappings.len()
);
if let Err(e) = write_cached_map(&map_dir, &cache_path, &map_file).await {
warn!("Failed to cache package map for {distro}: {e}");
}
map_file.mappings
}
Err(e) => {
debug!("Failed to fetch package map for {distro}: {e}");
if let Some(map) = read_cached_map(&cache_path).await {
info!(
"Using stale cached package map for {distro} ({} mappings)",
map.len()
);
return map;
}
debug!("No package map available for {distro}, using hardcoded fallback only");
HashMap::new()
}
}
}
async fn fetch_package_map(url: &str) -> std::result::Result<PackageMapFile, String> {
let response = reqwest::get(url)
.await
.map_err(|e| format!("HTTP request failed: {e}"))?;
if !response.status().is_success() {
return Err(format!("HTTP {}", response.status()));
}
response
.json::<PackageMapFile>()
.await
.map_err(|e| format!("JSON parse failed: {e}"))
}
async fn read_cached_map(path: &Path) -> Option<HashMap<String, String>> {
let contents = tokio::fs::read_to_string(path).await.ok()?;
let map_file: PackageMapFile = serde_json::from_str(&contents).ok()?;
Some(map_file.mappings)
}
async fn write_cached_map(
map_dir: &Path,
cache_path: &Path,
map_file: &PackageMapFile,
) -> std::result::Result<(), String> {
tokio::fs::create_dir_all(map_dir)
.await
.map_err(|e| format!("create dir: {e}"))?;
let json = serde_json::to_string_pretty(map_file).map_err(|e| format!("serialize: {e}"))?;
tokio::fs::write(cache_path, json)
.await
.map_err(|e| format!("write: {e}"))
}
fn parse_ghcr_blob_url(url: &str) -> Option<(String, String)> {
let without_proto = url
.strip_prefix("https://")
.or_else(|| url.strip_prefix("http://"))?;
let (registry, rest) = without_proto.split_once("/v2/")?;
let (repo, digest) = rest.split_once("/blobs/")?;
let image_ref = format!("{registry}/{repo}:latest");
Some((image_ref, digest.to_string()))
}
#[must_use]
pub fn bottle_platform_tag() -> String {
let version_output = std::process::Command::new("sw_vers")
.arg("-productVersion")
.output();
let macos_version = match version_output {
Ok(ref out) if out.status.success() => {
String::from_utf8_lossy(&out.stdout).trim().to_string()
}
_ => {
warn!("Could not determine macOS version via sw_vers, defaulting to sequoia");
"15.0".to_string()
}
};
let codename = if macos_version.starts_with("15.") || macos_version.starts_with("15") {
"sequoia"
} else if macos_version.starts_with("14.") || macos_version.starts_with("14") {
"sonoma"
} else if macos_version.starts_with("13.") || macos_version.starts_with("13") {
"ventura"
} else {
"sequoia"
};
let arch = std::env::consts::ARCH;
if arch == "aarch64" {
format!("arm64_{codename}")
} else {
codename.to_string()
}
}
fn sanitize_image_name(image: &str) -> String {
image.replace(['/', ':', '@'], "_")
}
fn utc_iso8601(epoch_secs: u64) -> String {
let sec = epoch_secs % 60;
let min = (epoch_secs / 60) % 60;
let hour = (epoch_secs / 3600) % 24;
let mut remaining_days = epoch_secs / 86400;
let mut year: u64 = 1970;
loop {
let is_leap = year % 4 == 0 && (year % 100 != 0 || year % 400 == 0);
let days_in_year = if is_leap { 366 } else { 365 };
if remaining_days < days_in_year {
break;
}
remaining_days -= days_in_year;
year += 1;
}
let is_leap = year % 4 == 0 && (year % 100 != 0 || year % 400 == 0);
let month_days = [
31,
if is_leap { 29 } else { 28 },
31,
30,
31,
30,
31,
31,
30,
31,
30,
31,
];
let mut month = 0u64;
for days in month_days {
if remaining_days < days {
break;
}
remaining_days -= days;
month += 1;
}
format!(
"{year:04}-{:02}-{:02}T{hour:02}:{min:02}:{sec:02}.000Z",
month + 1,
remaining_days + 1,
)
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_rewrite_golang() {
let result = rewrite_image_for_macos("golang:1.23");
assert_eq!(result, Some(format!("{ZLAYER_REGISTRY}/golang:1.23")));
}
#[test]
fn test_rewrite_golang_alpine() {
let result = rewrite_image_for_macos("golang:1.23-alpine");
assert_eq!(result, Some(format!("{ZLAYER_REGISTRY}/golang:1.23")));
}
#[test]
fn test_rewrite_ubuntu_to_base() {
let result = rewrite_image_for_macos("ubuntu:22.04");
assert_eq!(result, Some(format!("{ZLAYER_REGISTRY}/base:latest")));
}
#[test]
fn test_rewrite_alpine_to_base() {
let result = rewrite_image_for_macos("alpine:3.19");
assert_eq!(result, Some(format!("{ZLAYER_REGISTRY}/base:latest")));
}
#[test]
fn test_rewrite_node_latest() {
let result = rewrite_image_for_macos("node:latest");
assert_eq!(result, Some(format!("{ZLAYER_REGISTRY}/node:latest")));
}
#[test]
fn test_rewrite_node_slim() {
let result = rewrite_image_for_macos("node:20-slim");
assert_eq!(result, Some(format!("{ZLAYER_REGISTRY}/node:20")));
}
#[test]
fn test_rewrite_python_bookworm() {
let result = rewrite_image_for_macos("python:3.12-bookworm");
assert_eq!(result, Some(format!("{ZLAYER_REGISTRY}/python:3.12")));
}
#[test]
fn test_rewrite_qualified_golang() {
let result = rewrite_image_for_macos("docker.io/library/golang:1.22");
assert_eq!(result, Some(format!("{ZLAYER_REGISTRY}/golang:1.22")));
}
#[test]
fn test_no_rewrite_custom_image() {
let result = rewrite_image_for_macos("myregistry.io/myteam/myapp:v1.0");
assert_eq!(result, None);
}
#[test]
fn test_no_rewrite_already_zlayer() {
let result = rewrite_image_for_macos("ghcr.io/blackleafdigital/zlayer/golang:1.23");
assert_eq!(result, None);
}
#[test]
fn test_toolchain_spec_to_config_go() {
let spec = ToolchainSpec::go("1.23");
let config = toolchain_spec_to_config(&spec);
assert!(config.env.iter().any(|e| e.starts_with("GOROOT=")));
assert!(config.env.iter().any(|e| e.starts_with("PATH=")));
let path_entry = config.env.iter().find(|e| e.starts_with("PATH=")).unwrap();
assert!(path_entry.contains("/usr/local/go/bin"));
assert_eq!(config.working_dir, "/");
}
#[test]
fn test_toolchain_spec_to_config_java() {
let spec = ToolchainSpec::java("21");
let config = toolchain_spec_to_config(&spec);
assert!(config.env.iter().any(|e| e.starts_with("JAVA_HOME=")));
let path_entry = config.env.iter().find(|e| e.starts_with("PATH=")).unwrap();
assert!(path_entry.contains("/usr/local/java/bin"));
}
#[test]
fn test_resolve_common_packages_hardcoded() {
let empty_map = HashMap::new();
for pkg in &["curl", "git", "wget", "jq"] {
let (name, skipped) = resolve_single_package(pkg, &empty_map);
assert!(!skipped, "{pkg} should not be skipped");
assert_eq!(name, *pkg);
}
}
#[test]
fn test_resolve_skip_linux_only() {
let empty_map = HashMap::new();
for pkg in &["build-essential", "ca-certificates", "musl-dev", "libc-dev"] {
let (_name, skipped) = resolve_single_package(pkg, &empty_map);
assert!(skipped, "{pkg} should be skipped");
}
}
#[test]
fn test_resolve_passthrough_unknown() {
let empty_map = HashMap::new();
let (name, skipped) = resolve_single_package("some-obscure-package", &empty_map);
assert_eq!(name, "some-obscure-package");
assert!(!skipped);
}
#[test]
fn test_resolve_with_remote_map() {
let mut map = HashMap::new();
map.insert("libfoo-dev".to_string(), "foo".to_string());
map.insert("custom-pkg".to_string(), "custom-brew".to_string());
let (name, skipped) = resolve_single_package("custom-pkg", &map);
assert_eq!(name, "custom-brew");
assert!(!skipped);
let (name, skipped) = resolve_single_package("libfoo-dev", &map);
assert_eq!(name, "foo");
assert!(!skipped);
}
#[test]
fn test_resolve_name_transforms() {
let mut map = HashMap::new();
map.insert("ssl".to_string(), "openssl".to_string());
map.insert("yaml".to_string(), "libyaml".to_string());
let (name, _) = resolve_single_package("libssl", &map);
assert_eq!(name, "openssl");
let (name, _) = resolve_single_package("libyaml-dev", &map);
assert_eq!(name, "libyaml");
}
#[tokio::test]
async fn test_map_linux_packages_with_empty_cache() {
let tmp = std::env::temp_dir().join("zlayer-test-pkg-map");
let _ = tokio::process::Command::new("chmod")
.args(["-R", "u+w"])
.arg(&tmp)
.status()
.await;
let _ = tokio::process::Command::new("rm")
.args(["-rf"])
.arg(&tmp)
.status()
.await;
let result =
map_linux_packages(&["curl", "libssl-dev", "musl-dev"], "debian_12", &tmp).await;
assert_eq!(result.len(), 3);
assert_eq!(result[0].0, "curl");
assert!(!result[0].1);
assert_eq!(result[1].0, "openssl@3");
assert!(!result[1].1);
assert_eq!(result[2].0, "musl-dev");
assert!(result[2].1);
let _ = tokio::process::Command::new("chmod")
.args(["-R", "u+w"])
.arg(&tmp)
.status()
.await;
let _ = tokio::process::Command::new("rm")
.args(["-rf"])
.arg(&tmp)
.status()
.await;
}
#[test]
fn test_bottle_platform_tag() {
let tag = bottle_platform_tag();
assert!(!tag.is_empty(), "platform tag should not be empty");
assert!(
tag.contains("sequoia") || tag.contains("sonoma") || tag.contains("ventura"),
"unexpected platform tag: {tag}"
);
}
}