#![deny(missing_docs)]
#![allow(clippy::single_match, clippy::result_large_err)]
use std::{
collections::{BTreeMap, HashMap},
env,
fs::{self, File},
io::{Cursor, Read},
process::Command,
};
use axoasset::{LocalAsset, SourceFile};
use backend::{
ci::CiInfo,
installer::{self, homebrew::HomebrewInstallerInfo, npm::NpmInstallerInfo, InstallerImpl},
templates::{TemplateEntry, TEMPLATE_INSTALLER_NPM},
};
use camino::{Utf8Path, Utf8PathBuf};
use cargo_dist_schema::{Asset, AssetKind, DistManifest, ExecutableAsset};
use comfy_table::{presets::UTF8_FULL, Table};
use config::{
ArtifactMode, ChecksumStyle, CompressionImpl, Config, DirtyMode, GenerateMode, ZipStyle,
};
use goblin::Object;
use mach_object::{LoadCommand, OFile};
use semver::Version;
use serde::{Deserialize, Serialize};
use tracing::{info, warn};
use errors::*;
pub use init::{do_init, InitArgs};
use miette::{miette, Context, IntoDiagnostic};
pub use tasks::*;
pub mod backend;
pub mod config;
pub mod errors;
mod init;
pub mod tasks;
#[cfg(test)]
mod tests;
pub fn do_build(cfg: &Config) -> Result<DistManifest> {
check_integrity(cfg)?;
let dist = tasks::gather_work(cfg)?;
if !dist.dist_dir.exists() {
LocalAsset::create_dir_all(&dist.dist_dir)?;
}
eprintln!("building artifacts:");
for artifact in &dist.artifacts {
eprintln!(" {}", artifact.id);
init_artifact_dir(&dist, artifact)?;
}
eprintln!();
for step in &dist.local_build_steps {
run_build_step(&dist, step, &[])?;
}
let manifests = vec![build_manifest(cfg, &dist)?];
for step in &dist.global_build_steps {
run_build_step(&dist, step, &manifests)?;
}
Ok(build_manifest(cfg, &dist)?)
}
pub fn do_manifest(cfg: &Config) -> Result<DistManifest> {
check_integrity(cfg)?;
let dist = gather_work(cfg)?;
Ok(build_manifest(cfg, &dist)?)
}
fn build_manifest(cfg: &Config, dist: &DistGraph) -> DistResult<DistManifest> {
let mut releases = vec![];
let mut all_artifacts = BTreeMap::<String, cargo_dist_schema::Artifact>::new();
for release in &dist.releases {
let mut artifacts = vec![];
for &artifact_idx in &release.global_artifacts {
let id = &dist.artifact(artifact_idx).id;
all_artifacts.insert(id.clone(), manifest_artifact(cfg, dist, artifact_idx));
artifacts.push(id.clone());
}
for &variant_idx in &release.variants {
let variant = dist.variant(variant_idx);
for &artifact_idx in &variant.local_artifacts {
let id = &dist.artifact(artifact_idx).id;
all_artifacts.insert(id.clone(), manifest_artifact(cfg, dist, artifact_idx));
artifacts.push(id.clone());
}
}
releases.push(cargo_dist_schema::Release {
app_name: release.app_name.clone(),
app_version: release.version.to_string(),
artifacts,
})
}
let linkage = fetch_linkage(
cfg.targets.clone(),
dist.artifacts.clone(),
dist.dist_dir.clone(),
)?;
let linkage = linkage.iter().map(|l| l.to_schema()).collect();
let mut manifest = DistManifest::new(releases, all_artifacts);
manifest.dist_version = Some(env!("CARGO_PKG_VERSION").to_owned());
manifest.system_info = Some(cargo_dist_schema::SystemInfo {
cargo_version_line: dist.tools.cargo.version_line.clone(),
});
manifest.announcement_tag = dist.announcement_tag.clone();
manifest.announcement_is_prerelease = dist.announcement_is_prerelease;
manifest.announcement_title = dist.announcement_title.clone();
manifest.announcement_changelog = dist.announcement_changelog.clone();
manifest.announcement_github_body = dist.announcement_github_body.clone();
manifest.system_info = Some(cargo_dist_schema::SystemInfo {
cargo_version_line: dist.tools.cargo.version_line.clone(),
});
if !dist.ci_style.is_empty() {
let CiInfo { github } = &dist.ci;
let github = github.as_ref().map(|info| cargo_dist_schema::GithubCiInfo {
artifacts_matrix: Some(info.artifacts_matrix.clone()),
pr_run_mode: Some(info.pr_run_mode),
});
manifest.ci = Some(cargo_dist_schema::CiInfo { github });
}
manifest.publish_prereleases = dist.publish_prereleases;
manifest.linkage = linkage;
Ok(manifest)
}
fn manifest_artifact(
cfg: &Config,
dist: &DistGraph,
artifact_idx: ArtifactIdx,
) -> cargo_dist_schema::Artifact {
let artifact = dist.artifact(artifact_idx);
let mut assets = vec![];
let built_assets = artifact
.required_binaries
.iter()
.map(|(&binary_idx, exe_path)| {
let binary = &dist.binary(binary_idx);
let symbols_artifact = binary.symbols_artifact.map(|a| dist.artifact(a).id.clone());
Asset {
name: Some(binary.name.clone()),
path: Some(exe_path.file_name().unwrap().to_owned()),
kind: AssetKind::Executable(ExecutableAsset { symbols_artifact }),
}
});
let mut static_assets = artifact
.archive
.as_ref()
.map(|archive| {
archive
.static_assets
.iter()
.map(|(kind, asset)| {
let kind = match kind {
StaticAssetKind::Changelog => AssetKind::Changelog,
StaticAssetKind::License => AssetKind::License,
StaticAssetKind::Readme => AssetKind::Readme,
StaticAssetKind::Other => AssetKind::Unknown,
};
Asset {
name: Some(asset.file_name().unwrap().to_owned()),
path: Some(asset.file_name().unwrap().to_owned()),
kind,
}
})
.collect::<Vec<_>>()
})
.unwrap_or_default();
if let ArtifactKind::Installer(InstallerImpl::Npm(..)) = &artifact.kind {
let root_dir = dist
.templates
.get_template_dir(TEMPLATE_INSTALLER_NPM)
.expect("npm template missing!?");
let mut queue = vec![root_dir];
while let Some(dir) = queue.pop() {
for entry in dir.entries.values() {
match entry {
TemplateEntry::Dir(dir) => {
queue.push(dir);
}
TemplateEntry::File(file) => {
static_assets.push(Asset {
name: Some(file.name.clone()),
path: Some(file.path_from_ancestor(root_dir).to_string()),
kind: AssetKind::Unknown,
});
}
}
}
}
}
assets.extend(built_assets);
assets.extend(static_assets);
assets.sort_by(|k1, k2| k1.name.cmp(&k2.name));
let install_hint;
let description;
let kind;
match &artifact.kind {
ArtifactKind::ExecutableZip(_) => {
install_hint = None;
description = None;
kind = cargo_dist_schema::ArtifactKind::ExecutableZip;
}
ArtifactKind::Symbols(_) => {
install_hint = None;
description = None;
kind = cargo_dist_schema::ArtifactKind::Symbols;
}
ArtifactKind::Installer(
InstallerImpl::Powershell(info)
| InstallerImpl::Shell(info)
| InstallerImpl::Homebrew(HomebrewInstallerInfo { inner: info, .. })
| InstallerImpl::Npm(NpmInstallerInfo { inner: info, .. }),
) => {
install_hint = Some(info.hint.clone());
description = Some(info.desc.clone());
kind = cargo_dist_schema::ArtifactKind::Installer;
}
ArtifactKind::Installer(InstallerImpl::Msi(..)) => {
install_hint = None;
description = Some("install via msi".to_owned());
kind = cargo_dist_schema::ArtifactKind::Installer;
}
ArtifactKind::Checksum(_) => {
install_hint = None;
description = None;
kind = cargo_dist_schema::ArtifactKind::Checksum;
}
};
let checksum = artifact.checksum.map(|idx| dist.artifact(idx).id.clone());
cargo_dist_schema::Artifact {
name: Some(artifact.id.clone()),
path: if cfg.no_local_paths {
None
} else {
Some(artifact.file_path.to_string())
},
target_triples: artifact.target_triples.clone(),
install_hint,
description,
assets,
kind,
checksum,
}
}
fn run_build_step(
dist_graph: &DistGraph,
target: &BuildStep,
manifests: &[DistManifest],
) -> Result<()> {
match target {
BuildStep::Cargo(target) => build_cargo_target(dist_graph, target),
BuildStep::Rustup(cmd) => rustup_toolchain(dist_graph, cmd),
BuildStep::CopyFile(CopyFileStep {
src_path,
dest_path,
}) => copy_file(src_path, dest_path),
BuildStep::CopyDir(CopyDirStep {
src_path,
dest_path,
}) => copy_dir(src_path, dest_path),
BuildStep::Zip(ZipDirStep {
src_path,
dest_path,
zip_style,
with_root,
}) => zip_dir(src_path, dest_path, zip_style, with_root.as_deref()),
BuildStep::GenerateInstaller(installer) => {
generate_installer(dist_graph, installer, manifests)
}
BuildStep::Checksum(ChecksumImpl {
checksum,
src_path,
dest_path,
}) => Ok(generate_and_write_checksum(checksum, src_path, dest_path)?),
}
}
fn generate_and_write_checksum(
checksum: &ChecksumStyle,
src_path: &Utf8Path,
dest_path: &Utf8Path,
) -> DistResult<()> {
let output = generate_checksum(checksum, src_path)?;
write_checksum(&output, src_path, dest_path)
}
fn generate_checksum(checksum: &ChecksumStyle, src_path: &Utf8Path) -> DistResult<String> {
info!("generating {checksum:?} for {src_path}");
use sha2::Digest;
use std::fmt::Write;
let file_bytes = axoasset::LocalAsset::load_bytes(src_path.as_str())?;
let hash = match checksum {
ChecksumStyle::Sha256 => {
let mut hasher = sha2::Sha256::new();
hasher.update(&file_bytes);
hasher.finalize().as_slice().to_owned()
}
ChecksumStyle::Sha512 => {
let mut hasher = sha2::Sha512::new();
hasher.update(&file_bytes);
hasher.finalize().as_slice().to_owned()
}
ChecksumStyle::False => {
unreachable!()
}
};
let mut output = String::new();
for byte in hash {
write!(&mut output, "{:02x}", byte).unwrap();
}
Ok(output)
}
fn write_checksum(checksum: &str, src_path: &Utf8Path, dest_path: &Utf8Path) -> DistResult<()> {
let file_path = src_path.file_name().expect("hashing file with no name!?");
let line = format!("{checksum} *{file_path}\n");
axoasset::LocalAsset::write_new(&line, dest_path)?;
Ok(())
}
fn parse_env(env_string: &str) -> DistResult<BTreeMap<&str, &str>> {
let mut parsed = BTreeMap::new();
for line in env_string.trim_end().split('\n') {
let Some((key, value)) = line.split_once('=') else {
return Err(DistError::EnvParseError {
line: line.to_owned(),
});
};
parsed.insert(key, value);
}
Ok(parsed)
}
fn formulas_from_env(environment: &BTreeMap<&str, &str>) -> Vec<(String, String)> {
let mut packages = vec![];
if let Some(formulastring) = environment.get("HOMEBREW_DEPENDENCIES") {
if let Some(opt_prefix) = environment.get("HOMEBREW_OPT") {
for dep in formulastring.split(',') {
let short_name = dep.split('/').last().unwrap();
let pkg_opt = format!("{opt_prefix}/{short_name}");
packages.push((dep.to_owned(), pkg_opt));
}
}
}
packages
}
fn select_brew_env(environment: &BTreeMap<&str, &str>) -> Vec<(String, String)> {
let mut desired_env = vec![];
if let Some(value) = environment.get("PKG_CONFIG_PATH") {
desired_env.push(("PKG_CONFIG_PATH".to_owned(), value.to_string()))
}
if let Some(value) = environment.get("PKG_CONFIG_LIBDIR") {
desired_env.push(("PKG_CONFIG_LIBDIR".to_owned(), value.to_string()))
}
if let Some(value) = environment.get("CMAKE_INCLUDE_PATH") {
desired_env.push(("CMAKE_INCLUDE_PATH".to_owned(), value.to_string()))
}
if let Some(value) = environment.get("CMAKE_LIBRARY_PATH") {
desired_env.push(("CMAKE_LIBRARY_PATH".to_owned(), value.to_string()))
}
let mut paths = vec![];
for (_, pkg_opt) in formulas_from_env(environment) {
paths.push(format!("{pkg_opt}/bin"));
paths.push(format!("{pkg_opt}/sbin"));
}
if !paths.is_empty() {
let our_path = env!("PATH");
let desired_path = format!("{our_path}:{}", paths.join(":"));
desired_env.insert(0, ("PATH".to_owned(), desired_path));
}
desired_env
}
fn determine_brew_rustflags(base_rustflags: &str, environment: &BTreeMap<&str, &str>) -> String {
let mut rustflags = base_rustflags.to_owned();
for (_, pkg_opt) in formulas_from_env(environment) {
rustflags = format!("{rustflags} -L{pkg_opt}/lib");
}
rustflags
}
fn build_cargo_target(dist_graph: &DistGraph, target: &CargoBuildStep) -> Result<()> {
eprint!(
"building cargo target ({}/{}",
target.target_triple, target.profile
);
let mut rustflags = target.rustflags.clone();
let mut desired_extra_env = vec![];
let skip_brewfile = env::var("DO_NOT_USE_BREWFILE").is_ok();
if let Some(brew) = &dist_graph.tools.brew {
if Utf8Path::new("Brewfile").exists() && !skip_brewfile {
let result = Command::new(&brew.cmd)
.arg("bundle")
.arg("exec")
.arg("--")
.arg("/usr/bin/env")
.output()
.into_diagnostic()
.wrap_err_with(|| "failed to exec brew bundle exec".to_string())?;
let env_output = String::from_utf8_lossy(&result.stdout).to_string();
let brew_env = parse_env(&env_output)?;
desired_extra_env = select_brew_env(&brew_env);
rustflags = determine_brew_rustflags(&rustflags, &brew_env);
}
}
let mut command = Command::new(&dist_graph.tools.cargo.cmd);
command
.arg("build")
.arg("--profile")
.arg(&target.profile)
.arg("--message-format=json-render-diagnostics")
.arg("--target")
.arg(&target.target_triple)
.env("RUSTFLAGS", &rustflags)
.stdout(std::process::Stdio::piped());
if !target.features.default_features {
command.arg("--no-default-features");
}
match &target.features.features {
CargoTargetFeatureList::All => {
command.arg("--all-features");
}
CargoTargetFeatureList::List(features) => {
if !features.is_empty() {
for feature in features {
command.arg("--features");
command.arg(feature);
}
}
}
}
match &target.package {
CargoTargetPackages::Workspace => {
command.arg("--workspace");
eprintln!(" --workspace)");
}
CargoTargetPackages::Package(package) => {
command.arg("--package").arg(package);
eprintln!(" --package={})", package);
}
}
command.envs(desired_extra_env);
info!("exec: {:?}", command);
let mut task = command
.spawn()
.into_diagnostic()
.wrap_err_with(|| format!("failed to exec cargo build: {command:?}"))?;
let mut expected_exes =
HashMap::<String, HashMap<String, Vec<(Utf8PathBuf, Utf8PathBuf)>>>::new();
let mut expected_symbols =
HashMap::<String, HashMap<String, Vec<(Utf8PathBuf, Utf8PathBuf)>>>::new();
for &binary_idx in &target.expected_binaries {
let binary = &dist_graph.binary(binary_idx);
let package_id = binary.pkg_id.to_string();
let exe_name = binary.name.clone();
for exe_dest in &binary.copy_exe_to {
expected_exes
.entry(package_id.clone())
.or_default()
.entry(exe_name.clone())
.or_default()
.push((Utf8PathBuf::new(), exe_dest.clone()));
}
for sym_dest in &binary.copy_symbols_to {
expected_symbols
.entry(package_id.clone())
.or_default()
.entry(exe_name.clone())
.or_default()
.push((Utf8PathBuf::new(), sym_dest.clone()));
}
}
let reader = std::io::BufReader::new(task.stdout.take().unwrap());
for message in cargo_metadata::Message::parse_stream(reader) {
let Ok(message) = message
.into_diagnostic()
.wrap_err("failed to parse cargo json message")
.map_err(|e| warn!("{:?}", e))
else {
continue;
};
match message {
cargo_metadata::Message::CompilerArtifact(artifact) => {
if let Some(new_exe) = artifact.executable {
info!("got a new exe: {}", new_exe);
let package_id = artifact.package_id.to_string();
let exe_name = new_exe.file_stem().unwrap();
let expected_sym = expected_symbols
.get_mut(&package_id)
.and_then(|m| m.get_mut(exe_name));
if let Some(expected) = expected_sym {
for (src_sym_path, _) in expected {
for path in &artifact.filenames {
let is_symbols =
path.extension().map(|e| e == "pdb").unwrap_or(false);
if is_symbols {
*src_sym_path = path.to_owned();
}
}
}
}
let expected_exe = expected_exes
.get_mut(&package_id)
.and_then(|m| m.get_mut(exe_name));
if let Some(expected) = expected_exe {
for (src_bin_path, _) in expected {
*src_bin_path = new_exe.clone();
}
}
}
}
_ => {
}
}
}
for (package_id, exes) in expected_exes {
for (exe_name, to_copy) in &exes {
for (src_path, dest_path) in to_copy {
if src_path.as_str().is_empty() {
return Err(miette!(
"failed to find bin {} ({}) -- did the cargo build above have errors?",
exe_name,
package_id
));
}
copy_file(src_path, dest_path)?;
}
}
}
for (package_id, symbols) in expected_symbols {
for (exe, to_copy) in &symbols {
for (src_path, dest_path) in to_copy {
if src_path.as_str().is_empty() {
return Err(miette!(
"failed to find symbols for bin {} ({}) -- did the cargo build above have errors?",
exe,
package_id
));
}
copy_file(src_path, dest_path)?;
}
}
}
Ok(())
}
fn rustup_toolchain(_dist_graph: &DistGraph, cmd: &RustupStep) -> Result<()> {
eprintln!("running rustup to ensure you have {} installed", cmd.target);
let status = Command::new(&cmd.rustup.cmd)
.arg("target")
.arg("add")
.arg(&cmd.target)
.status()
.into_diagnostic()
.wrap_err("Failed to install rustup toolchain")?;
if !status.success() {
return Err(miette!("Failed to install rustup toolchain"));
}
Ok(())
}
fn init_artifact_dir(_dist: &DistGraph, artifact: &Artifact) -> Result<()> {
if artifact.file_path.exists() {
LocalAsset::remove_file(&artifact.file_path)?;
}
let Some(archive) = &artifact.archive else {
return Ok(());
};
info!("recreating artifact dir: {}", archive.dir_path);
if archive.dir_path.exists() {
LocalAsset::remove_dir_all(&archive.dir_path)?;
}
LocalAsset::create_dir(&archive.dir_path)?;
Ok(())
}
pub(crate) fn copy_file(src_path: &Utf8Path, dest_path: &Utf8Path) -> Result<()> {
LocalAsset::copy_named(src_path, dest_path)?;
Ok(())
}
pub(crate) fn copy_dir(src_path: &Utf8Path, dest_path: &Utf8Path) -> Result<()> {
LocalAsset::copy_dir_named(src_path, dest_path)?;
Ok(())
}
fn zip_dir(
src_path: &Utf8Path,
dest_path: &Utf8Path,
zip_style: &ZipStyle,
with_root: Option<&Utf8Path>,
) -> Result<()> {
match zip_style {
ZipStyle::Zip => LocalAsset::zip_dir(src_path, dest_path, with_root)?,
ZipStyle::Tar(CompressionImpl::Gzip) => {
LocalAsset::tar_gz_dir(src_path, dest_path, with_root)?
}
ZipStyle::Tar(CompressionImpl::Xzip) => {
LocalAsset::tar_xz_dir(src_path, dest_path, with_root)?
}
ZipStyle::Tar(CompressionImpl::Zstd) => {
LocalAsset::tar_zstd_dir(src_path, dest_path, with_root)?
}
ZipStyle::TempDir => {
}
}
Ok(())
}
#[derive(Debug)]
pub struct GenerateArgs {
pub check: bool,
pub modes: Vec<GenerateMode>,
}
#[derive(Debug)]
pub struct LinkageArgs {
pub print_output: bool,
pub print_json: bool,
pub from_json: Option<String>,
}
fn do_generate_preflight_checks(dist: &DistGraph) -> Result<()> {
if let Some(desired_version) = &dist.desired_cargo_dist_version {
let current_version: Version = std::env!("CARGO_PKG_VERSION").parse().unwrap();
if desired_version != ¤t_version
&& !desired_version.pre.starts_with("github-")
&& !matches!(dist.allow_dirty, DirtyMode::AllowAll)
{
return Err(miette!("you're running cargo-dist {}, but 'cargo-dist-version = {}' is set in your Cargo.toml\n\nYou should update cargo-dist-version if you want to update to this version", current_version, desired_version));
}
}
if !dist.is_init {
return Err(miette!(
"please run 'cargo dist init' before running any other commands!"
));
}
Ok(())
}
pub fn do_generate(cfg: &Config, args: &GenerateArgs) -> Result<()> {
let dist = gather_work(cfg)?;
run_generate(&dist, args)?;
Ok(())
}
pub fn run_generate(dist: &DistGraph, args: &GenerateArgs) -> Result<()> {
do_generate_preflight_checks(dist)?;
let inferred = args.modes.is_empty();
let modes = if inferred {
&[GenerateMode::Ci, GenerateMode::Msi]
} else {
for &mode in &args.modes {
if !dist.allow_dirty.should_run(mode)
&& matches!(dist.allow_dirty, DirtyMode::AllowList(..))
{
Err(DistError::ContradictoryGenerateModes {
generate_mode: mode,
})?;
}
}
&args.modes[..]
};
for &mode in modes {
if dist.allow_dirty.should_run(mode) {
match mode {
GenerateMode::Ci => {
let CiInfo { github } = &dist.ci;
if let Some(github) = github {
if args.check {
github.check(dist)?;
} else {
github.write_to_disk(dist)?;
}
}
}
GenerateMode::Msi => {
for artifact in &dist.artifacts {
if let ArtifactKind::Installer(InstallerImpl::Msi(msi)) = &artifact.kind {
if args.check {
msi.check_config()?;
} else {
msi.write_config_to_disk()?;
}
}
}
}
}
}
}
Ok(())
}
fn fetch_linkage(
targets: Vec<String>,
artifacts: Vec<Artifact>,
dist_dir: Utf8PathBuf,
) -> DistResult<Vec<Linkage>> {
let mut reports = vec![];
for target in targets {
let artifacts: Vec<Artifact> = artifacts
.clone()
.into_iter()
.filter(|r| r.target_triples.contains(&target))
.collect();
if artifacts.is_empty() {
eprintln!("No matching artifact for target {target}");
continue;
}
for artifact in artifacts {
let path = Utf8PathBuf::from(&dist_dir).join(format!("{}-{target}", artifact.id));
for (_, binary) in artifact.required_binaries {
let bin_path = path.join(binary);
if !bin_path.exists() {
eprintln!("Binary {bin_path} missing; skipping check");
} else {
reports.push(determine_linkage(&bin_path, &target)?);
}
}
}
}
Ok(reports)
}
pub fn do_linkage(cfg: &Config, args: &LinkageArgs) -> Result<()> {
let dist = gather_work(cfg)?;
let reports: Vec<Linkage> = if let Some(target) = args.from_json.clone() {
let file = SourceFile::load_local(&target)?;
file.deserialize_json()?
} else {
fetch_linkage(cfg.targets.clone(), dist.artifacts, dist.dist_dir)?
};
if args.print_output {
for report in &reports {
eprintln!("{}", report.report());
}
}
if args.print_json {
let j = serde_json::to_string(&reports).unwrap();
println!("{}", j);
}
Ok(())
}
#[derive(Debug, Deserialize, Serialize)]
pub struct Linkage {
pub binary: String,
pub target: String,
pub system: Vec<Library>,
pub homebrew: Vec<Library>,
pub public_unmanaged: Vec<Library>,
pub other: Vec<Library>,
pub frameworks: Vec<Library>,
}
impl Linkage {
pub fn report(&self) -> String {
let mut table = Table::new();
table
.load_preset(UTF8_FULL)
.set_header(vec!["Category", "Libraries"])
.add_row(vec![
"System",
self.system
.clone()
.into_iter()
.map(|l| l.to_string_pretty())
.collect::<Vec<String>>()
.join("\n")
.as_str(),
])
.add_row(vec![
"Homebrew",
self.homebrew
.clone()
.into_iter()
.map(|l| l.to_string_pretty())
.collect::<Vec<String>>()
.join("\n")
.as_str(),
])
.add_row(vec![
"Public (unmanaged)",
self.public_unmanaged
.clone()
.into_iter()
.map(|l| l.path)
.collect::<Vec<String>>()
.join("\n")
.as_str(),
])
.add_row(vec![
"Frameworks",
self.frameworks
.clone()
.into_iter()
.map(|l| l.path)
.collect::<Vec<String>>()
.join("\n")
.as_str(),
])
.add_row(vec![
"Other",
self.other
.clone()
.into_iter()
.map(|l| l.to_string_pretty())
.collect::<Vec<String>>()
.join("\n")
.as_str(),
]);
let s = format!(
r#"{} ({}):
{table}"#,
self.binary, self.target,
);
s.to_owned()
}
fn to_schema(&self) -> cargo_dist_schema::Linkage {
cargo_dist_schema::Linkage {
binary: self.binary.clone(),
target: self.target.clone(),
system: self.system.iter().map(|s| s.to_schema()).collect(),
homebrew: self.homebrew.iter().map(|s| s.to_schema()).collect(),
public_unmanaged: self
.public_unmanaged
.iter()
.map(|s| s.to_schema())
.collect(),
other: self.other.iter().map(|s| s.to_schema()).collect(),
frameworks: self.frameworks.iter().map(|s| s.to_schema()).collect(),
}
}
pub fn from_schema(other: &cargo_dist_schema::Linkage) -> Self {
Self {
binary: other.binary.clone(),
target: other.target.clone(),
system: other.system.iter().map(Library::from_schema).collect(),
homebrew: other.homebrew.iter().map(Library::from_schema).collect(),
public_unmanaged: other
.public_unmanaged
.iter()
.map(Library::from_schema)
.collect(),
other: other.other.iter().map(Library::from_schema).collect(),
frameworks: other.frameworks.iter().map(Library::from_schema).collect(),
}
}
}
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct Library {
pub path: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub source: Option<String>,
}
impl Library {
fn new(library: String) -> Self {
Self {
path: library,
source: None,
}
}
fn to_schema(&self) -> cargo_dist_schema::Library {
cargo_dist_schema::Library {
path: self.path.clone(),
source: self.source.clone(),
}
}
fn from_schema(other: &cargo_dist_schema::Library) -> Self {
Self {
path: other.path.clone(),
source: other.source.clone(),
}
}
fn from_homebrew(library: String) -> Self {
let brew_prefix = if library.starts_with("/opt/homebrew/opt/") {
Some("/opt/homebrew/opt/")
} else if library.starts_with("/usr/local/opt/") {
Some("/usr/local/opt/")
} else {
None
};
if let Some(prefix) = brew_prefix {
let cloned = library.clone();
let stripped = cloned.strip_prefix(prefix).unwrap();
let mut package = stripped.split('/').nth(0).unwrap().to_owned();
let receipt = Utf8PathBuf::from(&prefix)
.join(&package)
.join("INSTALL_RECEIPT.json");
if receipt.exists() {
let _ = SourceFile::load_local(&receipt)
.and_then(|file| file.deserialize_json())
.map(|parsed: serde_json::Value| {
if let Some(tap) = parsed["source"]["tap"].as_str() {
if tap != "homebrew/core" {
package = format!("{tap}/{package}");
}
}
});
}
Self {
path: library,
source: Some(package.to_owned()),
}
} else {
Self {
path: library,
source: None,
}
}
}
fn maybe_apt(library: String) -> DistResult<Self> {
if std::env::consts::OS != "linux" {
return Ok(Self {
path: library,
source: None,
});
}
let process = Command::new("dpkg")
.arg("--search")
.arg(&library)
.output()
.into_diagnostic();
match process {
Ok(output) => {
let output = String::from_utf8(output.stdout)?;
let package = output.split(':').nth(0).unwrap();
let source = if package.is_empty() {
None
} else {
Some(package.to_owned())
};
Ok(Self {
path: library,
source,
})
}
Err(_) => Ok(Self {
path: library,
source: None,
}),
}
}
fn to_string_pretty(&self) -> String {
if let Some(package) = &self.source {
format!("{} ({package})", self.path).to_owned()
} else {
self.path.clone()
}
}
}
fn do_otool(path: &Utf8PathBuf) -> DistResult<Vec<String>> {
let mut libraries = vec![];
let mut f = File::open(path)?;
let mut buf = vec![];
let size = f.read_to_end(&mut buf).unwrap();
let mut cur = Cursor::new(&buf[..size]);
if let OFile::MachFile {
header: _,
commands,
} = OFile::parse(&mut cur).unwrap()
{
let commands = commands
.iter()
.map(|load| load.command())
.cloned()
.collect::<Vec<LoadCommand>>();
for command in commands {
match command {
LoadCommand::IdDyLib(ref dylib)
| LoadCommand::LoadDyLib(ref dylib)
| LoadCommand::LoadWeakDyLib(ref dylib)
| LoadCommand::ReexportDyLib(ref dylib)
| LoadCommand::LoadUpwardDylib(ref dylib)
| LoadCommand::LazyLoadDylib(ref dylib) => {
libraries.push(dylib.name.to_string());
}
_ => {}
}
}
}
Ok(libraries)
}
fn do_ldd(path: &Utf8PathBuf) -> DistResult<Vec<String>> {
let mut libraries = vec![];
let output = Command::new("ldd")
.arg(path)
.output()
.expect("Unable to run ldd");
let result = String::from_utf8_lossy(&output.stdout).to_string();
let lines = result.trim_end().split('\n');
for line in lines {
let line = line.trim();
if line.starts_with("not a dynamic executable") {
break;
}
if line.starts_with("linux-vdso") {
continue;
}
if let Some(path) = line.split(" => ").nth(1) {
let lib = (path.split(' ').next().unwrap()).to_owned();
let realpath = fs::canonicalize(&lib)?;
libraries.push(realpath.to_string_lossy().to_string());
} else {
continue;
}
}
Ok(libraries)
}
fn do_pe(path: &Utf8PathBuf) -> DistResult<Vec<String>> {
let buf = std::fs::read(path)?;
match Object::parse(&buf)? {
Object::PE(pe) => Ok(pe.libraries.into_iter().map(|s| s.to_owned()).collect()),
_ => Err(DistError::LinkageCheckUnsupportedBinary {}),
}
}
fn determine_linkage(path: &Utf8PathBuf, target: &str) -> DistResult<Linkage> {
let libraries = match target {
"i686-apple-darwin" | "x86_64-apple-darwin" | "aarch64-apple-darwin" => do_otool(path)?,
"i686-unknown-linux-gnu"
| "x86_64-unknown-linux-gnu"
| "aarch64-unknown-linux-gnu"
| "i686-unknown-linux-musl"
| "x86_64-unknown-linux-musl"
| "aarch64-unknown-linux-musl" => {
if std::env::consts::OS != "linux" {
return Err(DistError::LinkageCheckInvalidOS {
host: std::env::consts::OS.to_owned(),
target: target.to_owned(),
});
}
do_ldd(path)?
}
"i686-pc-windows-msvc" | "x86_64-pc-windows-msvc" | "aarch64-pc-windows-msvc" => {
do_pe(path)?
}
_ => return Err(DistError::LinkageCheckUnsupportedBinary {}),
};
let mut linkage = Linkage {
binary: path.file_name().unwrap().to_owned(),
target: target.to_owned(),
system: vec![],
homebrew: vec![],
public_unmanaged: vec![],
frameworks: vec![],
other: vec![],
};
for library in libraries {
if library.starts_with("/opt/homebrew") {
linkage
.homebrew
.push(Library::from_homebrew(library.clone()));
} else if library.starts_with("/usr/lib") || library.starts_with("/lib") {
linkage.system.push(Library::maybe_apt(library.clone())?);
} else if library.starts_with("/System/Library/Frameworks")
|| library.starts_with("/Library/Frameworks")
{
linkage.frameworks.push(Library::new(library.clone()));
} else if library.starts_with("/usr/local") {
if std::fs::canonicalize(&library)?.starts_with("/usr/local/Cellar") {
linkage
.homebrew
.push(Library::from_homebrew(library.clone()));
} else {
linkage.public_unmanaged.push(Library::new(library.clone()));
}
} else {
linkage.other.push(Library::maybe_apt(library.clone())?);
}
}
Ok(linkage)
}
pub fn check_integrity(cfg: &Config) -> Result<()> {
let check_config = Config {
needs_coherent_announcement_tag: false,
artifact_mode: ArtifactMode::All,
no_local_paths: false,
allow_all_dirty: cfg.allow_all_dirty,
targets: vec![],
ci: vec![],
installers: vec![],
announcement_tag: None,
};
let dist = tasks::gather_work(&check_config)?;
run_generate(
&dist,
&GenerateArgs {
modes: vec![],
check: true,
},
)
}
fn generate_installer(
dist: &DistGraph,
style: &InstallerImpl,
manifests: &[DistManifest],
) -> Result<()> {
match style {
InstallerImpl::Shell(info) => {
installer::shell::write_install_sh_script(&dist.templates, info)?
}
InstallerImpl::Powershell(info) => {
installer::powershell::write_install_ps_script(&dist.templates, info)?
}
InstallerImpl::Npm(info) => installer::npm::write_npm_project(&dist.templates, info)?,
InstallerImpl::Homebrew(info) => {
installer::homebrew::write_homebrew_formula(&dist.templates, dist, info, manifests)?
}
InstallerImpl::Msi(info) => info.build()?,
}
Ok(())
}
pub fn default_desktop_targets() -> Vec<String> {
vec![
axoproject::platforms::TARGET_X64_LINUX_GNU.to_owned(),
axoproject::platforms::TARGET_X64_WINDOWS.to_owned(),
axoproject::platforms::TARGET_X64_MAC.to_owned(),
axoproject::platforms::TARGET_ARM64_MAC.to_owned(),
]
}
pub fn known_desktop_targets() -> Vec<String> {
vec![
axoproject::platforms::TARGET_X64_LINUX_GNU.to_owned(),
axoproject::platforms::TARGET_X64_LINUX_MUSL.to_owned(),
axoproject::platforms::TARGET_X64_WINDOWS.to_owned(),
axoproject::platforms::TARGET_X64_MAC.to_owned(),
axoproject::platforms::TARGET_ARM64_MAC.to_owned(),
]
}