Skip to main content

cargo_dist/
tasks.rs

1//! Code to compute the tasks dist should do
2//!
3//! This is the heart and soul of dist, and ideally the [`gather_work`][] function
4//! should compute every minute detail dist will perform ahead of time. This is done with
5//! the DistGraphBuilder, which roughly builds up the work to do as follows:
6//!
7//! 1. [`config::get_project`][]: find out everything we want to know about the workspace (binaries, configs, etc)
8//! 2. compute the TargetTriples we're interested based on ArtifactMode and target configs/flags
9//! 3. add Releases for all the binaries selected by the above steps
10//! 4. for each TargetTriple, create a ReleaseVariant of each Release
11//! 5. add target-specific Binaries to each ReleaseVariant
12//! 6. add Artifacts to each Release, which will be propagated to each ReleaseVariant as necessary
13//!   1. add archives, propagated to ReleaseVariants
14//!   2. add installers, each one decides if it's global or local
15//! 7. compute actual BuildSteps from the current graph (a Binary will only induce an actual `cargo build`
16//!    here if one of the Artifacts that was added requires outputs from it!)
17//! 8. generate release/announcement notes
18//!
19//! During step 6 a lot of extra magic happens:
20//!
21//! * We drop artifacts on the ground if the current ArtifactMode disallows them
22//! * We also try to automatically detect that a Binary That Needs To Be Built Now
23//!   can produce symbols and make an Artifact for that too.
24//!
25//! In summary, the DistGraph has roughly the following hierarchy
26//!
27//! * Announcement: all the releases together
28//!   * Releases: a specific version of an app (my-app-v1.0.0)
29//!    * global Artifacts: artifacts that have only one version across all platforms
30//!    * ReleaseVariants: a target-specific part of a Release (my-app-v1.0.0-x86_64-apple-darwin)
31//!      * local Artifacts: artifacts that are per-Variant
32//!      * Binaries: a binary that should be built for a specific Variant
33//!   * BuildSteps: steps we should take to build the artifacts
34//!
35//! Note that much of this hierarchy is rearranged/simplified in dist-manifest.json!
36//!
37//! Binaries are a little bit weird in that they are in principle nested under ReleaseVariants
38//! but can/should be shared between them when possible (e.g. if you have a crash reporter
39//! binary that's shared across various apps). This is... not well-supported and things will
40//! go a bit wonky if you actually try to do this right now. Notably what to parent a Symbols
41//! Artifact to becomes ambiguous! Probably we should just be fine with duplicating things in
42//! this case..?
43//!
44//! Also note that most of these things have (ideally, unchecked) globally unique "ids"
45//! that are used to create ids for things nested under them, to ensure final
46//! artifacts/folders/files always have unique names.
47//!
48//! Also note that the BuildSteps for installers are basically monolithic "build that installer"
49//! steps to give them the freedom to do whatever they need to do.
50
51use std::collections::BTreeMap;
52
53use crate::backend::installer::{ExecutableZipFragment, HomebrewImpl};
54use crate::platform::targets::{
55    TARGET_ARM64_LINUX_GNU, TARGET_ARM64_MAC, TARGET_X64_LINUX_GNU, TARGET_X64_MAC,
56};
57use axoasset::AxoClient;
58use axoprocess::Cmd;
59use axoproject::{PackageId, PackageIdx, WorkspaceGraph};
60use camino::{Utf8Path, Utf8PathBuf};
61use cargo_dist_schema::target_lexicon::{OperatingSystem, Triple};
62use cargo_dist_schema::{
63    ArtifactId, BuildEnvironment, DistManifest, HomebrewPackageName, SystemId, SystemInfo,
64    TripleName, TripleNameRef,
65};
66use semver::Version;
67use serde::Serialize;
68use tracing::{info, warn};
69
70use crate::announce::{self, AnnouncementTag, TagMode};
71use crate::backend::ci::github::GithubCiInfo;
72use crate::backend::ci::CiInfo;
73use crate::backend::installer::homebrew::{to_homebrew_license_format, HomebrewFragments};
74use crate::backend::installer::macpkg::PkgInstallerInfo;
75use crate::config::v1::builds::cargo::AppCargoBuildConfig;
76use crate::config::v1::ci::CiConfig;
77use crate::config::v1::installers::CommonInstallerConfig;
78use crate::config::v1::publishers::PublisherConfig;
79use crate::config::v1::{app_config, workspace_config, AppConfig, WorkspaceConfig};
80use crate::config::{DependencyKind, DirtyMode, LibraryStyle};
81use crate::linkage::determine_build_environment;
82use crate::net::ClientSettings;
83use crate::platform::{PlatformSupport, RuntimeConditions};
84use crate::sign::Signing;
85use crate::{
86    backend::{
87        installer::{
88            homebrew::{to_class_case, HomebrewInstallerInfo},
89            msi::MsiInstallerInfo,
90            npm::NpmInstallerInfo,
91            InstallerImpl, InstallerInfo,
92        },
93        templates::Templates,
94    },
95    config::{
96        self, ArtifactMode, ChecksumStyle, CompressionImpl, Config, HostingStyle, InstallerStyle,
97        ZipStyle,
98    },
99    errors::{DistError, DistResult},
100};
101
102/// Key in workspace.metadata or package.metadata for our config
103pub const METADATA_DIST: &str = "dist";
104/// Dir in target/ for us to build our packages in
105/// NOTE: DO NOT GIVE THIS THE SAME NAME AS A PROFILE!
106pub const TARGET_DIST: &str = "distrib";
107/// The profile we will build with
108pub const PROFILE_DIST: &str = "dist";
109
110/// The key for referring to linux as an "os"
111pub const OS_LINUX: &str = "linux";
112/// The key for referring to macos as an "os"
113pub const OS_MACOS: &str = "macos";
114/// The key for referring to windows as an "os"
115pub const OS_WINDOWS: &str = "windows";
116
117/// The key for referring to 64-bit x86_64 (AKA amd64) as an "cpu"
118pub const CPU_X64: &str = "x86_64";
119/// The key for referring to 32-bit x86 (AKA i686) as an "cpu"
120pub const CPU_X86: &str = "x86";
121/// The key for referring to 64-bit arm64 (AKA aarch64) as an "cpu"
122pub const CPU_ARM64: &str = "arm64";
123/// The key for referring to 32-bit arm as an "cpu"
124pub const CPU_ARM: &str = "arm";
125
126/// A map where the order doesn't matter
127pub type FastMap<K, V> = std::collections::HashMap<K, V>;
128/// A map where the order matters
129pub type SortedMap<K, V> = std::collections::BTreeMap<K, V>;
130/// A set where the order matters
131pub type SortedSet<T> = std::collections::BTreeSet<T>;
132
133/// A unique id for a [`Artifact`][]
134#[derive(Copy, Clone, PartialEq, PartialOrd, Eq, Ord, Hash, Debug)]
135pub struct ArtifactIdx(pub usize);
136
137/// A unique id for a [`ReleaseVariant`][]
138#[derive(Copy, Clone, PartialEq, PartialOrd, Eq, Ord, Hash, Debug)]
139pub struct ReleaseVariantIdx(pub usize);
140
141/// A unique id for a [`Release`][]
142#[derive(Copy, Clone, PartialEq, PartialOrd, Eq, Ord, Hash, Debug)]
143pub struct ReleaseIdx(pub usize);
144
145/// A unique id for a [`Binary`][]
146#[derive(Copy, Clone, PartialEq, PartialOrd, Eq, Ord, Hash, Debug)]
147pub struct BinaryIdx(pub usize);
148
149/// A convenience wrapper around a map of binary aliases
150#[derive(Clone, Debug)]
151pub struct BinaryAliases(BTreeMap<String, Vec<String>>);
152
153impl BinaryAliases {
154    /// Returns a formatted copy of the map, with file extensions added
155    /// if necessary.
156    pub fn for_target(&self, target: &TripleNameRef) -> BTreeMap<String, Vec<String>> {
157        if target.is_windows() {
158            BTreeMap::from_iter(self.0.iter().map(|(k, v)| {
159                (
160                    format!("{k}.exe"),
161                    v.iter().map(|name| format!("{name}.exe")).collect(),
162                )
163            }))
164        } else {
165            self.0.clone()
166        }
167    }
168
169    /// Returns a map of binary aliases for each target triple, with
170    /// executable extensions added if necessary.
171    pub fn for_targets(
172        &self,
173        targets: &[TripleName],
174    ) -> BTreeMap<TripleName, BTreeMap<String, Vec<String>>> {
175        BTreeMap::from_iter(
176            targets
177                .iter()
178                .map(|target| (target.to_owned(), self.for_target(target))),
179        )
180    }
181}
182
183/// The graph of all work that dist needs to do on this invocation.
184///
185/// All work is precomputed at the start of execution because only discovering
186/// what you need to do in the middle of building/packing things is a mess.
187/// It also allows us to report what *should* happen without actually doing it.
188#[derive(Debug)]
189pub struct DistGraph {
190    /// Unique id for the system we're building on.
191    ///
192    /// Since the whole premise of dist is to invoke it once on each machine, and no
193    /// two machines have any reason to have the exact same CLI args for dist, we
194    /// just use a mangled form of the CLI arguments here.
195    pub system_id: SystemId,
196    /// Whether it looks like `dist init` has been run
197    pub is_init: bool,
198    /// What to allow to be dirty
199    pub allow_dirty: DirtyMode,
200    /// Homebrew tap all packages agree on
201    pub global_homebrew_tap: Option<String>,
202    /// builtin publish jobs all packages agree on
203    pub global_publishers: Option<PublisherConfig>,
204    /// Whether we can just build the workspace or need to build each package
205    pub precise_cargo_builds: bool,
206
207    /// Info about the tools we're using to build
208    pub tools: Tools,
209    /// Signing tools
210    pub signer: Signing,
211    /// Minijinja templates we might want to render
212    pub templates: Templates,
213
214    /// The cargo target dir.
215    pub target_dir: Utf8PathBuf,
216    /// The root directory of the current git repo
217    pub repo_dir: Utf8PathBuf,
218    /// The root directory of the current cargo workspace.
219    pub workspace_dir: Utf8PathBuf,
220    /// dist's target dir (generally nested under `target_dir`).
221    pub dist_dir: Utf8PathBuf,
222    /// misc workspace-global config
223    pub config: WorkspaceConfig,
224    /// Targets we need to build (local artifacts)
225    pub local_build_steps: Vec<BuildStep>,
226    /// Targets we need to build (global artifacts)
227    pub global_build_steps: Vec<BuildStep>,
228    /// Distributable artifacts we want to produce for the releases
229    pub artifacts: Vec<Artifact>,
230    /// Binaries we want to build
231    pub binaries: Vec<Binary>,
232    /// Variants of Releases
233    pub variants: Vec<ReleaseVariant>,
234    /// Logical releases that artifacts are grouped under
235    pub releases: Vec<Release>,
236    /// Info about CI backends
237    pub ci: CiInfo,
238    /// List of hosting providers to use
239    pub hosting: Option<HostingInfo>,
240    /// LIES ALL LIES
241    pub local_builds_are_lies: bool,
242    /// HTTP client settings
243    pub client_settings: ClientSettings,
244    /// A reusable client for basic http fetches
245    pub axoclient: AxoClient,
246}
247
248/// Info about artifacts should be hosted
249#[derive(Debug, Clone)]
250pub struct HostingInfo {
251    /// Hosting backends
252    pub hosts: Vec<HostingStyle>,
253    /// The domain at which the repo is hosted, (e.g. `"https://github.com"`)
254    pub domain: String,
255    /// Path at the domain
256    pub repo_path: String,
257    /// Source hosting provider (e.g. "github")
258    pub source_host: String,
259    /// Project owner
260    pub owner: String,
261    /// Project name
262    pub project: String,
263}
264
265/// Various tools we have found installed on the system
266#[derive(Debug, Clone)]
267pub struct Tools {
268    /// Info on the host
269    pub host_target: TripleName,
270    /// Info on cargo
271    pub cargo: Option<CargoInfo>,
272    /// rustup, useful for getting specific toolchains
273    pub rustup: Option<Tool>,
274    /// homebrew, only available on macOS
275    pub brew: Option<Tool>,
276    /// git, used if the repository is a git repo
277    pub git: Option<Tool>,
278    /// omnibor, used for generating OmniBOR Artifact IDs
279    pub omnibor: Option<Tool>,
280    /// ssl.com's CodeSignTool, for Windows Code Signing
281    ///
282    /// <https://www.ssl.com/guide/esigner-codesigntool-command-guide/>
283    pub code_sign_tool: Option<Tool>,
284    /// cargo-auditable, used for auditable builds
285    pub cargo_auditable: Option<Tool>,
286    /// cargo-cyclonedx, for generating CycloneDX artifacts
287    pub cargo_cyclonedx: Option<Tool>,
288    /// cargo-xwin, for some cross builds
289    pub cargo_xwin: Option<Tool>,
290    /// cargo-zigbuild, for some cross builds
291    pub cargo_zigbuild: Option<Tool>,
292}
293
294impl Tools {
295    /// Returns the cargo info or an error
296    pub fn cargo(&self) -> DistResult<&CargoInfo> {
297        self.cargo.as_ref().ok_or(DistError::ToolMissing {
298            tool: "cargo".to_owned(),
299        })
300    }
301
302    /// Returns the omnibor info or an error
303    pub fn omnibor(&self) -> DistResult<&Tool> {
304        self.omnibor.as_ref().ok_or(DistError::ToolMissing {
305            tool: "omnibor-cli".to_owned(),
306        })
307    }
308
309    /// Returns cargo-auditable info or an error
310    pub fn cargo_auditable(&self) -> DistResult<&Tool> {
311        self.cargo_auditable.as_ref().ok_or(DistError::ToolMissing {
312            tool: "cargo-auditable".to_owned(),
313        })
314    }
315
316    /// Returns cargo-cyclonedx info or an error
317    pub fn cargo_cyclonedx(&self) -> DistResult<&Tool> {
318        self.cargo_cyclonedx.as_ref().ok_or(DistError::ToolMissing {
319            tool: "cargo-cyclonedx".to_owned(),
320        })
321    }
322
323    /// Returns cargo-xwin info or an error
324    pub fn cargo_xwin(&self) -> DistResult<&Tool> {
325        self.cargo_xwin.as_ref().ok_or(DistError::ToolMissing {
326            tool: "cargo-xwin".to_owned(),
327        })
328    }
329
330    /// Returns cargo-zigbuild info or an error
331    pub fn cargo_zigbuild(&self) -> DistResult<&Tool> {
332        self.cargo_zigbuild.as_ref().ok_or(DistError::ToolMissing {
333            tool: "cargo-zigbuild".to_owned(),
334        })
335    }
336}
337
338/// Info about the cargo toolchain we're using
339#[derive(Debug, Clone)]
340pub struct CargoInfo {
341    /// The path/command used to refer to cargo (usually from the CARGO env var)
342    pub cmd: String,
343    /// The first line of running cargo with `-vV`, should be version info
344    pub version_line: Option<String>,
345    /// The host target triple (obtained from `-vV`)
346    pub host_target: TripleName,
347}
348
349/// A tool we have found installed on the system
350#[derive(Debug, Clone, Default)]
351pub struct Tool {
352    /// The string to pass to Cmd::new
353    pub cmd: String,
354    /// The version the tool reported (in case useful)
355    pub version: String,
356}
357
358/// A binary we want to build (specific to a Variant)
359#[derive(Debug)]
360pub struct Binary {
361    /// A unique id to use for things derived from this binary
362    ///
363    /// (e.g. my-binary-v1.0.0-x86_64-pc-windows-msvc)
364    pub id: String,
365    /// The idx of the package this binary is defined by
366    pub pkg_idx: PackageIdx,
367    /// The cargo package this binary is defined by
368    ///
369    /// This is an "opaque" string that will show up in things like cargo machine-readable output,
370    /// but **this is not the format that cargo -p flags expect**. Use pkg_spec for that.
371    pub pkg_id: Option<PackageId>,
372    /// An ideally unambiguous way to refer to a package for the purpose of cargo -p flags.
373    pub pkg_spec: String,
374    /// The name of the binary (as defined by the Cargo.toml)
375    pub name: String,
376    /// The filename the binary will have
377    pub file_name: String,
378    /// The target triple to build it for
379    pub target: TripleName,
380    /// The artifact for this Binary's symbols
381    pub symbols_artifact: Option<ArtifactIdx>,
382    /// Places the executable needs to be copied to
383    ///
384    /// If this is empty by the time we compute the precise build steps
385    /// we will determine that this Binary doesn't actually need to be built.
386    pub copy_exe_to: Vec<Utf8PathBuf>,
387    /// Places the symbols need to be copied to
388    pub copy_symbols_to: Vec<Utf8PathBuf>,
389    /// feature flags!
390    pub features: CargoTargetFeatures,
391    /// What kind of binary this is
392    pub kind: BinaryKind,
393}
394
395/// Different kinds of binaries dist knows about
396#[derive(Clone, Copy, Debug, PartialEq)]
397pub enum BinaryKind {
398    /// Standard executable
399    Executable,
400    /// C-style dynamic library (.so/.dylib/.dll)
401    DynamicLibrary,
402    /// C-style static library (.a/.lib)
403    StaticLibrary,
404}
405
406/// A build step we would like to perform
407#[derive(Debug)]
408#[allow(clippy::large_enum_variant)]
409pub enum BuildStep {
410    /// Do a generic build (and copy the outputs to various locations)
411    Generic(GenericBuildStep),
412    /// Do a cargo build (and copy the outputs to various locations)
413    Cargo(CargoBuildStep),
414    /// Do an extra artifact build (and copy the outputs to various locations)
415    Extra(ExtraBuildStep),
416    /// Run rustup to get a toolchain
417    Rustup(RustupStep),
418    /// Copy a file
419    CopyFile(CopyStep),
420    /// Copy a dir
421    CopyDir(CopyStep),
422    /// Copy a file or dir (unknown, don't check which until the last possible second)
423    CopyFileOrDir(CopyStep),
424    /// Zip up a directory
425    Zip(ZipDirStep),
426    /// Generate some kind of installer
427    GenerateInstaller(InstallerImpl),
428    /// Generates a source tarball
429    GenerateSourceTarball(SourceTarballStep),
430    /// Checksum a file
431    Checksum(ChecksumImpl),
432    /// Generate a unified checksum file, containing multiple entries
433    UnifiedChecksum(UnifiedChecksumStep),
434    /// Generate an OmniBOR Artifact ID
435    OmniborArtifactId(OmniborArtifactIdImpl),
436    /// Fetch or build an updater binary
437    Updater(UpdaterStep),
438    // FIXME: For macos universal builds we'll want
439    // Lipo(LipoStep)
440}
441
442/// A cargo build (and copy the outputs to various locations)
443#[derive(Debug)]
444pub struct CargoBuildStep {
445    /// The --target triple to pass
446    pub target_triple: TripleName,
447    /// The feature flags to pass
448    pub features: CargoTargetFeatures,
449    /// What package to build (or "the workspace")
450    pub package: CargoTargetPackages,
451    /// The --profile to pass
452    pub profile: String,
453    /// The value to set for RUSTFLAGS
454    pub rustflags: String,
455    /// Binaries we expect from this build
456    pub expected_binaries: Vec<BinaryIdx>,
457    /// The working directory to run the build in
458    pub working_dir: Utf8PathBuf,
459}
460
461/// A wrapper to use instead of `cargo build`, generally used for cross-compilation
462#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
463pub enum CargoBuildWrapper {
464    /// Run 'cargo zigbuild' to cross-compile, e.g. from `x86_64-unknown-linux-gnu` to `aarch64-unknown-linux-gnu`
465    /// cf. <https://github.com/rust-cross/cargo-zigbuild>
466    ZigBuild,
467
468    /// Run 'cargo xwin' to cross-compile, e.g. from `aarch64-apple-darwin` to `x86_64-pc-windows-msvc`
469    /// cf. <https://github.com/rust-cross/cargo-xwin>
470    Xwin,
471}
472
473impl std::fmt::Display for CargoBuildWrapper {
474    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
475        f.pad(match self {
476            CargoBuildWrapper::ZigBuild => "cargo-zigbuild",
477            CargoBuildWrapper::Xwin => "cargo-xwin",
478        })
479    }
480}
481
482/// Returns the cargo build wrapper required to perform a certain cross-compilation
483pub fn build_wrapper_for_cross(
484    host: &Triple,
485    target: &Triple,
486) -> DistResult<Option<CargoBuildWrapper>> {
487    if host.operating_system == target.operating_system && host.architecture == target.architecture
488    {
489        // we're not cross-compiling, not really... maybe we're making a GNU binary from a "musl" host but meh.
490        return Ok(None);
491    }
492
493    match target.operating_system {
494        // compiling for macOS (making Mach-O binaries, .dylib files, etc.)
495        OperatingSystem::Darwin(_) => match host.operating_system {
496            OperatingSystem::Darwin(_) => {
497                // from mac to mac, even if we do aarch64 => x86_64, or the other way
498                // around, _all we need_ is to add the target to rustup
499                Ok(None)
500            }
501            _ => {
502                Err(DistError::UnsupportedCrossCompile {
503                    host: host.clone(),
504                    target: target.clone(),
505                    details: "cross-compiling to macOS is a road paved with sadness — we cowardly refuse to walk it.".to_string(),
506                })
507            }
508        },
509        // compiling for Linux (making ELF binaries, .so files, etc.)
510        OperatingSystem::Linux => match host.operating_system {
511            OperatingSystem::Linux | OperatingSystem::Darwin(_) | OperatingSystem::Windows => {
512                // zigbuild works for e.g. x86_64-unknown-linux-gnu => aarch64-unknown-linux-gnu
513                Ok(Some(CargoBuildWrapper::ZigBuild))
514            }
515            _ => {
516                Err(DistError::UnsupportedCrossCompile {
517                    host: host.clone(),
518                    target: target.clone(),
519                    details: format!("no idea how to cross-compile from {host} to linux"),
520                })
521            }
522        },
523        // compiling for Windows (making PE binaries, .dll files, etc.)
524        OperatingSystem::Windows => match host.operating_system {
525            OperatingSystem::Windows => {
526                // from win to win is generally supported.
527                Ok(None)
528            }
529            OperatingSystem::Linux | OperatingSystem::Darwin(_) => {
530                // cargo-xwin is made for that
531                Ok(Some(CargoBuildWrapper::Xwin))
532            }
533            _ => {
534                Err(DistError::UnsupportedCrossCompile {
535                    host: host.clone(),
536                    target: target.clone(),
537                    details: format!("no idea how to cross-compile from {host} to windows with architecture {}", target.architecture),
538                })
539            }
540        },
541        _ => {
542            Err(DistError::UnsupportedCrossCompile {
543                host: host.clone(),
544                target: target.clone(),
545                details: format!("no idea how to cross-compile from anything (including the current host, {host}) to {target}"),
546            })
547        }
548    }
549}
550
551/// A cargo build (and copy the outputs to various locations)
552#[derive(Debug)]
553pub struct GenericBuildStep {
554    /// The --target triple to pass
555    pub target_triple: TripleName,
556    /// Binaries we expect from this build
557    pub expected_binaries: Vec<BinaryIdx>,
558    /// The working directory to run the build in
559    pub working_dir: Utf8PathBuf,
560    /// The output directory to find build outputs in
561    pub out_dir: Utf8PathBuf,
562    /// The command to run to produce the expected binaries
563    pub build_command: Vec<String>,
564}
565
566/// An "extra" build step, producing new sidecar artifacts
567#[derive(Debug)]
568pub struct ExtraBuildStep {
569    /// The dir to run the build_command in
570    pub working_dir: Utf8PathBuf,
571    /// Relative paths (from the working_dir) to binaries we expect to find
572    pub artifact_relpaths: Vec<Utf8PathBuf>,
573    /// The command to run to produce the expected binaries
574    pub build_command: Vec<String>,
575}
576
577/// A cargo build (and copy the outputs to various locations)
578#[derive(Debug)]
579pub struct RustupStep {
580    /// The rustup to invoke (mostly here to prove you Have rustup)
581    pub rustup: Tool,
582    /// The target to install
583    pub target: TripleName,
584}
585
586/// zip/tarball some directory
587#[derive(Debug)]
588pub struct ZipDirStep {
589    /// The directory to zip up
590    pub src_path: Utf8PathBuf,
591    /// The final file path for the output zip
592    pub dest_path: Utf8PathBuf,
593    /// The name of the dir the tarball/zip will contain
594    pub with_root: Option<Utf8PathBuf>,
595    /// The kind of zip/tarball to make
596    pub zip_style: ZipStyle,
597}
598
599/// Copy a file
600#[derive(Debug)]
601pub struct CopyStep {
602    /// from here
603    pub src_path: Utf8PathBuf,
604    /// to here
605    pub dest_path: Utf8PathBuf,
606}
607
608/// Create a checksum
609#[derive(Debug, Clone)]
610pub struct ChecksumImpl {
611    /// the checksumming algorithm
612    pub checksum: ChecksumStyle,
613    /// of this file
614    pub src_path: Utf8PathBuf,
615    /// potentially write it to here
616    pub dest_path: Option<Utf8PathBuf>,
617    /// record it for this artifact in the dist-manifest
618    pub for_artifact: Option<ArtifactId>,
619}
620
621/// Create a unified checksum file, containing sums for
622/// all artifacts, save for the unified checksum itself,
623/// of course.
624///
625/// The result is something like `sha256.sum` which can be
626/// checked by common tools like `sha256sum -c`. Even though
627/// the type system lets each checksum have a different style,
628/// the setting is per-release so in practice they end up being
629/// the same.
630#[derive(Debug, Clone)]
631pub struct UnifiedChecksumStep {
632    /// the checksum style to use
633    pub checksum: ChecksumStyle,
634
635    /// record the unified checksum to this path
636    pub dest_path: Utf8PathBuf,
637}
638
639/// Create a file containing the OmniBOR Artifact ID for a specific file.
640#[derive(Debug, Clone)]
641pub struct OmniborArtifactIdImpl {
642    /// file to generate the Artifact ID for
643    pub src_path: Utf8PathBuf,
644    /// file to write the Artifact ID to
645    pub dest_path: Utf8PathBuf,
646}
647
648/// Create a source tarball
649#[derive(Debug, Clone)]
650pub struct SourceTarballStep {
651    /// the ref/tag/commit/branch/etc. to archive
652    pub committish: String,
653    /// A root directory to nest the archive's contents under
654    // Note: GitHub uses `appname-tag` for this
655    pub prefix: String,
656    /// target filename
657    pub target: Utf8PathBuf,
658    /// The dir to run the git command in
659    pub working_dir: Utf8PathBuf,
660    /// Use an implementation that bundles submodules
661    pub recursive: bool,
662}
663
664/// Fetch or build an updater
665#[derive(Debug, Clone)]
666pub struct UpdaterStep {
667    /// The target triple this updater is for
668    pub target_triple: TripleName,
669    /// The file this should produce
670    pub target_filename: Utf8PathBuf,
671    /// Whether to use the latest release instead of a fixed version
672    pub use_latest: bool,
673}
674
675/// A kind of symbols (debuginfo)
676#[derive(Copy, Clone, Debug)]
677pub enum SymbolKind {
678    /// Microsoft pdbs
679    Pdb,
680    /// Apple dSYMs
681    Dsym,
682    /// DWARF DWPs
683    Dwp,
684}
685
686impl SymbolKind {
687    /// Get the file extension for the symbol kind
688    pub fn ext(self) -> &'static str {
689        match self {
690            SymbolKind::Pdb => "pdb",
691            SymbolKind::Dsym => "dSYM",
692            SymbolKind::Dwp => "dwp",
693        }
694    }
695}
696
697/// A distributable artifact we want to build
698#[derive(Clone, Debug)]
699pub struct Artifact {
700    /// Unique id for the Artifact (its file name)
701    ///
702    /// i.e. `cargo-dist-v0.1.0-x86_64-pc-windows-msvc.zip`
703    pub id: ArtifactId,
704    /// The target platform
705    ///
706    /// i.e. `x86_64-pc-windows-msvc`
707    pub target_triples: Vec<TripleName>,
708    /// If constructing this artifact involves creating a directory,
709    /// copying static files into it, and then zip/tarring it, set this
710    /// value to automate those tasks.
711    pub archive: Option<Archive>,
712    /// The path where the final artifact will appear in the dist dir.
713    ///
714    /// i.e. `/.../target/dist/cargo-dist-v0.1.0-x86_64-pc-windows-msvc.zip`
715    pub file_path: Utf8PathBuf,
716    /// The built assets this artifact will contain
717    ///
718    /// i.e. `cargo-dist.exe`
719    pub required_binaries: FastMap<BinaryIdx, Utf8PathBuf>,
720    /// The kind of artifact this is
721    pub kind: ArtifactKind,
722    /// A checksum for this artifact, if any
723    pub checksum: Option<ArtifactIdx>,
724    /// Indicates whether the artifact is local or global
725    pub is_global: bool,
726}
727
728/// Info about an archive (zip/tarball) that should be made. Currently this is always part
729/// of an Artifact, and the final output will be [`Artifact::file_path`][].
730#[derive(Clone, Debug)]
731pub struct Archive {
732    /// An optional prefix path to nest all the archive contents under
733    /// If None then all the archive's contents will be placed in the root
734    pub with_root: Option<Utf8PathBuf>,
735    /// The path of the directory this artifact's contents will be stored in.
736    ///
737    /// i.e. `/.../target/dist/cargo-dist-v0.1.0-x86_64-pc-windows-msvc/`
738    pub dir_path: Utf8PathBuf,
739    /// The style of zip to make
740    pub zip_style: ZipStyle,
741    /// Static assets to copy to the root of the artifact's dir (path is src)
742    ///
743    /// In the future this might add a custom relative dest path
744    pub static_assets: Vec<(StaticAssetKind, Utf8PathBuf)>,
745}
746
747/// A kind of artifact (more specific fields)
748#[derive(Clone, Debug)]
749#[allow(clippy::large_enum_variant)]
750pub enum ArtifactKind {
751    /// An Archive containing binaries (aka ExecutableZip)
752    ExecutableZip(ExecutableZip),
753    /// Symbols
754    Symbols(Symbols),
755    /// An installer
756    Installer(InstallerImpl),
757    /// A checksum
758    Checksum(ChecksumImpl),
759    /// A unified checksum file, like `sha256.sum`
760    UnifiedChecksum(UnifiedChecksumStep),
761    /// A source tarball
762    SourceTarball(SourceTarball),
763    /// An extra artifact specified via config
764    ExtraArtifact(ExtraArtifactImpl),
765    /// An updater executable
766    Updater(UpdaterImpl),
767    /// An existing file representing a Software Bill Of Materials
768    SBOM(SBOMImpl),
769    /// An OmniBOR Artifact ID.
770    OmniborArtifactId(OmniborArtifactIdImpl),
771}
772
773/// An Archive containing binaries (aka ExecutableZip)
774#[derive(Clone, Debug)]
775pub struct ExecutableZip {
776    // everything important is already part of Artifact
777}
778
779/// A Symbols/Debuginfo Artifact
780#[derive(Clone, Debug)]
781pub struct Symbols {
782    /// The kind of symbols this is
783    kind: SymbolKind,
784}
785
786/// A source tarball artifact
787#[derive(Clone, Debug)]
788pub struct SourceTarball {
789    /// the ref/tag/commit/branch/etc. to archive
790    pub committish: String,
791    /// A root directory to nest the archive's contents under
792    // Note: GitHub uses `appname-tag` for this
793    pub prefix: String,
794    /// target filename
795    pub target: Utf8PathBuf,
796    /// path to the git checkout
797    pub working_dir: Utf8PathBuf,
798    /// Whether submodules should be included
799    pub recursive: bool,
800}
801
802/// An extra artifact of some kind
803#[derive(Clone, Debug)]
804pub struct ExtraArtifactImpl {
805    /// Working dir to run the command in
806    pub working_dir: Utf8PathBuf,
807    /// The command to run to produce this artifact
808    pub command: Vec<String>,
809    /// Relative path to the artifact, from the working_dir
810    pub artifact_relpath: Utf8PathBuf,
811}
812
813/// An updater executable
814#[derive(Clone, Debug)]
815pub struct UpdaterImpl {
816    /// Whether to use the latest or a specific known-good version
817    pub use_latest: bool,
818}
819
820/// A file containing a Software Bill Of Materials
821#[derive(Clone, Debug)]
822pub struct SBOMImpl {}
823
824/// A logical release of an application that artifacts are grouped under
825#[derive(Clone, Debug)]
826pub struct Release {
827    /// The name of the app
828    pub app_name: String,
829    /// A brief description of the app
830    pub app_desc: Option<String>,
831    /// The authors of the app
832    pub app_authors: Vec<String>,
833    /// The license of the app
834    pub app_license: Option<String>,
835    /// The URL to the app's source repository
836    pub app_repository_url: Option<String>,
837    /// The URL to the app's homepage
838    pub app_homepage_url: Option<String>,
839    /// A list of the app's keywords
840    pub app_keywords: Option<Vec<String>>,
841    /// The package this release is based on
842    pub pkg_idx: PackageIdx,
843    /// The version of the app
844    pub version: Version,
845    /// The unique id of the release (e.g. "my-app-v1.0.0")
846    pub id: String,
847    /// misc app-specific config
848    pub config: AppConfig,
849    /// Targets this Release has artifacts for
850    pub targets: Vec<TripleName>,
851    /// Binaries that every variant should ostensibly provide
852    ///
853    /// The string is the name of the binary under that package (without .exe extension)
854    pub bins: Vec<(PackageIdx, String)>,
855    /// C dynamic libraries that every variant should ostensibly provide
856    ///
857    /// The string is the name of the library, without lib prefix, and without platform-specific suffix (.so, .dylib, .dll)
858    /// Note: Windows won't include lib prefix in the final lib.
859    pub cdylibs: Vec<(PackageIdx, String)>,
860    /// C static libraries that every variant should ostensibly provide
861    ///
862    /// The string is the name of the library, without lib prefix, and without platform-specific suffix (.a, .lib)
863    /// Note: Windows won't include lib prefix in the final lib.
864    pub cstaticlibs: Vec<(PackageIdx, String)>,
865    /// They might still be limited to some subset of the targets (e.g. powershell scripts are
866    /// windows-only), but conceptually there's only "one" for the Release.
867    pub global_artifacts: Vec<ArtifactIdx>,
868    /// Variants of this Release (e.g. "the macos build") that can have "local" Artifacts.
869    pub variants: Vec<ReleaseVariantIdx>,
870    /// The body of the changelog for this release
871    pub changelog_body: Option<String>,
872    /// The title of the changelog for this release
873    pub changelog_title: Option<String>,
874    /// Static assets that should be included in bundles like archives
875    pub static_assets: Vec<(StaticAssetKind, Utf8PathBuf)>,
876    /// Computed support for platforms, gets iteratively refined over time, so check details
877    /// as late as possible, if you can!
878    pub platform_support: PlatformSupport,
879}
880
881/// A particular variant of a Release (e.g. "the macos build")
882#[derive(Debug)]
883pub struct ReleaseVariant {
884    /// The target triple this variant is for
885    pub target: TripleName,
886    /// The unique identifying string used for things related to this variant
887    /// (e.g. "my-app-v1.0.0-x86_64-pc-windows-msvc")
888    pub id: String,
889    /// Binaries included in this Release Variant
890    pub binaries: Vec<BinaryIdx>,
891    /// Static assets that should be included in bundles like archives
892    pub static_assets: Vec<(StaticAssetKind, Utf8PathBuf)>,
893    /// Artifacts that are "local" to this variant (binaries, symbols, msi-installer...)
894    pub local_artifacts: Vec<ArtifactIdx>,
895}
896
897/// A particular kind of static asset we're interested in
898#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
899pub enum StaticAssetKind {
900    /// A README file
901    Readme,
902    /// A LICENSE file
903    License,
904    /// A CHANGLEOG or RELEASES file
905    Changelog,
906    /// Some other miscellaneous file
907    Other,
908}
909
910/// Cargo features a cargo build should use.
911#[derive(Debug, Default, Clone, PartialEq, Eq, PartialOrd, Ord)]
912pub struct CargoTargetFeatures {
913    /// Whether to disable default features
914    pub default_features: bool,
915    /// Features to enable
916    pub features: CargoTargetFeatureList,
917}
918
919/// A list of features to build with
920#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)]
921pub enum CargoTargetFeatureList {
922    /// All of them
923    All,
924    /// Some of them
925    List(Vec<String>),
926}
927
928impl Default for CargoTargetFeatureList {
929    fn default() -> Self {
930        Self::List(vec![])
931    }
932}
933
934/// Whether to build a package or workspace
935#[derive(Debug)]
936pub enum CargoTargetPackages {
937    /// Build the workspace
938    Workspace,
939    /// Just build a package
940    ///
941    /// Inner string is [`Binary::pkg_spec`][]
942    Package(String),
943}
944
945pub(crate) struct DistGraphBuilder<'pkg_graph> {
946    pub(crate) inner: DistGraph,
947    pub(crate) manifest: DistManifest,
948    pub(crate) workspaces: &'pkg_graph mut WorkspaceGraph,
949    artifact_mode: ArtifactMode,
950    binaries_by_id: FastMap<String, BinaryIdx>,
951    package_configs: Vec<AppConfig>,
952}
953
954impl<'pkg_graph> DistGraphBuilder<'pkg_graph> {
955    pub(crate) fn new(
956        system_id: SystemId,
957        tools: Tools,
958        workspaces: &'pkg_graph mut WorkspaceGraph,
959        artifact_mode: ArtifactMode,
960        allow_all_dirty: bool,
961        announcement_tag_is_implicit: bool,
962    ) -> DistResult<Self> {
963        let root_workspace_idx = workspaces.root_workspace_idx();
964        let root_workspace = workspaces.workspace(root_workspace_idx);
965
966        // Complain if someone still has [workspace.metadata.dist] in a dist-workspace.toml scenario
967        if let Some(dist_manifest_path) = root_workspace.dist_manifest_path.as_deref() {
968            for workspace_idx in workspaces.all_workspace_indices() {
969                if workspace_idx == root_workspace_idx {
970                    continue;
971                }
972                let workspace = workspaces.workspace(workspace_idx);
973                config::reject_metadata_table(
974                    &workspace.manifest_path,
975                    dist_manifest_path,
976                    workspace.cargo_metadata_table.as_ref(),
977                )?;
978            }
979        }
980
981        let target_dir = root_workspace.target_dir.clone();
982        let workspace_dir = root_workspace.workspace_dir.clone();
983        let repo_dir = if let Some(repo) = &workspaces.repo {
984            repo.path.to_owned()
985        } else {
986            // Fallback if we're not building in a git repo
987            workspace_dir.clone()
988        };
989        let dist_dir = target_dir.join(TARGET_DIST);
990
991        let mut workspace_metadata =
992            // Read the global config
993            config::parse_metadata_table_or_manifest(
994                &root_workspace.manifest_path,
995                root_workspace.dist_manifest_path.as_deref(),
996                root_workspace.cargo_metadata_table.as_ref(),
997            )?;
998
999        let workspace_layer = workspace_metadata.to_toml_layer(true);
1000        workspace_metadata.make_relative_to(&root_workspace.workspace_dir);
1001
1002        let config = workspace_config(workspaces, workspace_layer.clone());
1003
1004        if config.builds.cargo.rust_toolchain_version.is_some() {
1005            warn!("rust-toolchain-version is deprecated, use rust-toolchain.toml if you want pinned toolchains");
1006        }
1007
1008        let local_builds_are_lies = artifact_mode == ArtifactMode::Lies;
1009
1010        // Compute/merge package configs
1011        let mut package_metadatas = vec![];
1012        let mut package_configs = vec![];
1013
1014        for (pkg_idx, package) in workspaces.all_packages() {
1015            let mut package_metadata = config::parse_metadata_table_or_manifest(
1016                &package.manifest_path,
1017                package.dist_manifest_path.as_deref(),
1018                package.cargo_metadata_table.as_ref(),
1019            )?;
1020            package_configs.push(app_config(
1021                workspaces,
1022                pkg_idx,
1023                workspace_layer.clone(),
1024                package_metadata.to_toml_layer(false),
1025            ));
1026
1027            package_metadata.make_relative_to(&package.package_root);
1028            package_metadata.merge_workspace_config(&workspace_metadata, &package.manifest_path);
1029            package_metadata.validate_install_paths()?;
1030            package_metadatas.push(package_metadata);
1031        }
1032
1033        // check cargo build settings for precise-builds
1034        let mut global_cargo_build_config = None::<AppCargoBuildConfig>;
1035        let mut packages_with_mismatched_features = vec![];
1036        for ((_idx, package), package_config) in workspaces.all_packages().zip(&package_configs) {
1037            if let Some(cargo_build_config) = &global_cargo_build_config {
1038                if package_config.builds.cargo.features != cargo_build_config.features
1039                    || package_config.builds.cargo.all_features != cargo_build_config.all_features
1040                    || package_config.builds.cargo.default_features
1041                        != cargo_build_config.default_features
1042                {
1043                    packages_with_mismatched_features.push(
1044                        package
1045                            .dist_manifest_path
1046                            .clone()
1047                            .unwrap_or(package.manifest_path.clone()),
1048                    );
1049                }
1050            } else {
1051                global_cargo_build_config = Some(package_config.builds.cargo.clone());
1052                // This package gets to be the archetype, so if there's a mismatch it will
1053                // always be implicated. So push it to the error list, and only say there's an
1054                // error if there's two entries in this at the end.
1055                packages_with_mismatched_features.push(
1056                    package
1057                        .dist_manifest_path
1058                        .clone()
1059                        .unwrap_or(package.manifest_path.clone()),
1060                );
1061            };
1062        }
1063        // Only do workspace builds if all the packages agree with the workspace feature settings
1064        let requires_precise = packages_with_mismatched_features.len() > 1;
1065        let precise_cargo_builds = if let Some(precise_builds) = config.builds.cargo.precise_builds
1066        {
1067            if !precise_builds && requires_precise {
1068                return Err(DistError::PreciseImpossible {
1069                    packages: packages_with_mismatched_features,
1070                });
1071            }
1072            precise_builds
1073        } else {
1074            info!("force-enabling precise-builds to handle your build features");
1075            requires_precise
1076        };
1077
1078        // check homebrew taps for global publish jobs
1079        // FIXME: when we add `dist publish` we can drop this,
1080        // as we can support granular publish settings
1081        let mut global_homebrew_tap = None;
1082        let mut packages_with_mismatched_taps = vec![];
1083        for ((_idx, package), package_config) in workspaces.all_packages().zip(&package_configs) {
1084            if let Some(homebrew) = &package_config.installers.homebrew {
1085                if let Some(new_tap) = &homebrew.tap {
1086                    if let Some(current_tap) = &global_homebrew_tap {
1087                        if current_tap != new_tap {
1088                            packages_with_mismatched_taps.push(
1089                                package
1090                                    .dist_manifest_path
1091                                    .clone()
1092                                    .unwrap_or(package.manifest_path.clone()),
1093                            );
1094                        }
1095                    } else {
1096                        // This package gets to be the archetype, so if there's a mismatch it will
1097                        // always be implicated. So push it to the error list, and only say there's an
1098                        // error if there's two entries in this at the end.
1099                        packages_with_mismatched_taps.push(
1100                            package
1101                                .dist_manifest_path
1102                                .clone()
1103                                .unwrap_or(package.manifest_path.clone()),
1104                        );
1105                        global_homebrew_tap = Some(new_tap.clone());
1106                    }
1107                }
1108            }
1109        }
1110        if packages_with_mismatched_taps.len() > 1 {
1111            return Err(DistError::MismatchedTaps {
1112                packages: packages_with_mismatched_taps,
1113            });
1114        }
1115
1116        // check publish jobs for global publish jobs
1117        // FIXME: when we add `dist publish` we can drop this,
1118        // as we can support granular publish settings
1119        let mut global_publishers = None;
1120        let mut packages_with_mismatched_publishers = vec![];
1121        for ((_idx, package), package_config) in workspaces.all_packages().zip(&package_configs) {
1122            if let Some(cur_publishers) = &global_publishers {
1123                if cur_publishers != &package_config.publishers {
1124                    packages_with_mismatched_publishers.push(
1125                        package
1126                            .dist_manifest_path
1127                            .clone()
1128                            .unwrap_or(package.manifest_path.clone()),
1129                    );
1130                }
1131            } else {
1132                // This package gets to be the archetype, so if there's a mismatch it will
1133                // always be implicated. So push it to the error list, and only say there's an
1134                // error if there's two entries in this at the end.
1135                packages_with_mismatched_publishers.push(
1136                    package
1137                        .dist_manifest_path
1138                        .clone()
1139                        .unwrap_or(package.manifest_path.clone()),
1140                );
1141                global_publishers = Some(package_config.publishers.clone());
1142            }
1143        }
1144        if packages_with_mismatched_publishers.len() > 1 {
1145            return Err(DistError::MismatchedPublishers {
1146                packages: packages_with_mismatched_publishers,
1147            });
1148        }
1149        let global_publish_prereleases = global_publishers
1150            .as_ref()
1151            .map(|p| {
1152                // until we have `dist publish` we need to enforce everyone agreeing on `prereleases`
1153                let PublisherConfig {
1154                    homebrew,
1155                    npm,
1156                    user,
1157                } = p;
1158                let h_pre = homebrew.as_ref().map(|p| p.prereleases);
1159                let npm_pre = npm.as_ref().map(|p| p.prereleases);
1160                let user_pre = user.as_ref().map(|p| p.prereleases);
1161                let choices = [h_pre, npm_pre, user_pre];
1162                let mut global_choice = None;
1163                #[allow(clippy::manual_flatten)]
1164                for choice in choices {
1165                    if let Some(choice) = choice {
1166                        if let Some(cur_choice) = global_choice {
1167                            if cur_choice != choice {
1168                                return Err(DistError::MismatchedPrereleases);
1169                            }
1170                        } else {
1171                            global_choice = Some(choice);
1172                        }
1173                    }
1174                }
1175                Ok(global_choice.unwrap_or(false))
1176            })
1177            .transpose()?
1178            .unwrap_or(false);
1179
1180        let templates = Templates::new()?;
1181        let allow_dirty = if allow_all_dirty {
1182            DirtyMode::AllowAll
1183        } else {
1184            DirtyMode::AllowList(config.allow_dirty.clone())
1185        };
1186        let cargo_version_line = tools.cargo.as_ref().and_then(|c| c.version_line.to_owned());
1187        let build_environment = if local_builds_are_lies {
1188            BuildEnvironment::Indeterminate
1189        } else {
1190            determine_build_environment(&tools.host_target)
1191        };
1192
1193        let system = SystemInfo {
1194            id: system_id.clone(),
1195            cargo_version_line,
1196            build_environment,
1197        };
1198        let systems = SortedMap::from_iter([(system_id.clone(), system)]);
1199
1200        let client_settings = ClientSettings::new();
1201        let axoclient = crate::net::create_axoasset_client(&client_settings)?;
1202
1203        let signer = Signing::new(
1204            &axoclient,
1205            &tools.host_target,
1206            &dist_dir,
1207            config.builds.ssldotcom_windows_sign.clone(),
1208            config.builds.macos_sign,
1209        )?;
1210        let github_attestations = config
1211            .hosts
1212            .github
1213            .as_ref()
1214            .map(|g| g.attestations)
1215            .unwrap_or(false);
1216        let github_attestations_filters = config
1217            .hosts
1218            .github
1219            .as_ref()
1220            .map(|g| g.attestations_filters.clone())
1221            .unwrap_or_default();
1222        let github_attestations_phase = config
1223            .hosts
1224            .github
1225            .as_ref()
1226            .map(|g| g.attestations_phase)
1227            .unwrap_or_default();
1228        let force_latest = config.hosts.force_latest;
1229        Ok(Self {
1230            inner: DistGraph {
1231                system_id,
1232                is_init: config.dist_version.is_some(),
1233                allow_dirty,
1234                global_homebrew_tap,
1235                global_publishers,
1236                precise_cargo_builds,
1237                target_dir,
1238                repo_dir,
1239                workspace_dir,
1240                dist_dir,
1241                config,
1242                signer,
1243                tools,
1244                local_builds_are_lies,
1245                templates,
1246                local_build_steps: vec![],
1247                global_build_steps: vec![],
1248                artifacts: vec![],
1249                binaries: vec![],
1250                variants: vec![],
1251                releases: vec![],
1252                ci: CiInfo::default(),
1253                hosting: None,
1254                client_settings,
1255                axoclient,
1256            },
1257            manifest: DistManifest {
1258                dist_version: Some(env!("CARGO_PKG_VERSION").to_owned()),
1259                system_info: None,
1260                announcement_tag: None,
1261                announcement_is_prerelease: false,
1262                announcement_tag_is_implicit,
1263                announcement_title: None,
1264                announcement_changelog: None,
1265                announcement_github_body: None,
1266                releases: vec![],
1267                artifacts: Default::default(),
1268                systems,
1269                assets: Default::default(),
1270                publish_prereleases: global_publish_prereleases,
1271                force_latest,
1272                ci: None,
1273                linkage: vec![],
1274                upload_files: vec![],
1275                github_attestations,
1276                github_attestations_filters,
1277                github_attestations_phase,
1278            },
1279            package_configs,
1280            workspaces,
1281            binaries_by_id: FastMap::new(),
1282            artifact_mode,
1283        })
1284    }
1285
1286    fn add_release(&mut self, pkg_idx: PackageIdx) -> ReleaseIdx {
1287        let package_info = self.workspaces.package(pkg_idx);
1288        let config = self.package_config(pkg_idx).clone();
1289
1290        let version = package_info.version.as_ref().unwrap().semver().clone();
1291        let app_name = package_info.name.clone();
1292        let app_desc = package_info.description.clone();
1293        let app_authors = package_info.authors.clone();
1294        let app_license = package_info.license.clone();
1295        let app_repository_url = package_info.repository_url.clone();
1296        let app_homepage_url = package_info.homepage_url.clone();
1297        let app_keywords = package_info.keywords.clone();
1298
1299        // Add static assets
1300        let mut static_assets = vec![];
1301        if config.artifacts.archives.auto_includes {
1302            if let Some(readme) = &package_info.readme_file {
1303                static_assets.push((StaticAssetKind::Readme, readme.clone()));
1304            }
1305            if let Some(changelog) = &package_info.changelog_file {
1306                static_assets.push((StaticAssetKind::Changelog, changelog.clone()));
1307            }
1308            for license in &package_info.license_files {
1309                static_assets.push((StaticAssetKind::License, license.clone()));
1310            }
1311        }
1312        for static_asset in &config.artifacts.archives.include {
1313            static_assets.push((StaticAssetKind::Other, static_asset.clone()));
1314        }
1315
1316        let platform_support = PlatformSupport::default();
1317        let idx = ReleaseIdx(self.inner.releases.len());
1318        let id = app_name.clone();
1319        info!("added release {id}");
1320        self.inner.releases.push(Release {
1321            app_name,
1322            app_desc,
1323            app_authors,
1324            app_license,
1325            app_repository_url,
1326            app_homepage_url,
1327            app_keywords,
1328            version,
1329            id,
1330            pkg_idx,
1331            global_artifacts: vec![],
1332            bins: vec![],
1333            cdylibs: vec![],
1334            cstaticlibs: vec![],
1335            targets: vec![],
1336            variants: vec![],
1337            changelog_body: None,
1338            changelog_title: None,
1339            config,
1340            static_assets,
1341            platform_support,
1342        });
1343        idx
1344    }
1345
1346    fn add_variant(
1347        &mut self,
1348        to_release: ReleaseIdx,
1349        target: TripleName,
1350    ) -> DistResult<ReleaseVariantIdx> {
1351        let idx = ReleaseVariantIdx(self.inner.variants.len());
1352        let Release {
1353            id: release_id,
1354            variants,
1355            targets,
1356            static_assets,
1357            bins,
1358            cdylibs,
1359            cstaticlibs,
1360            config,
1361            pkg_idx,
1362            ..
1363        } = self.release_mut(to_release);
1364        let static_assets = static_assets.clone();
1365        let variant_id = format!("{release_id}-{target}");
1366        info!("added variant {variant_id}");
1367        let binaries_map = &config.artifacts.archives.binaries;
1368
1369        variants.push(idx);
1370        targets.push(target.clone());
1371
1372        // Apply binary list overrides
1373        let mapped_bins = binaries_map
1374            .get(target.as_str())
1375            .or_else(|| binaries_map.get("*"));
1376        let mut packageables: Vec<(PackageIdx, String, BinaryKind)> =
1377            if let Some(mapped_bins) = mapped_bins {
1378                mapped_bins
1379                    .iter()
1380                    .map(|b| (*pkg_idx, b.to_string(), BinaryKind::Executable))
1381                    .collect()
1382            } else {
1383                bins.clone()
1384                    .into_iter()
1385                    .map(|(idx, b)| (idx, b, BinaryKind::Executable))
1386                    .collect()
1387            };
1388
1389        // If we're not packaging libraries here, avoid chaining them
1390        // into the list we're iterating over
1391        if config
1392            .artifacts
1393            .archives
1394            .package_libraries
1395            .contains(&LibraryStyle::CDynamic)
1396        {
1397            let all_dylibs = cdylibs
1398                .clone()
1399                .into_iter()
1400                .map(|(idx, l)| (idx, l, BinaryKind::DynamicLibrary));
1401            packageables = packageables.into_iter().chain(all_dylibs).collect();
1402        }
1403        if config
1404            .artifacts
1405            .archives
1406            .package_libraries
1407            .contains(&LibraryStyle::CStatic)
1408        {
1409            let all_cstaticlibs = cstaticlibs
1410                .clone()
1411                .into_iter()
1412                .map(|(idx, l)| (idx, l, BinaryKind::StaticLibrary));
1413            packageables = packageables.into_iter().chain(all_cstaticlibs).collect();
1414        }
1415
1416        // Add all the binaries of the release to this variant
1417        let mut binaries = vec![];
1418        for (pkg_idx, binary_name, kind) in packageables {
1419            let package = self.workspaces.package(pkg_idx);
1420            let package_config = self.package_config(pkg_idx);
1421            let pkg_id = package.cargo_package_id.clone();
1422            // For now we just use the name of the package as its package_spec.
1423            // I'm not sure if there are situations where this is ambiguous when
1424            // referring to a package in your workspace that you want to build an app for.
1425            // If they do exist, that's deeply cursed and I want a user to tell me about it.
1426            let pkg_spec = package.true_name.clone();
1427            let kind_label = match kind {
1428                BinaryKind::Executable => "exe",
1429                BinaryKind::DynamicLibrary => "cdylib",
1430                BinaryKind::StaticLibrary => "cstaticlib",
1431            };
1432            // FIXME: make this more of a GUID to allow variants to share binaries?
1433            let bin_id = format!("{variant_id}-{kind_label}-{binary_name}");
1434
1435            let idx = if let Some(&idx) = self.binaries_by_id.get(&bin_id) {
1436                // If we already are building this binary we don't need to do it again!
1437                idx
1438            } else {
1439                // Compute the rest of the details and add the binary
1440                let features = CargoTargetFeatures {
1441                    default_features: package_config.builds.cargo.default_features,
1442                    features: if package_config.builds.cargo.all_features {
1443                        CargoTargetFeatureList::All
1444                    } else {
1445                        CargoTargetFeatureList::List(package_config.builds.cargo.features.clone())
1446                    },
1447                };
1448
1449                let target_is_windows = target.is_windows();
1450                let platform_exe_ext;
1451                let platform_lib_prefix;
1452                if target_is_windows {
1453                    platform_exe_ext = ".exe";
1454                    platform_lib_prefix = "";
1455                } else {
1456                    platform_exe_ext = "";
1457                    platform_lib_prefix = "lib";
1458                };
1459
1460                let platform_lib_ext;
1461                let platform_staticlib_ext;
1462                if target_is_windows {
1463                    platform_lib_ext = ".dll";
1464                    platform_staticlib_ext = ".lib";
1465                } else if target.is_linux() {
1466                    platform_lib_ext = ".so";
1467                    platform_staticlib_ext = ".a";
1468                } else if target.is_darwin() {
1469                    platform_lib_ext = ".dylib";
1470                    platform_staticlib_ext = ".a";
1471                } else {
1472                    return Err(DistError::UnrecognizedTarget { target });
1473                };
1474
1475                let file_name = match kind {
1476                    BinaryKind::Executable => format!("{binary_name}{platform_exe_ext}"),
1477                    BinaryKind::DynamicLibrary => {
1478                        format!("{platform_lib_prefix}{binary_name}{platform_lib_ext}")
1479                    }
1480                    BinaryKind::StaticLibrary => {
1481                        format!("{platform_lib_prefix}{binary_name}{platform_staticlib_ext}")
1482                    }
1483                };
1484
1485                info!("added binary {bin_id}");
1486                let idx = BinaryIdx(self.inner.binaries.len());
1487                let binary = Binary {
1488                    id: bin_id.clone(),
1489                    pkg_id,
1490                    pkg_spec,
1491                    pkg_idx,
1492                    name: binary_name,
1493                    file_name,
1494                    target: target.clone(),
1495                    copy_exe_to: vec![],
1496                    copy_symbols_to: vec![],
1497                    symbols_artifact: None,
1498                    features,
1499                    kind,
1500                };
1501                self.inner.binaries.push(binary);
1502                self.binaries_by_id.insert(bin_id, idx);
1503                idx
1504            };
1505
1506            binaries.push(idx);
1507        }
1508
1509        self.inner.variants.push(ReleaseVariant {
1510            target,
1511            id: variant_id,
1512            local_artifacts: vec![],
1513            binaries,
1514            static_assets,
1515        });
1516        Ok(idx)
1517    }
1518
1519    fn add_binary(&mut self, to_release: ReleaseIdx, pkg_idx: PackageIdx, binary_name: String) {
1520        let release = self.release_mut(to_release);
1521        release.bins.push((pkg_idx, binary_name));
1522    }
1523
1524    fn add_library(&mut self, to_release: ReleaseIdx, pkg_idx: PackageIdx, binary_name: String) {
1525        let release = self.release_mut(to_release);
1526        release.cdylibs.push((pkg_idx, binary_name));
1527    }
1528
1529    fn add_static_library(
1530        &mut self,
1531        to_release: ReleaseIdx,
1532        pkg_idx: PackageIdx,
1533        binary_name: String,
1534    ) {
1535        let release = self.release_mut(to_release);
1536        release.cstaticlibs.push((pkg_idx, binary_name));
1537    }
1538
1539    fn add_executable_zip(&mut self, to_release: ReleaseIdx) {
1540        if !self.local_artifacts_enabled() {
1541            return;
1542        }
1543        info!(
1544            "adding executable zip to release {}",
1545            self.release(to_release).id
1546        );
1547
1548        // Create an archive for each Variant
1549        let release = self.release(to_release);
1550        let variants = release.variants.clone();
1551        let checksum = self.inner.config.artifacts.checksum;
1552        for variant_idx in variants {
1553            let (zip_artifact, built_assets) =
1554                self.make_executable_zip_for_variant(to_release, variant_idx);
1555
1556            let zip_artifact_idx = self.add_local_artifact(variant_idx, zip_artifact);
1557            for (binary, dest_path) in built_assets {
1558                self.require_binary(zip_artifact_idx, variant_idx, binary, dest_path);
1559            }
1560
1561            if checksum != ChecksumStyle::False {
1562                self.add_artifact_checksum(variant_idx, zip_artifact_idx, checksum);
1563            }
1564
1565            if self.inner.config.builds.omnibor {
1566                let omnibor = self.create_omnibor_artifact(zip_artifact_idx, false);
1567                self.add_local_artifact(variant_idx, omnibor);
1568            }
1569        }
1570    }
1571
1572    fn add_extra_artifacts(&mut self, app_config: &AppConfig, to_release: ReleaseIdx) {
1573        if !self.global_artifacts_enabled() {
1574            return;
1575        }
1576        let dist_dir = &self.inner.dist_dir.to_owned();
1577        let artifacts = app_config.artifacts.extra.clone();
1578
1579        for extra in artifacts {
1580            for artifact_relpath in extra.artifact_relpaths {
1581                let artifact_name = ArtifactId::new(
1582                    artifact_relpath
1583                        .file_name()
1584                        .expect("extra artifact had no name!?")
1585                        .to_owned(),
1586                );
1587                let target_path = dist_dir.join(artifact_name.as_str());
1588                let artifact = Artifact {
1589                    id: artifact_name,
1590                    target_triples: vec![],
1591                    file_path: target_path.to_owned(),
1592                    required_binaries: FastMap::new(),
1593                    archive: None,
1594                    kind: ArtifactKind::ExtraArtifact(ExtraArtifactImpl {
1595                        working_dir: extra.working_dir.clone(),
1596                        command: extra.command.clone(),
1597                        artifact_relpath,
1598                    }),
1599                    checksum: None,
1600                    is_global: true,
1601                };
1602
1603                self.add_global_artifact(to_release, artifact);
1604            }
1605        }
1606    }
1607
1608    fn add_cyclonedx_sbom_file(&mut self, to_package: PackageIdx, to_release: ReleaseIdx) {
1609        let release = self.release(to_release);
1610
1611        if !self.global_artifacts_enabled() || !release.config.builds.cargo.cargo_cyclonedx {
1612            return;
1613        }
1614
1615        let package = self.workspaces.package(to_package);
1616
1617        let file_name = format!("{}.cdx.xml", package.true_name);
1618        let file_path = Utf8Path::new("target/distrib/").join(file_name.clone());
1619        self.add_global_artifact(
1620            to_release,
1621            Artifact {
1622                id: ArtifactId::new(file_name),
1623                target_triples: Default::default(),
1624                archive: None,
1625                file_path: file_path.clone(),
1626                required_binaries: Default::default(),
1627                kind: ArtifactKind::SBOM(SBOMImpl {}),
1628                checksum: None,
1629                is_global: true,
1630            },
1631        );
1632    }
1633
1634    fn create_omnibor_artifact(&mut self, artifact_idx: ArtifactIdx, is_global: bool) -> Artifact {
1635        let artifact = self.artifact(artifact_idx);
1636        let id = artifact.id.clone();
1637        let src_path = artifact.file_path.clone();
1638
1639        let extension = src_path
1640            .extension()
1641            .map_or("omnibor".to_string(), |e| format!("{e}.omnibor"));
1642        let dest_path = src_path.with_extension(extension);
1643
1644        let new_id = format!("{}.omnibor", id);
1645
1646        Artifact {
1647            id: ArtifactId::new(new_id),
1648            target_triples: Default::default(),
1649            archive: None,
1650            file_path: dest_path.clone(),
1651            required_binaries: Default::default(),
1652            kind: ArtifactKind::OmniborArtifactId(OmniborArtifactIdImpl {
1653                src_path,
1654                dest_path,
1655            }),
1656            checksum: None,
1657            is_global,
1658        }
1659    }
1660
1661    fn add_unified_checksum_file(&mut self, to_release: ReleaseIdx) {
1662        if !self.global_artifacts_enabled() {
1663            return;
1664        }
1665
1666        let dist_dir = &self.inner.dist_dir;
1667        let checksum = self.inner.config.artifacts.checksum;
1668        let file_name = ArtifactId::new(format!("{}.sum", checksum.ext()));
1669        let file_path = dist_dir.join(file_name.as_str());
1670
1671        self.add_global_artifact(
1672            to_release,
1673            Artifact {
1674                id: file_name,
1675                target_triples: Default::default(),
1676                archive: None,
1677                file_path: file_path.clone(),
1678                required_binaries: Default::default(),
1679                kind: ArtifactKind::UnifiedChecksum(UnifiedChecksumStep {
1680                    checksum,
1681                    dest_path: file_path,
1682                }),
1683                checksum: None, // who checksums the checksummers...
1684                is_global: true,
1685            },
1686        );
1687    }
1688
1689    fn add_source_tarball(&mut self, _tag: &str, to_release: ReleaseIdx) {
1690        if !self.global_artifacts_enabled() {
1691            return;
1692        }
1693
1694        if !self.inner.config.artifacts.source_tarball {
1695            return;
1696        }
1697
1698        if self.inner.tools.git.is_none() {
1699            warn!("skipping source tarball; git not installed");
1700            return;
1701        }
1702
1703        let working_dir = self.inner.workspace_dir.clone();
1704
1705        let workspace_repo = &self.workspaces.repo;
1706
1707        // We'll be stubbing the actual generation in this case
1708        let is_git_repo = if self.inner.local_builds_are_lies {
1709            true
1710        } else {
1711            workspace_repo.is_some()
1712        };
1713
1714        let has_head = if self.inner.local_builds_are_lies {
1715            true
1716        } else if let Some(repo) = workspace_repo {
1717            repo.head.is_some()
1718        } else {
1719            false
1720        };
1721
1722        if !is_git_repo {
1723            warn!(
1724                "skipping source tarball; no git repo found at {}",
1725                self.inner.workspace_dir
1726            );
1727            return;
1728        }
1729
1730        if !has_head {
1731            warn!(
1732                "skipping source tarball; git repo at {} has no commits yet",
1733                self.inner.workspace_dir
1734            );
1735            return;
1736        }
1737
1738        let release = self.release(to_release);
1739        let checksum = self.inner.config.artifacts.checksum;
1740        info!("adding source tarball to release {}", release.id);
1741
1742        let dist_dir = &self.inner.dist_dir.to_owned();
1743
1744        let artifact_name = ArtifactId::new("source.tar.gz".to_owned());
1745        let target_path = dist_dir.join(artifact_name.as_str());
1746        let prefix = format!("{}-{}/", release.app_name, release.version);
1747        let recursive = self.inner.config.artifacts.recursive_tarball;
1748
1749        let artifact = Artifact {
1750            id: artifact_name.to_owned(),
1751            target_triples: vec![],
1752            file_path: target_path.to_owned(),
1753            required_binaries: FastMap::new(),
1754            archive: None,
1755            kind: ArtifactKind::SourceTarball(SourceTarball {
1756                // FIXME: it would be nice to verify that HEAD == tag when it Really Must
1757                // (as in when cutting a real release), but to make everything work when testing
1758                // locally or in CI without a tag, we just always use HEAD (since releases will
1759                // checkout the tag anyway, so HEAD==tag should always be true when it matters).
1760                committish: "HEAD".to_owned(),
1761                prefix,
1762                target: target_path.to_owned(),
1763                working_dir,
1764                recursive,
1765            }),
1766            checksum: None,
1767            is_global: true,
1768        };
1769
1770        let for_artifact = Some(artifact.id.clone());
1771        let artifact_idx = self.add_global_artifact(to_release, artifact);
1772
1773        if checksum != ChecksumStyle::False {
1774            let checksum_id = ArtifactId::new(format!("{artifact_name}.{}", checksum.ext()));
1775            let checksum_path = dist_dir.join(checksum_id.as_str());
1776            let checksum = Artifact {
1777                id: checksum_id.to_owned(),
1778                target_triples: vec![],
1779                file_path: checksum_path.to_owned(),
1780                required_binaries: FastMap::new(),
1781                archive: None,
1782                kind: ArtifactKind::Checksum(ChecksumImpl {
1783                    checksum,
1784                    src_path: target_path,
1785                    dest_path: Some(checksum_path),
1786                    for_artifact,
1787                }),
1788                checksum: None,
1789                is_global: true,
1790            };
1791
1792            let checksum_idx = self.add_global_artifact(to_release, checksum);
1793            self.artifact_mut(artifact_idx).checksum = Some(checksum_idx);
1794        }
1795
1796        if self.inner.config.builds.omnibor {
1797            let omnibor = self.create_omnibor_artifact(artifact_idx, true);
1798            self.add_global_artifact(to_release, omnibor);
1799        }
1800    }
1801
1802    fn add_artifact_checksum(
1803        &mut self,
1804        to_variant: ReleaseVariantIdx,
1805        artifact_idx: ArtifactIdx,
1806        checksum: ChecksumStyle,
1807    ) -> ArtifactIdx {
1808        let artifact = self.artifact(artifact_idx);
1809        let checksum_artifact = {
1810            let checksum_ext = checksum.ext();
1811            let checksum_id = ArtifactId::new(format!("{}.{}", artifact.id, checksum_ext));
1812            let checksum_path = artifact
1813                .file_path
1814                .parent()
1815                .unwrap()
1816                .join(checksum_id.as_str());
1817            Artifact {
1818                id: checksum_id,
1819                kind: ArtifactKind::Checksum(ChecksumImpl {
1820                    checksum,
1821                    src_path: artifact.file_path.clone(),
1822                    dest_path: Some(checksum_path.clone()),
1823                    for_artifact: Some(artifact.id.clone()),
1824                }),
1825
1826                target_triples: artifact.target_triples.clone(),
1827                archive: None,
1828                file_path: checksum_path,
1829                required_binaries: Default::default(),
1830                // Who checksums the checksummers...
1831                checksum: None,
1832                is_global: false,
1833            }
1834        };
1835        let checksum_idx = self.add_local_artifact(to_variant, checksum_artifact);
1836        self.artifact_mut(artifact_idx).checksum = Some(checksum_idx);
1837        checksum_idx
1838    }
1839
1840    fn add_updater(&mut self, variant_idx: ReleaseVariantIdx) {
1841        if !self.local_artifacts_enabled() {
1842            return;
1843        }
1844
1845        let artifact = self.make_updater_for_variant(variant_idx);
1846
1847        // This adds an updater per variant (eg one per app per target).
1848        // In the future this could possibly be deduplicated to just one per
1849        // target, but this is fine for now.
1850        self.add_local_artifact(variant_idx, artifact);
1851    }
1852
1853    pub(crate) fn make_updater_for_variant(&self, variant_idx: ReleaseVariantIdx) -> Artifact {
1854        let variant = self.variant(variant_idx);
1855        let filename = ArtifactId::new(format!("{}-update", variant.id));
1856        let target_path = &self.inner.dist_dir.to_owned().join(filename.as_str());
1857
1858        Artifact {
1859            id: filename.to_owned(),
1860            target_triples: vec![variant.target.to_owned()],
1861            file_path: target_path.to_owned(),
1862            required_binaries: FastMap::new(),
1863            archive: None,
1864            kind: ArtifactKind::Updater(UpdaterImpl {
1865                use_latest: self.inner.config.installers.always_use_latest_updater,
1866            }),
1867            checksum: None,
1868            is_global: false,
1869        }
1870    }
1871
1872    /// Make an executable zip for a variant, but don't yet integrate it into the graph
1873    ///
1874    /// This is useful for installers which want to know about *potential* executable zips
1875    pub(crate) fn make_executable_zip_for_variant(
1876        &self,
1877        release_idx: ReleaseIdx,
1878        variant_idx: ReleaseVariantIdx,
1879    ) -> (Artifact, Vec<(BinaryIdx, Utf8PathBuf)>) {
1880        // This is largely just a lot of path/name computation
1881        let dist_dir = &self.inner.dist_dir;
1882        let release = self.release(release_idx);
1883        let variant = self.variant(variant_idx);
1884
1885        let target_is_windows = variant.target.is_windows();
1886        let zip_style = if target_is_windows {
1887            release.config.artifacts.archives.windows_archive
1888        } else {
1889            release.config.artifacts.archives.unix_archive
1890        };
1891
1892        let artifact_dir_name = variant.id.clone();
1893        let artifact_dir_path = dist_dir.join(&artifact_dir_name);
1894        let artifact_ext = zip_style.ext();
1895        let artifact_name = ArtifactId::new(format!("{artifact_dir_name}{artifact_ext}"));
1896        let artifact_path = dist_dir.join(artifact_name.as_str());
1897
1898        let static_assets = variant.static_assets.clone();
1899        let mut built_assets = Vec::new();
1900        for &binary_idx in &variant.binaries {
1901            let binary = self.binary(binary_idx);
1902            built_assets.push((binary_idx, artifact_dir_path.join(&binary.file_name)));
1903        }
1904
1905        // When unpacking we currently rely on zips being flat, but --strip-prefix=1 tarballs.
1906        // This is kinda inconsistent, so maybe we should make both flat?
1907        // (It's hard to strip-prefix zips, so making them both have an extra dir is annoying)
1908        let with_root = if let ZipStyle::Zip = zip_style {
1909            None
1910        } else {
1911            Some(Utf8PathBuf::from(artifact_dir_name.clone()))
1912        };
1913
1914        (
1915            Artifact {
1916                id: artifact_name,
1917                target_triples: vec![variant.target.clone()],
1918                file_path: artifact_path,
1919                required_binaries: FastMap::new(),
1920                archive: Some(Archive {
1921                    with_root,
1922                    dir_path: artifact_dir_path,
1923                    zip_style,
1924                    static_assets,
1925                }),
1926                kind: ArtifactKind::ExecutableZip(ExecutableZip {}),
1927                // May get filled in later
1928                checksum: None,
1929                is_global: false,
1930            },
1931            built_assets,
1932        )
1933    }
1934
1935    /// Register that `for_artifact` requires `binary_idx` to actually be built for
1936    /// `for_variant`.
1937    ///
1938    /// `dest_path` is the file path to copy the binary to (used for Archives)
1939    /// as soon as they're built.
1940    ///
1941    /// Note that it's important to use `dest_path`, as cargo does not guarantee that
1942    /// multiple invocations will not overwrite each other's outputs. Since we always
1943    /// explicitly pass --target and --profile, this is unlikely to be an issue. But if
1944    /// we ever introduce the notion of "feature variants" (ReleaseVariants that differ
1945    /// only in the feature flags they take), this will become a problem.
1946    fn require_binary(
1947        &mut self,
1948        for_artifact: ArtifactIdx,
1949        for_variant: ReleaseVariantIdx,
1950        binary_idx: BinaryIdx,
1951        dest_path: Utf8PathBuf,
1952    ) {
1953        let dist_dir = self.inner.dist_dir.clone();
1954        let binary = self.binary_mut(binary_idx);
1955
1956        // Tell the binary that it should copy the exe to the given path
1957        binary.copy_exe_to.push(dest_path.clone());
1958
1959        // Try to make a symbols artifact for this binary now that we're building it
1960        if binary.symbols_artifact.is_none() {
1961            if let Some(symbol_kind) = target_symbol_kind(&binary.target) {
1962                // FIXME: For some formats these won't be the same but for now stubbed out
1963
1964                // FIXME: rustc/cargo has so more complex logic to do platform-specific name remapping
1965                // (see should_replace_hyphens in src/cargo/core/compiler/build_context/target_info.rs)
1966
1967                // FIXME: feed info about the expected source symbol name down to build_cargo_target
1968                // to unhardcode the use of .pdb ...!
1969
1970                // let src_symbol_ext = symbol_kind.ext();
1971                let dest_symbol_ext = symbol_kind.ext();
1972                // let base_name = &binary.name;
1973                let binary_id = &binary.id;
1974                // let src_symbol_name = format!("{base_name}.{src_symbol_ext}");
1975                let dest_symbol_name = ArtifactId::new(format!("{binary_id}.{dest_symbol_ext}"));
1976                let artifact_path = dist_dir.join(dest_symbol_name.as_str());
1977
1978                let artifact = Artifact {
1979                    id: dest_symbol_name,
1980                    target_triples: vec![binary.target.clone()],
1981                    archive: None,
1982                    file_path: artifact_path.clone(),
1983                    required_binaries: FastMap::new(),
1984                    kind: ArtifactKind::Symbols(Symbols { kind: symbol_kind }),
1985                    checksum: None,
1986                    is_global: false,
1987                };
1988
1989                // FIXME: strictly speaking a binary could plausibly be shared between Releases,
1990                // and in such a situation the artifact should also be shared between the Variants.
1991                // However this kind of breaks the local-artifact concept, as we require a local
1992                // artifact to be strictly nested under one Variant.
1993                //
1994                // For now we pretend this isn't a thing.
1995                let sym_artifact = self.add_local_artifact(for_variant, artifact);
1996
1997                // Record that we've made the symbols artifact for this binary
1998                let binary = self.binary_mut(binary_idx);
1999                binary.symbols_artifact = Some(sym_artifact);
2000                binary.copy_symbols_to.push(artifact_path);
2001            }
2002        }
2003
2004        // Tell the original requesting artifact that it will get this binary at the given path
2005        self.artifact_mut(for_artifact)
2006            .required_binaries
2007            .insert(binary_idx, dest_path);
2008    }
2009
2010    fn add_shell_installer(&mut self, to_release: ReleaseIdx) -> DistResult<()> {
2011        if !self.global_artifacts_enabled() {
2012            return Ok(());
2013        }
2014        let release = self.release(to_release);
2015        let Some(config) = &release.config.installers.shell else {
2016            return Ok(());
2017        };
2018        require_nonempty_installer(release, config)?;
2019        let release_id = &release.id;
2020        let schema_release = self
2021            .manifest
2022            .release_by_name(&release.app_name)
2023            .expect("couldn't find the release!?");
2024
2025        let env_vars = schema_release.env.clone();
2026
2027        let download_urls = schema_release
2028            .artifact_download_urls()
2029            .expect("couldn't compute a URL to download artifacts from!?");
2030        let hosting = schema_release.hosting.clone();
2031        let artifact_name = ArtifactId::new(format!("{release_id}-installer.sh"));
2032        let artifact_path = self.inner.dist_dir.join(artifact_name.as_str());
2033        let best_download_url = download_urls
2034            .first()
2035            .expect("returned empty list of artifact URLs!?");
2036        let installer_url = format!("{best_download_url}/{artifact_name}");
2037        let hint = format!("curl --proto '=https' --tlsv1.2 -LsSf {installer_url} | sh");
2038        let desc = "Install prebuilt binaries via shell script".to_owned();
2039
2040        // Get the artifacts
2041        let artifacts = release
2042            .platform_support
2043            .fragments()
2044            .into_iter()
2045            .filter(|a| !a.target_triple.is_windows_msvc())
2046            .collect::<Vec<_>>();
2047        let target_triples = artifacts
2048            .iter()
2049            .map(|a| a.target_triple.clone())
2050            .collect::<Vec<_>>();
2051
2052        if artifacts.is_empty() {
2053            warn!("skipping shell installer: not building any supported platforms (use --artifacts=global)");
2054            return Ok(());
2055        };
2056        let bin_aliases = BinaryAliases(config.bin_aliases.clone()).for_targets(&target_triples);
2057
2058        let runtime_conditions = release.platform_support.safe_conflated_runtime_conditions();
2059
2060        let installer_artifact = Artifact {
2061            id: artifact_name,
2062            target_triples,
2063            archive: None,
2064            file_path: artifact_path.clone(),
2065            required_binaries: FastMap::new(),
2066            checksum: None,
2067            kind: ArtifactKind::Installer(InstallerImpl::Shell(InstallerInfo {
2068                release: to_release,
2069                dest_path: artifact_path,
2070                app_name: release.app_name.clone(),
2071                app_version: release.version.to_string(),
2072                install_paths: config
2073                    .install_path
2074                    .iter()
2075                    .map(|p| p.clone().into_jinja())
2076                    .collect(),
2077                install_success_msg: config.install_success_msg.to_owned(),
2078                base_urls: download_urls.to_owned(),
2079                hosting,
2080                artifacts,
2081                hint,
2082                desc,
2083                receipt: InstallReceipt::from_metadata(&self.inner, release)?,
2084                bin_aliases,
2085                install_libraries: config.install_libraries.clone(),
2086                runtime_conditions,
2087                platform_support: None,
2088                env_vars,
2089            })),
2090            is_global: true,
2091        };
2092
2093        self.add_global_artifact(to_release, installer_artifact);
2094        Ok(())
2095    }
2096
2097    fn add_homebrew_installer(&mut self, to_release: ReleaseIdx) -> DistResult<()> {
2098        if !self.global_artifacts_enabled() {
2099            return Ok(());
2100        }
2101        let release = self.release(to_release);
2102        let Some(config) = &release.config.installers.homebrew else {
2103            return Ok(());
2104        };
2105        require_nonempty_installer(release, config)?;
2106        let formula = if let Some(formula) = &config.formula {
2107            formula
2108        } else {
2109            &release.id
2110        };
2111        let schema_release = self
2112            .manifest
2113            .release_by_name(&release.id)
2114            .expect("couldn't find the release!?");
2115        let download_urls = schema_release
2116            .artifact_download_urls()
2117            .expect("couldn't compute a URL to download artifacts from!?");
2118        let hosting = schema_release.hosting.clone();
2119
2120        let artifact_name = ArtifactId::new(format!("{formula}.rb"));
2121        let artifact_path = self.inner.dist_dir.join(artifact_name.as_str());
2122
2123        // If tap is specified, include that in the `brew install` message
2124        let install_target = if let Some(tap) = &self.inner.global_homebrew_tap {
2125            // So that, for example, axodotdev/homebrew-tap becomes axodotdev/tap
2126            let tap = tap.replace("/homebrew-", "/");
2127            format!("{tap}/{formula}")
2128        } else {
2129            formula.clone()
2130        };
2131
2132        let hint = format!("brew install {}", install_target);
2133        let desc = "Install prebuilt binaries via Homebrew".to_owned();
2134
2135        let artifacts = release
2136            .platform_support
2137            .fragments()
2138            .into_iter()
2139            .filter(|a| !a.target_triple.is_windows_msvc())
2140            .collect::<Vec<_>>();
2141        if artifacts.is_empty() {
2142            warn!("skipping Homebrew installer: not building any supported platforms (use --artifacts=global)");
2143            return Ok(());
2144        };
2145
2146        let target_triples = artifacts
2147            .iter()
2148            .map(|a| a.target_triple.clone())
2149            .collect::<Vec<_>>();
2150
2151        let find_fragment = |triple: &TripleNameRef| -> Option<ExecutableZipFragment> {
2152            artifacts
2153                .iter()
2154                .find(|a| a.target_triple == triple)
2155                .cloned()
2156        };
2157        let fragments = HomebrewFragments {
2158            x86_64_macos: find_fragment(TARGET_X64_MAC),
2159            arm64_macos: find_fragment(TARGET_ARM64_MAC),
2160            x86_64_linux: find_fragment(TARGET_X64_LINUX_GNU),
2161            arm64_linux: find_fragment(TARGET_ARM64_LINUX_GNU),
2162        };
2163
2164        let release = self.release(to_release);
2165        let app_name = release.app_name.clone();
2166        let app_desc = release.app_desc.clone().unwrap_or_else(|| {
2167            warn!("The Homebrew publish job is enabled but no description was specified\n  consider adding `description = ` to package in Cargo.toml");
2168            format!("The {} application", release.app_name)
2169        });
2170        let app_license = release.app_license.clone();
2171        let homebrew_dsl_license = app_license.as_ref().map(|app_license| {
2172            // Parse SPDX license expression and convert to Homebrew's Ruby license DSL.
2173            // If expression is malformed, fall back to plain input license string.
2174            to_homebrew_license_format(app_license).unwrap_or(format!("\"{app_license}\""))
2175        });
2176        let app_homepage_url = if release.app_homepage_url.is_none() {
2177            warn!("The Homebrew publish job is enabled but no homepage was specified\n  consider adding `homepage = ` to package in Cargo.toml");
2178            release.app_repository_url.clone()
2179        } else {
2180            release.app_homepage_url.clone()
2181        };
2182        let tap = config.tap.clone();
2183
2184        if tap.is_some() && release.config.publishers.homebrew.is_none() {
2185            warn!("A Homebrew tap was specified but the Homebrew publish job is disabled\n  consider adding \"homebrew\" to publish-jobs in Cargo.toml");
2186        }
2187        if release.config.publishers.homebrew.is_some() && tap.is_none() {
2188            warn!("The Homebrew publish job is enabled but no tap was specified\n  consider setting the tap field in Cargo.toml");
2189        }
2190
2191        let runtime_conditions = release.platform_support.safe_conflated_runtime_conditions();
2192
2193        let dependencies: Vec<HomebrewPackageName> = release
2194            .config
2195            .builds
2196            .system_dependencies
2197            .homebrew
2198            .clone()
2199            .into_iter()
2200            .filter(|(_, package)| package.0.stage_wanted(&DependencyKind::Run))
2201            .map(|(name, _)| name)
2202            .collect();
2203        let bin_aliases = BinaryAliases(config.bin_aliases.clone()).for_targets(&target_triples);
2204
2205        let inner = InstallerInfo {
2206            release: to_release,
2207            dest_path: artifact_path.clone(),
2208            app_name: release.app_name.clone(),
2209            app_version: release.version.to_string(),
2210            install_paths: config
2211                .install_path
2212                .iter()
2213                .map(|p| p.clone().into_jinja())
2214                .collect(),
2215            install_success_msg: config.install_success_msg.to_owned(),
2216            base_urls: download_urls,
2217            hosting,
2218            artifacts,
2219            hint,
2220            desc,
2221            receipt: None,
2222            bin_aliases,
2223            install_libraries: config.install_libraries.clone(),
2224            runtime_conditions,
2225            platform_support: None,
2226            // Not actually needed for this installer type
2227            env_vars: None,
2228        };
2229
2230        let installer_artifact = Artifact {
2231            id: artifact_name,
2232            target_triples,
2233            archive: None,
2234            file_path: artifact_path,
2235            required_binaries: Default::default(),
2236            checksum: None,
2237            kind: ArtifactKind::Installer(InstallerImpl::Homebrew(HomebrewImpl {
2238                info: HomebrewInstallerInfo {
2239                    name: app_name,
2240                    formula_class: to_class_case(formula),
2241                    desc: app_desc,
2242                    license: homebrew_dsl_license,
2243                    homepage: app_homepage_url,
2244                    tap,
2245                    dependencies,
2246                    inner,
2247                    install_libraries: config.install_libraries.clone(),
2248                },
2249                fragments,
2250            })),
2251            is_global: true,
2252        };
2253
2254        self.add_global_artifact(to_release, installer_artifact);
2255        Ok(())
2256    }
2257
2258    fn add_powershell_installer(&mut self, to_release: ReleaseIdx) -> DistResult<()> {
2259        if !self.global_artifacts_enabled() {
2260            return Ok(());
2261        }
2262
2263        // Get the basic info about the installer
2264        let release = self.release(to_release);
2265        let Some(config) = &release.config.installers.powershell else {
2266            return Ok(());
2267        };
2268        require_nonempty_installer(release, config)?;
2269        let release_id = &release.id;
2270        let schema_release = self
2271            .manifest
2272            .release_by_name(&release.app_name)
2273            .expect("couldn't find the release!?");
2274
2275        let env_vars = schema_release.env.clone();
2276
2277        let download_urls = schema_release
2278            .artifact_download_urls()
2279            .expect("couldn't compute a URL to download artifacts from!?");
2280        let hosting = schema_release.hosting.clone();
2281        let artifact_name = ArtifactId::new(format!("{release_id}-installer.ps1"));
2282        let artifact_path = self.inner.dist_dir.join(artifact_name.as_str());
2283        let best_download_url = download_urls
2284            .first()
2285            .expect("returned empty list of artifact URLs!?");
2286        let installer_url = format!("{best_download_url}/{artifact_name}");
2287        let hint = format!(r#"powershell -ExecutionPolicy Bypass -c "irm {installer_url} | iex""#);
2288        let desc = "Install prebuilt binaries via powershell script".to_owned();
2289
2290        // Gather up the bundles the installer supports
2291        let artifacts = release
2292            .platform_support
2293            .fragments()
2294            .into_iter()
2295            .filter(|a| a.target_triple.is_windows())
2296            .collect::<Vec<_>>();
2297        let target_triples = artifacts
2298            .iter()
2299            .map(|a| a.target_triple.clone())
2300            .collect::<Vec<_>>();
2301        if artifacts.is_empty() {
2302            warn!("skipping powershell installer: not building any supported platforms (use --artifacts=global)");
2303            return Ok(());
2304        };
2305        let bin_aliases = BinaryAliases(config.bin_aliases.clone()).for_targets(&target_triples);
2306        let installer_artifact = Artifact {
2307            id: artifact_name,
2308            target_triples,
2309            file_path: artifact_path.clone(),
2310            required_binaries: FastMap::new(),
2311            archive: None,
2312            checksum: None,
2313            kind: ArtifactKind::Installer(InstallerImpl::Powershell(InstallerInfo {
2314                release: to_release,
2315                dest_path: artifact_path,
2316                app_name: release.app_name.clone(),
2317                app_version: release.version.to_string(),
2318                install_paths: config
2319                    .install_path
2320                    .iter()
2321                    .map(|p| p.clone().into_jinja())
2322                    .collect(),
2323                install_success_msg: config.install_success_msg.to_owned(),
2324                base_urls: download_urls,
2325                hosting,
2326                artifacts,
2327                hint,
2328                desc,
2329                receipt: InstallReceipt::from_metadata(&self.inner, release)?,
2330                bin_aliases,
2331                install_libraries: config.install_libraries.clone(),
2332                runtime_conditions: RuntimeConditions::default(),
2333                platform_support: None,
2334                env_vars,
2335            })),
2336            is_global: true,
2337        };
2338
2339        self.add_global_artifact(to_release, installer_artifact);
2340        Ok(())
2341    }
2342
2343    fn add_npm_installer(&mut self, to_release: ReleaseIdx) -> DistResult<()> {
2344        if !self.global_artifacts_enabled() {
2345            return Ok(());
2346        }
2347        let release = self.release(to_release);
2348        let Some(config) = &release.config.installers.npm else {
2349            return Ok(());
2350        };
2351        require_nonempty_installer(release, config)?;
2352        let release_id = &release.id;
2353        let schema_release = self
2354            .manifest
2355            .release_by_name(&release.app_name)
2356            .expect("couldn't find the release!?");
2357        let download_urls = schema_release
2358            .artifact_download_urls()
2359            .expect("couldn't compute a URL to download artifacts from!?");
2360        let hosting = schema_release.hosting.clone();
2361
2362        let app_name = config.package.clone();
2363        let npm_package_name = if let Some(scope) = &config.scope {
2364            if scope.to_ascii_lowercase() != *scope {
2365                return Err(DistError::ScopeMustBeLowercase {
2366                    scope: scope.to_owned(),
2367                });
2368            }
2369
2370            format!("{scope}/{}", app_name)
2371        } else {
2372            app_name.clone()
2373        };
2374        let npm_package_version = release.version.to_string();
2375        let npm_package_desc = release.app_desc.clone();
2376        let npm_package_authors = release.app_authors.clone();
2377        let npm_package_license = release.app_license.clone();
2378        let npm_package_repository_url = release.app_repository_url.clone();
2379        let npm_package_homepage_url = release.app_homepage_url.clone();
2380        let npm_package_keywords = release.app_keywords.clone();
2381
2382        let static_assets = release.static_assets.clone();
2383        let dir_name = format!("{release_id}-npm-package");
2384        let dir_path = self.inner.dist_dir.join(&dir_name);
2385        let zip_style = ZipStyle::Tar(CompressionImpl::Gzip);
2386        let zip_ext = zip_style.ext();
2387        let artifact_name = ArtifactId::new(format!("{dir_name}{zip_ext}"));
2388        let artifact_path = self.inner.dist_dir.join(artifact_name.as_str());
2389        // let installer_url = format!("{download_url}/{artifact_name}");
2390        let hint = format!("npm install {npm_package_name}@{npm_package_version}");
2391        let desc = "Install prebuilt binaries into your npm project".to_owned();
2392
2393        let artifacts = release.platform_support.fragments();
2394        let target_triples = artifacts
2395            .iter()
2396            .map(|a| a.target_triple.clone())
2397            .collect::<Vec<_>>();
2398
2399        if artifacts.is_empty() {
2400            warn!("skipping npm installer: not building any supported platforms (use --artifacts=global)");
2401            return Ok(());
2402        };
2403        let bin_aliases = BinaryAliases(config.bin_aliases.clone()).for_targets(&target_triples);
2404
2405        let runtime_conditions = release.platform_support.safe_conflated_runtime_conditions();
2406
2407        let installer_artifact = Artifact {
2408            id: artifact_name,
2409            target_triples,
2410            archive: Some(Archive {
2411                // npm specifically expects the dir inside the tarball to be called "package"
2412                with_root: Some("package".into()),
2413                dir_path: dir_path.clone(),
2414                zip_style,
2415                static_assets,
2416            }),
2417            file_path: artifact_path.clone(),
2418            required_binaries: FastMap::new(),
2419            checksum: None,
2420            kind: ArtifactKind::Installer(InstallerImpl::Npm(NpmInstallerInfo {
2421                npm_package_name,
2422                npm_package_version,
2423                npm_package_desc,
2424                npm_package_authors,
2425                npm_package_license,
2426                npm_package_repository_url,
2427                npm_package_homepage_url,
2428                npm_package_keywords,
2429                create_shrinkwrap: config.shrinkwrap,
2430                package_dir: dir_path,
2431                inner: InstallerInfo {
2432                    release: to_release,
2433                    dest_path: artifact_path,
2434                    app_name,
2435                    app_version: release.version.to_string(),
2436                    install_paths: config
2437                        .install_path
2438                        .iter()
2439                        .map(|p| p.clone().into_jinja())
2440                        .collect(),
2441                    install_success_msg: config.install_success_msg.to_owned(),
2442                    base_urls: download_urls,
2443                    hosting,
2444                    artifacts,
2445                    hint,
2446                    desc,
2447                    receipt: None,
2448                    bin_aliases,
2449                    install_libraries: config.install_libraries.clone(),
2450                    runtime_conditions,
2451                    platform_support: None,
2452                    // Not actually needed for this installer type
2453                    env_vars: None,
2454                },
2455            })),
2456            is_global: true,
2457        };
2458
2459        self.add_global_artifact(to_release, installer_artifact);
2460        Ok(())
2461    }
2462
2463    fn add_msi_installer(&mut self, to_release: ReleaseIdx) -> DistResult<()> {
2464        if !self.local_artifacts_enabled() {
2465            return Ok(());
2466        }
2467
2468        // Clone info we need from the release to avoid borrowing across the loop
2469        let release = self.release(to_release);
2470        // FIXME: because we use cargo-wix and cargo-wix's config,
2471        // msi installers really don't respect any of our own config!
2472        // (We still look it up because it determines whether enabled or not.)
2473        let Some(_config) = &release.config.installers.msi else {
2474            return Ok(());
2475        };
2476        // FIXME: MSI installer contents don't actually respect this
2477        // require_nonempty_installer(release, config)?;
2478        let variants = release.variants.clone();
2479        let checksum = self.inner.config.artifacts.checksum;
2480
2481        // Make an msi for every windows platform
2482        for variant_idx in variants {
2483            let variant = self.variant(variant_idx);
2484            let binaries = variant.binaries.clone();
2485            let target = &variant.target;
2486            if !target.is_windows() {
2487                continue;
2488            }
2489
2490            let variant_id = &variant.id;
2491            let artifact_name = ArtifactId::new(format!("{variant_id}.msi"));
2492            let artifact_path = self.inner.dist_dir.join(artifact_name.as_str());
2493            let dir_name = format!("{variant_id}_msi");
2494            let dir_path = self.inner.dist_dir.join(&dir_name);
2495
2496            // Compute which package we're actually building, based on the binaries
2497            let mut package_info: Option<(String, PackageIdx)> = None;
2498            for &binary_idx in &binaries {
2499                let binary = self.binary(binary_idx);
2500                if let Some((existing_spec, _)) = &package_info {
2501                    // cargo-wix doesn't clearly support multi-package, so bail
2502                    if existing_spec != &binary.pkg_spec {
2503                        return Err(DistError::MultiPackage {
2504                            artifact_name,
2505                            spec1: existing_spec.clone(),
2506                            spec2: binary.pkg_spec.clone(),
2507                        })?;
2508                    }
2509                } else {
2510                    package_info = Some((binary.pkg_spec.clone(), binary.pkg_idx));
2511                }
2512            }
2513            let Some((pkg_spec, pkg_idx)) = package_info else {
2514                return Err(DistError::NoPackage { artifact_name })?;
2515            };
2516            let manifest_path = self.workspaces.package(pkg_idx).manifest_path.clone();
2517            let wxs_path = manifest_path
2518                .parent()
2519                .expect("Cargo.toml had no parent dir!?")
2520                .join("wix")
2521                .join("main.wxs");
2522
2523            // Gather up the bundles the installer supports
2524            let installer_artifact = Artifact {
2525                id: artifact_name,
2526                target_triples: vec![target.clone()],
2527                file_path: artifact_path.clone(),
2528                required_binaries: FastMap::new(),
2529                archive: Some(Archive {
2530                    with_root: None,
2531                    dir_path: dir_path.clone(),
2532                    zip_style: ZipStyle::TempDir,
2533                    static_assets: vec![],
2534                }),
2535                checksum: None,
2536                kind: ArtifactKind::Installer(InstallerImpl::Msi(MsiInstallerInfo {
2537                    package_dir: dir_path.clone(),
2538                    pkg_spec,
2539                    target: target.clone(),
2540                    file_path: artifact_path.clone(),
2541                    wxs_path,
2542                    manifest_path,
2543                })),
2544                is_global: false,
2545            };
2546
2547            // Register the artifact to various things
2548            let installer_idx = self.add_local_artifact(variant_idx, installer_artifact);
2549            for binary_idx in binaries {
2550                let binary = self.binary(binary_idx);
2551                self.require_binary(
2552                    installer_idx,
2553                    variant_idx,
2554                    binary_idx,
2555                    dir_path.join(&binary.file_name),
2556                );
2557            }
2558            if checksum != ChecksumStyle::False {
2559                self.add_artifact_checksum(variant_idx, installer_idx, checksum);
2560            }
2561            if self.inner.config.builds.omnibor {
2562                let omnibor = self.create_omnibor_artifact(installer_idx, false);
2563                self.add_local_artifact(variant_idx, omnibor);
2564            }
2565        }
2566
2567        Ok(())
2568    }
2569
2570    fn add_pkg_installer(&mut self, to_release: ReleaseIdx) -> DistResult<()> {
2571        if !self.local_artifacts_enabled() {
2572            return Ok(());
2573        }
2574
2575        // Clone info we need from the release to avoid borrowing across the loop
2576        let release = self.release(to_release);
2577        let Some(config) = release.config.installers.pkg.clone() else {
2578            return Ok(());
2579        };
2580        require_nonempty_installer(release, &config)?;
2581        let version = release.version.clone();
2582        let fragments = release.platform_support.fragments();
2583
2584        let variants = release.variants.clone();
2585        let checksum = self.inner.config.artifacts.checksum;
2586
2587        // Make a pkg for every darwin platform
2588        for variant_idx in variants {
2589            let variant = self.variant(variant_idx);
2590            let binaries = variant.binaries.clone();
2591            let bin_aliases = BinaryAliases(config.bin_aliases.clone());
2592            let target = &variant.target;
2593            if !target.is_darwin() {
2594                continue;
2595            }
2596
2597            let variant_id = &variant.id;
2598            let artifact_name = ArtifactId::new(format!("{variant_id}.pkg"));
2599            let artifact_path = self.inner.dist_dir.join(artifact_name.as_str());
2600            let dir_name = format!("{variant_id}_pkg");
2601            let dir_path = self.inner.dist_dir.join(&dir_name);
2602
2603            // Compute which package we're actually building, based on the binaries
2604            let mut package_info: Option<(String, PackageIdx)> = None;
2605            for &binary_idx in &binaries {
2606                let binary = self.binary(binary_idx);
2607                if let Some((existing_spec, _)) = &package_info {
2608                    // we haven't set ourselves up to bundle multiple packages yet
2609                    if existing_spec != &binary.pkg_spec {
2610                        return Err(DistError::MultiPackage {
2611                            artifact_name,
2612                            spec1: existing_spec.clone(),
2613                            spec2: binary.pkg_spec.clone(),
2614                        })?;
2615                    }
2616                } else {
2617                    package_info = Some((binary.pkg_spec.clone(), binary.pkg_idx));
2618                }
2619            }
2620
2621            let Some(artifact) = fragments
2622                .clone()
2623                .into_iter()
2624                .find(|a| a.target_triple == variant.target)
2625            else {
2626                return Err(DistError::NoPackage { artifact_name })?;
2627            };
2628
2629            let bin_aliases = bin_aliases.for_target(&variant.target);
2630
2631            let identifier = if let Some(id) = &config.identifier {
2632                id.to_owned()
2633            } else {
2634                return Err(DistError::MacPkgBundleIdentifierMissing {});
2635            };
2636
2637            // Gather up the bundles the installer supports
2638            let installer_artifact = Artifact {
2639                id: artifact_name,
2640                target_triples: vec![target.clone()],
2641                file_path: artifact_path.clone(),
2642                required_binaries: FastMap::new(),
2643                archive: Some(Archive {
2644                    with_root: None,
2645                    dir_path: dir_path.clone(),
2646                    zip_style: ZipStyle::TempDir,
2647                    static_assets: vec![],
2648                }),
2649                checksum: None,
2650                kind: ArtifactKind::Installer(InstallerImpl::Pkg(PkgInstallerInfo {
2651                    file_path: artifact_path.clone(),
2652                    artifact,
2653                    package_dir: dir_path.clone(),
2654                    identifier,
2655                    install_location: config.install_location.clone(),
2656                    version: version.to_string(),
2657                    bin_aliases,
2658                })),
2659                is_global: false,
2660            };
2661
2662            // Register the artifact to various things
2663            let installer_idx = self.add_local_artifact(variant_idx, installer_artifact);
2664            for binary_idx in binaries {
2665                let binary = self.binary(binary_idx);
2666                self.require_binary(
2667                    installer_idx,
2668                    variant_idx,
2669                    binary_idx,
2670                    dir_path.join(&binary.file_name),
2671                );
2672            }
2673            if checksum != ChecksumStyle::False {
2674                self.add_artifact_checksum(variant_idx, installer_idx, checksum);
2675            }
2676            if self.inner.config.builds.omnibor {
2677                let omnibor = self.create_omnibor_artifact(installer_idx, false);
2678                self.add_local_artifact(variant_idx, omnibor);
2679            }
2680        }
2681
2682        Ok(())
2683    }
2684
2685    fn add_local_artifact(
2686        &mut self,
2687        to_variant: ReleaseVariantIdx,
2688        artifact: Artifact,
2689    ) -> ArtifactIdx {
2690        assert!(self.local_artifacts_enabled());
2691        assert!(!artifact.is_global);
2692
2693        let idx = ArtifactIdx(self.inner.artifacts.len());
2694        let ReleaseVariant {
2695            local_artifacts, ..
2696        } = self.variant_mut(to_variant);
2697        local_artifacts.push(idx);
2698
2699        self.inner.artifacts.push(artifact);
2700        idx
2701    }
2702
2703    fn add_global_artifact(&mut self, to_release: ReleaseIdx, artifact: Artifact) -> ArtifactIdx {
2704        assert!(self.global_artifacts_enabled());
2705        assert!(artifact.is_global);
2706
2707        let idx = ArtifactIdx(self.inner.artifacts.len());
2708        let Release {
2709            global_artifacts, ..
2710        } = self.release_mut(to_release);
2711        global_artifacts.push(idx);
2712
2713        self.inner.artifacts.push(artifact);
2714        idx
2715    }
2716
2717    fn compute_build_steps(&mut self) -> DistResult<()> {
2718        // FIXME: more intelligently schedule these in a proper graph?
2719
2720        let mut local_build_steps = vec![];
2721        let mut global_build_steps = vec![];
2722
2723        for workspace_idx in self.workspaces.all_workspace_indices() {
2724            let workspace_kind = self.workspaces.workspace(workspace_idx).kind;
2725            let builds = match workspace_kind {
2726                axoproject::WorkspaceKind::Javascript => {
2727                    self.compute_generic_builds(workspace_idx)?
2728                }
2729                axoproject::WorkspaceKind::Generic => self.compute_generic_builds(workspace_idx)?,
2730                axoproject::WorkspaceKind::Rust => self.compute_cargo_builds(workspace_idx)?,
2731            };
2732            local_build_steps.extend(builds);
2733        }
2734        global_build_steps.extend(self.compute_extra_builds());
2735
2736        Self::add_build_steps_for_artifacts(
2737            &self
2738                .inner
2739                .artifacts
2740                .iter()
2741                .filter(|a| !a.is_global)
2742                .collect(),
2743            &mut local_build_steps,
2744        );
2745        Self::add_build_steps_for_artifacts(
2746            &self
2747                .inner
2748                .artifacts
2749                .iter()
2750                .filter(|a| a.is_global)
2751                .collect(),
2752            &mut global_build_steps,
2753        );
2754
2755        self.inner.local_build_steps = local_build_steps;
2756        self.inner.global_build_steps = global_build_steps;
2757
2758        Ok(())
2759    }
2760
2761    fn add_build_steps_for_artifacts(artifacts: &Vec<&Artifact>, build_steps: &mut Vec<BuildStep>) {
2762        for artifact in artifacts {
2763            match &artifact.kind {
2764                ArtifactKind::ExecutableZip(_zip) => {
2765                    // compute_cargo_builds and artifact.archive handle everything
2766                }
2767                ArtifactKind::Symbols(symbols) => {
2768                    match symbols.kind {
2769                        SymbolKind::Pdb => {
2770                            // No additional steps needed, the file is PERFECT (for now)
2771                        }
2772                        SymbolKind::Dsym => {
2773                            // FIXME: compress the dSYM in a .tar.xz, it's a actually a directory!
2774                        }
2775                        SymbolKind::Dwp => {
2776                            // No additional steps needed?
2777                        }
2778                    }
2779                }
2780                ArtifactKind::Installer(installer) => {
2781                    // Installer generation is complex enough that they just get monolithic impls
2782                    build_steps.push(BuildStep::GenerateInstaller(installer.clone()));
2783                }
2784                ArtifactKind::Checksum(checksum) => {
2785                    build_steps.push(BuildStep::Checksum(checksum.clone()));
2786                }
2787                ArtifactKind::UnifiedChecksum(unified_checksum) => {
2788                    build_steps.push(BuildStep::UnifiedChecksum(unified_checksum.clone()));
2789                }
2790                ArtifactKind::SourceTarball(tarball) => {
2791                    build_steps.push(BuildStep::GenerateSourceTarball(SourceTarballStep {
2792                        committish: tarball.committish.to_owned(),
2793                        prefix: tarball.prefix.to_owned(),
2794                        target: tarball.target.to_owned(),
2795                        working_dir: tarball.working_dir.to_owned(),
2796                        recursive: tarball.recursive,
2797                    }));
2798                }
2799                ArtifactKind::ExtraArtifact(_) => {
2800                    // compute_extra_builds handles this
2801                }
2802                ArtifactKind::Updater(UpdaterImpl { use_latest }) => {
2803                    build_steps.push(BuildStep::Updater(UpdaterStep {
2804                        // There should only be one triple per artifact
2805                        target_triple: artifact.target_triples.first().unwrap().to_owned(),
2806                        target_filename: artifact.file_path.to_owned(),
2807                        use_latest: *use_latest,
2808                    }))
2809                }
2810                ArtifactKind::SBOM(_) => {
2811                    // The SBOM is already generated.
2812                }
2813                ArtifactKind::OmniborArtifactId(src) => {
2814                    let src_path = src.src_path.clone();
2815                    let old_extension = src_path.extension().unwrap_or("");
2816                    let dest_path = src_path.with_extension(format!("{}.omnibor", old_extension));
2817
2818                    build_steps.push(BuildStep::OmniborArtifactId(OmniborArtifactIdImpl {
2819                        src_path,
2820                        dest_path,
2821                    }));
2822                }
2823            }
2824
2825            if let Some(archive) = &artifact.archive {
2826                let artifact_dir = &archive.dir_path;
2827                // Copy all the static assets
2828                for (_, src_path) in &archive.static_assets {
2829                    let src_path = src_path.clone();
2830                    let file_name = src_path.file_name().unwrap();
2831                    let dest_path = artifact_dir.join(file_name);
2832                    // We want to let this path be created by build.rs, so we defer
2833                    // checking if it's a file or a dir until the last possible second
2834                    build_steps.push(BuildStep::CopyFileOrDir(CopyStep {
2835                        src_path,
2836                        dest_path,
2837                    }))
2838                }
2839
2840                // Zip up the artifact
2841                build_steps.push(BuildStep::Zip(ZipDirStep {
2842                    src_path: artifact_dir.to_owned(),
2843                    dest_path: artifact.file_path.clone(),
2844                    with_root: archive.with_root.clone(),
2845                    zip_style: archive.zip_style,
2846                }));
2847                // and get its sha256 checksum into the metadata
2848                build_steps.push(BuildStep::Checksum(ChecksumImpl {
2849                    checksum: ChecksumStyle::Sha256,
2850                    src_path: artifact.file_path.clone(),
2851                    dest_path: None,
2852                    for_artifact: Some(artifact.id.clone()),
2853                }))
2854            }
2855        }
2856    }
2857
2858    fn validate_distable_packages(&self, announcing: &AnnouncementTag) -> DistResult<()> {
2859        for release in &announcing.rust_releases {
2860            let package = self.workspaces.package(release.package_idx);
2861            let workspace_idx = self.workspaces.workspace_for_package(release.package_idx);
2862            let package_workspace = self.workspaces.workspace(workspace_idx);
2863            let package_kind = package_workspace.kind;
2864            if announcing.package.is_none() {
2865                match package_kind {
2866                    axoproject::WorkspaceKind::Generic | axoproject::WorkspaceKind::Javascript => {
2867                        if let Some(build_command) = &package.build_command {
2868                            if build_command.len() == 1
2869                                && build_command.first().unwrap().contains(' ')
2870                            {
2871                                return Err(DistError::SusBuildCommand {
2872                                    manifest: package
2873                                        .dist_manifest_path
2874                                        .clone()
2875                                        .unwrap_or_else(|| package.manifest_path.clone()),
2876                                    command: build_command[0].clone(),
2877                                });
2878                            } else if build_command.is_empty() {
2879                                return Err(DistError::NoBuildCommand {
2880                                    manifest: package
2881                                        .dist_manifest_path
2882                                        .clone()
2883                                        .unwrap_or_else(|| package.manifest_path.clone()),
2884                                });
2885                            }
2886                        } else if package_kind == axoproject::WorkspaceKind::Javascript {
2887                            return Err(DistError::NoDistScript {
2888                                manifest: package.manifest_path.clone(),
2889                            });
2890                        } else {
2891                            return Err(DistError::NoBuildCommand {
2892                                manifest: package
2893                                    .dist_manifest_path
2894                                    .clone()
2895                                    .unwrap_or_else(|| package.manifest_path.clone()),
2896                            });
2897                        }
2898                    }
2899                    axoproject::WorkspaceKind::Rust => {
2900                        if package.build_command.is_some() {
2901                            return Err(DistError::UnexpectedBuildCommand {
2902                                manifest: package
2903                                    .dist_manifest_path
2904                                    .clone()
2905                                    .unwrap_or_else(|| package.manifest_path.clone()),
2906                            });
2907                        }
2908                    }
2909                }
2910            }
2911        }
2912        Ok(())
2913    }
2914
2915    fn compute_releases(
2916        &mut self,
2917        cfg: &Config,
2918        announcing: &AnnouncementTag,
2919        triples: &[TripleName],
2920        bypass_package_target_prefs: bool,
2921    ) -> DistResult<()> {
2922        // Create a Release for each package
2923        for info in &announcing.rust_releases {
2924            // FIXME: this clone is hacky but I'm in the middle of a nasty refactor
2925            let app_config = self.package_config(info.package_idx).clone();
2926
2927            // Create a Release for this binary
2928            let release = self.add_release(info.package_idx);
2929
2930            // Don't bother with any of this without binaries
2931            // or C libraries
2932            // (releases a Rust library, nothing to Build)
2933            if info.executables.is_empty() && info.cdylibs.is_empty() && info.cstaticlibs.is_empty()
2934            {
2935                continue;
2936            }
2937
2938            // Tell the Release to include these binaries
2939            for binary in &info.executables {
2940                self.add_binary(release, info.package_idx, binary.to_owned());
2941            }
2942
2943            for lib in &info.cdylibs {
2944                self.add_library(release, info.package_idx, lib.to_owned());
2945            }
2946
2947            for lib in &info.cstaticlibs {
2948                self.add_static_library(release, info.package_idx, lib.to_owned());
2949            }
2950
2951            // Create variants for this Release for each target
2952            for target in triples {
2953                // This logic ensures that (outside of host mode) we only select targets that are a
2954                // subset of the ones the package claims to support
2955                let use_target =
2956                    bypass_package_target_prefs || app_config.targets.iter().any(|t| t == target);
2957                if !use_target {
2958                    continue;
2959                }
2960
2961                // Create the variant
2962                let variant = self.add_variant(release, target.clone())?;
2963
2964                if self.inner.config.installers.updater {
2965                    self.add_updater(variant);
2966                }
2967            }
2968            // Add executable zips to the Release
2969            self.add_executable_zip(release);
2970
2971            // Get initial platform support for installers to use
2972            self.compute_platform_support(release);
2973
2974            // Add the source tarball if appropriate
2975            self.add_source_tarball(&announcing.tag, release);
2976
2977            // Add any extra artifacts defined in the config
2978            self.add_extra_artifacts(&app_config, release);
2979
2980            // Add installers to the Release
2981            // Prefer the CLI's choices (`cfg`) if they're non-empty
2982            let installers = if cfg.installers.is_empty() {
2983                &[
2984                    InstallerStyle::Shell,
2985                    InstallerStyle::Powershell,
2986                    InstallerStyle::Homebrew,
2987                    InstallerStyle::Npm,
2988                    InstallerStyle::Msi,
2989                    InstallerStyle::Pkg,
2990                ]
2991            } else {
2992                &cfg.installers[..]
2993            };
2994
2995            for installer in installers {
2996                match installer {
2997                    InstallerStyle::Shell => self.add_shell_installer(release)?,
2998                    InstallerStyle::Powershell => self.add_powershell_installer(release)?,
2999                    InstallerStyle::Homebrew => self.add_homebrew_installer(release)?,
3000                    InstallerStyle::Npm => self.add_npm_installer(release)?,
3001                    InstallerStyle::Msi => self.add_msi_installer(release)?,
3002                    InstallerStyle::Pkg => self.add_pkg_installer(release)?,
3003                }
3004            }
3005
3006            // Add SBOM file, if it exists.
3007            self.add_cyclonedx_sbom_file(info.package_idx, release);
3008
3009            // Add the unified checksum file
3010            if self.inner.config.artifacts.checksum != ChecksumStyle::False {
3011                self.add_unified_checksum_file(release);
3012            }
3013        }
3014
3015        // Translate the result to DistManifest
3016        crate::manifest::add_releases_to_manifest(cfg, &self.inner, &mut self.manifest)?;
3017
3018        Ok(())
3019    }
3020
3021    fn compute_ci(&mut self) -> DistResult<()> {
3022        let CiConfig { github } = &self.inner.config.ci;
3023
3024        let mut has_ci = false;
3025        if let Some(github_config) = github {
3026            has_ci = true;
3027            self.inner.ci.github = Some(GithubCiInfo::new(&self.inner, github_config)?);
3028        }
3029
3030        // apply to manifest
3031        if has_ci {
3032            let CiInfo { github } = &self.inner.ci;
3033            let github = github.as_ref().map(|info| {
3034                let external_repo_commit = info
3035                    .github_release
3036                    .as_ref()
3037                    .and_then(|r| r.external_repo_commit.clone());
3038                cargo_dist_schema::GithubCiInfo {
3039                    artifacts_matrix: Some(info.artifacts_matrix.clone()),
3040                    pr_run_mode: Some(info.pr_run_mode),
3041                    external_repo_commit,
3042                }
3043            });
3044
3045            self.manifest.ci = Some(cargo_dist_schema::CiInfo { github });
3046        }
3047
3048        Ok(())
3049    }
3050
3051    fn compute_platform_support(&mut self, release: ReleaseIdx) {
3052        let support = PlatformSupport::new(self, release);
3053        self.release_mut(release).platform_support = support;
3054    }
3055
3056    pub(crate) fn binary(&self, idx: BinaryIdx) -> &Binary {
3057        &self.inner.binaries[idx.0]
3058    }
3059    pub(crate) fn binary_mut(&mut self, idx: BinaryIdx) -> &mut Binary {
3060        &mut self.inner.binaries[idx.0]
3061    }
3062    pub(crate) fn artifact(&self, idx: ArtifactIdx) -> &Artifact {
3063        &self.inner.artifacts[idx.0]
3064    }
3065    pub(crate) fn artifact_mut(&mut self, idx: ArtifactIdx) -> &mut Artifact {
3066        &mut self.inner.artifacts[idx.0]
3067    }
3068    pub(crate) fn release(&self, idx: ReleaseIdx) -> &Release {
3069        &self.inner.releases[idx.0]
3070    }
3071    pub(crate) fn release_mut(&mut self, idx: ReleaseIdx) -> &mut Release {
3072        &mut self.inner.releases[idx.0]
3073    }
3074    pub(crate) fn variant(&self, idx: ReleaseVariantIdx) -> &ReleaseVariant {
3075        &self.inner.variants[idx.0]
3076    }
3077    pub(crate) fn variant_mut(&mut self, idx: ReleaseVariantIdx) -> &mut ReleaseVariant {
3078        &mut self.inner.variants[idx.0]
3079    }
3080    pub(crate) fn local_artifacts_enabled(&self) -> bool {
3081        match self.artifact_mode {
3082            ArtifactMode::Local => true,
3083            ArtifactMode::Global => false,
3084            ArtifactMode::Host => true,
3085            ArtifactMode::All => true,
3086            ArtifactMode::Lies => true,
3087        }
3088    }
3089    pub(crate) fn global_artifacts_enabled(&self) -> bool {
3090        match self.artifact_mode {
3091            ArtifactMode::Local => false,
3092            ArtifactMode::Global => true,
3093            ArtifactMode::Host => true,
3094            ArtifactMode::All => true,
3095            ArtifactMode::Lies => true,
3096        }
3097    }
3098
3099    pub(crate) fn package_config(&self, pkg_idx: PackageIdx) -> &AppConfig {
3100        &self.package_configs[pkg_idx.0]
3101    }
3102}
3103
3104impl DistGraph {
3105    /// Get a binary
3106    pub fn binary(&self, idx: BinaryIdx) -> &Binary {
3107        &self.binaries[idx.0]
3108    }
3109    /// Get a binary
3110    pub fn artifact(&self, idx: ArtifactIdx) -> &Artifact {
3111        &self.artifacts[idx.0]
3112    }
3113    /// Get a release
3114    pub fn release(&self, idx: ReleaseIdx) -> &Release {
3115        &self.releases[idx.0]
3116    }
3117    /// Get a variant
3118    pub fn variant(&self, idx: ReleaseVariantIdx) -> &ReleaseVariant {
3119        &self.variants[idx.0]
3120    }
3121}
3122
3123/// Precompute all the work this invocation will need to do
3124pub fn gather_work(cfg: &Config) -> DistResult<(DistGraph, DistManifest)> {
3125    info!("analyzing workspace:");
3126    let tools = tool_info()?;
3127    let mut workspaces = crate::config::get_project()?;
3128    let system_id = format!(
3129        "{}:{}:{}",
3130        cfg.root_cmd,
3131        cfg.artifact_mode,
3132        cfg.targets.join(",")
3133    );
3134    let mut graph = DistGraphBuilder::new(
3135        system_id,
3136        tools,
3137        &mut workspaces,
3138        cfg.artifact_mode,
3139        cfg.allow_all_dirty,
3140        matches!(cfg.tag_settings.tag, TagMode::Infer),
3141    )?;
3142
3143    // If no targets were specified, just use the host target
3144    let host_target_triple = [graph.inner.tools.host_target.clone()];
3145    // If all targets specified, union together the targets our packages support
3146    // Note that this uses BTreeSet as an intermediate to make the order stable
3147    let all_target_triples = graph
3148        .workspaces
3149        .all_packages()
3150        .flat_map(|(id, _)| &graph.package_config(id).targets)
3151        .collect::<SortedSet<_>>()
3152        .into_iter()
3153        .cloned()
3154        .collect::<Vec<_>>();
3155
3156    // Choose which set of target triples we're building for
3157    let mut bypass_package_target_prefs = false;
3158    let triples = if cfg.targets.is_empty() {
3159        if matches!(cfg.artifact_mode, ArtifactMode::Host) {
3160            info!("using host target-triple");
3161            // In "host" mode we want to build for the host arch regardless of what the
3162            // packages claim they support.
3163            //
3164            // FIXME: may cause sadness for "truly platform-specific" bins like a windows-only util
3165            // FIXME: it would be nice to do "easy" crosses like x64 mac => arm64 + universal2
3166            bypass_package_target_prefs = true;
3167            &host_target_triple
3168        } else if all_target_triples.is_empty() {
3169            return Err(DistError::CliMissingTargets {
3170                host_target: graph.inner.tools.host_target.clone(),
3171            });
3172        } else {
3173            info!("using all target-triples");
3174            // Otherwise assume the user wants all targets (desirable for --artifacts=global)
3175            &all_target_triples[..]
3176        }
3177    } else {
3178        info!("using explicit target-triples");
3179        // If the CLI has explicit targets, only use those!
3180        &cfg.targets[..]
3181    };
3182    info!("selected triples: {:?}", triples);
3183
3184    // Figure out what packages we're announcing
3185    let announcing = announce::select_tag(&mut graph, &cfg.tag_settings)?;
3186
3187    graph.validate_distable_packages(&announcing)?;
3188
3189    // Immediately check if there's other manifests kicking around that provide info
3190    // we don't want to recompute (lets us move towards more of an architecture where
3191    // `plan` figures out what to do and subsequent steps Simply Obey).
3192    crate::manifest::load_and_merge_manifests(
3193        &graph.inner.dist_dir,
3194        &mut graph.manifest,
3195        &announcing,
3196    )?;
3197
3198    // Figure out how artifacts should be hosted
3199    graph.compute_hosting(cfg, &announcing)?;
3200
3201    // Figure out what we're releasing/building
3202    graph.compute_releases(cfg, &announcing, triples, bypass_package_target_prefs)?;
3203
3204    // Prep the announcement's release notes and whatnot
3205    graph.compute_announcement_info(&announcing);
3206
3207    // Finally compute all the build steps!
3208    graph.compute_build_steps()?;
3209
3210    // And now figure out how to orchestrate the result in CI
3211    graph.compute_ci()?;
3212
3213    Ok((graph.inner, graph.manifest))
3214}
3215
3216/// Get the path/command to invoke Cargo
3217pub fn cargo() -> DistResult<String> {
3218    let cargo = std::env::var("CARGO").unwrap_or_else(|_| "cargo".to_owned());
3219    Ok(cargo)
3220}
3221
3222/// Get the host target triple from cargo
3223pub fn get_cargo_info(cargo: String) -> DistResult<CargoInfo> {
3224    let mut command = Cmd::new(&cargo, "get your Rust toolchain's version");
3225    command.arg("-vV");
3226    let output = command.output()?;
3227    let output = String::from_utf8(output.stdout).map_err(|_| DistError::FailedCargoVersion)?;
3228    let mut lines = output.lines();
3229    let version_line = lines.next().map(|s| s.to_owned());
3230    for line in lines {
3231        if let Some(target) = line.strip_prefix("host: ") {
3232            info!("host target is {target}");
3233            return Ok(CargoInfo {
3234                cmd: cargo,
3235                version_line,
3236                host_target: TripleName::new(target.to_owned()),
3237            });
3238        }
3239    }
3240    Err(DistError::FailedCargoVersion)
3241}
3242
3243fn target_symbol_kind(target: &TripleNameRef) -> Option<SymbolKind> {
3244    #[allow(clippy::if_same_then_else)]
3245    if target.is_windows_msvc() {
3246        // Temporary disabled pending redesign of symbol handling!
3247
3248        // Some(SymbolKind::Pdb)
3249        None
3250    } else if target.is_apple() {
3251        // Macos dSYM files are real and work but things
3252        // freak out because it turns out they're directories
3253        // and not "real" files? Temporarily disabling this
3254        // until I have time to figure out what to do
3255
3256        // Some(SymbolKind::Dsym)
3257        None
3258    } else {
3259        // Linux has DWPs but cargo doesn't properly uplift them
3260        // See: https://github.com/rust-lang/cargo/pull/11384
3261
3262        // Some(SymbolKind::Dwp)
3263        None
3264    }
3265}
3266
3267fn tool_info() -> DistResult<Tools> {
3268    let cargo = if let Ok(cargo_cmd) = cargo() {
3269        get_cargo_info(cargo_cmd).ok()
3270    } else {
3271        None
3272    };
3273    Ok(Tools {
3274        host_target: TripleName::new(current_platform::CURRENT_PLATFORM.to_owned()),
3275        cargo,
3276        rustup: find_tool("rustup", "-V"),
3277        brew: find_tool("brew", "--version"),
3278        git: find_tool("git", "--version"),
3279        omnibor: find_tool("omnibor", "--version"),
3280        // Computed later if needed
3281        code_sign_tool: None,
3282
3283        // NOTE: This doesn't actually give us cargo-auditable's version info,
3284        // but it does confirm it's installed, which is what we care about.
3285        cargo_auditable: find_cargo_subcommand("cargo", "auditable", "--version"),
3286
3287        cargo_cyclonedx: find_cargo_subcommand("cargo", "cyclonedx", "--version"),
3288        cargo_xwin: find_cargo_subcommand("cargo", "xwin", "--version"),
3289        cargo_zigbuild: find_tool("cargo-zigbuild", "--version"),
3290    })
3291}
3292
3293fn find_cargo_subcommand(name: &str, arg: &str, test_flag: &str) -> Option<Tool> {
3294    let output = Cmd::new(name, "detect tool")
3295        .arg(arg)
3296        .arg(test_flag)
3297        .check(false)
3298        .output()
3299        .ok()?;
3300    let string_output = String::from_utf8(output.stdout).ok()?;
3301    let version = string_output.lines().next()?;
3302    Some(Tool {
3303        cmd: format!("{} {}", name, arg),
3304        version: version.to_owned(),
3305    })
3306}
3307
3308fn find_tool(name: &str, test_flag: &str) -> Option<Tool> {
3309    let output = Cmd::new(name, "detect tool")
3310        .arg(test_flag)
3311        .check(false)
3312        .output()
3313        .ok()?;
3314    let string_output = String::from_utf8(output.stdout).ok()?;
3315    let version = string_output.lines().next()?;
3316    Some(Tool {
3317        cmd: name.to_owned(),
3318        version: version.to_owned(),
3319    })
3320}
3321
3322/// Represents the source for the canonical form of this app's releases
3323#[derive(Clone, Debug, Serialize)]
3324#[serde(rename_all = "lowercase")]
3325pub enum ReleaseSourceType {
3326    /// GitHub Releases
3327    GitHub,
3328    /// Axo releases
3329    Axo,
3330}
3331
3332/// Where to look up releases for this app
3333#[derive(Clone, Debug, Serialize)]
3334pub struct ReleaseSource {
3335    /// Which type of remote resource to look up
3336    pub release_type: ReleaseSourceType,
3337    /// The owner, from the owner/name format
3338    pub owner: String,
3339    /// The name, from the owner/name format
3340    pub name: String,
3341    /// The app's name
3342    pub app_name: String,
3343}
3344
3345/// The software which installed this receipt
3346#[derive(Clone, Debug, Serialize)]
3347#[serde(rename_all = "kebab-case")]
3348pub enum ProviderSource {
3349    /// cargo-dist
3350    CargoDist,
3351}
3352
3353/// Information about the source of this receipt
3354#[derive(Clone, Debug, Serialize)]
3355pub struct Provider {
3356    /// The software this receipt was installed via
3357    pub source: ProviderSource,
3358    /// The version of the above software
3359    pub version: String,
3360}
3361
3362/// Which style of installation layout this app uses
3363#[derive(Clone, Debug, Serialize)]
3364#[serde(rename_all = "kebab-case")]
3365pub enum InstallLayout {
3366    /// Not specified; will be determined later
3367    Unspecified,
3368    /// All files are in a single directory
3369    Flat,
3370    /// Separated into file type-specific directories
3371    Hierarchical,
3372    /// Like Hierarchical, but with only a bin subdirectory
3373    CargoHome,
3374}
3375
3376/// Struct representing an install receipt
3377#[derive(Clone, Debug, Serialize)]
3378pub struct InstallReceipt {
3379    /// The location on disk where this app was installed
3380    pub install_prefix: String,
3381    /// The layout within the above prefix
3382    pub install_layout: InstallLayout,
3383    /// A list of all binaries installed by this app
3384    pub binaries: Vec<String>,
3385    /// A list of all C dynamic libraries installed by this app
3386    pub cdylibs: Vec<String>,
3387    /// A list of all C static libraries installed by this app
3388    pub cstaticlibs: Vec<String>,
3389    /// Information about where to request information on new releases
3390    pub source: ReleaseSource,
3391    /// The version that was installed
3392    pub version: String,
3393    /// The software which installed this receipt
3394    pub provider: Provider,
3395    /// A list of aliases binaries were installed under
3396    pub binary_aliases: BTreeMap<String, Vec<String>>,
3397    /// Whether or not to modify system paths when installing
3398    pub modify_path: bool,
3399}
3400
3401impl InstallReceipt {
3402    /// Produces an install receipt for the given DistGraph.
3403    pub fn from_metadata(
3404        manifest: &DistGraph,
3405        release: &Release,
3406    ) -> DistResult<Option<InstallReceipt>> {
3407        let hosting = if let Some(hosting) = &manifest.hosting {
3408            hosting
3409        } else {
3410            return Ok(None);
3411        };
3412        let source_type = if hosting.hosts.contains(&HostingStyle::Github) {
3413            ReleaseSourceType::GitHub
3414        } else {
3415            return Err(DistError::NoGitHubHosting {});
3416        };
3417
3418        Ok(Some(InstallReceipt {
3419            // These first five are placeholder values which the installer will update
3420            install_prefix: "AXO_INSTALL_PREFIX".to_owned(),
3421            install_layout: InstallLayout::Unspecified,
3422            binaries: vec!["CARGO_DIST_BINS".to_owned()],
3423            cdylibs: vec!["CARGO_DIST_DYLIBS".to_owned()],
3424            cstaticlibs: vec!["CARGO_DIST_STATICLIBS".to_owned()],
3425            version: release.version.to_string(),
3426            source: ReleaseSource {
3427                release_type: source_type,
3428                owner: hosting.owner.to_owned(),
3429                name: hosting.project.to_owned(),
3430                app_name: release.app_name.to_owned(),
3431            },
3432            provider: Provider {
3433                source: ProviderSource::CargoDist,
3434                version: env!("CARGO_PKG_VERSION").to_owned(),
3435            },
3436            binary_aliases: BTreeMap::default(),
3437            modify_path: true,
3438        }))
3439    }
3440}
3441
3442fn require_nonempty_installer(release: &Release, config: &CommonInstallerConfig) -> DistResult<()> {
3443    if config.install_libraries.is_empty() && release.bins.is_empty() {
3444        Err(DistError::EmptyInstaller {})
3445    } else {
3446        Ok(())
3447    }
3448}