use super::make_client;
use crate::progress::InstallProgress;
use crate::state;
use aube_lockfile::DriftStatus;
use aube_lockfile::dep_path_filename::dep_path_to_filename;
use miette::{Context, IntoDiagnostic, miette};
use rayon::prelude::*;
use std::collections::BTreeMap;
mod frozen;
mod settings;
mod side_effects_cache;
pub use frozen::{FrozenMode, GlobalFrozenFlags, GlobalVirtualStoreFlags};
pub(crate) use settings::PeerDependencyRules;
pub(crate) use side_effects_cache::{SideEffectsCacheConfig, side_effects_cache_root};
use settings::{
ResolverConfigInputs, configure_resolver, default_lockfile_network_concurrency,
default_streaming_network_concurrency, find_gvs_incompatible_trigger,
maybe_cleanup_unused_catalogs, resolve_dedupe_peer_dependents, resolve_dedupe_peers,
resolve_git_shallow_hosts, resolve_link_concurrency, resolve_network_concurrency,
resolve_peers_from_workspace_root, resolve_peers_suffix_max_length, resolve_side_effects_cache,
resolve_side_effects_cache_readonly, resolve_strict_peer_dependencies,
resolve_strict_store_pkg_content_check, resolve_symlink, resolve_use_running_store_server,
resolve_verify_store_integrity, warn_unmet_peers,
};
use side_effects_cache::{SideEffectsCacheEntry, SideEffectsCacheRestore};
#[derive(Debug, clap::Args)]
pub struct InstallArgs {
#[arg(short = 'D', long, conflicts_with = "prod")]
pub dev: bool,
#[arg(short = 'P', long, visible_alias = "production")]
pub prod: bool,
#[arg(long)]
pub dangerously_allow_all_builds: bool,
#[arg(long, conflicts_with_all = ["frozen_lockfile", "no_frozen_lockfile", "prefer_frozen_lockfile"])]
pub fix_lockfile: bool,
#[arg(long)]
pub force: bool,
#[arg(long)]
pub ignore_pnpmfile: bool,
#[arg(long)]
pub ignore_scripts: bool,
#[arg(long, conflicts_with = "frozen_lockfile")]
pub lockfile_only: bool,
#[arg(long)]
pub merge_git_branch_lockfiles: bool,
#[arg(long, value_name = "N")]
pub network_concurrency: Option<u64>,
#[arg(long)]
pub no_optional: bool,
#[arg(long, overrides_with = "side_effects_cache")]
pub no_side_effects_cache: bool,
#[arg(long, overrides_with = "verify_store_integrity")]
pub no_verify_store_integrity: bool,
#[arg(long, value_name = "MODE")]
pub node_linker: Option<String>,
#[arg(long, conflicts_with = "prefer_offline")]
pub offline: bool,
#[arg(long, value_name = "METHOD")]
pub package_import_method: Option<String>,
#[arg(long, conflicts_with = "offline")]
pub prefer_offline: bool,
#[arg(long, value_name = "GLOB", value_delimiter = ',')]
pub public_hoist_pattern: Vec<String>,
#[arg(long, value_name = "MODE")]
pub resolution_mode: Option<String>,
#[arg(long)]
pub shamefully_hoist: bool,
#[arg(long, overrides_with = "no_side_effects_cache")]
pub side_effects_cache: bool,
#[arg(long, overrides_with = "no_verify_store_integrity")]
pub verify_store_integrity: bool,
#[arg(short = 'w', hide = true)]
pub workspace_root_short: bool,
}
impl InstallArgs {
pub fn to_cli_flag_bag(
&self,
global: GlobalFrozenFlags,
global_gvs: GlobalVirtualStoreFlags,
) -> Vec<(String, String)> {
let mut out: Vec<(String, String)> = Vec::new();
if let Some(mode) = self.resolution_mode.as_deref() {
out.push(("resolution-mode".to_string(), mode.to_string()));
}
if let Some(linker) = self.node_linker.as_deref() {
out.push(("node-linker".to_string(), linker.to_string()));
}
if let Some(method) = self.package_import_method.as_deref() {
out.push(("package-import-method".to_string(), method.to_string()));
}
for pattern in &self.public_hoist_pattern {
out.push(("public-hoist-pattern".to_string(), pattern.to_string()));
}
if self.shamefully_hoist {
out.push(("shamefully-hoist".to_string(), "true".to_string()));
}
out.extend(global_gvs.to_cli_flag_bag());
if global.frozen {
out.push(("frozen-lockfile".to_string(), "true".to_string()));
}
if global.no_frozen {
out.push(("frozen-lockfile".to_string(), "false".to_string()));
}
if global.prefer_frozen {
out.push(("prefer-frozen-lockfile".to_string(), "true".to_string()));
}
if let Some(n) = self.network_concurrency {
out.push(("network-concurrency".to_string(), n.to_string()));
}
if self.verify_store_integrity {
out.push(("verify-store-integrity".to_string(), "true".to_string()));
}
if self.no_verify_store_integrity {
out.push(("verify-store-integrity".to_string(), "false".to_string()));
}
if self.side_effects_cache {
out.push(("side-effects-cache".to_string(), "true".to_string()));
}
if self.no_side_effects_cache {
out.push(("side-effects-cache".to_string(), "false".to_string()));
}
out
}
pub fn into_options(
self,
global: GlobalFrozenFlags,
yaml_prefer_frozen: Option<bool>,
cli_flags: Vec<(String, String)>,
env_snapshot: Vec<(String, String)>,
) -> InstallOptions {
let force = self.force;
let mode = if self.fix_lockfile {
FrozenMode::Fix
} else if force && !(global.frozen || global.no_frozen || global.prefer_frozen) {
FrozenMode::No
} else {
FrozenMode::from_flags(
global.frozen,
global.no_frozen,
global.prefer_frozen,
yaml_prefer_frozen,
)
};
let network_mode = if self.offline {
aube_registry::NetworkMode::Offline
} else if self.prefer_offline {
aube_registry::NetworkMode::PreferOffline
} else {
aube_registry::NetworkMode::Online
};
let strict_no_lockfile = global.frozen;
InstallOptions {
project_dir: None,
mode,
prod: self.prod,
dev: self.dev,
no_optional: self.no_optional,
ignore_pnpmfile: self.ignore_pnpmfile,
ignore_scripts: self.ignore_scripts,
lockfile_only: self.lockfile_only,
merge_git_branch_lockfiles: self.merge_git_branch_lockfiles,
dangerously_allow_all_builds: self.dangerously_allow_all_builds,
network_mode,
minimum_release_age_override: None,
strict_no_lockfile,
force,
cli_flags,
env_snapshot,
git_prepare_depth: 0,
workspace_filter: aube_workspace::selector::EffectiveFilter::default(),
}
}
}
#[derive(Debug, Clone)]
pub struct InstallOptions {
pub project_dir: Option<std::path::PathBuf>,
pub mode: FrozenMode,
pub prod: bool,
pub dev: bool,
pub no_optional: bool,
pub ignore_pnpmfile: bool,
pub ignore_scripts: bool,
pub lockfile_only: bool,
pub merge_git_branch_lockfiles: bool,
pub dangerously_allow_all_builds: bool,
pub network_mode: aube_registry::NetworkMode,
pub minimum_release_age_override: Option<u64>,
pub strict_no_lockfile: bool,
pub force: bool,
pub cli_flags: Vec<(String, String)>,
pub env_snapshot: Vec<(String, String)>,
pub git_prepare_depth: u32,
pub workspace_filter: aube_workspace::selector::EffectiveFilter,
}
impl InstallOptions {
pub fn with_mode(mode: FrozenMode) -> Self {
Self {
project_dir: None,
mode,
prod: false,
dev: false,
no_optional: false,
ignore_pnpmfile: false,
ignore_scripts: false,
lockfile_only: false,
merge_git_branch_lockfiles: false,
dangerously_allow_all_builds: false,
network_mode: aube_registry::NetworkMode::Online,
minimum_release_age_override: None,
strict_no_lockfile: false,
force: false,
cli_flags: Vec::new(),
env_snapshot: aube_settings::values::capture_env(),
git_prepare_depth: 0,
workspace_filter: aube_workspace::selector::EffectiveFilter::default(),
}
}
}
impl From<FrozenMode> for InstallOptions {
fn from(mode: FrozenMode) -> Self {
Self::with_mode(mode)
}
}
async fn run_root_lifecycle(
project_dir: &std::path::Path,
modules_dir_name: &str,
manifest: &aube_manifest::PackageJson,
hook: aube_scripts::LifecycleHook,
) -> miette::Result<()> {
if !manifest.scripts.contains_key(hook.script_name()) {
return Ok(());
}
tracing::debug!("Running {} script...", hook.script_name());
aube_scripts::run_root_hook(project_dir, modules_dir_name, manifest, hook)
.await
.map_err(|e| miette!("{}", e))?;
Ok(())
}
pub(crate) fn build_policy_from_sources(
manifest: &aube_manifest::PackageJson,
workspace: &aube_manifest::WorkspaceConfig,
dangerously_allow_all_builds: bool,
) -> (
aube_scripts::BuildPolicy,
Vec<aube_scripts::BuildPolicyError>,
) {
let mut merged = manifest.pnpm_allow_builds();
for (k, v) in workspace.allow_builds_raw() {
merged.insert(k, v);
}
let mut only_built = manifest.pnpm_only_built_dependencies();
only_built.extend(workspace.only_built_dependencies.iter().cloned());
let mut never_built = manifest.pnpm_never_built_dependencies();
never_built.extend(workspace.never_built_dependencies.iter().cloned());
aube_scripts::BuildPolicy::from_config(
&merged,
&only_built,
&never_built,
dangerously_allow_all_builds,
)
}
pub(crate) fn resolve_link_strategy(
cwd: &std::path::Path,
ctx: &aube_settings::ResolveCtx<'_>,
) -> miette::Result<aube_linker::LinkStrategy> {
let package_import_method_cli =
aube_settings::values::string_from_cli("packageImportMethod", ctx.cli);
let strategy = if let Some(cli) = package_import_method_cli.as_deref() {
match cli.trim().to_ascii_lowercase().as_str() {
"" | "auto" => aube_linker::Linker::detect_strategy(cwd),
"hardlink" => aube_linker::LinkStrategy::Hardlink,
"copy" => aube_linker::LinkStrategy::Copy,
"clone-or-copy" => aube_linker::LinkStrategy::Reflink,
"clone" => {
tracing::warn!(
"package-import-method=clone: reflink will silently fall back to copy \
if the filesystem does not support it (strict enforcement is a known TODO)"
);
aube_linker::LinkStrategy::Reflink
}
other => {
return Err(miette!(
"unknown --package-import-method value `{other}`; expected `auto`, `hardlink`, `copy`, `clone`, or `clone-or-copy`"
));
}
}
} else {
match aube_settings::resolved::package_import_method(ctx) {
aube_settings::resolved::PackageImportMethod::Auto => {
aube_linker::Linker::detect_strategy(cwd)
}
aube_settings::resolved::PackageImportMethod::Hardlink => {
aube_linker::LinkStrategy::Hardlink
}
aube_settings::resolved::PackageImportMethod::Copy => aube_linker::LinkStrategy::Copy,
aube_settings::resolved::PackageImportMethod::CloneOrCopy => {
aube_linker::LinkStrategy::Reflink
}
aube_settings::resolved::PackageImportMethod::Clone => {
tracing::warn!(
"package-import-method=clone: reflink will silently fall back to copy \
if the filesystem does not support it (strict enforcement is a known TODO)"
);
aube_linker::LinkStrategy::Reflink
}
}
};
Ok(strategy)
}
#[allow(clippy::too_many_arguments)]
pub(crate) async fn run_dep_lifecycle_scripts(
project_dir: &std::path::Path,
modules_dir_name: &str,
aube_dir: &std::path::Path,
graph: &aube_lockfile::LockfileGraph,
policy: &aube_scripts::BuildPolicy,
virtual_store_dir_max_length: usize,
child_concurrency: usize,
placements: Option<&aube_linker::HoistedPlacements>,
side_effects_cache: SideEffectsCacheConfig<'_>,
) -> miette::Result<usize> {
#[derive(Clone)]
struct BuildJob {
name: String,
version: String,
package_dir: std::path::PathBuf,
dep_modules_dir: std::path::PathBuf,
manifest: aube_manifest::PackageJson,
cache_entry: Option<SideEffectsCacheEntry>,
}
let mut jobs: Vec<BuildJob> = Vec::new();
for (dep_path, pkg) in &graph.packages {
match policy.decide(&pkg.name, &pkg.version) {
aube_scripts::AllowDecision::Allow => {}
aube_scripts::AllowDecision::Deny | aube_scripts::AllowDecision::Unspecified => {
continue;
}
}
let package_dir = materialized_pkg_dir(
aube_dir,
dep_path,
&pkg.name,
virtual_store_dir_max_length,
placements,
);
if !package_dir.exists() {
tracing::debug!(
"allowBuilds: skipping {} — {} not on disk",
pkg.name,
package_dir.display()
);
continue;
}
let pkg_json_path = package_dir.join("package.json");
let pkg_json_content = match std::fs::read_to_string(&pkg_json_path) {
Ok(s) => s,
Err(e) if e.kind() == std::io::ErrorKind::NotFound => continue,
Err(e) => {
return Err(miette!(
"failed to read package.json for {} at {}: {}",
pkg.name,
pkg_json_path.display(),
e
));
}
};
let dep_manifest: aube_manifest::PackageJson = serde_json::from_str(&pkg_json_content)
.into_diagnostic()
.wrap_err_with(|| format!("failed to parse package.json for {}", pkg.name))?;
if !aube_scripts::has_dep_lifecycle_work(&package_dir, &dep_manifest) {
continue;
}
let cache_entry = side_effects_cache
.root()
.map(|root| SideEffectsCacheEntry::new(root, &pkg.name, &pkg.version, &package_dir))
.transpose()?;
let dep_modules_dir = dep_modules_dir_for(&package_dir, &pkg.name);
jobs.push(BuildJob {
name: pkg.name.clone(),
version: pkg.version.clone(),
package_dir,
dep_modules_dir,
manifest: dep_manifest,
cache_entry,
});
}
if jobs.is_empty() {
return Ok(0);
}
let concurrency = child_concurrency.max(1);
let semaphore = std::sync::Arc::new(tokio::sync::Semaphore::new(concurrency));
let project_dir = project_dir.to_path_buf();
let modules_dir_name = modules_dir_name.to_string();
let should_restore_side_effects_cache = side_effects_cache.should_restore();
let should_save_side_effects_cache = side_effects_cache.should_save();
let overwrite_side_effects_cache = side_effects_cache.overwrite_existing();
let mut set: tokio::task::JoinSet<miette::Result<usize>> = tokio::task::JoinSet::new();
for job in jobs {
let sem = semaphore.clone();
let project_dir = project_dir.clone();
let modules_dir_name = modules_dir_name.clone();
set.spawn(async move {
let _permit = sem.acquire().await.unwrap();
if should_restore_side_effects_cache && let Some(cache_entry) = job.cache_entry.clone()
{
let package_dir = job.package_dir.clone();
let restore_result = tokio::task::spawn_blocking(move || {
cache_entry.restore_if_available(&package_dir)
})
.await
.map_err(|e| {
miette!(
"side-effects-cache restore task panicked for {}@{}: {e}",
job.name,
job.version
)
})?;
match restore_result? {
SideEffectsCacheRestore::Restored | SideEffectsCacheRestore::AlreadyApplied => {
return Ok(0);
}
SideEffectsCacheRestore::Miss => {}
}
}
let mut ran_here = 0usize;
for hook in aube_scripts::DEP_LIFECYCLE_HOOKS {
let did_run = aube_scripts::run_dep_hook(
&job.package_dir,
&job.dep_modules_dir,
&project_dir,
&modules_dir_name,
&job.manifest,
hook,
)
.await
.map_err(|e| {
miette!(
"lifecycle script {} failed for {}@{}: {}",
hook.script_name(),
job.name,
job.version,
e
)
})?;
if did_run {
tracing::debug!(
"ran {} for {}@{}",
hook.script_name(),
job.name,
job.version
);
ran_here += 1;
}
}
if should_save_side_effects_cache
&& ran_here > 0
&& let Some(cache_entry) = job.cache_entry.clone()
{
let package_dir = job.package_dir.clone();
let save_result = tokio::task::spawn_blocking(move || {
cache_entry.save(&package_dir, overwrite_side_effects_cache)
})
.await
.map_err(|e| {
miette!(
"side-effects-cache save task panicked for {}@{}: {e}",
job.name,
job.version
)
})
.and_then(|r| r);
if let Err(e) = save_result {
tracing::debug!(
"side-effects-cache: ignoring cache save error for {}@{}: {e}",
job.name,
job.version
);
}
}
Ok(ran_here)
});
}
let mut ran = 0usize;
while let Some(res) = set.join_next().await {
ran += res.into_diagnostic()??;
}
Ok(ran)
}
fn validate_required_scripts(
project_dir: &std::path::Path,
manifest: &aube_manifest::PackageJson,
required: &[String],
) -> miette::Result<()> {
if required.is_empty() {
return Ok(());
}
let mut missing = Vec::new();
collect_missing_required_scripts(".", manifest, required, &mut missing);
for pkg_dir in aube_workspace::find_workspace_packages(project_dir)
.map_err(|e| miette!("failed to discover workspace packages: {e}"))?
{
let manifest_path = pkg_dir.join("package.json");
let pkg_manifest = aube_manifest::PackageJson::from_path(&manifest_path)
.into_diagnostic()
.wrap_err_with(|| format!("failed to read {}", manifest_path.display()))?;
let label = pkg_manifest
.name
.as_deref()
.map(str::to_string)
.unwrap_or_else(|| {
pkg_dir
.strip_prefix(project_dir)
.unwrap_or(&pkg_dir)
.display()
.to_string()
});
collect_missing_required_scripts(&label, &pkg_manifest, required, &mut missing);
}
if missing.is_empty() {
Ok(())
} else {
Err(miette!(
"requiredScripts check failed:\n{}",
missing
.into_iter()
.map(|(pkg, script)| format!(" - {pkg} is missing `{script}`"))
.collect::<Vec<_>>()
.join("\n")
))
}
}
fn collect_missing_required_scripts(
label: &str,
manifest: &aube_manifest::PackageJson,
required: &[String],
missing: &mut Vec<(String, String)>,
) {
for script in required {
if !manifest.scripts.contains_key(script) {
missing.push((label.to_string(), script.clone()));
}
}
}
fn unreviewed_dep_builds(
aube_dir: &std::path::Path,
graph: &aube_lockfile::LockfileGraph,
policy: &aube_scripts::BuildPolicy,
virtual_store_dir_max_length: usize,
placements: Option<&aube_linker::HoistedPlacements>,
) -> miette::Result<Vec<String>> {
let mut unreviewed = Vec::new();
for (dep_path, pkg) in &graph.packages {
if !matches!(
policy.decide(&pkg.name, &pkg.version),
aube_scripts::AllowDecision::Unspecified
) {
continue;
}
let package_dir = materialized_pkg_dir(
aube_dir,
dep_path,
&pkg.name,
virtual_store_dir_max_length,
placements,
);
if !package_dir.exists() {
continue;
}
let pkg_json_path = package_dir.join("package.json");
let pkg_json_content = match std::fs::read_to_string(&pkg_json_path) {
Ok(s) => s,
Err(e) if e.kind() == std::io::ErrorKind::NotFound => continue,
Err(e) => {
return Err(miette!(
"failed to read package.json for {} at {}: {}",
pkg.name,
pkg_json_path.display(),
e
));
}
};
let dep_manifest: aube_manifest::PackageJson = serde_json::from_str(&pkg_json_content)
.into_diagnostic()
.wrap_err_with(|| format!("failed to parse package.json for {}", pkg.name))?;
if aube_scripts::has_dep_lifecycle_work(&package_dir, &dep_manifest) {
unreviewed.push(format!("{}@{}", pkg.name, pkg.version));
}
}
unreviewed.sort();
unreviewed.dedup();
Ok(unreviewed)
}
struct ScratchDir(std::path::PathBuf);
impl ScratchDir {
fn path(&self) -> &std::path::Path {
&self.0
}
}
impl Drop for ScratchDir {
fn drop(&mut self) {
let _ = std::fs::remove_dir_all(&self.0);
}
}
fn prepare_scratch_copy(src: &std::path::Path, spec: &str) -> miette::Result<ScratchDir> {
let mut hasher = std::collections::hash_map::DefaultHasher::new();
use std::hash::{Hash, Hasher};
src.hash(&mut hasher);
std::process::id().hash(&mut hasher);
std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
.map(|d| d.as_nanos())
.unwrap_or(0)
.hash(&mut hasher);
let dst = std::env::temp_dir().join(format!("aube-git-prep-{:x}", hasher.finish()));
if dst.exists() {
let _ = std::fs::remove_dir_all(&dst);
}
std::fs::create_dir_all(&dst)
.map_err(|e| miette!("git dep {spec}: create scratch dir {}: {e}", dst.display()))?;
let scratch = ScratchDir(dst);
let out = std::process::Command::new("cp")
.arg("-a")
.arg(format!("{}/.", src.display()))
.arg(scratch.path())
.output()
.map_err(|e| miette!("git dep {spec}: spawn cp for scratch copy: {e}"))?;
if !out.status.success() {
return Err(miette!(
"git dep {spec}: scratch copy failed: {}",
String::from_utf8_lossy(&out.stderr).trim()
));
}
let _ = std::fs::remove_dir_all(scratch.path().join(".git"));
Ok(scratch)
}
const GIT_PREPARE_MAX_DEPTH: u32 = 4;
async fn run_git_dep_prepare(
clone_dir: &std::path::Path,
spec: &str,
ignore_scripts: bool,
depth: u32,
) -> miette::Result<()> {
if depth >= GIT_PREPARE_MAX_DEPTH {
return Err(miette!(
"git dep {spec}: `prepare` nesting exceeded {GIT_PREPARE_MAX_DEPTH} levels"
));
}
let mut opts = InstallOptions::with_mode(super::chained_frozen_mode(FrozenMode::Prefer));
opts.project_dir = Some(clone_dir.to_path_buf());
opts.ignore_scripts = ignore_scripts;
opts.git_prepare_depth = depth + 1;
let spec = spec.to_string();
tokio::task::spawn_blocking(move || {
let runtime = tokio::runtime::Builder::new_current_thread()
.enable_all()
.build()
.into_diagnostic()
.wrap_err("failed to build nested git prepare runtime")?;
runtime.block_on(run(opts))
})
.await
.into_diagnostic()
.wrap_err_with(|| format!("git dep {spec}: nested install task failed"))?
.wrap_err_with(|| format!("git dep {spec}: nested install for `prepare` failed"))
}
pub(super) async fn import_local_source(
store: &std::sync::Arc<aube_store::Store>,
project_root: &std::path::Path,
local: &aube_lockfile::LocalSource,
client: Option<&std::sync::Arc<aube_registry::client::RegistryClient>>,
ignore_scripts: bool,
git_prepare_depth: u32,
git_shallow_hosts: &[String],
) -> miette::Result<Option<aube_store::PackageIndex>> {
use aube_lockfile::LocalSource;
match local {
LocalSource::Link(_) => Ok(None),
LocalSource::Directory(rel) => {
let abs = project_root.join(rel);
if !abs.is_dir() {
return Err(miette!(
"local dependency {}: {} is not a directory",
local.specifier(),
abs.display()
));
}
let index = store
.import_directory(&abs)
.map_err(|e| miette!("failed to import {}: {e}", local.specifier()))?;
Ok(Some(index))
}
LocalSource::Tarball(rel) => {
let abs = project_root.join(rel);
let bytes = std::fs::read(&abs)
.into_diagnostic()
.wrap_err_with(|| format!("read {}", abs.display()))?;
let index = store
.import_tarball(&bytes)
.map_err(|e| miette!("failed to import {}: {e}", local.specifier()))?;
Ok(Some(index))
}
LocalSource::Git(g) => {
let url = g.url.clone();
let resolved = g.resolved.clone();
let spec = local.specifier();
let shallow = aube_store::git_host_in_list(&url, git_shallow_hosts);
let clone_dir = tokio::task::spawn_blocking(move || {
aube_store::git_shallow_clone(&url, &resolved, shallow)
})
.await
.map_err(|e| miette!("git clone task panicked: {e}"))?
.map_err(|e| miette!("failed to clone {spec}: {e}"))?;
let manifest_path = clone_dir.join("package.json");
let needs_prepare = !ignore_scripts
&& aube_manifest::PackageJson::from_path(&manifest_path)
.ok()
.is_some_and(|pj| pj.scripts.contains_key("prepare"));
if needs_prepare {
let scratch = prepare_scratch_copy(&clone_dir, &spec)?;
run_git_dep_prepare(scratch.path(), &spec, ignore_scripts, git_prepare_depth)
.await?;
let archive = crate::commands::pack::build_archive(scratch.path())
.wrap_err_with(|| format!("failed to pack prepared git dep {spec}"))?;
let index = store
.import_tarball(&archive.tarball)
.map_err(|e| miette!("failed to import prepared {spec}: {e}"))?;
return Ok(Some(index));
}
let index = store
.import_directory(&clone_dir)
.map_err(|e| miette!("failed to import {}: {e}", local.specifier()))?;
Ok(Some(index))
}
LocalSource::RemoteTarball(t) => {
let client = client.ok_or_else(|| {
miette!(
"internal: import_local_source called without a registry client for {}",
local.specifier()
)
})?;
let bytes = client
.fetch_tarball_bytes(&t.url)
.await
.map_err(|e| miette!("failed to fetch {}: {e}", t.url))?;
if !t.integrity.is_empty() {
aube_store::verify_integrity(&bytes, &t.integrity)
.map_err(|e| miette!("{}: {e}", t.url))?;
}
let index = store
.import_tarball(&bytes)
.map_err(|e| miette!("failed to import {}: {e}", local.specifier()))?;
Ok(Some(index))
}
}
}
pub(super) async fn fetch_packages(
packages: &BTreeMap<String, aube_lockfile::LockedPackage>,
store: &std::sync::Arc<aube_store::Store>,
client: std::sync::Arc<aube_registry::client::RegistryClient>,
progress: Option<&InstallProgress>,
ignore_scripts: bool,
git_prepare_depth: u32,
git_shallow_hosts: Vec<String>,
) -> miette::Result<(BTreeMap<String, aube_store::PackageIndex>, usize, usize)> {
let cwd = crate::dirs::project_root_or_cwd()?;
let npmrc_entries = aube_registry::config::load_npmrc_entries(&cwd);
let raw_workspace = aube_manifest::workspace::load_both(&cwd)
.map(|(_, raw)| raw)
.unwrap_or_default();
let env = aube_settings::values::capture_env();
let ctx = aube_settings::ResolveCtx {
npmrc: &npmrc_entries,
workspace_yaml: &raw_workspace,
env: &env,
cli: &[],
};
let network_concurrency = resolve_network_concurrency(&ctx);
let verify_integrity = resolve_verify_store_integrity(&ctx);
let strict_pkg_content_check = resolve_strict_store_pkg_content_check(&ctx);
let virtual_store_dir_max_length = super::resolve_virtual_store_dir_max_length(&ctx);
let aube_dir = super::resolve_virtual_store_dir(&ctx, &cwd);
fetch_packages_with_root(
packages,
store,
|| client,
progress,
&cwd,
&aube_dir,
true,
virtual_store_dir_max_length,
ignore_scripts,
network_concurrency,
verify_integrity,
strict_pkg_content_check,
git_prepare_depth,
git_shallow_hosts,
)
.await
}
#[allow(clippy::too_many_arguments)]
pub(super) async fn fetch_packages_with_root<F>(
packages: &BTreeMap<String, aube_lockfile::LockedPackage>,
store: &std::sync::Arc<aube_store::Store>,
client: F,
progress: Option<&InstallProgress>,
project_root: &std::path::Path,
aube_dir: &std::path::Path,
skip_already_linked_shortcut: bool,
virtual_store_dir_max_length: usize,
ignore_scripts: bool,
network_concurrency: Option<usize>,
verify_integrity: bool,
strict_pkg_content_check: bool,
git_prepare_depth: u32,
git_shallow_hosts: Vec<String>,
) -> miette::Result<(BTreeMap<String, aube_store::PackageIndex>, usize, usize)>
where
F: FnOnce() -> std::sync::Arc<aube_registry::client::RegistryClient>,
{
enum CheckResult {
AlreadyLinked,
Cached(aube_store::PackageIndex),
NeedsFetch,
}
let check_results: Vec<_> = packages
.par_iter()
.filter(|(_, pkg)| pkg.local_source.is_none())
.map(|(dep_path, pkg)| {
if !skip_already_linked_shortcut {
let entry_name = dep_path_to_filename(dep_path, virtual_store_dir_max_length);
if aube_dir.join(&entry_name).exists() {
return (dep_path.clone(), pkg, CheckResult::AlreadyLinked);
}
}
match store.load_index(pkg.registry_name(), &pkg.version) {
Some(index) => (dep_path.clone(), pkg, CheckResult::Cached(index)),
None => (dep_path.clone(), pkg, CheckResult::NeedsFetch),
}
})
.collect();
let mut indices: BTreeMap<String, aube_store::PackageIndex> = BTreeMap::new();
let has_remote_tarball = packages.values().any(|p| {
matches!(
p.local_source,
Some(aube_lockfile::LocalSource::RemoteTarball(_))
)
});
let mut client_slot: Option<std::sync::Arc<aube_registry::client::RegistryClient>> = None;
let mut client_builder = Some(client);
if has_remote_tarball {
client_slot = Some((client_builder.take().unwrap())());
}
for (dep_path, pkg) in packages {
let Some(ref local) = pkg.local_source else {
continue;
};
if let Some(index) = import_local_source(
store,
project_root,
local,
client_slot.as_ref(),
ignore_scripts,
git_prepare_depth,
&git_shallow_hosts,
)
.await?
{
indices.insert(dep_path.clone(), index);
}
if let Some(p) = progress {
p.inc_reused(1);
}
}
let mut to_fetch = Vec::new();
let mut cached_count = 0usize;
for (dep_path, pkg, result) in check_results {
match result {
CheckResult::AlreadyLinked => {
cached_count += 1;
}
CheckResult::Cached(index) => {
indices.insert(dep_path, index);
cached_count += 1;
}
CheckResult::NeedsFetch => {
to_fetch.push((
dep_path,
pkg.name.clone(),
pkg.registry_name().to_string(),
pkg.version.clone(),
pkg.tarball_url.clone(),
pkg.integrity.clone(),
));
}
}
}
if let Some(p) = progress {
p.inc_reused(cached_count);
}
let fetch_count = to_fetch.len();
if !to_fetch.is_empty() {
let client = match client_slot.take() {
Some(c) => c,
None => (client_builder.take().unwrap())(),
};
let sem_permits = network_concurrency.unwrap_or_else(default_lockfile_network_concurrency);
let semaphore = std::sync::Arc::new(tokio::sync::Semaphore::new(sem_permits));
let mut handles = Vec::new();
for (dep_path, display_name, registry_name, version, tarball_url_override, integrity) in
to_fetch
{
let sem = semaphore.clone();
let store = store.clone();
let client = client.clone();
let row = progress.map(|p| p.start_fetch(&display_name, &version));
let bytes_progress = progress.cloned();
let handle = tokio::spawn(async move {
let _row = row;
let task_start = std::time::Instant::now();
let permit = sem.acquire().await.unwrap();
let wait_time = task_start.elapsed();
let url = tarball_url_override
.clone()
.unwrap_or_else(|| client.tarball_url(®istry_name, &version));
let dl_start = std::time::Instant::now();
let bytes = client
.fetch_tarball_bytes(&url)
.await
.map_err(|e| miette!("failed to fetch {display_name}@{version}: {e}"))?;
let dl_time = dl_start.elapsed();
if let Some(p) = bytes_progress.as_ref() {
p.inc_downloaded_bytes(bytes.len() as u64);
}
let bytes_len = bytes.len();
let (index, import_time) = tokio::task::spawn_blocking({
let store = store.clone();
let display_name = display_name.clone();
let registry_name = registry_name.clone();
let version = version.clone();
move || -> miette::Result<_> {
if verify_integrity && let Some(ref expected) = integrity {
aube_store::verify_integrity(&bytes, expected)
.map_err(|e| miette!("{display_name}@{version}: {e}"))?;
}
let import_start = std::time::Instant::now();
let index = store.import_tarball(&bytes).map_err(|e| {
miette!("failed to import {display_name}@{version}: {e}")
})?;
let import_time = import_start.elapsed();
if strict_pkg_content_check {
aube_store::validate_pkg_content(&index, ®istry_name, &version)
.map_err(|e| miette!("{display_name}@{version}: {e}"))?;
}
if let Err(e) = store.save_index(®istry_name, &version, &index) {
tracing::warn!(
"Failed to cache index for {display_name}@{version}: {e}"
);
}
Ok((index, import_time))
}
})
.await
.into_diagnostic()??;
tracing::trace!(
"fetch {display_name}@{version}: wait={:.0?} dl={:.0?} ({} bytes) import={:.0?}",
wait_time,
dl_time,
bytes_len,
import_time
);
drop(permit);
Ok::<_, miette::Report>((dep_path, index))
});
handles.push(handle);
}
for handle in handles {
let (dep_path, index) = handle.await.into_diagnostic()??;
indices.insert(dep_path, index);
}
}
Ok((indices, cached_count, fetch_count))
}
pub(super) fn version_from_dep_path(dep_path: &str, name: &str) -> String {
let tail = dep_path
.strip_prefix(&format!("{name}@"))
.unwrap_or(dep_path);
tail.split('(').next().unwrap_or(tail).to_string()
}
fn remap_indices_to_contextualized(
canonical_indices: &BTreeMap<String, aube_store::PackageIndex>,
graph: &aube_lockfile::LockfileGraph,
) -> BTreeMap<String, aube_store::PackageIndex> {
let mut out = BTreeMap::new();
for (dep_path, pkg) in &graph.packages {
let canonical_key = format!("{}@{}", pkg.name, pkg.version);
if let Some(idx) = canonical_indices
.get(dep_path)
.or_else(|| canonical_indices.get(&canonical_key))
{
out.insert(dep_path.clone(), idx.clone());
}
}
out
}
pub async fn run(opts: InstallOptions) -> miette::Result<()> {
let mode = opts.mode;
let cwd = if let Some(project_dir) = &opts.project_dir {
project_dir.clone()
} else {
let initial_cwd = crate::dirs::cwd()?;
match crate::dirs::find_project_root(&initial_cwd) {
Some(root) => root,
None => {
return Err(miette!(
"no package.json found in {} or any parent directory",
initial_cwd.display()
));
}
}
};
let _lock = super::take_project_lock(&cwd)?;
let start = std::time::Instant::now();
if opts.force {
let _ = state::remove_state(&cwd);
}
let manifest = aube_manifest::PackageJson::from_path(&cwd.join("package.json"))
.into_diagnostic()
.wrap_err("failed to read package.json")?;
let project_name = manifest.name.as_deref().unwrap_or("(unnamed)");
let npmrc_entries = aube_registry::config::load_npmrc_entries(&cwd);
let (ws_config_shared, raw_workspace) = aube_manifest::workspace::load_both(&cwd)
.into_diagnostic()
.wrap_err("failed to load workspace config")?;
let workspace_catalogs = super::discover_catalogs(&cwd)?;
let settings_ctx = aube_settings::ResolveCtx {
npmrc: &npmrc_entries,
workspace_yaml: &raw_workspace,
env: &opts.env_snapshot,
cli: &opts.cli_flags,
};
super::configure_script_settings(&settings_ctx);
let modules_dir_name = aube_settings::resolved::modules_dir(&settings_ctx);
let aube_dir = super::resolve_virtual_store_dir(&settings_ctx, &cwd);
let lockfile_enabled = aube_settings::resolved::lockfile(&settings_ctx);
let modules_dir_enabled = aube_settings::resolved::enable_modules_dir(&settings_ctx);
let lockfile_only_effective = opts.lockfile_only || !modules_dir_enabled;
if !lockfile_enabled && opts.lockfile_only {
return Err(miette!(
"--lockfile-only is incompatible with lockfile=false; \
remove one or the other"
));
}
if !lockfile_enabled && !modules_dir_enabled {
return Err(miette!(
"enableModulesDir=false is incompatible with lockfile=false; \
remove one or the other"
));
}
if !lockfile_enabled && opts.strict_no_lockfile {
return Err(miette!(
"--frozen-lockfile is incompatible with lockfile=false; \
remove one or the other"
));
}
let lockfile_include_tarball_url =
aube_settings::resolved::lockfile_include_tarball_url(&settings_ctx);
tracing::debug!(
"lockfile: enabled={lockfile_enabled}, include-tarball-url={lockfile_include_tarball_url}"
);
if lockfile_enabled {
let patterns =
aube_settings::resolved::merge_git_branch_lockfiles_branch_pattern(&settings_ctx)
.unwrap_or_default();
let should_merge = opts.merge_git_branch_lockfiles
|| aube_lockfile::merge::current_branch_matches(&cwd, &patterns);
if should_merge {
match aube_lockfile::merge_branch_lockfiles(&cwd, &manifest) {
Ok(report) => {
if !report.merged_files.is_empty() {
let filenames: Vec<String> = report
.merged_files
.iter()
.filter_map(|p| {
p.file_name()
.and_then(|n| n.to_str())
.map(|s| s.to_string())
})
.collect();
tracing::info!(
"merged {} branch lockfile(s) into aube-lock.yaml: {}",
report.merged_files.len(),
filenames.join(", ")
);
if !report.conflicts.is_empty() {
tracing::warn!(
"{} conflict(s) during branch-lockfile merge; see --verbose for details",
report.conflicts.len()
);
}
} else {
tracing::debug!(
"branch-lockfile merge triggered but no aube-lock.*.yaml files were found"
);
}
}
Err(err) => {
return Err(miette!("failed to merge branch lockfiles: {err}"));
}
}
}
}
let network_concurrency_setting = resolve_network_concurrency(&settings_ctx);
let link_concurrency_setting = resolve_link_concurrency(&settings_ctx);
let verify_store_integrity_setting = resolve_verify_store_integrity(&settings_ctx);
let strict_store_pkg_content_check_setting =
resolve_strict_store_pkg_content_check(&settings_ctx);
let side_effects_cache_setting = resolve_side_effects_cache(&settings_ctx);
let side_effects_cache_readonly_setting = resolve_side_effects_cache_readonly(&settings_ctx);
let strict_dep_builds_setting = aube_settings::resolved::strict_dep_builds(&settings_ctx);
let required_scripts =
aube_settings::resolved::required_scripts(&settings_ctx).unwrap_or_default();
validate_required_scripts(&cwd, &manifest, &required_scripts)?;
if resolve_use_running_store_server(&settings_ctx) {
eprintln!(
"warning: aube has no store server; useRunningStoreServer=true is accepted but has no effect"
);
}
if !resolve_symlink(&settings_ctx) {
eprintln!(
"warning: aube's isolated layout requires symlinks; symlink=false is accepted but has no effect"
);
}
let _ = aube_settings::resolved::dlx_cache_max_age(&settings_ctx);
tracing::debug!(
"settings: network-concurrency={:?}, link-concurrency={:?}, verify-store-integrity={}, strict-store-pkg-content-check={}, side-effects-cache={}, side-effects-cache-readonly={}, strict-dep-builds={}",
network_concurrency_setting,
link_concurrency_setting,
verify_store_integrity_setting,
strict_store_pkg_content_check_setting,
side_effects_cache_setting,
side_effects_cache_readonly_setting,
strict_dep_builds_setting,
);
let virtual_store_dir_max_length = super::resolve_virtual_store_dir_max_length(&settings_ctx);
if !opts.ignore_scripts && !lockfile_only_effective {
run_root_lifecycle(
&cwd,
&modules_dir_name,
&manifest,
aube_scripts::LifecycleHook::PreInstall,
)
.await?;
}
let prog = InstallProgress::try_new();
let prog_ref = prog.as_ref();
let workspace_packages = aube_workspace::find_workspace_packages(&cwd)
.into_diagnostic()
.wrap_err("failed to discover workspace packages")?;
let recursive_install = aube_settings::resolved::recursive_install(&settings_ctx);
let has_workspace = !workspace_packages.is_empty();
let link_all_workspace_importers =
has_workspace && (recursive_install || !opts.workspace_filter.is_empty());
let mut manifests: Vec<(String, aube_manifest::PackageJson)> =
vec![(".".to_string(), manifest.clone())];
let mut ws_package_versions: std::collections::HashMap<String, String> =
std::collections::HashMap::new();
let mut ws_dirs: BTreeMap<String, std::path::PathBuf> = BTreeMap::new();
if has_workspace {
tracing::debug!(
"Workspace: {} packages for {project_name}",
workspace_packages.len()
);
for pkg_dir in &workspace_packages {
let pkg_manifest = aube_manifest::PackageJson::from_path(&pkg_dir.join("package.json"))
.into_diagnostic()
.wrap_err_with(|| format!("failed to read {}/package.json", pkg_dir.display()))?;
let rel_path = pkg_dir
.strip_prefix(&cwd)
.unwrap_or(pkg_dir)
.to_string_lossy()
.to_string();
if let Some(ref name) = pkg_manifest.name {
let version = pkg_manifest.version.as_deref().unwrap_or("0.0.0");
ws_package_versions.insert(name.clone(), version.to_string());
ws_dirs.insert(name.clone(), pkg_dir.clone());
tracing::debug!(" {name}@{version} ({rel_path})");
}
if !rel_path.is_empty() {
manifests.push((rel_path, pkg_manifest));
}
}
}
let gvs_triggers =
aube_settings::resolved::disable_global_virtual_store_for_packages(&settings_ctx);
let explicit_global_virtual_store =
aube_settings::resolved::enable_global_virtual_store(&settings_ctx);
let use_global_virtual_store_override = explicit_global_virtual_store.or_else(|| {
let triggered_by = find_gvs_incompatible_trigger(&manifests, &gvs_triggers);
let ci_mode = opts.env_snapshot.iter().any(|(k, _)| k == "CI");
let virtual_store_only_setting = aube_settings::resolved::virtual_store_only(&settings_ctx);
if let Some(name) = triggered_by
&& !ci_mode
&& !virtual_store_only_setting
{
tracing::warn!(
"`{name}` isn't compatible with aube's global virtual store — \
installing per-project instead. Install still succeeds; repeat \
installs of this project just won't share materialized packages \
across projects. Fixing this requires an upstream change in \
`{name}` itself (please file it with that project, not aube). \
To silence this warning, add `enableGlobalVirtualStore=false` to \
.npmrc — or set `disableGlobalVirtualStoreForPackages=[]` to opt \
out of this auto-detection entirely. \
Details: https://aube.en.dev/package-manager/node-modules#global-virtual-store"
);
Some(false)
} else {
None
}
});
let source_kind_before = if lockfile_enabled {
aube_lockfile::detect_existing_lockfile_kind(&cwd)
} else {
None
};
let fix_mode_parse: Option<(aube_lockfile::LockfileGraph, aube_lockfile::LockfileKind)> =
if mode == FrozenMode::Fix && lockfile_enabled {
aube_lockfile::parse_lockfile_with_kind(&cwd, &manifest).ok()
} else {
None
};
let existing_for_resolver: Option<&aube_lockfile::LockfileGraph> =
fix_mode_parse.as_ref().map(|(g, _)| g);
if lockfile_only_effective {
let force_resolve = matches!(mode, FrozenMode::No);
let parsed_owned;
let parsed: Result<
(&aube_lockfile::LockfileGraph, aube_lockfile::LockfileKind),
&aube_lockfile::Error,
> = if let Some((g, k)) = fix_mode_parse.as_ref() {
Ok((g, *k))
} else {
parsed_owned = aube_lockfile::parse_lockfile_with_kind(&cwd, &manifest);
match &parsed_owned {
Ok((g, k)) => Ok((g, *k)),
Err(e) => Err(e),
}
};
if let Err(e) = parsed
&& !matches!(e, aube_lockfile::Error::NotFound(_))
{
return Err(miette!("failed to parse lockfile: {e}"));
}
let fresh = !force_resolve
&& matches!(
parsed,
Ok((g, _))
if matches!(g.check_drift_workspace(&manifests), DriftStatus::Fresh)
&& matches!(g.check_catalogs_drift(&workspace_catalogs), DriftStatus::Fresh)
);
if fresh {
tracing::debug!("--lockfile-only: lockfile already up to date");
if let Some(p) = prog_ref {
p.finish(true);
}
eprintln!("Lockfile is up to date, resolution step is skipped");
return Ok(());
}
if let Some(p) = prog_ref {
p.set_phase("resolving");
}
let client = std::sync::Arc::new(make_client(&cwd).with_network_mode(opts.network_mode));
let pnpmfile_path = (!opts.ignore_pnpmfile)
.then(|| crate::pnpmfile::detect(&cwd))
.flatten();
let read_package_host = match pnpmfile_path.as_deref() {
Some(p) => crate::pnpmfile::ReadPackageHost::spawn(p)
.await
.wrap_err("failed to start pnpmfile readPackage host")?,
None => None,
};
let read_package_hook: Option<Box<dyn aube_resolver::ReadPackageHook>> =
read_package_host.map(|h| Box::new(h) as Box<dyn aube_resolver::ReadPackageHook>);
let mut resolver = configure_resolver(
aube_resolver::Resolver::new(client),
&cwd,
&manifest,
ResolverConfigInputs {
settings_ctx: &settings_ctx,
workspace_catalogs: &workspace_catalogs,
opts: &opts,
target_lockfile_kind: lockfile_enabled
.then(|| source_kind_before.unwrap_or(aube_lockfile::LockfileKind::Aube)),
},
read_package_hook,
);
let mut graph = if has_workspace {
resolver
.resolve_workspace(&manifests, existing_for_resolver, &ws_package_versions)
.await
} else {
resolver.resolve(&manifest, existing_for_resolver).await
}
.into_diagnostic()
.wrap_err("failed to resolve dependencies")?;
drop(resolver);
if let Some(pnpmfile_path) = pnpmfile_path.as_deref() {
crate::pnpmfile::run_after_all_resolved(pnpmfile_path, &mut graph)
.await
.wrap_err("pnpmfile afterAllResolved hook failed")?;
}
if lockfile_include_tarball_url {
let lo_client = make_client(&cwd);
graph.settings.lockfile_include_tarball_url = true;
for pkg in graph.packages.values_mut() {
if pkg.local_source.is_some() {
continue;
}
if pkg.tarball_url.is_none() {
pkg.tarball_url =
Some(lo_client.tarball_url(pkg.registry_name(), &pkg.version));
}
}
}
let lo_write_kind = source_kind_before.unwrap_or(aube_lockfile::LockfileKind::Aube);
let lo_written = aube_lockfile::write_lockfile_as(&cwd, &graph, &manifest, lo_write_kind)
.into_diagnostic()
.wrap_err("failed to write lockfile")?;
tracing::debug!(
"--lockfile-only: wrote {}",
lo_written
.file_name()
.map(|n| n.to_string_lossy().into_owned())
.unwrap_or_else(|| lo_written.display().to_string())
);
maybe_cleanup_unused_catalogs(&cwd, &settings_ctx, &workspace_catalogs, &graph.catalogs)?;
if let Some(p) = prog_ref {
p.finish(true);
}
eprintln!(
"Lockfile written ({} packages); skipped node_modules linking",
graph.packages.len()
);
return Ok(());
}
let phase_start = std::time::Instant::now();
let store = std::sync::Arc::new(super::open_store(&cwd)?);
if let Err(e) = store.ensure_shards_exist() {
tracing::debug!("ensure_shards_exist failed (slow path will cover): {e}");
}
let lockfile_result = if !lockfile_enabled {
tracing::debug!("lockfile=false: skipping lockfile parse, re-resolving");
Err(aube_lockfile::Error::NotFound(cwd.clone()))
} else {
match mode {
FrozenMode::No => {
Err(aube_lockfile::Error::NotFound(cwd.clone()))
}
FrozenMode::Fix => {
Err(aube_lockfile::Error::NotFound(cwd.clone()))
}
FrozenMode::Frozen => {
let parsed = aube_lockfile::parse_lockfile_with_kind(&cwd, &manifest);
if let Ok((ref graph, _)) = parsed {
if let DriftStatus::Stale { reason } =
graph.check_catalogs_drift(&workspace_catalogs)
{
return Err(miette!(
"lockfile is out of date with pnpm-workspace.yaml: {reason}\n\
help: run without --frozen-lockfile to update the lockfile"
));
}
if let DriftStatus::Stale { reason } = graph.check_drift_workspace(&manifests) {
return Err(miette!(
"lockfile is out of date with package.json: {reason}\n\
help: run without --frozen-lockfile to update the lockfile, \
or run `aube install --no-frozen-lockfile` to regenerate it"
));
}
}
parsed
}
FrozenMode::Prefer => {
match aube_lockfile::parse_lockfile_with_kind(&cwd, &manifest) {
Ok((graph, kind)) => {
if let DriftStatus::Stale { reason } =
graph.check_catalogs_drift(&workspace_catalogs)
{
tracing::debug!(
"Lockfile out of date with workspace catalogs ({reason}), re-resolving..."
);
Err(aube_lockfile::Error::NotFound(cwd.clone()))
} else {
match graph.check_drift_workspace(&manifests) {
DriftStatus::Fresh => Ok((graph, kind)),
DriftStatus::Stale { reason } => {
tracing::debug!(
"Lockfile out of date ({reason}), re-resolving..."
);
Err(aube_lockfile::Error::NotFound(cwd.clone()))
}
}
}
}
other => other,
}
}
}
};
let (graph, package_indices, cached_count, fetch_count) = match lockfile_result {
Ok((mut graph, kind)) => {
let (sup_os, sup_cpu, sup_libc) = manifest.pnpm_supported_architectures();
let supported_architectures = aube_resolver::SupportedArchitectures {
os: sup_os,
cpu: sup_cpu,
libc: sup_libc,
..Default::default()
};
let ignored_optional_deps: std::collections::BTreeSet<String> = manifest
.pnpm_ignored_optional_dependencies()
.into_iter()
.collect();
aube_resolver::platform::filter_graph(
&mut graph,
&supported_architectures,
&ignored_optional_deps,
);
if matches!(
kind,
aube_lockfile::LockfileKind::Npm | aube_lockfile::LockfileKind::NpmShrinkwrap
) {
let peer_pass_start = std::time::Instant::now();
let pkgs_before = graph.packages.len();
graph = aube_resolver::hoist_auto_installed_peers(graph);
let peer_options = aube_resolver::PeerContextOptions {
dedupe_peer_dependents: resolve_dedupe_peer_dependents(&settings_ctx),
dedupe_peers: resolve_dedupe_peers(&settings_ctx),
resolve_from_workspace_root: resolve_peers_from_workspace_root(&settings_ctx),
peers_suffix_max_length: resolve_peers_suffix_max_length(&settings_ctx),
};
graph = aube_resolver::apply_peer_contexts(graph, &peer_options);
tracing::debug!(
"peer-context pass (lockfile={:?}) {} → {} packages in {:.1?}",
kind,
pkgs_before,
graph.packages.len(),
peer_pass_start.elapsed()
);
}
let source_label = match kind {
aube_lockfile::LockfileKind::Aube => "Lockfile",
aube_lockfile::LockfileKind::Pnpm => "pnpm-lock.yaml",
aube_lockfile::LockfileKind::Yarn | aube_lockfile::LockfileKind::YarnBerry => {
"yarn.lock"
}
aube_lockfile::LockfileKind::Npm => "package-lock.json",
aube_lockfile::LockfileKind::NpmShrinkwrap => "npm-shrinkwrap.json",
aube_lockfile::LockfileKind::Bun => "bun.lock",
};
tracing::debug!(
"{source_label}: {} packages for {project_name}",
graph.packages.len()
);
tracing::debug!(
"phase:resolve (from lockfile) {:.1?}",
phase_start.elapsed()
);
if let Some(p) = prog_ref {
p.set_total(graph.packages.len());
p.set_phase("fetching");
}
let phase_start = std::time::Instant::now();
let network_mode = opts.network_mode;
let cwd_for_client = cwd.clone();
let (indices, cached, fetched) = fetch_packages_with_root(
&graph.packages,
&store,
|| {
std::sync::Arc::new(
make_client(&cwd_for_client).with_network_mode(network_mode),
)
},
prog_ref,
&cwd,
&aube_dir,
has_workspace,
virtual_store_dir_max_length,
opts.ignore_scripts,
network_concurrency_setting,
verify_store_integrity_setting,
strict_store_pkg_content_check_setting,
opts.git_prepare_depth,
resolve_git_shallow_hosts(&settings_ctx),
)
.await?;
tracing::debug!(
"phase:fetch {:.1?} ({fetched} packages)",
phase_start.elapsed()
);
(graph, indices, cached, fetched)
}
Err(aube_lockfile::Error::NotFound(_))
if !(matches!(mode, FrozenMode::Frozen) && opts.strict_no_lockfile) =>
{
tracing::debug!("No lockfile found, resolving dependencies for {project_name}...");
if let Some(p) = prog_ref {
p.set_phase("resolving");
}
let node_version_for_prewarm = {
let override_ = aube_settings::resolved::node_version(&settings_ctx);
crate::engines::resolve_node_version(override_.as_deref())
};
let (build_policy_for_prewarm, _policy_warnings_unused) = build_policy_from_sources(
&manifest,
&ws_config_shared,
opts.dangerously_allow_all_builds,
);
let build_policy_for_prewarm = std::sync::Arc::new(build_policy_for_prewarm);
let client =
std::sync::Arc::new(make_client(&cwd).with_network_mode(opts.network_mode));
let tarball_client = client.clone();
let (resolver, mut resolved_rx) = aube_resolver::Resolver::with_stream(client);
let pnpmfile_path = (!opts.ignore_pnpmfile)
.then(|| crate::pnpmfile::detect(&cwd))
.flatten();
let read_package_host = match pnpmfile_path.as_deref() {
Some(p) => crate::pnpmfile::ReadPackageHost::spawn(p)
.await
.wrap_err("failed to start pnpmfile readPackage host")?,
None => None,
};
let read_package_hook: Option<Box<dyn aube_resolver::ReadPackageHook>> =
read_package_host.map(|h| Box::new(h) as Box<dyn aube_resolver::ReadPackageHook>);
let mut resolver = configure_resolver(
resolver,
&cwd,
&manifest,
ResolverConfigInputs {
settings_ctx: &settings_ctx,
workspace_catalogs: &workspace_catalogs,
opts: &opts,
target_lockfile_kind: lockfile_enabled
.then(|| source_kind_before.unwrap_or(aube_lockfile::LockfileKind::Aube)),
},
read_package_hook,
);
let post_fetch_client = tarball_client.clone();
let fetch_store = store.clone();
let fetch_progress = prog.clone();
let fetch_project_root = cwd.clone();
let fetch_local_client = tarball_client.clone();
let fetch_ignore_scripts = opts.ignore_scripts;
let fetch_git_prepare_depth = opts.git_prepare_depth;
let fetch_network_concurrency =
network_concurrency_setting.unwrap_or_else(default_streaming_network_concurrency);
let fetch_verify_integrity = verify_store_integrity_setting;
let fetch_strict_pkg_content_check = strict_store_pkg_content_check_setting;
let fetch_git_shallow_hosts = resolve_git_shallow_hosts(&settings_ctx);
let (fetch_sup_os, fetch_sup_cpu, fetch_sup_libc) =
manifest.pnpm_supported_architectures();
let fetch_supported_arch = aube_resolver::SupportedArchitectures {
os: fetch_sup_os,
cpu: fetch_sup_cpu,
libc: fetch_sup_libc,
..Default::default()
};
let (materialize_tx, materialize_rx) =
tokio::sync::mpsc::unbounded_channel::<(String, aube_store::PackageIndex)>();
let fetch_handle = tokio::spawn(async move {
let semaphore =
std::sync::Arc::new(tokio::sync::Semaphore::new(fetch_network_concurrency));
let mut handles = Vec::new();
let mut indices: BTreeMap<String, aube_store::PackageIndex> = BTreeMap::new();
let mut cached_count = 0usize;
while let Some(pkg) = resolved_rx.recv().await {
if pkg.local_source.is_none()
&& !aube_resolver::is_supported(
&pkg.os,
&pkg.cpu,
&pkg.libc,
&fetch_supported_arch,
)
{
tracing::debug!(
"deferring tarball fetch for {}@{}: platform mismatch (catch-up will cover survivors)",
pkg.name,
pkg.version
);
continue;
}
if let Some(p) = fetch_progress.as_ref() {
p.inc_total(1);
}
if let Some(ref local) = pkg.local_source {
match import_local_source(
&fetch_store,
&fetch_project_root,
local,
Some(&fetch_local_client),
fetch_ignore_scripts,
fetch_git_prepare_depth,
&fetch_git_shallow_hosts,
)
.await
{
Ok(Some(index)) => {
let _ = materialize_tx.send((pkg.dep_path.clone(), index.clone()));
indices.insert(pkg.dep_path, index);
cached_count += 1;
if let Some(p) = fetch_progress.as_ref() {
p.inc_reused(1);
}
}
Ok(None) => {
if let Some(p) = fetch_progress.as_ref() {
p.inc_reused(1);
}
}
Err(e) => return Err(e),
}
continue;
}
let pkg_registry_name = pkg.registry_name().to_string();
if let Some(index) = fetch_store.load_index(&pkg_registry_name, &pkg.version) {
let _ = materialize_tx.send((pkg.dep_path.clone(), index.clone()));
indices.insert(pkg.dep_path, index);
cached_count += 1;
if let Some(p) = fetch_progress.as_ref() {
p.inc_reused(1);
}
continue;
}
let sem = semaphore.clone();
let store = fetch_store.clone();
let client = tarball_client.clone();
let row = fetch_progress
.as_ref()
.map(|p| p.start_fetch(&pkg.name, &pkg.version));
let bytes_progress = fetch_progress.clone();
handles.push(tokio::spawn(async move {
let _row = row;
let permit = sem.acquire().await.unwrap();
let url = pkg
.tarball_url
.clone()
.unwrap_or_else(|| client.tarball_url(&pkg_registry_name, &pkg.version));
tracing::trace!("Fetching {}@{}", pkg.name, pkg.version);
let bytes = client.fetch_tarball_bytes(&url).await.map_err(|e| {
miette!("failed to fetch {}@{}: {e}", pkg.name, pkg.version)
})?;
if let Some(p) = bytes_progress.as_ref() {
p.inc_downloaded_bytes(bytes.len() as u64);
}
drop(permit);
let pkg_display_name = pkg.name.clone();
let pkg_version = pkg.version.clone();
let dep_path = pkg.dep_path.clone();
let integrity = pkg.integrity.clone();
let index = tokio::task::spawn_blocking(move || -> miette::Result<_> {
if fetch_verify_integrity && let Some(ref expected) = integrity {
aube_store::verify_integrity(&bytes, expected).map_err(|e| {
miette!("{pkg_display_name}@{pkg_version}: {e}")
})?;
}
let index = store.import_tarball(&bytes).map_err(|e| {
miette!("failed to import {pkg_display_name}@{pkg_version}: {e}")
})?;
if fetch_strict_pkg_content_check {
aube_store::validate_pkg_content(
&index,
&pkg_registry_name,
&pkg_version,
)
.map_err(|e| miette!("{pkg_display_name}@{pkg_version}: {e}"))?;
}
if let Err(e) =
store.save_index(&pkg_registry_name, &pkg_version, &index)
{
tracing::warn!(
"Failed to cache index for {pkg_display_name}@{pkg_version}: {e}"
);
}
Ok(index)
})
.await
.into_diagnostic()??;
Ok::<_, miette::Report>((dep_path, index))
}));
}
let fetch_count = handles.len();
for handle in handles {
let (dep_path, index) = handle.await.into_diagnostic()??;
let _ = materialize_tx.send((dep_path.clone(), index.clone()));
indices.insert(dep_path, index);
}
drop(materialize_tx);
Ok::<_, miette::Report>((indices, cached_count, fetch_count))
});
let resolve_result = if has_workspace {
resolver
.resolve_workspace(&manifests, existing_for_resolver, &ws_package_versions)
.await
} else {
resolver.resolve(&manifest, existing_for_resolver).await
}
.into_diagnostic()
.wrap_err("failed to resolve dependencies");
if resolve_result.is_err() {
fetch_handle.abort();
return resolve_result.map(|_| unreachable!());
}
let mut graph = resolve_result.unwrap();
if let Some(pnpmfile_path) = pnpmfile_path.as_deref() {
crate::pnpmfile::run_after_all_resolved(pnpmfile_path, &mut graph)
.await
.wrap_err("pnpmfile afterAllResolved hook failed")?;
}
if let Ok(prior) = aube_lockfile::parse_lockfile(&cwd, &manifest) {
graph.overlay_metadata_from(&prior);
}
tracing::debug!("Resolved {} packages", graph.packages.len());
if let Some(p) = prog_ref {
p.set_phase("fetching");
}
tracing::debug!("phase:resolve (fresh) {:.1?}", phase_start.elapsed());
drop(resolver);
let materialize_phase_start = std::time::Instant::now();
let materialize_graph = std::sync::Arc::new(graph.clone());
let materialize_store = store.clone();
let materialize_virtual_store_dir_max_length = virtual_store_dir_max_length;
let materialize_strategy = resolve_link_strategy(&cwd, &settings_ctx)?;
let materialize_link_concurrency = link_concurrency_setting;
let materialize_patches_vec = crate::patches::load_patches(&cwd)?;
let materialize_patches: aube_linker::Patches = materialize_patches_vec
.values()
.map(|p| (p.key.clone(), p.content.clone()))
.collect();
let materialize_patch_hashes: std::collections::BTreeMap<String, String> =
materialize_patches_vec
.values()
.map(|p| (p.key.clone(), p.content_hash()))
.collect();
let materialize_node_version = node_version_for_prewarm.clone();
let materialize_allow = {
let build_policy = build_policy_for_prewarm.clone();
move |name: &str, version: &str| {
matches!(
build_policy.decide(name, version),
aube_scripts::AllowDecision::Allow
)
}
};
let materialize_handle: tokio::task::JoinHandle<
miette::Result<aube_linker::LinkStats>,
> = tokio::spawn(async move {
let engine = materialize_node_version
.as_deref()
.map(aube_lockfile::graph_hash::engine_name_default);
let patch_hash_fn = |name: &str, version: &str| -> Option<String> {
let key = format!("{name}@{version}");
materialize_patch_hashes.get(&key).cloned()
};
let graph_hashes = aube_lockfile::graph_hash::compute_graph_hashes_with_patches(
&materialize_graph,
&materialize_allow,
engine.as_ref(),
&patch_hash_fn,
);
let mut linker =
aube_linker::Linker::new(materialize_store.as_ref(), materialize_strategy)
.with_graph_hashes(graph_hashes)
.with_virtual_store_dir_max_length(
materialize_virtual_store_dir_max_length,
);
if !materialize_patches.is_empty() {
linker = linker.with_patches(materialize_patches);
}
if let Some(enabled) = use_global_virtual_store_override {
linker = linker.with_use_global_virtual_store(enabled);
}
if !linker.uses_global_virtual_store() {
let mut rx = materialize_rx;
while rx.recv().await.is_some() {}
return Ok(aube_linker::LinkStats::default());
}
let linker = std::sync::Arc::new(linker);
let graph = materialize_graph;
let mut canonical_to_contextualized: std::collections::HashMap<
String,
std::collections::HashSet<String>,
> = std::collections::HashMap::new();
for (dep_path, pkg) in &graph.packages {
if pkg.local_source.is_some() {
continue;
}
let canonical = format!("{}@{}", pkg.name, pkg.version);
canonical_to_contextualized
.entry(canonical)
.or_default()
.insert(dep_path.clone());
canonical_to_contextualized
.entry(dep_path.clone())
.or_default()
.insert(dep_path.clone());
}
let permits = materialize_link_concurrency
.unwrap_or(if cfg!(target_os = "macos") { 4 } else { 16 });
let sem = std::sync::Arc::new(tokio::sync::Semaphore::new(permits));
let mut in_flight: Vec<
tokio::task::JoinHandle<miette::Result<aube_linker::LinkStats>>,
> = Vec::new();
let mut rx = materialize_rx;
while let Some((key, index)) = rx.recv().await {
let Some(dep_paths) = canonical_to_contextualized.get(&key).cloned() else {
continue;
};
let index = std::sync::Arc::new(index);
for dep_path in dep_paths {
let Some(pkg) = graph.packages.get(&dep_path).cloned() else {
continue;
};
if pkg.local_source.is_some() {
continue;
}
let linker = linker.clone();
let sem = sem.clone();
let index = index.clone();
in_flight.push(tokio::spawn(async move {
let _permit = sem.acquire().await.unwrap();
let dep_path_for_err = dep_path.clone();
tokio::task::spawn_blocking(move || -> miette::Result<_> {
let mut stats = aube_linker::LinkStats::default();
linker
.ensure_in_virtual_store(&dep_path, &pkg, &index, &mut stats)
.map_err(|e| {
miette!("prewarm GVS for {dep_path_for_err}: {e}")
})?;
Ok(stats)
})
.await
.into_diagnostic()?
}));
}
}
let mut total = aube_linker::LinkStats::default();
for handle in in_flight {
let s = handle.await.into_diagnostic()??;
total.packages_linked += s.packages_linked;
total.packages_cached += s.packages_cached;
total.files_linked += s.files_linked;
}
Ok(total)
});
let fetch_phase_start = std::time::Instant::now();
let fetch_result = match fetch_handle.await.into_diagnostic()? {
Ok(v) => v,
Err(e) => {
materialize_handle.abort();
return Err(e);
}
};
let (canonical_indices, mut cached, mut fetched) = fetch_result;
tracing::debug!(
"phase:fetch {:.1?} ({fetched} packages, {cached} cached)",
fetch_phase_start.elapsed()
);
let prewarm_stats = materialize_handle.await.into_diagnostic()??;
tracing::debug!(
"phase:prewarm-gvs {:.1?} ({} packages, {} files)",
materialize_phase_start.elapsed(),
prewarm_stats.packages_linked,
prewarm_stats.files_linked,
);
let mut indices = remap_indices_to_contextualized(&canonical_indices, &graph);
if lockfile_enabled {
if lockfile_include_tarball_url {
graph.settings.lockfile_include_tarball_url = true;
for pkg in graph.packages.values_mut() {
if pkg.local_source.is_some() {
continue;
}
if pkg.tarball_url.is_none() {
pkg.tarball_url = Some(
post_fetch_client.tarball_url(pkg.registry_name(), &pkg.version),
);
}
}
}
let write_kind = source_kind_before.unwrap_or(aube_lockfile::LockfileKind::Aube);
let written_path =
aube_lockfile::write_lockfile_as(&cwd, &graph, &manifest, write_kind)
.into_diagnostic()
.wrap_err("failed to write lockfile")?;
tracing::debug!(
"Wrote {}",
written_path
.file_name()
.map(|n| n.to_string_lossy().into_owned())
.unwrap_or_else(|| written_path.display().to_string())
);
} else {
tracing::debug!("lockfile=false: skipping lockfile write");
}
let (sup_os, sup_cpu, sup_libc) = manifest.pnpm_supported_architectures();
let install_supported_architectures = aube_resolver::SupportedArchitectures {
os: sup_os,
cpu: sup_cpu,
libc: sup_libc,
..Default::default()
};
let install_ignored_optional: std::collections::BTreeSet<String> = manifest
.pnpm_ignored_optional_dependencies()
.into_iter()
.collect();
aube_resolver::platform::filter_graph(
&mut graph,
&install_supported_architectures,
&install_ignored_optional,
);
let missing_packages: BTreeMap<String, aube_lockfile::LockedPackage> = graph
.packages
.iter()
.filter(|(dep_path, _)| !indices.contains_key(*dep_path))
.map(|(k, v)| (k.clone(), v.clone()))
.collect();
if !missing_packages.is_empty() {
tracing::debug!(
"catch-up fetch for {} package(s) deferred by the streaming filter but kept by filter_graph",
missing_packages.len()
);
let cwd_for_catchup_client = cwd.clone();
let catchup_network_mode = opts.network_mode;
let (catchup_indices, catchup_cached, catchup_fetched) = fetch_packages_with_root(
&missing_packages,
&store,
|| {
std::sync::Arc::new(
make_client(&cwd_for_catchup_client)
.with_network_mode(catchup_network_mode),
)
},
prog_ref,
&cwd,
&aube_dir,
has_workspace,
virtual_store_dir_max_length,
opts.ignore_scripts,
network_concurrency_setting,
verify_store_integrity_setting,
strict_store_pkg_content_check_setting,
opts.git_prepare_depth,
resolve_git_shallow_hosts(&settings_ctx),
)
.await?;
indices.extend(catchup_indices);
cached += catchup_cached;
fetched += catchup_fetched;
}
(graph, indices, cached, fetched)
}
Err(aube_lockfile::Error::NotFound(_)) => {
return Err(miette!(
"no lockfile found and --frozen-lockfile is set\n\
help: commit pnpm-lock.yaml to your repository, or run \
`aube install --no-frozen-lockfile` to generate one"
));
}
Err(e) => {
return Err(e)
.into_diagnostic()
.wrap_err("failed to parse lockfile");
}
};
tracing::debug!("Packages: {cached_count} cached, {fetch_count} fetched");
maybe_cleanup_unused_catalogs(&cwd, &settings_ctx, &workspace_catalogs, &graph.catalogs)?;
let strict_peer_deps = resolve_strict_peer_dependencies(&settings_ctx);
let peer_rules = PeerDependencyRules::resolve(&manifest, &settings_ctx);
warn_unmet_peers(&graph, strict_peer_deps, &peer_rules)?;
let mut graph_for_link = if opts.prod || opts.dev || opts.no_optional {
let before = graph.packages.len();
let prod = opts.prod;
let dev = opts.dev;
let no_optional = opts.no_optional;
let filtered = graph.filter_deps(|d| {
if prod && d.dep_type == aube_lockfile::DepType::Dev {
return false;
}
if dev && d.dep_type != aube_lockfile::DepType::Dev {
return false;
}
if no_optional && d.dep_type == aube_lockfile::DepType::Optional {
return false;
}
true
});
let dropped = before - filtered.packages.len();
if dropped > 0 {
let label = if opts.dev && opts.no_optional {
"--dev --no-optional"
} else if opts.dev {
"--dev"
} else if opts.prod && opts.no_optional {
"--prod --no-optional"
} else if opts.prod {
"--prod"
} else if opts.no_optional {
"--no-optional"
} else {
unreachable!()
};
tracing::debug!("{label}: skipping {dropped} packages");
}
filtered
} else {
graph.clone()
};
if !opts.workspace_filter.is_empty() {
graph_for_link = filter_graph_to_workspace_selection(
&cwd,
&workspace_packages,
&graph_for_link,
&opts.workspace_filter,
)?;
} else if has_workspace && !link_all_workspace_importers {
graph_for_link = filter_graph_to_importers(&graph_for_link, ["."]);
}
let engine_strict = aube_settings::resolved::engine_strict(&settings_ctx);
let child_concurrency = aube_settings::resolved::child_concurrency(&settings_ctx) as usize;
let node_version_override = aube_settings::resolved::node_version(&settings_ctx);
let node_version = crate::engines::resolve_node_version(node_version_override.as_deref());
crate::engines::run_checks(
&aube_dir,
&manifest,
&graph_for_link,
&package_indices,
node_version.as_deref(),
engine_strict,
virtual_store_dir_max_length,
)?;
let (build_policy, policy_warnings) = build_policy_from_sources(
&manifest,
&ws_config_shared,
opts.dangerously_allow_all_builds,
);
for w in &policy_warnings {
eprintln!("warn: {w}");
}
let phase_start = std::time::Instant::now();
let strategy = resolve_link_strategy(&cwd, &settings_ctx)?;
if let Some(p) = prog_ref {
p.set_phase("linking");
}
tracing::debug!("Link strategy: {strategy:?}");
let shamefully_hoist = aube_settings::resolved::shamefully_hoist(&settings_ctx);
let public_hoist_pattern = aube_settings::resolved::public_hoist_pattern(&settings_ctx);
let hoist = aube_settings::resolved::hoist(&settings_ctx);
let hoist_pattern = aube_settings::resolved::hoist_pattern(&settings_ctx);
let hoist_workspace_packages = aube_settings::resolved::hoist_workspace_packages(&settings_ctx);
let dedupe_direct_deps = aube_settings::resolved::dedupe_direct_deps(&settings_ctx);
let virtual_store_only = aube_settings::resolved::virtual_store_only(&settings_ctx);
let reject_pnp =
miette!("node-linker=pnp is not supported by aube; use `isolated` (default) or `hoisted`");
let node_linker_cli = aube_settings::values::string_from_cli("nodeLinker", settings_ctx.cli);
let node_linker = if let Some(cli) = node_linker_cli.as_deref() {
let trimmed = cli.trim();
if trimmed.eq_ignore_ascii_case("pnp") {
return Err(reject_pnp);
}
trimmed.parse::<aube_linker::NodeLinker>().map_err(|_| {
miette!("unknown --node-linker value `{cli}`; expected `isolated` or `hoisted`")
})?
} else {
match aube_settings::resolved::node_linker(&settings_ctx) {
aube_settings::resolved::NodeLinker::Pnp => return Err(reject_pnp),
aube_settings::resolved::NodeLinker::Hoisted => aube_linker::NodeLinker::Hoisted,
aube_settings::resolved::NodeLinker::Isolated => aube_linker::NodeLinker::Isolated,
}
};
tracing::debug!("node-linker: {:?}", node_linker);
let mut linker = aube_linker::Linker::new(store.as_ref(), strategy)
.with_shamefully_hoist(shamefully_hoist)
.with_public_hoist_pattern(&public_hoist_pattern)
.with_hoist(hoist)
.with_hoist_pattern(&hoist_pattern)
.with_hoist_workspace_packages(hoist_workspace_packages)
.with_dedupe_direct_deps(dedupe_direct_deps)
.with_virtual_store_dir_max_length(virtual_store_dir_max_length)
.with_node_linker(node_linker)
.with_link_concurrency(link_concurrency_setting)
.with_virtual_store_only(virtual_store_only)
.with_modules_dir_name(modules_dir_name.clone())
.with_aube_dir_override(aube_dir.clone());
if let Some(enabled) = use_global_virtual_store_override {
linker = linker.with_use_global_virtual_store(enabled);
}
let resolved_patches = crate::patches::load_patches(&cwd)?;
let patch_hashes: std::collections::BTreeMap<String, String> = resolved_patches
.values()
.map(|p| (p.key.clone(), p.content_hash()))
.collect();
let patches_for_linker: aube_linker::Patches = resolved_patches
.values()
.map(|p| (p.key.clone(), p.content.clone()))
.collect();
let patch_hash_fn = |name: &str, version: &str| -> Option<String> {
let key = format!("{name}@{version}");
patch_hashes.get(&key).cloned()
};
if linker.uses_global_virtual_store() {
let engine = node_version
.as_deref()
.map(aube_lockfile::graph_hash::engine_name_default);
let allow = |name: &str, version: &str| {
matches!(
build_policy.decide(name, version),
aube_scripts::AllowDecision::Allow
)
};
let graph_hashes = aube_lockfile::graph_hash::compute_graph_hashes_with_patches(
&graph_for_link,
&allow,
engine.as_ref(),
&patch_hash_fn,
);
linker = linker.with_graph_hashes(graph_hashes);
}
if !patches_for_linker.is_empty() {
linker = linker.with_patches(patches_for_linker);
}
let stats = if has_workspace {
linker
.link_workspace(&cwd, &graph_for_link, &package_indices, &ws_dirs)
.into_diagnostic()
.wrap_err("failed to link workspace node_modules")?
} else {
linker
.link_all(&cwd, &graph_for_link, &package_indices)
.into_diagnostic()
.wrap_err("failed to link node_modules")?
};
tracing::debug!(
"phase:link {:.1?} ({} files)",
phase_start.elapsed(),
stats.files_linked
);
if has_workspace
&& matches!(node_linker, aube_linker::NodeLinker::Isolated)
&& !virtual_store_only
{
let inject_start = std::time::Instant::now();
let injected_count = super::inject::apply_injected(
&cwd,
&modules_dir_name,
&aube_dir,
virtual_store_dir_max_length,
&graph_for_link,
&manifests,
&ws_dirs,
)?;
if injected_count > 0 {
tracing::debug!(
"phase:inject {:.1?} ({injected_count} workspace deps injected)",
inject_start.elapsed()
);
}
}
let placements_ref = stats.hoisted_placements.as_ref();
let phase_start = std::time::Instant::now();
let extend_node_path = aube_settings::resolved::extend_node_path(&settings_ctx);
let prefer_symlinked_executables =
aube_settings::resolved::prefer_symlinked_executables(&settings_ctx);
let shim_opts = aube_linker::BinShimOptions {
extend_node_path,
prefer_symlinked_executables,
};
if !virtual_store_only {
link_bins(
&cwd,
&modules_dir_name,
&aube_dir,
&graph_for_link,
virtual_store_dir_max_length,
placements_ref,
shim_opts,
)?;
if has_workspace {
for (importer_path, deps) in &graph_for_link.importers {
if importer_path == "." {
continue;
}
let pkg_dir = cwd.join(importer_path);
let bin_dir = pkg_dir.join(&modules_dir_name).join(".bin");
std::fs::create_dir_all(&bin_dir).into_diagnostic()?;
for dep in deps {
link_bins_for_dep(
&aube_dir,
&bin_dir,
&graph_for_link,
&dep.dep_path,
&dep.name,
virtual_store_dir_max_length,
placements_ref,
shim_opts,
)?;
}
}
}
link_dep_bins(
&aube_dir,
&graph_for_link,
virtual_store_dir_max_length,
placements_ref,
shim_opts,
)?;
tracing::debug!("phase:link_bins {:.1?}", phase_start.elapsed());
}
let install_is_noop = stats.packages_linked == 0 && stats.top_level_linked == 0;
if let Some(p) = prog_ref {
p.finish(!install_is_noop);
}
if !opts.ignore_scripts && strict_dep_builds_setting && !virtual_store_only {
let unreviewed = unreviewed_dep_builds(
&aube_dir,
&graph_for_link,
&build_policy,
virtual_store_dir_max_length,
placements_ref,
)?;
if !unreviewed.is_empty() {
return Err(miette!(
"dependencies with build scripts must be reviewed before install:\n{}\nhelp: add them to `allowBuilds` / `onlyBuiltDependencies`, set `neverBuiltDependencies`, or set `strictDepBuilds=false`",
unreviewed
.into_iter()
.map(|pkg| format!(" - {pkg}"))
.collect::<Vec<_>>()
.join("\n")
));
}
}
if !opts.ignore_scripts && build_policy.has_any_allow_rule() && !virtual_store_only {
let side_effects_cache_root =
side_effects_cache_setting.then(|| side_effects_cache_root(store.as_ref()));
let side_effects_cache = side_effects_cache_root
.as_deref()
.map(|root| {
if side_effects_cache_readonly_setting {
SideEffectsCacheConfig::RestoreOnly(root)
} else {
SideEffectsCacheConfig::RestoreAndSave(root)
}
})
.unwrap_or(SideEffectsCacheConfig::Disabled);
let ran = run_dep_lifecycle_scripts(
&cwd,
&modules_dir_name,
&aube_dir,
&graph_for_link,
&build_policy,
virtual_store_dir_max_length,
child_concurrency,
placements_ref,
side_effects_cache,
)
.await?;
if ran > 0 {
tracing::debug!("allowBuilds: ran {ran} dep lifecycle script(s)");
}
}
if !opts.ignore_scripts && !virtual_store_only {
for hook in [
aube_scripts::LifecycleHook::Install,
aube_scripts::LifecycleHook::PostInstall,
aube_scripts::LifecycleHook::Prepare,
] {
run_root_lifecycle(&cwd, &modules_dir_name, &manifest, hook).await?;
}
}
if !virtual_store_only {
state::write_state(&cwd, opts.prod || opts.dev)
.into_diagnostic()
.wrap_err("failed to write install state")?;
}
let modules_cache_max_age_minutes =
aube_settings::resolved::modules_cache_max_age(&settings_ctx);
if modules_cache_max_age_minutes > 0 && !virtual_store_only {
let removed = sweep_orphaned_aube_entries(
&aube_dir,
&graph,
virtual_store_dir_max_length,
std::time::Duration::from_secs(modules_cache_max_age_minutes.saturating_mul(60)),
);
if removed > 0 {
tracing::debug!("modulesCacheMaxAge: swept {removed} orphaned .aube entry/entries");
}
}
let elapsed = start.elapsed();
tracing::debug!(
"Done in {:.0?}: {} packages ({} cached), {} files linked, {} top-level",
elapsed,
stats.packages_linked + stats.packages_cached,
stats.packages_cached,
stats.files_linked,
stats.top_level_linked
);
if stats.packages_linked == 0
&& stats.packages_cached == 0
&& graph_for_link
.packages
.values()
.any(|p| p.local_source.is_none())
{
return Err(miette!("no packages were linked — something went wrong"));
}
if let Some(p) = prog_ref {
p.print_install_summary(
stats.packages_linked,
stats.top_level_linked,
graph_for_link.packages.len(),
elapsed,
);
}
if !opts.ignore_scripts && !strict_dep_builds_setting && !virtual_store_only {
let unreviewed = unreviewed_dep_builds(
&aube_dir,
&graph_for_link,
&build_policy,
virtual_store_dir_max_length,
placements_ref,
)?;
if !unreviewed.is_empty() {
const MAX_INLINE: usize = 5;
let list = if unreviewed.len() <= MAX_INLINE {
unreviewed.join(", ")
} else {
format!(
"{}, and {} more",
unreviewed[..MAX_INLINE].join(", "),
unreviewed.len() - MAX_INLINE
)
};
tracing::warn!(
"ignored build scripts for {} package(s): {}. Run `aube approve-builds` to review and enable them, or set `strictDepBuilds=true` to fail installs that have unreviewed builds.",
unreviewed.len(),
list
);
}
}
Ok(())
}
fn sweep_orphaned_aube_entries(
aube_dir: &std::path::Path,
graph: &aube_lockfile::LockfileGraph,
virtual_store_dir_max_length: usize,
max_age: std::time::Duration,
) -> usize {
use aube_lockfile::dep_path_filename::dep_path_to_filename;
let entries = match std::fs::read_dir(aube_dir) {
Ok(e) => e,
Err(e) if e.kind() == std::io::ErrorKind::NotFound => return 0,
Err(e) => {
tracing::debug!(
"modulesCacheMaxAge: cannot read {}: {e}; skipping sweep",
aube_dir.display()
);
return 0;
}
};
let in_use: std::collections::HashSet<String> = graph
.packages
.keys()
.map(|dep_path| dep_path_to_filename(dep_path, virtual_store_dir_max_length))
.collect();
let now = std::time::SystemTime::now();
let mut removed = 0usize;
for entry in entries.flatten() {
let name = entry.file_name();
let name_str = name.to_string_lossy();
if name_str.starts_with('.') {
continue;
}
if name_str == "node_modules" {
continue;
}
if in_use.contains(name_str.as_ref()) {
continue;
}
let metadata = match entry.path().symlink_metadata() {
Ok(m) => m,
Err(e) => {
tracing::debug!(
"modulesCacheMaxAge: cannot stat {}: {e}",
entry.path().display()
);
continue;
}
};
let modified = match metadata.modified() {
Ok(t) => t,
Err(_) => continue, };
let age = now.duration_since(modified).unwrap_or_default();
if age < max_age {
continue;
}
let path = entry.path();
let file_type = metadata.file_type();
let result = if file_type.is_symlink() {
std::fs::remove_file(&path)
} else {
std::fs::remove_dir_all(&path).or_else(|_| std::fs::remove_file(&path))
};
match result {
Ok(()) => removed += 1,
Err(e) => tracing::debug!(
"modulesCacheMaxAge: failed to remove {}: {e}",
path.display()
),
}
}
removed
}
fn filter_graph_to_workspace_selection(
workspace_root: &std::path::Path,
workspace_packages: &[std::path::PathBuf],
graph: &aube_lockfile::LockfileGraph,
filters: &aube_workspace::selector::EffectiveFilter,
) -> miette::Result<aube_lockfile::LockfileGraph> {
let selected = aube_workspace::selector::select_workspace_packages(
workspace_root,
workspace_packages,
filters,
)
.map_err(|e| miette!("invalid --filter selector: {e}"))?;
if selected.is_empty() {
return Err(miette!(
"aube install: filter {filters:?} did not match any workspace package"
));
}
let mut keep_importers = std::collections::BTreeSet::new();
for pkg in selected {
keep_importers.insert(super::workspace_importer_path(workspace_root, &pkg.dir)?);
}
let importers: std::collections::BTreeMap<String, Vec<aube_lockfile::DirectDep>> = graph
.importers
.iter()
.filter(|(importer, _)| keep_importers.contains(*importer))
.map(|(importer, deps)| (importer.clone(), deps.clone()))
.collect();
let filtered = aube_lockfile::LockfileGraph {
importers,
..graph.clone()
};
Ok(filtered.filter_deps(|_| true))
}
fn filter_graph_to_importers<const N: usize>(
graph: &aube_lockfile::LockfileGraph,
keep_importers: [&str; N],
) -> aube_lockfile::LockfileGraph {
let keep_importers: std::collections::BTreeSet<&str> = keep_importers.into_iter().collect();
let importers: std::collections::BTreeMap<String, Vec<aube_lockfile::DirectDep>> = graph
.importers
.iter()
.filter(|(importer, _)| keep_importers.contains(importer.as_str()))
.map(|(importer, deps)| (importer.clone(), deps.clone()))
.collect();
let filtered = aube_lockfile::LockfileGraph {
importers,
..graph.clone()
};
filtered.filter_deps(|_| true)
}
fn materialized_pkg_dir(
aube_dir: &std::path::Path,
dep_path: &str,
name: &str,
virtual_store_dir_max_length: usize,
placements: Option<&aube_linker::HoistedPlacements>,
) -> std::path::PathBuf {
if let Some(placements) = placements
&& let Some(p) = placements.package_dir(dep_path)
{
return p.to_path_buf();
}
aube_dir
.join(dep_path_to_filename(dep_path, virtual_store_dir_max_length))
.join("node_modules")
.join(name)
}
fn dep_modules_dir_for(package_dir: &std::path::Path, name: &str) -> std::path::PathBuf {
if name.starts_with('@') {
package_dir
.parent()
.and_then(std::path::Path::parent)
.map(std::path::Path::to_path_buf)
.unwrap_or_else(|| package_dir.to_path_buf())
} else {
package_dir
.parent()
.map(std::path::Path::to_path_buf)
.unwrap_or_else(|| package_dir.to_path_buf())
}
}
fn read_materialized_pkg_json(
aube_dir: &std::path::Path,
dep_path: &str,
name: &str,
virtual_store_dir_max_length: usize,
placements: Option<&aube_linker::HoistedPlacements>,
) -> miette::Result<Option<serde_json::Value>> {
let pkg_dir = materialized_pkg_dir(
aube_dir,
dep_path,
name,
virtual_store_dir_max_length,
placements,
);
let pkg_json_path = pkg_dir.join("package.json");
let content = match std::fs::read_to_string(&pkg_json_path) {
Ok(s) => s,
Err(e) if e.kind() == std::io::ErrorKind::NotFound => return Ok(None),
Err(e) => {
return Err(miette!(
"failed to read package.json for {name} at {}: {e}",
pkg_json_path.display()
));
}
};
let value = serde_json::from_str(&content)
.into_diagnostic()
.wrap_err_with(|| format!("failed to parse package.json for {name}"))?;
Ok(Some(value))
}
#[allow(clippy::too_many_arguments)]
fn link_bins_for_dep(
aube_dir: &std::path::Path,
bin_dir: &std::path::Path,
graph: &aube_lockfile::LockfileGraph,
dep_path: &str,
name: &str,
virtual_store_dir_max_length: usize,
placements: Option<&aube_linker::HoistedPlacements>,
shim_opts: aube_linker::BinShimOptions,
) -> miette::Result<()> {
let pkg_dir = materialized_pkg_dir(
aube_dir,
dep_path,
name,
virtual_store_dir_max_length,
placements,
);
if let Some(pkg_json) = read_materialized_pkg_json(
aube_dir,
dep_path,
name,
virtual_store_dir_max_length,
placements,
)? && let Some(bin) = pkg_json.get("bin")
{
match bin {
serde_json::Value::String(bin_path) => {
let bin_name = name.split('/').next_back().unwrap_or(name);
create_bin_link(bin_dir, bin_name, &pkg_dir.join(bin_path), shim_opts)?;
}
serde_json::Value::Object(bins) => {
for (bin_name, path) in bins {
if let Some(path_str) = path.as_str() {
create_bin_link(bin_dir, bin_name, &pkg_dir.join(path_str), shim_opts)?;
}
}
}
_ => {}
}
}
link_bundled_bins(bin_dir, &pkg_dir, graph, dep_path, shim_opts)?;
Ok(())
}
fn link_bins(
project_dir: &std::path::Path,
modules_dir_name: &str,
aube_dir: &std::path::Path,
graph: &aube_lockfile::LockfileGraph,
virtual_store_dir_max_length: usize,
placements: Option<&aube_linker::HoistedPlacements>,
shim_opts: aube_linker::BinShimOptions,
) -> miette::Result<()> {
let bin_dir = project_dir.join(modules_dir_name).join(".bin");
std::fs::create_dir_all(&bin_dir).into_diagnostic()?;
for dep in graph.root_deps() {
link_bins_for_dep(
aube_dir,
&bin_dir,
graph,
&dep.dep_path,
&dep.name,
virtual_store_dir_max_length,
placements,
shim_opts,
)?;
}
Ok(())
}
pub(crate) fn link_dep_bins(
aube_dir: &std::path::Path,
graph: &aube_lockfile::LockfileGraph,
virtual_store_dir_max_length: usize,
placements: Option<&aube_linker::HoistedPlacements>,
shim_opts: aube_linker::BinShimOptions,
) -> miette::Result<()> {
if placements.is_some() {
return Ok(());
}
for (dep_path, pkg) in &graph.packages {
let pkg_dir = materialized_pkg_dir(
aube_dir,
dep_path,
&pkg.name,
virtual_store_dir_max_length,
placements,
);
if !pkg_dir.exists() {
continue;
}
if pkg.dependencies.is_empty() {
continue;
}
let dep_modules_dir = dep_modules_dir_for(&pkg_dir, &pkg.name);
let bin_dir = dep_modules_dir.join(".bin");
for (child_name, child_version) in &pkg.dependencies {
let child_dep_path = format!("{child_name}@{child_version}");
if child_dep_path == *dep_path && child_name == &pkg.name {
continue;
}
link_bins_for_dep(
aube_dir,
&bin_dir,
graph,
&child_dep_path,
child_name,
virtual_store_dir_max_length,
placements,
shim_opts,
)?;
}
}
Ok(())
}
fn link_bundled_bins(
bin_dir: &std::path::Path,
pkg_dir: &std::path::Path,
graph: &aube_lockfile::LockfileGraph,
dep_path: &str,
shim_opts: aube_linker::BinShimOptions,
) -> miette::Result<()> {
let Some(locked) = graph.get_package(dep_path) else {
return Ok(());
};
for bundled in &locked.bundled_dependencies {
let bundled_dir = pkg_dir.join("node_modules").join(bundled);
let bundled_pkg_json_path = bundled_dir.join("package.json");
let Ok(content) = std::fs::read_to_string(&bundled_pkg_json_path) else {
continue;
};
let Ok(bundled_pkg_json) = serde_json::from_str::<serde_json::Value>(&content) else {
continue;
};
let Some(bin) = bundled_pkg_json.get("bin") else {
continue;
};
match bin {
serde_json::Value::String(bin_path) => {
let bin_name = bundled.split('/').next_back().unwrap_or(bundled);
create_bin_link(bin_dir, bin_name, &bundled_dir.join(bin_path), shim_opts)?;
}
serde_json::Value::Object(bins) => {
for (name, path) in bins {
if let Some(path_str) = path.as_str() {
create_bin_link(bin_dir, name, &bundled_dir.join(path_str), shim_opts)?;
}
}
}
_ => {}
}
}
Ok(())
}
fn create_bin_link(
bin_dir: &std::path::Path,
name: &str,
target: &std::path::Path,
shim_opts: aube_linker::BinShimOptions,
) -> miette::Result<()> {
std::fs::create_dir_all(bin_dir).into_diagnostic()?;
aube_linker::create_bin_shim(bin_dir, name, target, shim_opts).into_diagnostic()?;
Ok(())
}