use super::{make_client, packument_cache_dir, packument_full_cache_dir};
use crate::progress::InstallProgress;
use crate::state;
use aube_lockfile::DriftStatus;
use aube_lockfile::dep_path_filename::dep_path_to_filename;
use miette::{Context, IntoDiagnostic, miette};
use rayon::prelude::*;
use sha2::Digest;
use std::collections::BTreeMap;
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum FrozenMode {
Frozen,
Prefer,
No,
Fix,
}
#[derive(Debug, Clone, Copy, Default)]
pub struct GlobalFrozenFlags {
pub frozen: bool,
pub no_frozen: bool,
pub prefer_frozen: bool,
}
impl FrozenMode {
pub fn from_flags(
frozen: bool,
no_frozen: bool,
prefer_frozen: bool,
yaml_prefer_frozen: Option<bool>,
) -> Self {
if frozen {
Self::Frozen
} else if no_frozen {
Self::No
} else if prefer_frozen {
Self::Prefer
} else {
match yaml_prefer_frozen {
Some(true) => Self::Prefer,
Some(false) => Self::No,
None => Self::default_for_env(),
}
}
}
fn default_for_env() -> Self {
if std::env::var_os("CI").is_some() {
Self::Frozen
} else {
Self::Prefer
}
}
}
#[derive(Debug, clap::Args)]
pub struct InstallArgs {
#[arg(short = 'D', long, conflicts_with = "prod")]
pub dev: bool,
#[arg(short = 'P', long, visible_alias = "production")]
pub prod: bool,
#[arg(long)]
pub dangerously_allow_all_builds: bool,
#[arg(long, conflicts_with_all = ["frozen_lockfile", "no_frozen_lockfile", "prefer_frozen_lockfile"])]
pub fix_lockfile: bool,
#[arg(long)]
pub force: bool,
#[arg(long)]
pub ignore_pnpmfile: bool,
#[arg(long)]
pub ignore_scripts: bool,
#[arg(long, conflicts_with = "frozen_lockfile")]
pub lockfile_only: bool,
#[arg(long)]
pub merge_git_branch_lockfiles: bool,
#[arg(long, value_name = "N")]
pub network_concurrency: Option<u64>,
#[arg(long)]
pub no_optional: bool,
#[arg(long, overrides_with = "side_effects_cache")]
pub no_side_effects_cache: bool,
#[arg(long, overrides_with = "verify_store_integrity")]
pub no_verify_store_integrity: bool,
#[arg(long, value_name = "MODE")]
pub node_linker: Option<String>,
#[arg(long, conflicts_with = "prefer_offline")]
pub offline: bool,
#[arg(long, value_name = "METHOD")]
pub package_import_method: Option<String>,
#[arg(long, conflicts_with = "offline")]
pub prefer_offline: bool,
#[arg(long, value_name = "GLOB", value_delimiter = ',')]
pub public_hoist_pattern: Vec<String>,
#[arg(long, value_name = "MODE")]
pub resolution_mode: Option<String>,
#[arg(long)]
pub shamefully_hoist: bool,
#[arg(long, overrides_with = "no_side_effects_cache")]
pub side_effects_cache: bool,
#[arg(long, overrides_with = "no_verify_store_integrity")]
pub verify_store_integrity: bool,
#[arg(short = 'w', hide = true)]
pub workspace_root_short: bool,
}
impl InstallArgs {
pub fn to_cli_flag_bag(&self, global: GlobalFrozenFlags) -> Vec<(String, String)> {
let mut out: Vec<(String, String)> = Vec::new();
if let Some(mode) = self.resolution_mode.as_deref() {
out.push(("resolution-mode".to_string(), mode.to_string()));
}
if let Some(linker) = self.node_linker.as_deref() {
out.push(("node-linker".to_string(), linker.to_string()));
}
if let Some(method) = self.package_import_method.as_deref() {
out.push(("package-import-method".to_string(), method.to_string()));
}
for pattern in &self.public_hoist_pattern {
out.push(("public-hoist-pattern".to_string(), pattern.to_string()));
}
if self.shamefully_hoist {
out.push(("shamefully-hoist".to_string(), "true".to_string()));
}
if global.frozen {
out.push(("frozen-lockfile".to_string(), "true".to_string()));
}
if global.no_frozen {
out.push(("frozen-lockfile".to_string(), "false".to_string()));
}
if global.prefer_frozen {
out.push(("prefer-frozen-lockfile".to_string(), "true".to_string()));
}
if let Some(n) = self.network_concurrency {
out.push(("network-concurrency".to_string(), n.to_string()));
}
if self.verify_store_integrity {
out.push(("verify-store-integrity".to_string(), "true".to_string()));
}
if self.no_verify_store_integrity {
out.push(("verify-store-integrity".to_string(), "false".to_string()));
}
if self.side_effects_cache {
out.push(("side-effects-cache".to_string(), "true".to_string()));
}
if self.no_side_effects_cache {
out.push(("side-effects-cache".to_string(), "false".to_string()));
}
out
}
pub fn into_options(
self,
global: GlobalFrozenFlags,
yaml_prefer_frozen: Option<bool>,
cli_flags: Vec<(String, String)>,
env_snapshot: Vec<(String, String)>,
) -> InstallOptions {
let force = self.force;
let mode = if self.fix_lockfile {
FrozenMode::Fix
} else if force && !(global.frozen || global.no_frozen || global.prefer_frozen) {
FrozenMode::No
} else {
FrozenMode::from_flags(
global.frozen,
global.no_frozen,
global.prefer_frozen,
yaml_prefer_frozen,
)
};
let network_mode = if self.offline {
aube_registry::NetworkMode::Offline
} else if self.prefer_offline {
aube_registry::NetworkMode::PreferOffline
} else {
aube_registry::NetworkMode::Online
};
let strict_no_lockfile = global.frozen;
InstallOptions {
project_dir: None,
mode,
prod: self.prod,
dev: self.dev,
no_optional: self.no_optional,
ignore_pnpmfile: self.ignore_pnpmfile,
ignore_scripts: self.ignore_scripts,
lockfile_only: self.lockfile_only,
merge_git_branch_lockfiles: self.merge_git_branch_lockfiles,
dangerously_allow_all_builds: self.dangerously_allow_all_builds,
network_mode,
minimum_release_age_override: None,
strict_no_lockfile,
force,
cli_flags,
env_snapshot,
git_prepare_depth: 0,
workspace_filter: aube_workspace::selector::EffectiveFilter::default(),
}
}
}
pub(crate) fn parse_resolution_mode(s: &str) -> Option<aube_resolver::ResolutionMode> {
match s.trim().to_ascii_lowercase().as_str() {
"highest" => Some(aube_resolver::ResolutionMode::Highest),
"time-based" | "time" | "lowest-direct" => Some(aube_resolver::ResolutionMode::TimeBased),
_ => None,
}
}
#[derive(Debug, Clone)]
pub struct InstallOptions {
pub project_dir: Option<std::path::PathBuf>,
pub mode: FrozenMode,
pub prod: bool,
pub dev: bool,
pub no_optional: bool,
pub ignore_pnpmfile: bool,
pub ignore_scripts: bool,
pub lockfile_only: bool,
pub merge_git_branch_lockfiles: bool,
pub dangerously_allow_all_builds: bool,
pub network_mode: aube_registry::NetworkMode,
pub minimum_release_age_override: Option<u64>,
pub strict_no_lockfile: bool,
pub force: bool,
pub cli_flags: Vec<(String, String)>,
pub env_snapshot: Vec<(String, String)>,
pub git_prepare_depth: u32,
pub workspace_filter: aube_workspace::selector::EffectiveFilter,
}
impl InstallOptions {
pub fn with_mode(mode: FrozenMode) -> Self {
Self {
project_dir: None,
mode,
prod: false,
dev: false,
no_optional: false,
ignore_pnpmfile: false,
ignore_scripts: false,
lockfile_only: false,
merge_git_branch_lockfiles: false,
dangerously_allow_all_builds: false,
network_mode: aube_registry::NetworkMode::Online,
minimum_release_age_override: None,
strict_no_lockfile: false,
force: false,
cli_flags: Vec::new(),
env_snapshot: aube_settings::values::capture_env(),
git_prepare_depth: 0,
workspace_filter: aube_workspace::selector::EffectiveFilter::default(),
}
}
}
impl From<FrozenMode> for InstallOptions {
fn from(mode: FrozenMode) -> Self {
Self::with_mode(mode)
}
}
async fn run_root_lifecycle(
project_dir: &std::path::Path,
modules_dir_name: &str,
manifest: &aube_manifest::PackageJson,
hook: aube_scripts::LifecycleHook,
) -> miette::Result<()> {
if !manifest.scripts.contains_key(hook.script_name()) {
return Ok(());
}
tracing::debug!("Running {} script...", hook.script_name());
aube_scripts::run_root_hook(project_dir, modules_dir_name, manifest, hook)
.await
.map_err(|e| miette!("{}", e))?;
Ok(())
}
pub(crate) fn build_policy_from_sources(
manifest: &aube_manifest::PackageJson,
workspace: &aube_manifest::WorkspaceConfig,
dangerously_allow_all_builds: bool,
) -> (
aube_scripts::BuildPolicy,
Vec<aube_scripts::BuildPolicyError>,
) {
let mut merged = manifest.pnpm_allow_builds();
for (k, v) in workspace.allow_builds_raw() {
merged.insert(k, v);
}
let mut only_built = manifest.pnpm_only_built_dependencies();
only_built.extend(workspace.only_built_dependencies.iter().cloned());
let mut never_built = manifest.pnpm_never_built_dependencies();
never_built.extend(workspace.never_built_dependencies.iter().cloned());
aube_scripts::BuildPolicy::from_config(
&merged,
&only_built,
&never_built,
dangerously_allow_all_builds,
)
}
pub(crate) fn resolve_link_strategy(
cwd: &std::path::Path,
ctx: &aube_settings::ResolveCtx<'_>,
) -> miette::Result<aube_linker::LinkStrategy> {
let package_import_method_cli =
aube_settings::values::string_from_cli("packageImportMethod", ctx.cli);
let strategy = if let Some(cli) = package_import_method_cli.as_deref() {
match cli.trim().to_ascii_lowercase().as_str() {
"" | "auto" => aube_linker::Linker::detect_strategy(cwd),
"hardlink" => aube_linker::LinkStrategy::Hardlink,
"copy" => aube_linker::LinkStrategy::Copy,
"clone-or-copy" => aube_linker::LinkStrategy::Reflink,
"clone" => {
tracing::warn!(
"package-import-method=clone: reflink will silently fall back to copy \
if the filesystem does not support it (strict enforcement is a known TODO)"
);
aube_linker::LinkStrategy::Reflink
}
other => {
return Err(miette!(
"unknown --package-import-method value `{other}`; expected `auto`, `hardlink`, `copy`, `clone`, or `clone-or-copy`"
));
}
}
} else {
match aube_settings::resolved::package_import_method(ctx) {
aube_settings::resolved::PackageImportMethod::Auto => {
aube_linker::Linker::detect_strategy(cwd)
}
aube_settings::resolved::PackageImportMethod::Hardlink => {
aube_linker::LinkStrategy::Hardlink
}
aube_settings::resolved::PackageImportMethod::Copy => aube_linker::LinkStrategy::Copy,
aube_settings::resolved::PackageImportMethod::CloneOrCopy => {
aube_linker::LinkStrategy::Reflink
}
aube_settings::resolved::PackageImportMethod::Clone => {
tracing::warn!(
"package-import-method=clone: reflink will silently fall back to copy \
if the filesystem does not support it (strict enforcement is a known TODO)"
);
aube_linker::LinkStrategy::Reflink
}
}
};
Ok(strategy)
}
#[allow(clippy::too_many_arguments)]
pub(crate) async fn run_dep_lifecycle_scripts(
project_dir: &std::path::Path,
modules_dir_name: &str,
aube_dir: &std::path::Path,
graph: &aube_lockfile::LockfileGraph,
policy: &aube_scripts::BuildPolicy,
virtual_store_dir_max_length: usize,
child_concurrency: usize,
placements: Option<&aube_linker::HoistedPlacements>,
side_effects_cache: SideEffectsCacheConfig<'_>,
) -> miette::Result<usize> {
#[derive(Clone)]
struct BuildJob {
name: String,
version: String,
package_dir: std::path::PathBuf,
manifest: aube_manifest::PackageJson,
cache_entry: Option<SideEffectsCacheEntry>,
}
let mut jobs: Vec<BuildJob> = Vec::new();
for (dep_path, pkg) in &graph.packages {
match policy.decide(&pkg.name, &pkg.version) {
aube_scripts::AllowDecision::Allow => {}
aube_scripts::AllowDecision::Deny | aube_scripts::AllowDecision::Unspecified => {
continue;
}
}
let package_dir = materialized_pkg_dir(
aube_dir,
dep_path,
&pkg.name,
virtual_store_dir_max_length,
placements,
);
if !package_dir.exists() {
tracing::debug!(
"allowBuilds: skipping {} — {} not on disk",
pkg.name,
package_dir.display()
);
continue;
}
let pkg_json_path = package_dir.join("package.json");
let pkg_json_content = match std::fs::read_to_string(&pkg_json_path) {
Ok(s) => s,
Err(e) if e.kind() == std::io::ErrorKind::NotFound => continue,
Err(e) => {
return Err(miette!(
"failed to read package.json for {} at {}: {}",
pkg.name,
pkg_json_path.display(),
e
));
}
};
let dep_manifest: aube_manifest::PackageJson = serde_json::from_str(&pkg_json_content)
.into_diagnostic()
.wrap_err_with(|| format!("failed to parse package.json for {}", pkg.name))?;
if !aube_scripts::has_dep_lifecycle_work(&package_dir, &dep_manifest) {
continue;
}
let cache_entry = side_effects_cache
.root()
.map(|root| SideEffectsCacheEntry::new(root, &pkg.name, &pkg.version, &package_dir))
.transpose()?;
jobs.push(BuildJob {
name: pkg.name.clone(),
version: pkg.version.clone(),
package_dir,
manifest: dep_manifest,
cache_entry,
});
}
if jobs.is_empty() {
return Ok(0);
}
let concurrency = child_concurrency.max(1);
let semaphore = std::sync::Arc::new(tokio::sync::Semaphore::new(concurrency));
let project_dir = project_dir.to_path_buf();
let modules_dir_name = modules_dir_name.to_string();
let should_restore_side_effects_cache = side_effects_cache.should_restore();
let should_save_side_effects_cache = side_effects_cache.should_save();
let overwrite_side_effects_cache = side_effects_cache.overwrite_existing();
let mut set: tokio::task::JoinSet<miette::Result<usize>> = tokio::task::JoinSet::new();
for job in jobs {
let sem = semaphore.clone();
let project_dir = project_dir.clone();
let modules_dir_name = modules_dir_name.clone();
set.spawn(async move {
let _permit = sem.acquire().await.unwrap();
if should_restore_side_effects_cache && let Some(cache_entry) = job.cache_entry.clone()
{
let package_dir = job.package_dir.clone();
let restore_result = tokio::task::spawn_blocking(move || {
cache_entry.restore_if_available(&package_dir)
})
.await
.map_err(|e| {
miette!(
"side-effects-cache restore task panicked for {}@{}: {e}",
job.name,
job.version
)
})?;
match restore_result? {
SideEffectsCacheRestore::Restored | SideEffectsCacheRestore::AlreadyApplied => {
return Ok(0);
}
SideEffectsCacheRestore::Miss => {}
}
}
let mut ran_here = 0usize;
for hook in aube_scripts::DEP_LIFECYCLE_HOOKS {
let did_run = aube_scripts::run_dep_hook(
&job.package_dir,
&project_dir,
&modules_dir_name,
&job.manifest,
hook,
)
.await
.map_err(|e| {
miette!(
"lifecycle script {} failed for {}@{}: {}",
hook.script_name(),
job.name,
job.version,
e
)
})?;
if did_run {
tracing::debug!(
"ran {} for {}@{}",
hook.script_name(),
job.name,
job.version
);
ran_here += 1;
}
}
if should_save_side_effects_cache
&& ran_here > 0
&& let Some(cache_entry) = job.cache_entry.clone()
{
let package_dir = job.package_dir.clone();
let save_result = tokio::task::spawn_blocking(move || {
cache_entry.save(&package_dir, overwrite_side_effects_cache)
})
.await
.map_err(|e| {
miette!(
"side-effects-cache save task panicked for {}@{}: {e}",
job.name,
job.version
)
})
.and_then(|r| r);
if let Err(e) = save_result {
tracing::debug!(
"side-effects-cache: ignoring cache save error for {}@{}: {e}",
job.name,
job.version
);
}
}
Ok(ran_here)
});
}
let mut ran = 0usize;
while let Some(res) = set.join_next().await {
ran += res.into_diagnostic()??;
}
Ok(ran)
}
fn validate_required_scripts(
project_dir: &std::path::Path,
manifest: &aube_manifest::PackageJson,
required: &[String],
) -> miette::Result<()> {
if required.is_empty() {
return Ok(());
}
let mut missing = Vec::new();
collect_missing_required_scripts(".", manifest, required, &mut missing);
for pkg_dir in aube_workspace::find_workspace_packages(project_dir)
.map_err(|e| miette!("failed to discover workspace packages: {e}"))?
{
let manifest_path = pkg_dir.join("package.json");
let pkg_manifest = aube_manifest::PackageJson::from_path(&manifest_path)
.into_diagnostic()
.wrap_err_with(|| format!("failed to read {}", manifest_path.display()))?;
let label = pkg_manifest
.name
.as_deref()
.map(str::to_string)
.unwrap_or_else(|| {
pkg_dir
.strip_prefix(project_dir)
.unwrap_or(&pkg_dir)
.display()
.to_string()
});
collect_missing_required_scripts(&label, &pkg_manifest, required, &mut missing);
}
if missing.is_empty() {
Ok(())
} else {
Err(miette!(
"requiredScripts check failed:\n{}",
missing
.into_iter()
.map(|(pkg, script)| format!(" - {pkg} is missing `{script}`"))
.collect::<Vec<_>>()
.join("\n")
))
}
}
fn collect_missing_required_scripts(
label: &str,
manifest: &aube_manifest::PackageJson,
required: &[String],
missing: &mut Vec<(String, String)>,
) {
for script in required {
if !manifest.scripts.contains_key(script) {
missing.push((label.to_string(), script.clone()));
}
}
}
fn unreviewed_dep_builds(
aube_dir: &std::path::Path,
graph: &aube_lockfile::LockfileGraph,
policy: &aube_scripts::BuildPolicy,
virtual_store_dir_max_length: usize,
placements: Option<&aube_linker::HoistedPlacements>,
) -> miette::Result<Vec<String>> {
let mut unreviewed = Vec::new();
for (dep_path, pkg) in &graph.packages {
if !matches!(
policy.decide(&pkg.name, &pkg.version),
aube_scripts::AllowDecision::Unspecified
) {
continue;
}
let package_dir = materialized_pkg_dir(
aube_dir,
dep_path,
&pkg.name,
virtual_store_dir_max_length,
placements,
);
if !package_dir.exists() {
continue;
}
let pkg_json_path = package_dir.join("package.json");
let pkg_json_content = match std::fs::read_to_string(&pkg_json_path) {
Ok(s) => s,
Err(e) if e.kind() == std::io::ErrorKind::NotFound => continue,
Err(e) => {
return Err(miette!(
"failed to read package.json for {} at {}: {}",
pkg.name,
pkg_json_path.display(),
e
));
}
};
let dep_manifest: aube_manifest::PackageJson = serde_json::from_str(&pkg_json_content)
.into_diagnostic()
.wrap_err_with(|| format!("failed to parse package.json for {}", pkg.name))?;
if aube_scripts::has_dep_lifecycle_work(&package_dir, &dep_manifest) {
unreviewed.push(format!("{}@{}", pkg.name, pkg.version));
}
}
unreviewed.sort();
unreviewed.dedup();
Ok(unreviewed)
}
const SIDE_EFFECTS_CACHE_MARKER: &str = ".aube-side-effects-cache";
const SIDE_EFFECTS_CACHE_TMP_PREFIX: &str = ".tmp-side-effects-";
const SIDE_EFFECTS_CACHE_TMP_STALE_AFTER: std::time::Duration =
std::time::Duration::from_secs(60 * 60);
#[derive(Debug, Clone, Copy)]
pub(crate) enum SideEffectsCacheConfig<'a> {
Disabled,
RestoreOnly(&'a std::path::Path),
RestoreAndSave(&'a std::path::Path),
SaveOnlyOverwrite(&'a std::path::Path),
}
impl<'a> SideEffectsCacheConfig<'a> {
fn root(self) -> Option<&'a std::path::Path> {
match self {
Self::Disabled => None,
Self::RestoreOnly(root)
| Self::RestoreAndSave(root)
| Self::SaveOnlyOverwrite(root) => Some(root),
}
}
fn should_restore(self) -> bool {
matches!(self, Self::RestoreOnly(_) | Self::RestoreAndSave(_))
}
fn overwrite_existing(self) -> bool {
matches!(self, Self::SaveOnlyOverwrite(_))
}
fn should_save(self) -> bool {
matches!(self, Self::RestoreAndSave(_) | Self::SaveOnlyOverwrite(_))
}
}
#[derive(Debug, Clone)]
struct SideEffectsCacheEntry {
input_hash: String,
path: std::path::PathBuf,
}
enum SideEffectsCacheRestore {
Miss,
Restored,
AlreadyApplied,
}
impl SideEffectsCacheEntry {
fn new(
root: &std::path::Path,
name: &str,
version: &str,
package_dir: &std::path::Path,
) -> miette::Result<Self> {
let input_hash = match read_valid_side_effects_marker(package_dir) {
Some(hash) => hash,
None => hash_dir_for_side_effects_cache(package_dir)?,
};
let safe_name = name.replace('/', "__");
Ok(Self {
path: root
.join(format!("{safe_name}@{version}"))
.join(&input_hash),
input_hash,
})
}
fn restore_if_available(
&self,
package_dir: &std::path::Path,
) -> miette::Result<SideEffectsCacheRestore> {
if marker_matches(package_dir, &self.input_hash) && self.path.is_dir() {
tracing::debug!(
"side-effects-cache: already applied {}",
self.path.display()
);
return Ok(SideEffectsCacheRestore::AlreadyApplied);
}
if !self.path.is_dir() {
return Ok(SideEffectsCacheRestore::Miss);
}
copy_dir(&self.path, package_dir, CopyMode::HardlinkOrCopy).wrap_err_with(|| {
format!(
"failed to restore side effects cache from {}",
self.path.display()
)
})?;
tracing::debug!("side-effects-cache: restored {}", self.path.display());
Ok(SideEffectsCacheRestore::Restored)
}
fn save(&self, package_dir: &std::path::Path, overwrite_existing: bool) -> miette::Result<()> {
if self.path.is_dir() {
if overwrite_existing {
std::fs::remove_dir_all(&self.path)
.into_diagnostic()
.wrap_err_with(|| format!("failed to remove {}", self.path.display()))?;
} else {
write_side_effects_marker(package_dir, &self.input_hash)?;
return Ok(());
}
}
let parent = self.path.parent().ok_or_else(|| {
miette!(
"invalid side effects cache path has no parent: {}",
self.path.display()
)
})?;
std::fs::create_dir_all(parent)
.into_diagnostic()
.wrap_err_with(|| format!("failed to create {}", parent.display()))?;
sweep_stale_side_effects_tmp_dirs(parent);
write_side_effects_marker(package_dir, &self.input_hash)?;
let tmp = parent.join(format!(
"{SIDE_EFFECTS_CACHE_TMP_PREFIX}{}-{}",
std::process::id(),
std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
.map(|d| d.as_nanos())
.unwrap_or(0)
));
if tmp.exists() {
std::fs::remove_dir_all(&tmp)
.into_diagnostic()
.wrap_err_with(|| format!("failed to remove {}", tmp.display()))?;
}
copy_dir(package_dir, &tmp, CopyMode::Copy).wrap_err_with(|| {
format!(
"failed to write side effects cache into {}",
self.path.display()
)
})?;
match std::fs::rename(&tmp, &self.path) {
Ok(()) => {
tracing::debug!("side-effects-cache: saved {}", self.path.display());
Ok(())
}
Err(e) if self.path.is_dir() => {
tracing::debug!(
"side-effects-cache: cache appeared while saving {}: {e}",
self.path.display()
);
let _ = std::fs::remove_dir_all(&tmp);
Ok(())
}
Err(e) => {
let _ = std::fs::remove_dir_all(&tmp);
Err(e)
.into_diagnostic()
.wrap_err_with(|| format!("failed to publish {}", self.path.display()))
}
}
}
}
fn sweep_stale_side_effects_tmp_dirs(parent: &std::path::Path) {
let Ok(entries) = std::fs::read_dir(parent) else {
return;
};
for entry in entries.flatten() {
if should_remove_side_effects_tmp_dir(&entry) {
let _ = std::fs::remove_dir_all(entry.path());
}
}
}
fn should_remove_side_effects_tmp_dir(entry: &std::fs::DirEntry) -> bool {
if !entry
.file_name()
.to_string_lossy()
.starts_with(SIDE_EFFECTS_CACHE_TMP_PREFIX)
{
return false;
}
entry
.metadata()
.and_then(|m| m.modified())
.and_then(|modified| modified.elapsed().map_err(std::io::Error::other))
.is_ok_and(|age| age >= SIDE_EFFECTS_CACHE_TMP_STALE_AFTER)
}
pub(crate) fn side_effects_cache_root(store: &aube_store::Store) -> std::path::PathBuf {
store
.virtual_store_dir()
.parent()
.unwrap_or_else(|| store.root())
.join("side-effects-v1")
}
fn marker_matches(package_dir: &std::path::Path, input_hash: &str) -> bool {
read_valid_side_effects_marker(package_dir).is_some_and(|s| s == input_hash)
}
fn read_valid_side_effects_marker(package_dir: &std::path::Path) -> Option<String> {
let marker = std::fs::read_to_string(package_dir.join(SIDE_EFFECTS_CACHE_MARKER)).ok()?;
let marker = marker.trim();
is_side_effects_cache_hash(marker).then(|| marker.to_ascii_lowercase())
}
fn is_side_effects_cache_hash(value: &str) -> bool {
value.len() == 128 && value.bytes().all(|b| b.is_ascii_hexdigit())
}
fn write_side_effects_marker(
package_dir: &std::path::Path,
input_hash: &str,
) -> miette::Result<()> {
std::fs::write(package_dir.join(SIDE_EFFECTS_CACHE_MARKER), input_hash)
.into_diagnostic()
.wrap_err_with(|| {
format!(
"failed to write side effects cache marker in {}",
package_dir.display()
)
})
}
fn hash_dir_for_side_effects_cache(package_dir: &std::path::Path) -> miette::Result<String> {
let mut hasher = sha2::Sha512::new();
hash_dir_inner(package_dir, package_dir, &mut hasher)?;
Ok(hex::encode(hasher.finalize()))
}
fn hash_dir_inner(
base: &std::path::Path,
current: &std::path::Path,
hasher: &mut sha2::Sha512,
) -> miette::Result<()> {
let mut entries: Vec<_> = std::fs::read_dir(current)
.into_diagnostic()
.wrap_err_with(|| format!("failed to read {}", current.display()))?
.collect::<Result<Vec<_>, _>>()
.into_diagnostic()
.wrap_err_with(|| format!("failed to read {}", current.display()))?;
entries.sort_by_key(|e| e.path());
for entry in entries {
let path = entry.path();
if path.file_name().and_then(|n| n.to_str()) == Some(SIDE_EFFECTS_CACHE_MARKER) {
continue;
}
let rel = path
.strip_prefix(base)
.into_diagnostic()
.wrap_err_with(|| format!("failed to relativize {}", path.display()))?
.to_string_lossy()
.replace('\\', "/");
let meta = std::fs::symlink_metadata(&path)
.into_diagnostic()
.wrap_err_with(|| format!("failed to stat {}", path.display()))?;
hasher.update(rel.as_bytes());
if meta.file_type().is_symlink() {
hasher.update(b"\0symlink\0");
let target = std::fs::read_link(&path)
.into_diagnostic()
.wrap_err_with(|| format!("failed to read symlink {}", path.display()))?;
hasher.update(target.to_string_lossy().as_bytes());
} else if meta.is_dir() {
hasher.update(b"\0dir\0");
hash_dir_inner(base, &path, hasher)?;
} else if meta.is_file() {
hasher.update(b"\0file\0");
#[cfg(unix)]
{
use std::os::unix::fs::PermissionsExt;
hasher.update((meta.permissions().mode() & 0o7777).to_le_bytes());
}
let bytes = std::fs::read(&path)
.into_diagnostic()
.wrap_err_with(|| format!("failed to read {}", path.display()))?;
hasher.update(bytes);
}
}
Ok(())
}
#[derive(Clone, Copy)]
enum CopyMode {
Copy,
HardlinkOrCopy,
}
fn copy_dir(src: &std::path::Path, dst: &std::path::Path, mode: CopyMode) -> miette::Result<()> {
if dst.symlink_metadata().is_ok() {
remove_path(dst)?;
}
std::fs::create_dir_all(dst)
.into_diagnostic()
.wrap_err_with(|| format!("failed to create {}", dst.display()))?;
copy_dir_inner(src, src, dst, mode)
}
fn copy_dir_inner(
base: &std::path::Path,
current: &std::path::Path,
dst_root: &std::path::Path,
mode: CopyMode,
) -> miette::Result<()> {
let mut entries: Vec<_> = std::fs::read_dir(current)
.into_diagnostic()
.wrap_err_with(|| format!("failed to read {}", current.display()))?
.collect::<Result<Vec<_>, _>>()
.into_diagnostic()
.wrap_err_with(|| format!("failed to read {}", current.display()))?;
entries.sort_by_key(|e| e.path());
for entry in entries {
let path = entry.path();
let rel = path
.strip_prefix(base)
.into_diagnostic()
.wrap_err_with(|| format!("failed to relativize {}", path.display()))?;
let dst = dst_root.join(rel);
let meta = std::fs::symlink_metadata(&path)
.into_diagnostic()
.wrap_err_with(|| format!("failed to stat {}", path.display()))?;
if meta.file_type().is_symlink() {
if let Some(parent) = dst.parent() {
std::fs::create_dir_all(parent)
.into_diagnostic()
.wrap_err_with(|| format!("failed to create {}", parent.display()))?;
}
create_symlink_like(&path, &dst, meta.file_type())?;
} else if meta.is_dir() {
std::fs::create_dir_all(&dst)
.into_diagnostic()
.wrap_err_with(|| format!("failed to create {}", dst.display()))?;
copy_dir_inner(base, &path, dst_root, mode)?;
} else if meta.is_file() {
if let Some(parent) = dst.parent() {
std::fs::create_dir_all(parent)
.into_diagnostic()
.wrap_err_with(|| format!("failed to create {}", parent.display()))?;
}
match mode {
CopyMode::Copy => {
std::fs::copy(&path, &dst)
.into_diagnostic()
.wrap_err_with(|| format!("failed to copy {}", dst.display()))?;
}
CopyMode::HardlinkOrCopy => {
if let Err(e) = std::fs::hard_link(&path, &dst) {
tracing::debug!(
"side-effects-cache: hardlink failed for {} -> {}: {e}; copying",
path.display(),
dst.display()
);
std::fs::copy(&path, &dst)
.into_diagnostic()
.wrap_err_with(|| format!("failed to copy {}", dst.display()))?;
}
}
}
}
}
Ok(())
}
fn remove_path(path: &std::path::Path) -> miette::Result<()> {
let meta = std::fs::symlink_metadata(path)
.into_diagnostic()
.wrap_err_with(|| format!("failed to stat {}", path.display()))?;
if meta.is_dir() && !meta.file_type().is_symlink() {
std::fs::remove_dir_all(path)
} else {
std::fs::remove_file(path)
}
.into_diagnostic()
.wrap_err_with(|| format!("failed to remove {}", path.display()))
}
#[cfg(unix)]
fn create_symlink_like(
src: &std::path::Path,
dst: &std::path::Path,
_file_type: std::fs::FileType,
) -> miette::Result<()> {
let target = std::fs::read_link(src)
.into_diagnostic()
.wrap_err_with(|| format!("failed to read symlink {}", src.display()))?;
std::os::unix::fs::symlink(&target, dst)
.into_diagnostic()
.wrap_err_with(|| format!("failed to symlink {}", dst.display()))
}
#[cfg(windows)]
fn create_symlink_like(
src: &std::path::Path,
dst: &std::path::Path,
file_type: std::fs::FileType,
) -> miette::Result<()> {
use std::os::windows::fs::FileTypeExt;
let target = std::fs::read_link(src)
.into_diagnostic()
.wrap_err_with(|| format!("failed to read symlink {}", src.display()))?;
if file_type.is_symlink_dir() {
aube_linker::create_dir_link(&target, dst)
} else {
std::os::windows::fs::symlink_file(&target, dst)
}
.into_diagnostic()
.wrap_err_with(|| format!("failed to symlink {}", dst.display()))
}
struct ScratchDir(std::path::PathBuf);
impl ScratchDir {
fn path(&self) -> &std::path::Path {
&self.0
}
}
impl Drop for ScratchDir {
fn drop(&mut self) {
let _ = std::fs::remove_dir_all(&self.0);
}
}
fn prepare_scratch_copy(src: &std::path::Path, spec: &str) -> miette::Result<ScratchDir> {
let mut hasher = std::collections::hash_map::DefaultHasher::new();
use std::hash::{Hash, Hasher};
src.hash(&mut hasher);
std::process::id().hash(&mut hasher);
std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
.map(|d| d.as_nanos())
.unwrap_or(0)
.hash(&mut hasher);
let dst = std::env::temp_dir().join(format!("aube-git-prep-{:x}", hasher.finish()));
if dst.exists() {
let _ = std::fs::remove_dir_all(&dst);
}
std::fs::create_dir_all(&dst)
.map_err(|e| miette!("git dep {spec}: create scratch dir {}: {e}", dst.display()))?;
let scratch = ScratchDir(dst);
let out = std::process::Command::new("cp")
.arg("-a")
.arg(format!("{}/.", src.display()))
.arg(scratch.path())
.output()
.map_err(|e| miette!("git dep {spec}: spawn cp for scratch copy: {e}"))?;
if !out.status.success() {
return Err(miette!(
"git dep {spec}: scratch copy failed: {}",
String::from_utf8_lossy(&out.stderr).trim()
));
}
let _ = std::fs::remove_dir_all(scratch.path().join(".git"));
Ok(scratch)
}
const GIT_PREPARE_MAX_DEPTH: u32 = 4;
async fn run_git_dep_prepare(
clone_dir: &std::path::Path,
spec: &str,
ignore_scripts: bool,
depth: u32,
) -> miette::Result<()> {
if depth >= GIT_PREPARE_MAX_DEPTH {
return Err(miette!(
"git dep {spec}: `prepare` nesting exceeded {GIT_PREPARE_MAX_DEPTH} levels"
));
}
let mut opts = InstallOptions::with_mode(super::chained_frozen_mode(FrozenMode::Prefer));
opts.project_dir = Some(clone_dir.to_path_buf());
opts.ignore_scripts = ignore_scripts;
opts.git_prepare_depth = depth + 1;
let spec = spec.to_string();
tokio::task::spawn_blocking(move || {
let runtime = tokio::runtime::Builder::new_current_thread()
.enable_all()
.build()
.into_diagnostic()
.wrap_err("failed to build nested git prepare runtime")?;
runtime.block_on(run(opts))
})
.await
.into_diagnostic()
.wrap_err_with(|| format!("git dep {spec}: nested install task failed"))?
.wrap_err_with(|| format!("git dep {spec}: nested install for `prepare` failed"))
}
pub(super) async fn import_local_source(
store: &std::sync::Arc<aube_store::Store>,
project_root: &std::path::Path,
local: &aube_lockfile::LocalSource,
client: Option<&std::sync::Arc<aube_registry::client::RegistryClient>>,
ignore_scripts: bool,
git_prepare_depth: u32,
git_shallow_hosts: &[String],
) -> miette::Result<Option<aube_store::PackageIndex>> {
use aube_lockfile::LocalSource;
match local {
LocalSource::Link(_) => Ok(None),
LocalSource::Directory(rel) => {
let abs = project_root.join(rel);
if !abs.is_dir() {
return Err(miette!(
"local dependency {}: {} is not a directory",
local.specifier(),
abs.display()
));
}
let index = store
.import_directory(&abs)
.map_err(|e| miette!("failed to import {}: {e}", local.specifier()))?;
Ok(Some(index))
}
LocalSource::Tarball(rel) => {
let abs = project_root.join(rel);
let bytes = std::fs::read(&abs)
.into_diagnostic()
.wrap_err_with(|| format!("read {}", abs.display()))?;
let index = store
.import_tarball(&bytes)
.map_err(|e| miette!("failed to import {}: {e}", local.specifier()))?;
Ok(Some(index))
}
LocalSource::Git(g) => {
let url = g.url.clone();
let resolved = g.resolved.clone();
let spec = local.specifier();
let shallow = aube_store::git_host_in_list(&url, git_shallow_hosts);
let clone_dir = tokio::task::spawn_blocking(move || {
aube_store::git_shallow_clone(&url, &resolved, shallow)
})
.await
.map_err(|e| miette!("git clone task panicked: {e}"))?
.map_err(|e| miette!("failed to clone {spec}: {e}"))?;
let manifest_path = clone_dir.join("package.json");
let needs_prepare = !ignore_scripts
&& aube_manifest::PackageJson::from_path(&manifest_path)
.ok()
.is_some_and(|pj| pj.scripts.contains_key("prepare"));
if needs_prepare {
let scratch = prepare_scratch_copy(&clone_dir, &spec)?;
run_git_dep_prepare(scratch.path(), &spec, ignore_scripts, git_prepare_depth)
.await?;
let archive = crate::commands::pack::build_archive(scratch.path())
.wrap_err_with(|| format!("failed to pack prepared git dep {spec}"))?;
let index = store
.import_tarball(&archive.tarball)
.map_err(|e| miette!("failed to import prepared {spec}: {e}"))?;
return Ok(Some(index));
}
let index = store
.import_directory(&clone_dir)
.map_err(|e| miette!("failed to import {}: {e}", local.specifier()))?;
Ok(Some(index))
}
LocalSource::RemoteTarball(t) => {
let client = client.ok_or_else(|| {
miette!(
"internal: import_local_source called without a registry client for {}",
local.specifier()
)
})?;
let bytes = client
.fetch_tarball_bytes(&t.url)
.await
.map_err(|e| miette!("failed to fetch {}: {e}", t.url))?;
if !t.integrity.is_empty() {
aube_store::verify_integrity(&bytes, &t.integrity)
.map_err(|e| miette!("{}: {e}", t.url))?;
}
let index = store
.import_tarball(&bytes)
.map_err(|e| miette!("failed to import {}: {e}", local.specifier()))?;
Ok(Some(index))
}
}
}
pub(super) async fn fetch_packages(
packages: &BTreeMap<String, aube_lockfile::LockedPackage>,
store: &std::sync::Arc<aube_store::Store>,
client: std::sync::Arc<aube_registry::client::RegistryClient>,
progress: Option<&InstallProgress>,
ignore_scripts: bool,
git_prepare_depth: u32,
git_shallow_hosts: Vec<String>,
) -> miette::Result<(BTreeMap<String, aube_store::PackageIndex>, usize, usize)> {
let cwd = crate::dirs::project_root_or_cwd()?;
let npmrc_entries = aube_registry::config::load_npmrc_entries(&cwd);
let raw_workspace = aube_manifest::workspace::load_both(&cwd)
.map(|(_, raw)| raw)
.unwrap_or_default();
let env = aube_settings::values::capture_env();
let ctx = aube_settings::ResolveCtx {
npmrc: &npmrc_entries,
workspace_yaml: &raw_workspace,
env: &env,
cli: &[],
};
let network_concurrency = resolve_network_concurrency(&ctx);
let verify_integrity = resolve_verify_store_integrity(&ctx);
let strict_pkg_content_check = resolve_strict_store_pkg_content_check(&ctx);
let virtual_store_dir_max_length = super::resolve_virtual_store_dir_max_length(&ctx);
let aube_dir = super::resolve_virtual_store_dir(&ctx, &cwd);
fetch_packages_with_root(
packages,
store,
|| client,
progress,
&cwd,
&aube_dir,
true,
virtual_store_dir_max_length,
ignore_scripts,
network_concurrency,
verify_integrity,
strict_pkg_content_check,
git_prepare_depth,
git_shallow_hosts,
)
.await
}
#[allow(clippy::too_many_arguments)]
pub(super) async fn fetch_packages_with_root<F>(
packages: &BTreeMap<String, aube_lockfile::LockedPackage>,
store: &std::sync::Arc<aube_store::Store>,
client: F,
progress: Option<&InstallProgress>,
project_root: &std::path::Path,
aube_dir: &std::path::Path,
skip_already_linked_shortcut: bool,
virtual_store_dir_max_length: usize,
ignore_scripts: bool,
network_concurrency: Option<usize>,
verify_integrity: bool,
strict_pkg_content_check: bool,
git_prepare_depth: u32,
git_shallow_hosts: Vec<String>,
) -> miette::Result<(BTreeMap<String, aube_store::PackageIndex>, usize, usize)>
where
F: FnOnce() -> std::sync::Arc<aube_registry::client::RegistryClient>,
{
enum CheckResult {
AlreadyLinked,
Cached(aube_store::PackageIndex),
NeedsFetch,
}
let check_results: Vec<_> = packages
.par_iter()
.filter(|(_, pkg)| pkg.local_source.is_none())
.map(|(dep_path, pkg)| {
if !skip_already_linked_shortcut {
let entry_name = dep_path_to_filename(dep_path, virtual_store_dir_max_length);
if aube_dir.join(&entry_name).exists() {
return (dep_path.clone(), pkg, CheckResult::AlreadyLinked);
}
}
match store.load_index(pkg.registry_name(), &pkg.version) {
Some(index) => (dep_path.clone(), pkg, CheckResult::Cached(index)),
None => (dep_path.clone(), pkg, CheckResult::NeedsFetch),
}
})
.collect();
let mut indices: BTreeMap<String, aube_store::PackageIndex> = BTreeMap::new();
let has_remote_tarball = packages.values().any(|p| {
matches!(
p.local_source,
Some(aube_lockfile::LocalSource::RemoteTarball(_))
)
});
let mut client_slot: Option<std::sync::Arc<aube_registry::client::RegistryClient>> = None;
let mut client_builder = Some(client);
if has_remote_tarball {
client_slot = Some((client_builder.take().unwrap())());
}
for (dep_path, pkg) in packages {
let Some(ref local) = pkg.local_source else {
continue;
};
if let Some(index) = import_local_source(
store,
project_root,
local,
client_slot.as_ref(),
ignore_scripts,
git_prepare_depth,
&git_shallow_hosts,
)
.await?
{
indices.insert(dep_path.clone(), index);
}
if let Some(p) = progress {
p.inc_reused(1);
}
}
let mut to_fetch = Vec::new();
let mut cached_count = 0usize;
for (dep_path, pkg, result) in check_results {
match result {
CheckResult::AlreadyLinked => {
cached_count += 1;
}
CheckResult::Cached(index) => {
indices.insert(dep_path, index);
cached_count += 1;
}
CheckResult::NeedsFetch => {
to_fetch.push((
dep_path,
pkg.name.clone(),
pkg.registry_name().to_string(),
pkg.version.clone(),
pkg.tarball_url.clone(),
pkg.integrity.clone(),
));
}
}
}
if let Some(p) = progress {
p.inc_reused(cached_count);
}
let fetch_count = to_fetch.len();
if !to_fetch.is_empty() {
let client = match client_slot.take() {
Some(c) => c,
None => (client_builder.take().unwrap())(),
};
let sem_permits = network_concurrency.unwrap_or_else(default_lockfile_network_concurrency);
let semaphore = std::sync::Arc::new(tokio::sync::Semaphore::new(sem_permits));
let mut handles = Vec::new();
for (dep_path, display_name, registry_name, version, tarball_url_override, integrity) in
to_fetch
{
let sem = semaphore.clone();
let store = store.clone();
let client = client.clone();
let row = progress.map(|p| p.start_fetch(&display_name, &version));
let bytes_progress = progress.cloned();
let handle = tokio::spawn(async move {
let _row = row;
let task_start = std::time::Instant::now();
let permit = sem.acquire().await.unwrap();
let wait_time = task_start.elapsed();
let url = tarball_url_override
.clone()
.unwrap_or_else(|| client.tarball_url(®istry_name, &version));
let dl_start = std::time::Instant::now();
let bytes = client
.fetch_tarball_bytes(&url)
.await
.map_err(|e| miette!("failed to fetch {display_name}@{version}: {e}"))?;
let dl_time = dl_start.elapsed();
if let Some(p) = bytes_progress.as_ref() {
p.inc_downloaded_bytes(bytes.len() as u64);
}
let bytes_len = bytes.len();
let (index, import_time) = tokio::task::spawn_blocking({
let store = store.clone();
let display_name = display_name.clone();
let registry_name = registry_name.clone();
let version = version.clone();
move || -> miette::Result<_> {
if verify_integrity && let Some(ref expected) = integrity {
aube_store::verify_integrity(&bytes, expected)
.map_err(|e| miette!("{display_name}@{version}: {e}"))?;
}
let import_start = std::time::Instant::now();
let index = store.import_tarball(&bytes).map_err(|e| {
miette!("failed to import {display_name}@{version}: {e}")
})?;
let import_time = import_start.elapsed();
if strict_pkg_content_check {
aube_store::validate_pkg_content(&index, ®istry_name, &version)
.map_err(|e| miette!("{display_name}@{version}: {e}"))?;
}
if let Err(e) = store.save_index(®istry_name, &version, &index) {
tracing::warn!(
"Failed to cache index for {display_name}@{version}: {e}"
);
}
Ok((index, import_time))
}
})
.await
.into_diagnostic()??;
tracing::trace!(
"fetch {display_name}@{version}: wait={:.0?} dl={:.0?} ({} bytes) import={:.0?}",
wait_time,
dl_time,
bytes_len,
import_time
);
drop(permit);
Ok::<_, miette::Report>((dep_path, index))
});
handles.push(handle);
}
for handle in handles {
let (dep_path, index) = handle.await.into_diagnostic()??;
indices.insert(dep_path, index);
}
}
Ok((indices, cached_count, fetch_count))
}
fn resolve_resolution_mode(ctx: &aube_settings::ResolveCtx<'_>) -> aube_resolver::ResolutionMode {
let raw = aube_settings::values::string_from_cli("resolutionMode", ctx.cli)
.or_else(|| aube_settings::values::string_from_env("resolutionMode", ctx.env))
.or_else(|| aube_settings::values::string_from_npmrc("resolutionMode", ctx.npmrc))
.or_else(|| {
aube_settings::values::string_from_workspace_yaml("resolutionMode", ctx.workspace_yaml)
});
if let Some(raw) = raw
&& let Some(m) = parse_resolution_mode(&raw)
{
return m;
}
map_resolution_mode(aube_settings::resolved::resolution_mode(ctx))
}
fn map_resolution_mode(
m: aube_settings::resolved::ResolutionMode,
) -> aube_resolver::ResolutionMode {
match m {
aube_settings::resolved::ResolutionMode::Highest => aube_resolver::ResolutionMode::Highest,
aube_settings::resolved::ResolutionMode::TimeBased
| aube_settings::resolved::ResolutionMode::LowestDirect => {
aube_resolver::ResolutionMode::TimeBased
}
}
}
fn resolve_minimum_release_age(
ctx: &aube_settings::ResolveCtx<'_>,
cli_minutes: Option<u64>,
) -> Option<aube_resolver::MinimumReleaseAge> {
let minutes = cli_minutes.unwrap_or_else(|| aube_settings::resolved::minimum_release_age(ctx));
if minutes == 0 {
return None;
}
let exclude: std::collections::HashSet<String> =
aube_settings::resolved::minimum_release_age_exclude(ctx)
.unwrap_or_default()
.into_iter()
.collect();
let strict = aube_settings::resolved::minimum_release_age_strict(ctx);
Some(aube_resolver::MinimumReleaseAge {
minutes,
exclude,
strict,
})
}
fn resolve_auto_install_peers(ctx: &aube_settings::ResolveCtx<'_>) -> bool {
aube_settings::resolved::auto_install_peers(ctx)
}
fn resolve_exclude_links_from_lockfile(ctx: &aube_settings::ResolveCtx<'_>) -> bool {
aube_settings::resolved::exclude_links_from_lockfile(ctx)
}
fn find_gvs_incompatible_trigger<'a>(
manifests: &[(String, aube_manifest::PackageJson)],
triggers: &'a [String],
) -> Option<&'a str> {
for (_, m) in manifests {
for name in triggers {
if m.dependencies.contains_key(name)
|| m.dev_dependencies.contains_key(name)
|| m.optional_dependencies.contains_key(name)
{
return Some(name.as_str());
}
}
}
None
}
fn maybe_cleanup_unused_catalogs(
cwd: &std::path::Path,
ctx: &aube_settings::ResolveCtx<'_>,
declared: &std::collections::BTreeMap<String, std::collections::BTreeMap<String, String>>,
used: &std::collections::BTreeMap<
String,
std::collections::BTreeMap<String, aube_lockfile::CatalogEntry>,
>,
) -> miette::Result<()> {
if !aube_settings::resolved::cleanup_unused_catalogs(ctx) {
return Ok(());
}
if declared.is_empty() {
return Ok(());
}
let Some(ws_path) = super::catalogs::workspace_yaml_path(cwd) else {
return Ok(());
};
let dropped = super::catalogs::prune_unused_catalog_entries(&ws_path, declared, used)?;
if !dropped.is_empty() {
let filename = ws_path
.file_name()
.map(|n| n.to_string_lossy().into_owned())
.unwrap_or_else(|| ws_path.display().to_string());
tracing::info!(
"cleanupUnusedCatalogs: pruned {} entr{} from {filename}",
dropped.len(),
if dropped.len() == 1 { "y" } else { "ies" }
);
}
Ok(())
}
fn resolve_network_concurrency(ctx: &aube_settings::ResolveCtx<'_>) -> Option<usize> {
aube_settings::resolved::network_concurrency(ctx).and_then(|n| {
if n == 0 {
tracing::warn!("ignoring network-concurrency=0 (must be >= 1)");
None
} else {
Some(n as usize)
}
})
}
fn resolve_link_concurrency(ctx: &aube_settings::ResolveCtx<'_>) -> Option<usize> {
aube_settings::resolved::link_concurrency(ctx).and_then(|n| {
if n == 0 {
tracing::warn!("ignoring link-concurrency=0 (must be >= 1)");
None
} else {
Some(n as usize)
}
})
}
fn default_lockfile_network_concurrency() -> usize {
if cfg!(target_os = "macos") { 24 } else { 128 }
}
fn default_streaming_network_concurrency() -> usize {
if cfg!(target_os = "macos") { 24 } else { 64 }
}
fn resolve_verify_store_integrity(ctx: &aube_settings::ResolveCtx<'_>) -> bool {
aube_settings::resolved::verify_store_integrity(ctx)
}
fn resolve_strict_store_pkg_content_check(ctx: &aube_settings::ResolveCtx<'_>) -> bool {
aube_settings::resolved::strict_store_pkg_content_check(ctx)
}
fn resolve_use_running_store_server(ctx: &aube_settings::ResolveCtx<'_>) -> bool {
aube_settings::resolved::use_running_store_server(ctx)
}
fn resolve_symlink(ctx: &aube_settings::ResolveCtx<'_>) -> bool {
aube_settings::resolved::symlink(ctx)
}
fn resolve_git_shallow_hosts(ctx: &aube_settings::ResolveCtx<'_>) -> Vec<String> {
aube_settings::resolved::git_shallow_hosts(ctx)
}
fn resolve_side_effects_cache(ctx: &aube_settings::ResolveCtx<'_>) -> bool {
aube_settings::resolved::side_effects_cache(ctx)
}
fn resolve_side_effects_cache_readonly(ctx: &aube_settings::ResolveCtx<'_>) -> bool {
aube_settings::resolved::side_effects_cache_readonly(ctx)
}
fn resolve_strict_peer_dependencies(ctx: &aube_settings::ResolveCtx<'_>) -> bool {
aube_settings::resolved::strict_peer_dependencies(ctx)
}
fn resolve_peers_suffix_max_length(ctx: &aube_settings::ResolveCtx<'_>) -> usize {
let raw = aube_settings::resolved::peers_suffix_max_length(ctx);
usize::try_from(raw).unwrap_or(usize::MAX)
}
fn resolve_dedupe_peer_dependents(ctx: &aube_settings::ResolveCtx<'_>) -> bool {
aube_settings::resolved::dedupe_peer_dependents(ctx)
}
fn resolve_dedupe_peers(ctx: &aube_settings::ResolveCtx<'_>) -> bool {
aube_settings::resolved::dedupe_peers(ctx)
}
fn resolve_peers_from_workspace_root(ctx: &aube_settings::ResolveCtx<'_>) -> bool {
aube_settings::resolved::resolve_peers_from_workspace_root(ctx)
}
fn resolve_registry_supports_time_field(ctx: &aube_settings::ResolveCtx<'_>) -> bool {
aube_settings::resolved::registry_supports_time_field(ctx)
}
fn resolve_dependency_policy(
manifest: &aube_manifest::PackageJson,
ctx: &aube_settings::ResolveCtx<'_>,
) -> aube_resolver::DependencyPolicy {
let mut policy = aube_resolver::DependencyPolicy::default();
let mut package_extensions = manifest.package_extensions();
merge_json_object_setting(ctx, "packageExtensions", &mut package_extensions);
policy.package_extensions = parse_package_extensions(package_extensions);
let mut allowed_deprecated = manifest.allowed_deprecated_versions();
merge_string_map_setting(ctx, "allowedDeprecatedVersions", &mut allowed_deprecated);
policy.allowed_deprecated_versions = allowed_deprecated;
policy.trust_policy = match aube_settings::resolved::trust_policy(ctx) {
aube_settings::resolved::TrustPolicy::NoDowngrade => {
aube_resolver::TrustPolicy::NoDowngrade
}
aube_settings::resolved::TrustPolicy::Off => aube_resolver::TrustPolicy::Off,
};
policy.trust_policy_exclude = aube_settings::resolved::trust_policy_exclude(ctx)
.into_iter()
.collect();
policy.trust_policy_ignore_after = aube_settings::resolved::trust_policy_ignore_after(ctx);
policy.block_exotic_subdeps = aube_settings::resolved::block_exotic_subdeps(ctx);
policy
}
fn merge_json_object_setting(
ctx: &aube_settings::ResolveCtx<'_>,
setting: &str,
out: &mut BTreeMap<String, serde_json::Value>,
) {
if let Some(value) = object_setting_from_workspace_yaml(setting, ctx.workspace_yaml) {
out.extend(value);
}
if let Some(value) = object_setting_from_npmrc(setting, ctx.npmrc) {
out.extend(value);
}
if let Some(value) = object_setting_from_env(setting, ctx.env) {
out.extend(value);
}
}
fn merge_string_map_setting(
ctx: &aube_settings::ResolveCtx<'_>,
setting: &str,
out: &mut BTreeMap<String, String>,
) {
if let Some(value) = object_setting_from_workspace_yaml(setting, ctx.workspace_yaml) {
out.extend(json_string_map(value));
}
if let Some(value) = object_setting_from_npmrc(setting, ctx.npmrc) {
out.extend(json_string_map(value));
}
if let Some(value) = object_setting_from_env(setting, ctx.env) {
out.extend(json_string_map(value));
}
}
fn object_setting_from_npmrc(
setting: &str,
entries: &[(String, String)],
) -> Option<BTreeMap<String, serde_json::Value>> {
let meta = aube_settings::find(setting)?;
for (key, raw) in entries.iter().rev() {
if meta.npmrc_keys.contains(&key.as_str()) {
return parse_json_object(raw);
}
}
None
}
fn object_setting_from_env(
setting: &str,
env: &[(String, String)],
) -> Option<BTreeMap<String, serde_json::Value>> {
let meta = aube_settings::find(setting)?;
for (key, raw) in env.iter().rev() {
if meta.env_vars.contains(&key.as_str()) {
return parse_json_object(raw);
}
}
None
}
fn object_setting_from_workspace_yaml(
setting: &str,
raw: &BTreeMap<String, serde_yaml::Value>,
) -> Option<BTreeMap<String, serde_json::Value>> {
let meta = aube_settings::find(setting)?;
for key in meta.workspace_yaml_keys {
let Some(value) = aube_settings::workspace_yaml_value(raw, key) else {
continue;
};
if let Ok(serde_json::Value::Object(obj)) = serde_json::to_value(value) {
return Some(obj.into_iter().collect());
}
}
None
}
fn parse_json_object(raw: &str) -> Option<BTreeMap<String, serde_json::Value>> {
let serde_json::Value::Object(obj) = serde_json::from_str(raw).ok()? else {
return None;
};
Some(obj.into_iter().collect())
}
fn json_string_map(map: BTreeMap<String, serde_json::Value>) -> BTreeMap<String, String> {
map.into_iter()
.filter_map(|(k, v)| v.as_str().map(|s| (k, s.to_string())))
.collect()
}
fn parse_package_extensions(
raw: BTreeMap<String, serde_json::Value>,
) -> Vec<aube_resolver::PackageExtension> {
raw.into_iter()
.filter_map(|(selector, value)| {
let obj = value.as_object()?;
Some(aube_resolver::PackageExtension {
selector,
dependencies: read_json_string_map(obj.get("dependencies")),
optional_dependencies: read_json_string_map(obj.get("optionalDependencies")),
peer_dependencies: read_json_string_map(obj.get("peerDependencies")),
peer_dependencies_meta: read_peer_dependencies_meta(
obj.get("peerDependenciesMeta"),
),
})
})
.collect()
}
fn read_json_string_map(value: Option<&serde_json::Value>) -> BTreeMap<String, String> {
value
.and_then(|v| v.as_object())
.map(|obj| {
obj.iter()
.filter_map(|(k, v)| v.as_str().map(|s| (k.clone(), s.to_string())))
.collect()
})
.unwrap_or_default()
}
fn read_peer_dependencies_meta(
value: Option<&serde_json::Value>,
) -> BTreeMap<String, aube_registry::PeerDepMeta> {
value
.and_then(|v| v.as_object())
.map(|obj| {
obj.iter()
.map(|(name, meta)| {
let optional = meta
.as_object()
.and_then(|m| m.get("optional"))
.and_then(|v| v.as_bool())
.unwrap_or(false);
(name.clone(), aube_registry::PeerDepMeta { optional })
})
.collect()
})
.unwrap_or_default()
}
struct ResolverConfigInputs<'a> {
settings_ctx: &'a aube_settings::ResolveCtx<'a>,
workspace_catalogs:
&'a std::collections::BTreeMap<String, std::collections::BTreeMap<String, String>>,
opts: &'a InstallOptions,
}
fn configure_resolver(
resolver: aube_resolver::Resolver,
cwd: &std::path::Path,
manifest: &aube_manifest::PackageJson,
inputs: ResolverConfigInputs<'_>,
read_package_hook: Option<Box<dyn aube_resolver::ReadPackageHook>>,
) -> aube_resolver::Resolver {
let ResolverConfigInputs {
settings_ctx,
workspace_catalogs,
opts,
} = inputs;
let auto_install_peers = resolve_auto_install_peers(settings_ctx);
let exclude_links_from_lockfile = resolve_exclude_links_from_lockfile(settings_ctx);
let peers_suffix_max_length = resolve_peers_suffix_max_length(settings_ctx);
let dedupe_peer_dependents = resolve_dedupe_peer_dependents(settings_ctx);
let dedupe_peers = resolve_dedupe_peers(settings_ctx);
let resolve_peers_from_workspace_root_opt = resolve_peers_from_workspace_root(settings_ctx);
let registry_supports_time_field = resolve_registry_supports_time_field(settings_ctx);
let (sup_os, sup_cpu, sup_libc) = manifest.pnpm_supported_architectures();
let supported_architectures = aube_resolver::SupportedArchitectures {
os: sup_os,
cpu: sup_cpu,
libc: sup_libc,
};
let effective_overrides = manifest.overrides_map();
let mut effective_overrides = effective_overrides;
merge_string_map_setting(settings_ctx, "overrides", &mut effective_overrides);
if !effective_overrides.is_empty() {
tracing::debug!("applying {} overrides", effective_overrides.len());
}
let dependency_policy = resolve_dependency_policy(manifest, settings_ctx);
if !dependency_policy.package_extensions.is_empty() {
tracing::debug!(
"applying {} packageExtensions",
dependency_policy.package_extensions.len()
);
}
let ignored_optional = manifest.pnpm_ignored_optional_dependencies();
if !ignored_optional.is_empty() {
tracing::debug!(
"ignoring {} optional dependencies (pnpm.ignoredOptionalDependencies)",
ignored_optional.len()
);
}
let resolution_mode = resolve_resolution_mode(settings_ctx);
let minimum_release_age =
resolve_minimum_release_age(settings_ctx, opts.minimum_release_age_override);
if let Some(ref mra) = minimum_release_age {
tracing::debug!(
"minimumReleaseAge: {} min, {} excluded, strict={}",
mra.minutes,
mra.exclude.len(),
mra.strict
);
}
let git_shallow_hosts = resolve_git_shallow_hosts(settings_ctx);
let mut resolver = resolver
.with_packument_cache(packument_cache_dir())
.with_packument_full_cache(packument_full_cache_dir())
.with_auto_install_peers(auto_install_peers)
.with_peers_suffix_max_length(peers_suffix_max_length)
.with_exclude_links_from_lockfile(exclude_links_from_lockfile)
.with_dedupe_peer_dependents(dedupe_peer_dependents)
.with_dedupe_peers(dedupe_peers)
.with_resolve_peers_from_workspace_root(resolve_peers_from_workspace_root_opt)
.with_registry_supports_time_field(registry_supports_time_field)
.with_supported_architectures(supported_architectures)
.with_overrides(effective_overrides)
.with_ignored_optional_dependencies(ignored_optional)
.with_resolution_mode(resolution_mode)
.with_minimum_release_age(minimum_release_age)
.with_catalogs(workspace_catalogs.clone())
.with_project_root(cwd.to_path_buf())
.with_dependency_policy(dependency_policy)
.with_git_shallow_hosts(git_shallow_hosts);
if let Some(hook) = read_package_hook {
resolver = resolver.with_read_package_hook(hook);
}
resolver
}
fn warn_unmet_peers(
graph: &aube_lockfile::LockfileGraph,
strict: bool,
rules: &PeerDependencyRules,
) -> miette::Result<()> {
let unmet: Vec<_> = aube_resolver::detect_unmet_peers(graph)
.into_iter()
.filter(|u| !rules.silences(u))
.collect();
if unmet.is_empty() {
return Ok(());
}
let header = if strict {
"error: Issues with peer dependencies found"
} else {
"warn: Issues with peer dependencies found"
};
let prefix = if strict { "error:" } else { "warn:" };
eprintln!("{header}");
for u in &unmet {
match &u.found {
Some(found) => eprintln!(
"{prefix} {}@{}: expected peer {}@{}, found {}",
u.from_name,
version_from_dep_path(&u.from_dep_path, &u.from_name),
u.peer_name,
u.declared,
found,
),
None => eprintln!(
"{prefix} {}@{}: missing required peer {}@{}",
u.from_name,
version_from_dep_path(&u.from_dep_path, &u.from_name),
u.peer_name,
u.declared,
),
}
}
if strict {
return Err(miette!(
"{} unmet peer dependenc{} (strict-peer-dependencies is enabled)",
unmet.len(),
if unmet.len() == 1 { "y" } else { "ies" }
));
}
Ok(())
}
#[derive(Debug, Default)]
pub(crate) struct PeerDependencyRules {
ignore_missing: Vec<glob::Pattern>,
allow_any: Vec<glob::Pattern>,
allowed_versions: BTreeMap<String, String>,
}
impl PeerDependencyRules {
pub(crate) fn resolve(
manifest: &aube_manifest::PackageJson,
ctx: &aube_settings::ResolveCtx<'_>,
) -> Self {
let ignore_missing_raw = aube_settings::resolved::peer_dependency_rules_ignore_missing(ctx)
.unwrap_or_else(|| manifest.pnpm_peer_dependency_rules_ignore_missing());
let allow_any_raw = aube_settings::resolved::peer_dependency_rules_allow_any(ctx)
.unwrap_or_else(|| manifest.pnpm_peer_dependency_rules_allow_any());
let mut allowed_versions = manifest.pnpm_peer_dependency_rules_allowed_versions();
merge_string_map_setting(
ctx,
"peerDependencyRules.allowedVersions",
&mut allowed_versions,
);
Self {
ignore_missing: compile_peer_patterns("ignoreMissing", &ignore_missing_raw),
allow_any: compile_peer_patterns("allowAny", &allow_any_raw),
allowed_versions,
}
}
pub(crate) fn silences(&self, u: &aube_resolver::UnmetPeer) -> bool {
if u.found.is_none() && self.ignore_missing.iter().any(|p| p.matches(&u.peer_name)) {
return true;
}
if self.allow_any.iter().any(|p| p.matches(&u.peer_name)) {
return true;
}
if let Some(found) = u.found.as_deref()
&& self.allowed_versions_permit(&u.from_name, &u.peer_name, found)
{
return true;
}
false
}
fn allowed_versions_permit(&self, parent: &str, peer: &str, found: &str) -> bool {
let scoped_key = format!("{parent}>{peer}");
let candidates = [
self.allowed_versions.get(&scoped_key),
self.allowed_versions.get(peer),
];
let Ok(found_v) = node_semver::Version::parse(found) else {
return false;
};
candidates
.into_iter()
.flatten()
.any(|range| matches_range(range, &found_v))
}
}
fn matches_range(range: &str, found: &node_semver::Version) -> bool {
match node_semver::Range::parse(range) {
Ok(r) => r.satisfies(found),
Err(_) => false,
}
}
fn compile_peer_patterns(field: &str, raw: &[String]) -> Vec<glob::Pattern> {
raw.iter()
.filter_map(|p| match glob::Pattern::new(p) {
Ok(pat) => Some(pat),
Err(err) => {
tracing::warn!("ignoring invalid peerDependencyRules.{field} pattern {p:?}: {err}");
None
}
})
.collect()
}
fn version_from_dep_path(dep_path: &str, name: &str) -> String {
let tail = dep_path
.strip_prefix(&format!("{name}@"))
.unwrap_or(dep_path);
tail.split('(').next().unwrap_or(tail).to_string()
}
fn remap_indices_to_contextualized(
canonical_indices: &BTreeMap<String, aube_store::PackageIndex>,
graph: &aube_lockfile::LockfileGraph,
) -> BTreeMap<String, aube_store::PackageIndex> {
let mut out = BTreeMap::new();
for (dep_path, pkg) in &graph.packages {
let canonical_key = format!("{}@{}", pkg.name, pkg.version);
if let Some(idx) = canonical_indices
.get(dep_path)
.or_else(|| canonical_indices.get(&canonical_key))
{
out.insert(dep_path.clone(), idx.clone());
}
}
out
}
pub async fn run(opts: InstallOptions) -> miette::Result<()> {
let mode = opts.mode;
let cwd = if let Some(project_dir) = &opts.project_dir {
project_dir.clone()
} else {
let initial_cwd = crate::dirs::cwd()?;
match crate::dirs::find_project_root(&initial_cwd) {
Some(root) => root,
None => {
return Err(miette!(
"no package.json found in {} or any parent directory",
initial_cwd.display()
));
}
}
};
let _lock = super::take_project_lock(&cwd)?;
let start = std::time::Instant::now();
if opts.force {
let _ = state::remove_state(&cwd);
}
let manifest = aube_manifest::PackageJson::from_path(&cwd.join("package.json"))
.into_diagnostic()
.wrap_err("failed to read package.json")?;
let project_name = manifest.name.as_deref().unwrap_or("(unnamed)");
let npmrc_entries = aube_registry::config::load_npmrc_entries(&cwd);
let (ws_config_shared, raw_workspace) = aube_manifest::workspace::load_both(&cwd)
.into_diagnostic()
.wrap_err("failed to load workspace config")?;
let workspace_catalogs = super::discover_catalogs(&cwd)?;
let settings_ctx = aube_settings::ResolveCtx {
npmrc: &npmrc_entries,
workspace_yaml: &raw_workspace,
env: &opts.env_snapshot,
cli: &opts.cli_flags,
};
super::configure_script_settings(&settings_ctx);
let modules_dir_name = aube_settings::resolved::modules_dir(&settings_ctx);
let aube_dir = super::resolve_virtual_store_dir(&settings_ctx, &cwd);
let lockfile_enabled = aube_settings::resolved::lockfile(&settings_ctx);
let modules_dir_enabled = aube_settings::resolved::enable_modules_dir(&settings_ctx);
let lockfile_only_effective = opts.lockfile_only || !modules_dir_enabled;
if !lockfile_enabled && opts.lockfile_only {
return Err(miette!(
"--lockfile-only is incompatible with lockfile=false; \
remove one or the other"
));
}
if !lockfile_enabled && !modules_dir_enabled {
return Err(miette!(
"enableModulesDir=false is incompatible with lockfile=false; \
remove one or the other"
));
}
if !lockfile_enabled && opts.strict_no_lockfile {
return Err(miette!(
"--frozen-lockfile is incompatible with lockfile=false; \
remove one or the other"
));
}
let lockfile_include_tarball_url =
aube_settings::resolved::lockfile_include_tarball_url(&settings_ctx);
tracing::debug!(
"lockfile: enabled={lockfile_enabled}, include-tarball-url={lockfile_include_tarball_url}"
);
if lockfile_enabled {
let patterns =
aube_settings::resolved::merge_git_branch_lockfiles_branch_pattern(&settings_ctx)
.unwrap_or_default();
let should_merge = opts.merge_git_branch_lockfiles
|| aube_lockfile::merge::current_branch_matches(&cwd, &patterns);
if should_merge {
match aube_lockfile::merge_branch_lockfiles(&cwd, &manifest) {
Ok(report) => {
if !report.merged_files.is_empty() {
let filenames: Vec<String> = report
.merged_files
.iter()
.filter_map(|p| {
p.file_name()
.and_then(|n| n.to_str())
.map(|s| s.to_string())
})
.collect();
tracing::info!(
"merged {} branch lockfile(s) into aube-lock.yaml: {}",
report.merged_files.len(),
filenames.join(", ")
);
if !report.conflicts.is_empty() {
tracing::warn!(
"{} conflict(s) during branch-lockfile merge; see --verbose for details",
report.conflicts.len()
);
}
} else {
tracing::debug!(
"branch-lockfile merge triggered but no aube-lock.*.yaml files were found"
);
}
}
Err(err) => {
return Err(miette!("failed to merge branch lockfiles: {err}"));
}
}
}
}
let network_concurrency_setting = resolve_network_concurrency(&settings_ctx);
let link_concurrency_setting = resolve_link_concurrency(&settings_ctx);
let verify_store_integrity_setting = resolve_verify_store_integrity(&settings_ctx);
let strict_store_pkg_content_check_setting =
resolve_strict_store_pkg_content_check(&settings_ctx);
let side_effects_cache_setting = resolve_side_effects_cache(&settings_ctx);
let side_effects_cache_readonly_setting = resolve_side_effects_cache_readonly(&settings_ctx);
let strict_dep_builds_setting = aube_settings::resolved::strict_dep_builds(&settings_ctx);
let required_scripts =
aube_settings::resolved::required_scripts(&settings_ctx).unwrap_or_default();
validate_required_scripts(&cwd, &manifest, &required_scripts)?;
if resolve_use_running_store_server(&settings_ctx) {
eprintln!(
"warning: aube has no store server; useRunningStoreServer=true is accepted but has no effect"
);
}
if !resolve_symlink(&settings_ctx) {
eprintln!(
"warning: aube's isolated layout requires symlinks; symlink=false is accepted but has no effect"
);
}
let _ = aube_settings::resolved::dlx_cache_max_age(&settings_ctx);
tracing::debug!(
"settings: network-concurrency={:?}, link-concurrency={:?}, verify-store-integrity={}, strict-store-pkg-content-check={}, side-effects-cache={}, side-effects-cache-readonly={}, strict-dep-builds={}",
network_concurrency_setting,
link_concurrency_setting,
verify_store_integrity_setting,
strict_store_pkg_content_check_setting,
side_effects_cache_setting,
side_effects_cache_readonly_setting,
strict_dep_builds_setting,
);
let virtual_store_dir_max_length = super::resolve_virtual_store_dir_max_length(&settings_ctx);
if !opts.ignore_scripts && !lockfile_only_effective {
run_root_lifecycle(
&cwd,
&modules_dir_name,
&manifest,
aube_scripts::LifecycleHook::PreInstall,
)
.await?;
}
let prog = InstallProgress::try_new();
let prog_ref = prog.as_ref();
let workspace_packages = aube_workspace::find_workspace_packages(&cwd)
.into_diagnostic()
.wrap_err("failed to discover workspace packages")?;
let recursive_install = aube_settings::resolved::recursive_install(&settings_ctx);
let has_workspace = !workspace_packages.is_empty();
let link_all_workspace_importers =
has_workspace && (recursive_install || !opts.workspace_filter.is_empty());
let mut manifests: Vec<(String, aube_manifest::PackageJson)> =
vec![(".".to_string(), manifest.clone())];
let mut ws_package_versions: std::collections::HashMap<String, String> =
std::collections::HashMap::new();
let mut ws_dirs: BTreeMap<String, std::path::PathBuf> = BTreeMap::new();
if has_workspace {
tracing::debug!(
"Workspace: {} packages for {project_name}",
workspace_packages.len()
);
for pkg_dir in &workspace_packages {
let pkg_manifest = aube_manifest::PackageJson::from_path(&pkg_dir.join("package.json"))
.into_diagnostic()
.wrap_err_with(|| format!("failed to read {}/package.json", pkg_dir.display()))?;
let rel_path = pkg_dir
.strip_prefix(&cwd)
.unwrap_or(pkg_dir)
.to_string_lossy()
.to_string();
if let Some(ref name) = pkg_manifest.name {
let version = pkg_manifest.version.as_deref().unwrap_or("0.0.0");
ws_package_versions.insert(name.clone(), version.to_string());
ws_dirs.insert(name.clone(), pkg_dir.clone());
tracing::debug!(" {name}@{version} ({rel_path})");
}
manifests.push((rel_path, pkg_manifest));
}
}
let gvs_triggers =
aube_settings::resolved::disable_global_virtual_store_for_packages(&settings_ctx);
let use_global_virtual_store_override = {
let triggered_by = find_gvs_incompatible_trigger(&manifests, &gvs_triggers);
let ci_mode = opts.env_snapshot.iter().any(|(k, _)| k == "CI");
let virtual_store_only_setting = aube_settings::resolved::virtual_store_only(&settings_ctx);
if let Some(name) = triggered_by
&& !ci_mode
&& !virtual_store_only_setting
{
tracing::warn!(
"disabling global virtual store: `{name}` is in disableGlobalVirtualStoreForPackages (packages in that list are known to be incompatible with gvs-linked node_modules, e.g. Next.js's Turbopack rejects symlinks that escape the project root). Set the list to `[]` in .npmrc to opt out."
);
Some(false)
} else {
None
}
};
let source_kind_before = if lockfile_enabled {
aube_lockfile::detect_existing_lockfile_kind(&cwd)
} else {
None
};
let fix_mode_parse: Option<(aube_lockfile::LockfileGraph, aube_lockfile::LockfileKind)> =
if mode == FrozenMode::Fix && lockfile_enabled {
aube_lockfile::parse_lockfile_with_kind(&cwd, &manifest).ok()
} else {
None
};
let existing_for_resolver: Option<&aube_lockfile::LockfileGraph> =
fix_mode_parse.as_ref().map(|(g, _)| g);
if lockfile_only_effective {
let force_resolve = matches!(mode, FrozenMode::No);
let parsed_owned;
let parsed: Result<
(&aube_lockfile::LockfileGraph, aube_lockfile::LockfileKind),
&aube_lockfile::Error,
> = if let Some((g, k)) = fix_mode_parse.as_ref() {
Ok((g, *k))
} else {
parsed_owned = aube_lockfile::parse_lockfile_with_kind(&cwd, &manifest);
match &parsed_owned {
Ok((g, k)) => Ok((g, *k)),
Err(e) => Err(e),
}
};
if let Err(e) = parsed
&& !matches!(e, aube_lockfile::Error::NotFound(_))
{
return Err(miette!("failed to parse lockfile: {e}"));
}
let fresh = !force_resolve
&& matches!(
parsed,
Ok((g, _))
if matches!(g.check_drift_workspace(&manifests), DriftStatus::Fresh)
&& matches!(g.check_catalogs_drift(&workspace_catalogs), DriftStatus::Fresh)
);
if fresh {
tracing::debug!("--lockfile-only: lockfile already up to date");
if let Some(p) = prog_ref {
p.finish(true);
}
eprintln!("Lockfile is up to date, resolution step is skipped");
return Ok(());
}
if let Some(p) = prog_ref {
p.set_phase("resolving");
}
let client = std::sync::Arc::new(make_client(&cwd).with_network_mode(opts.network_mode));
let pnpmfile_path = (!opts.ignore_pnpmfile)
.then(|| crate::pnpmfile::detect(&cwd))
.flatten();
let read_package_host = match pnpmfile_path.as_deref() {
Some(p) => crate::pnpmfile::ReadPackageHost::spawn(p)
.await
.wrap_err("failed to start pnpmfile readPackage host")?,
None => None,
};
let read_package_hook: Option<Box<dyn aube_resolver::ReadPackageHook>> =
read_package_host.map(|h| Box::new(h) as Box<dyn aube_resolver::ReadPackageHook>);
let mut resolver = configure_resolver(
aube_resolver::Resolver::new(client),
&cwd,
&manifest,
ResolverConfigInputs {
settings_ctx: &settings_ctx,
workspace_catalogs: &workspace_catalogs,
opts: &opts,
},
read_package_hook,
);
let mut graph = if has_workspace {
resolver
.resolve_workspace(&manifests, existing_for_resolver, &ws_package_versions)
.await
} else {
resolver.resolve(&manifest, existing_for_resolver).await
}
.into_diagnostic()
.wrap_err("failed to resolve dependencies")?;
drop(resolver);
if let Some(pnpmfile_path) = pnpmfile_path.as_deref() {
crate::pnpmfile::run_after_all_resolved(pnpmfile_path, &mut graph)
.await
.wrap_err("pnpmfile afterAllResolved hook failed")?;
}
if lockfile_include_tarball_url {
let lo_client = make_client(&cwd);
graph.settings.lockfile_include_tarball_url = true;
for pkg in graph.packages.values_mut() {
if pkg.local_source.is_some() {
continue;
}
if pkg.tarball_url.is_none() {
pkg.tarball_url =
Some(lo_client.tarball_url(pkg.registry_name(), &pkg.version));
}
}
}
let lo_write_kind = source_kind_before.unwrap_or(aube_lockfile::LockfileKind::Aube);
let lo_written = aube_lockfile::write_lockfile_as(&cwd, &graph, &manifest, lo_write_kind)
.into_diagnostic()
.wrap_err("failed to write lockfile")?;
tracing::debug!(
"--lockfile-only: wrote {}",
lo_written
.file_name()
.map(|n| n.to_string_lossy().into_owned())
.unwrap_or_else(|| lo_written.display().to_string())
);
maybe_cleanup_unused_catalogs(&cwd, &settings_ctx, &workspace_catalogs, &graph.catalogs)?;
if let Some(p) = prog_ref {
p.finish(true);
}
eprintln!(
"Lockfile written ({} packages); skipped node_modules linking",
graph.packages.len()
);
return Ok(());
}
let phase_start = std::time::Instant::now();
let store = std::sync::Arc::new(super::open_store(&cwd)?);
if let Err(e) = store.ensure_shards_exist() {
tracing::debug!("ensure_shards_exist failed (slow path will cover): {e}");
}
let lockfile_result = if !lockfile_enabled {
tracing::debug!("lockfile=false: skipping lockfile parse, re-resolving");
Err(aube_lockfile::Error::NotFound(cwd.clone()))
} else {
match mode {
FrozenMode::No => {
Err(aube_lockfile::Error::NotFound(cwd.clone()))
}
FrozenMode::Fix => {
Err(aube_lockfile::Error::NotFound(cwd.clone()))
}
FrozenMode::Frozen => {
let parsed = aube_lockfile::parse_lockfile_with_kind(&cwd, &manifest);
if let Ok((ref graph, _)) = parsed {
if let DriftStatus::Stale { reason } =
graph.check_catalogs_drift(&workspace_catalogs)
{
return Err(miette!(
"lockfile is out of date with pnpm-workspace.yaml: {reason}\n\
help: run without --frozen-lockfile to update the lockfile"
));
}
if let DriftStatus::Stale { reason } = graph.check_drift_workspace(&manifests) {
return Err(miette!(
"lockfile is out of date with package.json: {reason}\n\
help: run without --frozen-lockfile to update the lockfile, \
or run `aube install --no-frozen-lockfile` to regenerate it"
));
}
}
parsed
}
FrozenMode::Prefer => {
match aube_lockfile::parse_lockfile_with_kind(&cwd, &manifest) {
Ok((graph, kind)) => {
if let DriftStatus::Stale { reason } =
graph.check_catalogs_drift(&workspace_catalogs)
{
tracing::debug!(
"Lockfile out of date with workspace catalogs ({reason}), re-resolving..."
);
Err(aube_lockfile::Error::NotFound(cwd.clone()))
} else {
match graph.check_drift_workspace(&manifests) {
DriftStatus::Fresh => Ok((graph, kind)),
DriftStatus::Stale { reason } => {
tracing::debug!(
"Lockfile out of date ({reason}), re-resolving..."
);
Err(aube_lockfile::Error::NotFound(cwd.clone()))
}
}
}
}
other => other,
}
}
}
};
let (graph, package_indices, cached_count, fetch_count) = match lockfile_result {
Ok((mut graph, kind)) => {
let (sup_os, sup_cpu, sup_libc) = manifest.pnpm_supported_architectures();
let supported_architectures = aube_resolver::SupportedArchitectures {
os: sup_os,
cpu: sup_cpu,
libc: sup_libc,
};
let ignored_optional_deps: std::collections::BTreeSet<String> = manifest
.pnpm_ignored_optional_dependencies()
.into_iter()
.collect();
aube_resolver::platform::filter_graph(
&mut graph,
&supported_architectures,
&ignored_optional_deps,
);
if matches!(
kind,
aube_lockfile::LockfileKind::Npm | aube_lockfile::LockfileKind::NpmShrinkwrap
) {
let peer_pass_start = std::time::Instant::now();
let pkgs_before = graph.packages.len();
graph = aube_resolver::hoist_auto_installed_peers(graph);
let peer_options = aube_resolver::PeerContextOptions {
dedupe_peer_dependents: resolve_dedupe_peer_dependents(&settings_ctx),
dedupe_peers: resolve_dedupe_peers(&settings_ctx),
resolve_from_workspace_root: resolve_peers_from_workspace_root(&settings_ctx),
peers_suffix_max_length: resolve_peers_suffix_max_length(&settings_ctx),
};
graph = aube_resolver::apply_peer_contexts(graph, &peer_options);
tracing::debug!(
"peer-context pass (lockfile={:?}) {} → {} packages in {:.1?}",
kind,
pkgs_before,
graph.packages.len(),
peer_pass_start.elapsed()
);
}
let source_label = match kind {
aube_lockfile::LockfileKind::Aube => "Lockfile",
aube_lockfile::LockfileKind::Pnpm => "pnpm-lock.yaml",
aube_lockfile::LockfileKind::Yarn | aube_lockfile::LockfileKind::YarnBerry => {
"yarn.lock"
}
aube_lockfile::LockfileKind::Npm => "package-lock.json",
aube_lockfile::LockfileKind::NpmShrinkwrap => "npm-shrinkwrap.json",
aube_lockfile::LockfileKind::Bun => "bun.lock",
};
tracing::debug!(
"{source_label}: {} packages for {project_name}",
graph.packages.len()
);
tracing::debug!(
"phase:resolve (from lockfile) {:.1?}",
phase_start.elapsed()
);
if let Some(p) = prog_ref {
p.set_total(graph.packages.len());
p.set_phase("fetching");
}
let phase_start = std::time::Instant::now();
let network_mode = opts.network_mode;
let cwd_for_client = cwd.clone();
let (indices, cached, fetched) = fetch_packages_with_root(
&graph.packages,
&store,
|| {
std::sync::Arc::new(
make_client(&cwd_for_client).with_network_mode(network_mode),
)
},
prog_ref,
&cwd,
&aube_dir,
has_workspace,
virtual_store_dir_max_length,
opts.ignore_scripts,
network_concurrency_setting,
verify_store_integrity_setting,
strict_store_pkg_content_check_setting,
opts.git_prepare_depth,
resolve_git_shallow_hosts(&settings_ctx),
)
.await?;
tracing::debug!(
"phase:fetch {:.1?} ({fetched} packages)",
phase_start.elapsed()
);
(graph, indices, cached, fetched)
}
Err(aube_lockfile::Error::NotFound(_))
if !(matches!(mode, FrozenMode::Frozen) && opts.strict_no_lockfile) =>
{
tracing::debug!("No lockfile found, resolving dependencies for {project_name}...");
if let Some(p) = prog_ref {
p.set_phase("resolving");
}
let node_version_for_prewarm = {
let override_ = aube_settings::resolved::node_version(&settings_ctx);
crate::engines::resolve_node_version(override_.as_deref())
};
let (build_policy_for_prewarm, _policy_warnings_unused) = build_policy_from_sources(
&manifest,
&ws_config_shared,
opts.dangerously_allow_all_builds,
);
let build_policy_for_prewarm = std::sync::Arc::new(build_policy_for_prewarm);
let client =
std::sync::Arc::new(make_client(&cwd).with_network_mode(opts.network_mode));
let tarball_client = client.clone();
let (resolver, mut resolved_rx) = aube_resolver::Resolver::with_stream(client);
let pnpmfile_path = (!opts.ignore_pnpmfile)
.then(|| crate::pnpmfile::detect(&cwd))
.flatten();
let read_package_host = match pnpmfile_path.as_deref() {
Some(p) => crate::pnpmfile::ReadPackageHost::spawn(p)
.await
.wrap_err("failed to start pnpmfile readPackage host")?,
None => None,
};
let read_package_hook: Option<Box<dyn aube_resolver::ReadPackageHook>> =
read_package_host.map(|h| Box::new(h) as Box<dyn aube_resolver::ReadPackageHook>);
let mut resolver = configure_resolver(
resolver,
&cwd,
&manifest,
ResolverConfigInputs {
settings_ctx: &settings_ctx,
workspace_catalogs: &workspace_catalogs,
opts: &opts,
},
read_package_hook,
);
let post_fetch_client = tarball_client.clone();
let fetch_store = store.clone();
let fetch_progress = prog.clone();
let fetch_project_root = cwd.clone();
let fetch_local_client = tarball_client.clone();
let fetch_ignore_scripts = opts.ignore_scripts;
let fetch_git_prepare_depth = opts.git_prepare_depth;
let fetch_network_concurrency =
network_concurrency_setting.unwrap_or_else(default_streaming_network_concurrency);
let fetch_verify_integrity = verify_store_integrity_setting;
let fetch_strict_pkg_content_check = strict_store_pkg_content_check_setting;
let fetch_git_shallow_hosts = resolve_git_shallow_hosts(&settings_ctx);
let (materialize_tx, materialize_rx) =
tokio::sync::mpsc::unbounded_channel::<(String, aube_store::PackageIndex)>();
let fetch_handle = tokio::spawn(async move {
let semaphore =
std::sync::Arc::new(tokio::sync::Semaphore::new(fetch_network_concurrency));
let mut handles = Vec::new();
let mut indices: BTreeMap<String, aube_store::PackageIndex> = BTreeMap::new();
let mut cached_count = 0usize;
while let Some(pkg) = resolved_rx.recv().await {
if let Some(p) = fetch_progress.as_ref() {
p.inc_total(1);
}
if let Some(ref local) = pkg.local_source {
match import_local_source(
&fetch_store,
&fetch_project_root,
local,
Some(&fetch_local_client),
fetch_ignore_scripts,
fetch_git_prepare_depth,
&fetch_git_shallow_hosts,
)
.await
{
Ok(Some(index)) => {
let _ = materialize_tx.send((pkg.dep_path.clone(), index.clone()));
indices.insert(pkg.dep_path, index);
cached_count += 1;
if let Some(p) = fetch_progress.as_ref() {
p.inc_reused(1);
}
}
Ok(None) => {
if let Some(p) = fetch_progress.as_ref() {
p.inc_reused(1);
}
}
Err(e) => return Err(e),
}
continue;
}
let pkg_registry_name = pkg.registry_name().to_string();
if let Some(index) = fetch_store.load_index(&pkg_registry_name, &pkg.version) {
let _ = materialize_tx.send((pkg.dep_path.clone(), index.clone()));
indices.insert(pkg.dep_path, index);
cached_count += 1;
if let Some(p) = fetch_progress.as_ref() {
p.inc_reused(1);
}
continue;
}
let sem = semaphore.clone();
let store = fetch_store.clone();
let client = tarball_client.clone();
let row = fetch_progress
.as_ref()
.map(|p| p.start_fetch(&pkg.name, &pkg.version));
let bytes_progress = fetch_progress.clone();
handles.push(tokio::spawn(async move {
let _row = row;
let permit = sem.acquire().await.unwrap();
let url = pkg
.tarball_url
.clone()
.unwrap_or_else(|| client.tarball_url(&pkg_registry_name, &pkg.version));
tracing::trace!("Fetching {}@{}", pkg.name, pkg.version);
let bytes = client.fetch_tarball_bytes(&url).await.map_err(|e| {
miette!("failed to fetch {}@{}: {e}", pkg.name, pkg.version)
})?;
if let Some(p) = bytes_progress.as_ref() {
p.inc_downloaded_bytes(bytes.len() as u64);
}
drop(permit);
let pkg_display_name = pkg.name.clone();
let pkg_version = pkg.version.clone();
let dep_path = pkg.dep_path.clone();
let integrity = pkg.integrity.clone();
let index = tokio::task::spawn_blocking(move || -> miette::Result<_> {
if fetch_verify_integrity && let Some(ref expected) = integrity {
aube_store::verify_integrity(&bytes, expected).map_err(|e| {
miette!("{pkg_display_name}@{pkg_version}: {e}")
})?;
}
let index = store.import_tarball(&bytes).map_err(|e| {
miette!("failed to import {pkg_display_name}@{pkg_version}: {e}")
})?;
if fetch_strict_pkg_content_check {
aube_store::validate_pkg_content(
&index,
&pkg_registry_name,
&pkg_version,
)
.map_err(|e| miette!("{pkg_display_name}@{pkg_version}: {e}"))?;
}
if let Err(e) =
store.save_index(&pkg_registry_name, &pkg_version, &index)
{
tracing::warn!(
"Failed to cache index for {pkg_display_name}@{pkg_version}: {e}"
);
}
Ok(index)
})
.await
.into_diagnostic()??;
Ok::<_, miette::Report>((dep_path, index))
}));
}
let fetch_count = handles.len();
for handle in handles {
let (dep_path, index) = handle.await.into_diagnostic()??;
let _ = materialize_tx.send((dep_path.clone(), index.clone()));
indices.insert(dep_path, index);
}
drop(materialize_tx);
Ok::<_, miette::Report>((indices, cached_count, fetch_count))
});
let resolve_result = if has_workspace {
resolver
.resolve_workspace(&manifests, existing_for_resolver, &ws_package_versions)
.await
} else {
resolver.resolve(&manifest, existing_for_resolver).await
}
.into_diagnostic()
.wrap_err("failed to resolve dependencies");
if resolve_result.is_err() {
fetch_handle.abort();
return resolve_result.map(|_| unreachable!());
}
let mut graph = resolve_result.unwrap();
if let Some(pnpmfile_path) = pnpmfile_path.as_deref() {
crate::pnpmfile::run_after_all_resolved(pnpmfile_path, &mut graph)
.await
.wrap_err("pnpmfile afterAllResolved hook failed")?;
}
tracing::debug!("Resolved {} packages", graph.packages.len());
if let Some(p) = prog_ref {
p.set_phase("fetching");
}
tracing::debug!("phase:resolve (fresh) {:.1?}", phase_start.elapsed());
drop(resolver);
let materialize_phase_start = std::time::Instant::now();
let materialize_graph = std::sync::Arc::new(graph.clone());
let materialize_store = store.clone();
let materialize_virtual_store_dir_max_length = virtual_store_dir_max_length;
let materialize_strategy = resolve_link_strategy(&cwd, &settings_ctx)?;
let materialize_link_concurrency = link_concurrency_setting;
let materialize_patches_vec = crate::patches::load_patches(&cwd)?;
let materialize_patches: aube_linker::Patches = materialize_patches_vec
.values()
.map(|p| (p.key.clone(), p.content.clone()))
.collect();
let materialize_patch_hashes: std::collections::BTreeMap<String, String> =
materialize_patches_vec
.values()
.map(|p| (p.key.clone(), p.content_hash()))
.collect();
let materialize_node_version = node_version_for_prewarm.clone();
let materialize_allow = {
let build_policy = build_policy_for_prewarm.clone();
move |name: &str, version: &str| {
matches!(
build_policy.decide(name, version),
aube_scripts::AllowDecision::Allow
)
}
};
let materialize_handle: tokio::task::JoinHandle<
miette::Result<aube_linker::LinkStats>,
> = tokio::spawn(async move {
let engine = materialize_node_version
.as_deref()
.map(aube_lockfile::graph_hash::engine_name_default);
let patch_hash_fn = |name: &str, version: &str| -> Option<String> {
let key = format!("{name}@{version}");
materialize_patch_hashes.get(&key).cloned()
};
let graph_hashes = aube_lockfile::graph_hash::compute_graph_hashes_with_patches(
&materialize_graph,
&materialize_allow,
engine.as_ref(),
&patch_hash_fn,
);
let mut linker =
aube_linker::Linker::new(materialize_store.as_ref(), materialize_strategy)
.with_graph_hashes(graph_hashes)
.with_virtual_store_dir_max_length(
materialize_virtual_store_dir_max_length,
);
if !materialize_patches.is_empty() {
linker = linker.with_patches(materialize_patches);
}
if let Some(enabled) = use_global_virtual_store_override {
linker = linker.with_use_global_virtual_store(enabled);
}
if !linker.uses_global_virtual_store() {
let mut rx = materialize_rx;
while rx.recv().await.is_some() {}
return Ok(aube_linker::LinkStats::default());
}
let linker = std::sync::Arc::new(linker);
let graph = materialize_graph;
let mut canonical_to_contextualized: std::collections::HashMap<
String,
std::collections::HashSet<String>,
> = std::collections::HashMap::new();
for (dep_path, pkg) in &graph.packages {
if pkg.local_source.is_some() {
continue;
}
let canonical = format!("{}@{}", pkg.name, pkg.version);
canonical_to_contextualized
.entry(canonical)
.or_default()
.insert(dep_path.clone());
canonical_to_contextualized
.entry(dep_path.clone())
.or_default()
.insert(dep_path.clone());
}
let permits = materialize_link_concurrency
.unwrap_or(if cfg!(target_os = "macos") { 4 } else { 16 });
let sem = std::sync::Arc::new(tokio::sync::Semaphore::new(permits));
let mut in_flight: Vec<
tokio::task::JoinHandle<miette::Result<aube_linker::LinkStats>>,
> = Vec::new();
let mut rx = materialize_rx;
while let Some((key, index)) = rx.recv().await {
let Some(dep_paths) = canonical_to_contextualized.get(&key).cloned() else {
continue;
};
let index = std::sync::Arc::new(index);
for dep_path in dep_paths {
let Some(pkg) = graph.packages.get(&dep_path).cloned() else {
continue;
};
if pkg.local_source.is_some() {
continue;
}
let linker = linker.clone();
let sem = sem.clone();
let index = index.clone();
in_flight.push(tokio::spawn(async move {
let _permit = sem.acquire().await.unwrap();
let dep_path_for_err = dep_path.clone();
tokio::task::spawn_blocking(move || -> miette::Result<_> {
let mut stats = aube_linker::LinkStats::default();
linker
.ensure_in_virtual_store(&dep_path, &pkg, &index, &mut stats)
.map_err(|e| {
miette!("prewarm GVS for {dep_path_for_err}: {e}")
})?;
Ok(stats)
})
.await
.into_diagnostic()?
}));
}
}
let mut total = aube_linker::LinkStats::default();
for handle in in_flight {
let s = handle.await.into_diagnostic()??;
total.packages_linked += s.packages_linked;
total.packages_cached += s.packages_cached;
total.files_linked += s.files_linked;
}
Ok(total)
});
let fetch_phase_start = std::time::Instant::now();
let fetch_result = match fetch_handle.await.into_diagnostic()? {
Ok(v) => v,
Err(e) => {
materialize_handle.abort();
return Err(e);
}
};
let (canonical_indices, cached, fetched) = fetch_result;
tracing::debug!(
"phase:fetch {:.1?} ({fetched} packages, {cached} cached)",
fetch_phase_start.elapsed()
);
let prewarm_stats = materialize_handle.await.into_diagnostic()??;
tracing::debug!(
"phase:prewarm-gvs {:.1?} ({} packages, {} files)",
materialize_phase_start.elapsed(),
prewarm_stats.packages_linked,
prewarm_stats.files_linked,
);
let indices = remap_indices_to_contextualized(&canonical_indices, &graph);
if lockfile_enabled {
if lockfile_include_tarball_url {
graph.settings.lockfile_include_tarball_url = true;
for pkg in graph.packages.values_mut() {
if pkg.local_source.is_some() {
continue;
}
if pkg.tarball_url.is_none() {
pkg.tarball_url = Some(
post_fetch_client.tarball_url(pkg.registry_name(), &pkg.version),
);
}
}
}
let write_kind = source_kind_before.unwrap_or(aube_lockfile::LockfileKind::Aube);
let written_path =
aube_lockfile::write_lockfile_as(&cwd, &graph, &manifest, write_kind)
.into_diagnostic()
.wrap_err("failed to write lockfile")?;
tracing::debug!(
"Wrote {}",
written_path
.file_name()
.map(|n| n.to_string_lossy().into_owned())
.unwrap_or_else(|| written_path.display().to_string())
);
} else {
tracing::debug!("lockfile=false: skipping lockfile write");
}
(graph, indices, cached, fetched)
}
Err(aube_lockfile::Error::NotFound(_)) => {
return Err(miette!(
"no lockfile found and --frozen-lockfile is set\n\
help: commit pnpm-lock.yaml to your repository, or run \
`aube install --no-frozen-lockfile` to generate one"
));
}
Err(e) => {
return Err(e)
.into_diagnostic()
.wrap_err("failed to parse lockfile");
}
};
tracing::debug!("Packages: {cached_count} cached, {fetch_count} fetched");
maybe_cleanup_unused_catalogs(&cwd, &settings_ctx, &workspace_catalogs, &graph.catalogs)?;
let strict_peer_deps = resolve_strict_peer_dependencies(&settings_ctx);
let peer_rules = PeerDependencyRules::resolve(&manifest, &settings_ctx);
warn_unmet_peers(&graph, strict_peer_deps, &peer_rules)?;
let mut graph_for_link = if opts.prod || opts.dev || opts.no_optional {
let before = graph.packages.len();
let prod = opts.prod;
let dev = opts.dev;
let no_optional = opts.no_optional;
let filtered = graph.filter_deps(|d| {
if prod && d.dep_type == aube_lockfile::DepType::Dev {
return false;
}
if dev && d.dep_type != aube_lockfile::DepType::Dev {
return false;
}
if no_optional && d.dep_type == aube_lockfile::DepType::Optional {
return false;
}
true
});
let dropped = before - filtered.packages.len();
if dropped > 0 {
let label = if opts.dev && opts.no_optional {
"--dev --no-optional"
} else if opts.dev {
"--dev"
} else if opts.prod && opts.no_optional {
"--prod --no-optional"
} else if opts.prod {
"--prod"
} else if opts.no_optional {
"--no-optional"
} else {
unreachable!()
};
tracing::debug!("{label}: skipping {dropped} packages");
}
filtered
} else {
graph.clone()
};
if !opts.workspace_filter.is_empty() {
graph_for_link = filter_graph_to_workspace_selection(
&cwd,
&workspace_packages,
&graph_for_link,
&opts.workspace_filter,
)?;
} else if has_workspace && !link_all_workspace_importers {
graph_for_link = filter_graph_to_importers(&graph_for_link, ["."]);
}
let engine_strict = aube_settings::resolved::engine_strict(&settings_ctx);
let child_concurrency = aube_settings::resolved::child_concurrency(&settings_ctx) as usize;
let node_version_override = aube_settings::resolved::node_version(&settings_ctx);
let node_version = crate::engines::resolve_node_version(node_version_override.as_deref());
crate::engines::run_checks(
&aube_dir,
&manifest,
&graph_for_link,
&package_indices,
node_version.as_deref(),
engine_strict,
virtual_store_dir_max_length,
)?;
let (build_policy, policy_warnings) = build_policy_from_sources(
&manifest,
&ws_config_shared,
opts.dangerously_allow_all_builds,
);
for w in &policy_warnings {
eprintln!("warn: {w}");
}
let phase_start = std::time::Instant::now();
let strategy = resolve_link_strategy(&cwd, &settings_ctx)?;
if let Some(p) = prog_ref {
p.set_phase("linking");
}
tracing::debug!("Link strategy: {strategy:?}");
let shamefully_hoist = aube_settings::resolved::shamefully_hoist(&settings_ctx);
let public_hoist_pattern = aube_settings::resolved::public_hoist_pattern(&settings_ctx);
let hoist = aube_settings::resolved::hoist(&settings_ctx);
let hoist_pattern = aube_settings::resolved::hoist_pattern(&settings_ctx);
let hoist_workspace_packages = aube_settings::resolved::hoist_workspace_packages(&settings_ctx);
let dedupe_direct_deps = aube_settings::resolved::dedupe_direct_deps(&settings_ctx);
let virtual_store_only = aube_settings::resolved::virtual_store_only(&settings_ctx);
let reject_pnp =
miette!("node-linker=pnp is not supported by aube; use `isolated` (default) or `hoisted`");
let node_linker_cli = aube_settings::values::string_from_cli("nodeLinker", settings_ctx.cli);
let node_linker = if let Some(cli) = node_linker_cli.as_deref() {
let trimmed = cli.trim().to_ascii_lowercase();
if trimmed == "pnp" {
return Err(reject_pnp);
}
aube_linker::NodeLinker::parse(&trimmed).ok_or_else(|| {
miette!("unknown --node-linker value `{cli}`; expected `isolated` or `hoisted`")
})?
} else {
match aube_settings::resolved::node_linker(&settings_ctx) {
aube_settings::resolved::NodeLinker::Pnp => return Err(reject_pnp),
aube_settings::resolved::NodeLinker::Hoisted => aube_linker::NodeLinker::Hoisted,
aube_settings::resolved::NodeLinker::Isolated => aube_linker::NodeLinker::Isolated,
}
};
tracing::debug!("node-linker: {:?}", node_linker);
let mut linker = aube_linker::Linker::new(store.as_ref(), strategy)
.with_shamefully_hoist(shamefully_hoist)
.with_public_hoist_pattern(&public_hoist_pattern)
.with_hoist(hoist)
.with_hoist_pattern(&hoist_pattern)
.with_hoist_workspace_packages(hoist_workspace_packages)
.with_dedupe_direct_deps(dedupe_direct_deps)
.with_virtual_store_dir_max_length(virtual_store_dir_max_length)
.with_node_linker(node_linker)
.with_link_concurrency(link_concurrency_setting)
.with_virtual_store_only(virtual_store_only)
.with_modules_dir_name(modules_dir_name.clone())
.with_aube_dir_override(aube_dir.clone());
if let Some(enabled) = use_global_virtual_store_override {
linker = linker.with_use_global_virtual_store(enabled);
}
let resolved_patches = crate::patches::load_patches(&cwd)?;
let patch_hashes: std::collections::BTreeMap<String, String> = resolved_patches
.values()
.map(|p| (p.key.clone(), p.content_hash()))
.collect();
let patches_for_linker: aube_linker::Patches = resolved_patches
.values()
.map(|p| (p.key.clone(), p.content.clone()))
.collect();
let patch_hash_fn = |name: &str, version: &str| -> Option<String> {
let key = format!("{name}@{version}");
patch_hashes.get(&key).cloned()
};
if linker.uses_global_virtual_store() {
let engine = node_version
.as_deref()
.map(aube_lockfile::graph_hash::engine_name_default);
let allow = |name: &str, version: &str| {
matches!(
build_policy.decide(name, version),
aube_scripts::AllowDecision::Allow
)
};
let graph_hashes = aube_lockfile::graph_hash::compute_graph_hashes_with_patches(
&graph_for_link,
&allow,
engine.as_ref(),
&patch_hash_fn,
);
linker = linker.with_graph_hashes(graph_hashes);
}
if !patches_for_linker.is_empty() {
linker = linker.with_patches(patches_for_linker);
}
let stats = if has_workspace {
linker
.link_workspace(&cwd, &graph_for_link, &package_indices, &ws_dirs)
.into_diagnostic()
.wrap_err("failed to link workspace node_modules")?
} else {
linker
.link_all(&cwd, &graph_for_link, &package_indices)
.into_diagnostic()
.wrap_err("failed to link node_modules")?
};
tracing::debug!(
"phase:link {:.1?} ({} files)",
phase_start.elapsed(),
stats.files_linked
);
if has_workspace
&& matches!(node_linker, aube_linker::NodeLinker::Isolated)
&& !virtual_store_only
{
let inject_start = std::time::Instant::now();
let injected_count = super::inject::apply_injected(
&cwd,
&modules_dir_name,
&aube_dir,
virtual_store_dir_max_length,
&graph_for_link,
&manifests,
&ws_dirs,
)?;
if injected_count > 0 {
tracing::debug!(
"phase:inject {:.1?} ({injected_count} workspace deps injected)",
inject_start.elapsed()
);
}
}
let placements_ref = stats.hoisted_placements.as_ref();
let phase_start = std::time::Instant::now();
let extend_node_path = aube_settings::resolved::extend_node_path(&settings_ctx);
let prefer_symlinked_executables =
aube_settings::resolved::prefer_symlinked_executables(&settings_ctx);
let shim_opts = aube_linker::BinShimOptions {
extend_node_path,
prefer_symlinked_executables,
};
if !virtual_store_only {
link_bins(
&cwd,
&modules_dir_name,
&aube_dir,
&graph_for_link,
virtual_store_dir_max_length,
placements_ref,
shim_opts,
)?;
if has_workspace {
for (importer_path, deps) in &graph_for_link.importers {
if importer_path == "." {
continue;
}
let pkg_dir = cwd.join(importer_path);
let bin_dir = pkg_dir.join(&modules_dir_name).join(".bin");
std::fs::create_dir_all(&bin_dir).into_diagnostic()?;
for dep in deps {
link_bins_for_dep(
&aube_dir,
&bin_dir,
&graph_for_link,
&dep.dep_path,
&dep.name,
virtual_store_dir_max_length,
placements_ref,
shim_opts,
)?;
}
}
}
tracing::debug!("phase:link_bins {:.1?}", phase_start.elapsed());
}
let install_is_noop = stats.packages_linked == 0 && stats.top_level_linked == 0;
if let Some(p) = prog_ref {
p.finish(!install_is_noop);
}
if !opts.ignore_scripts && strict_dep_builds_setting && !virtual_store_only {
let unreviewed = unreviewed_dep_builds(
&aube_dir,
&graph_for_link,
&build_policy,
virtual_store_dir_max_length,
placements_ref,
)?;
if !unreviewed.is_empty() {
return Err(miette!(
"dependencies with build scripts must be reviewed before install:\n{}\nhelp: add them to `allowBuilds` / `onlyBuiltDependencies`, set `neverBuiltDependencies`, or set `strictDepBuilds=false`",
unreviewed
.into_iter()
.map(|pkg| format!(" - {pkg}"))
.collect::<Vec<_>>()
.join("\n")
));
}
}
if !opts.ignore_scripts && build_policy.has_any_allow_rule() && !virtual_store_only {
let side_effects_cache_root =
side_effects_cache_setting.then(|| side_effects_cache_root(store.as_ref()));
let side_effects_cache = side_effects_cache_root
.as_deref()
.map(|root| {
if side_effects_cache_readonly_setting {
SideEffectsCacheConfig::RestoreOnly(root)
} else {
SideEffectsCacheConfig::RestoreAndSave(root)
}
})
.unwrap_or(SideEffectsCacheConfig::Disabled);
let ran = run_dep_lifecycle_scripts(
&cwd,
&modules_dir_name,
&aube_dir,
&graph_for_link,
&build_policy,
virtual_store_dir_max_length,
child_concurrency,
placements_ref,
side_effects_cache,
)
.await?;
if ran > 0 {
tracing::debug!("allowBuilds: ran {ran} dep lifecycle script(s)");
}
}
if !opts.ignore_scripts && !virtual_store_only {
for hook in [
aube_scripts::LifecycleHook::Install,
aube_scripts::LifecycleHook::PostInstall,
aube_scripts::LifecycleHook::Prepare,
] {
run_root_lifecycle(&cwd, &modules_dir_name, &manifest, hook).await?;
}
}
if !virtual_store_only {
state::write_state(&cwd, opts.prod || opts.dev)
.into_diagnostic()
.wrap_err("failed to write install state")?;
}
let modules_cache_max_age_minutes =
aube_settings::resolved::modules_cache_max_age(&settings_ctx);
if modules_cache_max_age_minutes > 0 && !virtual_store_only {
let removed = sweep_orphaned_aube_entries(
&aube_dir,
&graph,
virtual_store_dir_max_length,
std::time::Duration::from_secs(modules_cache_max_age_minutes.saturating_mul(60)),
);
if removed > 0 {
tracing::debug!("modulesCacheMaxAge: swept {removed} orphaned .aube entry/entries");
}
}
let elapsed = start.elapsed();
tracing::debug!(
"Done in {:.0?}: {} packages ({} cached), {} files linked, {} top-level",
elapsed,
stats.packages_linked + stats.packages_cached,
stats.packages_cached,
stats.files_linked,
stats.top_level_linked
);
if stats.packages_linked == 0
&& stats.packages_cached == 0
&& graph_for_link
.packages
.values()
.any(|p| p.local_source.is_none())
{
return Err(miette!("no packages were linked — something went wrong"));
}
if let Some(p) = prog_ref {
p.print_install_summary(
stats.packages_linked,
stats.top_level_linked,
graph_for_link.packages.len(),
elapsed,
);
}
Ok(())
}
fn sweep_orphaned_aube_entries(
aube_dir: &std::path::Path,
graph: &aube_lockfile::LockfileGraph,
virtual_store_dir_max_length: usize,
max_age: std::time::Duration,
) -> usize {
use aube_lockfile::dep_path_filename::dep_path_to_filename;
let entries = match std::fs::read_dir(aube_dir) {
Ok(e) => e,
Err(e) if e.kind() == std::io::ErrorKind::NotFound => return 0,
Err(e) => {
tracing::debug!(
"modulesCacheMaxAge: cannot read {}: {e}; skipping sweep",
aube_dir.display()
);
return 0;
}
};
let in_use: std::collections::HashSet<String> = graph
.packages
.keys()
.map(|dep_path| dep_path_to_filename(dep_path, virtual_store_dir_max_length))
.collect();
let now = std::time::SystemTime::now();
let mut removed = 0usize;
for entry in entries.flatten() {
let name = entry.file_name();
let name_str = name.to_string_lossy();
if name_str.starts_with('.') {
continue;
}
if name_str == "node_modules" {
continue;
}
if in_use.contains(name_str.as_ref()) {
continue;
}
let metadata = match entry.path().symlink_metadata() {
Ok(m) => m,
Err(e) => {
tracing::debug!(
"modulesCacheMaxAge: cannot stat {}: {e}",
entry.path().display()
);
continue;
}
};
let modified = match metadata.modified() {
Ok(t) => t,
Err(_) => continue, };
let age = now.duration_since(modified).unwrap_or_default();
if age < max_age {
continue;
}
let path = entry.path();
let file_type = metadata.file_type();
let result = if file_type.is_symlink() {
std::fs::remove_file(&path)
} else {
std::fs::remove_dir_all(&path).or_else(|_| std::fs::remove_file(&path))
};
match result {
Ok(()) => removed += 1,
Err(e) => tracing::debug!(
"modulesCacheMaxAge: failed to remove {}: {e}",
path.display()
),
}
}
removed
}
fn filter_graph_to_workspace_selection(
workspace_root: &std::path::Path,
workspace_packages: &[std::path::PathBuf],
graph: &aube_lockfile::LockfileGraph,
filters: &aube_workspace::selector::EffectiveFilter,
) -> miette::Result<aube_lockfile::LockfileGraph> {
let selected = aube_workspace::selector::select_workspace_packages(
workspace_root,
workspace_packages,
filters,
)
.map_err(|e| miette!("invalid --filter selector: {e}"))?;
if selected.is_empty() {
return Err(miette!(
"aube install: filter {filters:?} did not match any workspace package"
));
}
let mut keep_importers = std::collections::BTreeSet::new();
for pkg in selected {
keep_importers.insert(super::workspace_importer_path(workspace_root, &pkg.dir)?);
}
let importers: std::collections::BTreeMap<String, Vec<aube_lockfile::DirectDep>> = graph
.importers
.iter()
.filter(|(importer, _)| keep_importers.contains(*importer))
.map(|(importer, deps)| (importer.clone(), deps.clone()))
.collect();
let filtered = aube_lockfile::LockfileGraph {
importers,
..graph.clone()
};
Ok(filtered.filter_deps(|_| true))
}
fn filter_graph_to_importers<const N: usize>(
graph: &aube_lockfile::LockfileGraph,
keep_importers: [&str; N],
) -> aube_lockfile::LockfileGraph {
let keep_importers: std::collections::BTreeSet<&str> = keep_importers.into_iter().collect();
let importers: std::collections::BTreeMap<String, Vec<aube_lockfile::DirectDep>> = graph
.importers
.iter()
.filter(|(importer, _)| keep_importers.contains(importer.as_str()))
.map(|(importer, deps)| (importer.clone(), deps.clone()))
.collect();
let filtered = aube_lockfile::LockfileGraph {
importers,
..graph.clone()
};
filtered.filter_deps(|_| true)
}
fn materialized_pkg_dir(
aube_dir: &std::path::Path,
dep_path: &str,
name: &str,
virtual_store_dir_max_length: usize,
placements: Option<&aube_linker::HoistedPlacements>,
) -> std::path::PathBuf {
if let Some(placements) = placements
&& let Some(p) = placements.package_dir(dep_path)
{
return p.to_path_buf();
}
aube_dir
.join(dep_path_to_filename(dep_path, virtual_store_dir_max_length))
.join("node_modules")
.join(name)
}
fn read_materialized_pkg_json(
aube_dir: &std::path::Path,
dep_path: &str,
name: &str,
virtual_store_dir_max_length: usize,
placements: Option<&aube_linker::HoistedPlacements>,
) -> miette::Result<Option<serde_json::Value>> {
let pkg_dir = materialized_pkg_dir(
aube_dir,
dep_path,
name,
virtual_store_dir_max_length,
placements,
);
let pkg_json_path = pkg_dir.join("package.json");
let content = match std::fs::read_to_string(&pkg_json_path) {
Ok(s) => s,
Err(e) if e.kind() == std::io::ErrorKind::NotFound => return Ok(None),
Err(e) => {
return Err(miette!(
"failed to read package.json for {name} at {}: {e}",
pkg_json_path.display()
));
}
};
let value = serde_json::from_str(&content)
.into_diagnostic()
.wrap_err_with(|| format!("failed to parse package.json for {name}"))?;
Ok(Some(value))
}
#[allow(clippy::too_many_arguments)]
fn link_bins_for_dep(
aube_dir: &std::path::Path,
bin_dir: &std::path::Path,
graph: &aube_lockfile::LockfileGraph,
dep_path: &str,
name: &str,
virtual_store_dir_max_length: usize,
placements: Option<&aube_linker::HoistedPlacements>,
shim_opts: aube_linker::BinShimOptions,
) -> miette::Result<()> {
let pkg_dir = materialized_pkg_dir(
aube_dir,
dep_path,
name,
virtual_store_dir_max_length,
placements,
);
if let Some(pkg_json) = read_materialized_pkg_json(
aube_dir,
dep_path,
name,
virtual_store_dir_max_length,
placements,
)? && let Some(bin) = pkg_json.get("bin")
{
match bin {
serde_json::Value::String(bin_path) => {
let bin_name = name.split('/').next_back().unwrap_or(name);
create_bin_link(bin_dir, bin_name, &pkg_dir.join(bin_path), shim_opts)?;
}
serde_json::Value::Object(bins) => {
for (bin_name, path) in bins {
if let Some(path_str) = path.as_str() {
create_bin_link(bin_dir, bin_name, &pkg_dir.join(path_str), shim_opts)?;
}
}
}
_ => {}
}
}
link_bundled_bins(bin_dir, &pkg_dir, graph, dep_path, shim_opts)?;
Ok(())
}
fn link_bins(
project_dir: &std::path::Path,
modules_dir_name: &str,
aube_dir: &std::path::Path,
graph: &aube_lockfile::LockfileGraph,
virtual_store_dir_max_length: usize,
placements: Option<&aube_linker::HoistedPlacements>,
shim_opts: aube_linker::BinShimOptions,
) -> miette::Result<()> {
let bin_dir = project_dir.join(modules_dir_name).join(".bin");
std::fs::create_dir_all(&bin_dir).into_diagnostic()?;
for dep in graph.root_deps() {
link_bins_for_dep(
aube_dir,
&bin_dir,
graph,
&dep.dep_path,
&dep.name,
virtual_store_dir_max_length,
placements,
shim_opts,
)?;
}
Ok(())
}
fn link_bundled_bins(
bin_dir: &std::path::Path,
pkg_dir: &std::path::Path,
graph: &aube_lockfile::LockfileGraph,
dep_path: &str,
shim_opts: aube_linker::BinShimOptions,
) -> miette::Result<()> {
let Some(locked) = graph.get_package(dep_path) else {
return Ok(());
};
for bundled in &locked.bundled_dependencies {
let bundled_dir = pkg_dir.join("node_modules").join(bundled);
let bundled_pkg_json_path = bundled_dir.join("package.json");
let Ok(content) = std::fs::read_to_string(&bundled_pkg_json_path) else {
continue;
};
let Ok(bundled_pkg_json) = serde_json::from_str::<serde_json::Value>(&content) else {
continue;
};
let Some(bin) = bundled_pkg_json.get("bin") else {
continue;
};
match bin {
serde_json::Value::String(bin_path) => {
let bin_name = bundled.split('/').next_back().unwrap_or(bundled);
create_bin_link(bin_dir, bin_name, &bundled_dir.join(bin_path), shim_opts)?;
}
serde_json::Value::Object(bins) => {
for (name, path) in bins {
if let Some(path_str) = path.as_str() {
create_bin_link(bin_dir, name, &bundled_dir.join(path_str), shim_opts)?;
}
}
}
_ => {}
}
}
Ok(())
}
fn create_bin_link(
bin_dir: &std::path::Path,
name: &str,
target: &std::path::Path,
shim_opts: aube_linker::BinShimOptions,
) -> miette::Result<()> {
aube_linker::create_bin_shim(bin_dir, name, target, shim_opts).into_diagnostic()?;
Ok(())
}
#[cfg(test)]
mod frozen_mode_tests {
use super::*;
#[test]
fn cli_frozen_beats_yaml() {
let m = FrozenMode::from_flags(true, false, false, Some(false));
assert!(matches!(m, FrozenMode::Frozen));
}
#[test]
fn yaml_prefer_true_maps_to_prefer() {
let m = FrozenMode::from_flags(false, false, false, Some(true));
assert!(matches!(m, FrozenMode::Prefer));
}
#[test]
fn yaml_prefer_false_maps_to_no() {
let m = FrozenMode::from_flags(false, false, false, Some(false));
assert!(matches!(m, FrozenMode::No));
}
#[test]
fn side_effects_marker_accepts_only_sha512_hex() {
let dir = tempfile::tempdir().unwrap();
let marker_path = dir.path().join(SIDE_EFFECTS_CACHE_MARKER);
std::fs::write(&marker_path, "../../evil").unwrap();
assert_eq!(read_valid_side_effects_marker(dir.path()), None);
std::fs::write(&marker_path, format!("{}\n", "A".repeat(128))).unwrap();
assert_eq!(
read_valid_side_effects_marker(dir.path()),
Some("a".repeat(128))
);
}
#[test]
fn workspace_yaml_value_resolves_dotted_paths() {
let raw: BTreeMap<String, serde_yaml::Value> =
serde_yaml::from_str("outer:\n inner:\n key: value\n").unwrap();
assert_eq!(
aube_settings::workspace_yaml_value(&raw, "outer.inner.key").and_then(|v| v.as_str()),
Some("value")
);
assert!(aube_settings::workspace_yaml_value(&raw, "outer.missing.key").is_none());
}
}
#[cfg(test)]
mod peer_dependency_rules_tests {
use super::*;
fn unmet(
parent: &str,
peer: &str,
declared: &str,
found: Option<&str>,
) -> aube_resolver::UnmetPeer {
aube_resolver::UnmetPeer {
from_dep_path: format!("{parent}@0.0.0"),
from_name: parent.to_string(),
peer_name: peer.to_string(),
declared: declared.to_string(),
found: found.map(String::from),
}
}
fn rules(
ignore_missing: &[&str],
allow_any: &[&str],
allowed_versions: &[(&str, &str)],
) -> PeerDependencyRules {
PeerDependencyRules {
ignore_missing: ignore_missing
.iter()
.map(|p| glob::Pattern::new(p).unwrap())
.collect(),
allow_any: allow_any
.iter()
.map(|p| glob::Pattern::new(p).unwrap())
.collect(),
allowed_versions: allowed_versions
.iter()
.map(|(k, v)| ((*k).to_string(), (*v).to_string()))
.collect(),
}
}
#[test]
fn ignore_missing_silences_only_missing_matches() {
let r = rules(&["react*"], &[], &[]);
assert!(r.silences(&unmet("parent", "react", "^18.0.0", None)));
assert!(r.silences(&unmet("parent", "react-dom", "^18.0.0", None)));
assert!(!r.silences(&unmet("parent", "react", "^18.0.0", Some("19.0.0"))));
assert!(!r.silences(&unmet("parent", "vue", "^3.0.0", None)));
}
#[test]
fn allow_any_silences_both_missing_and_wrong_version() {
let r = rules(&[], &["react"], &[]);
assert!(r.silences(&unmet("parent", "react", "^18.0.0", None)));
assert!(r.silences(&unmet("parent", "react", "^18.0.0", Some("19.0.0"))));
assert!(!r.silences(&unmet("parent", "vue", "^3.0.0", Some("2.0.0"))));
}
#[test]
fn allowed_versions_bare_key_widens_range_regardless_of_parent() {
let r = rules(&[], &[], &[("react", "^19.0.0")]);
assert!(r.silences(&unmet(
"styled-components",
"react",
"^18.0.0",
Some("19.0.0")
)));
assert!(r.silences(&unmet("other-lib", "react", "^18.0.0", Some("19.5.0"))));
assert!(!r.silences(&unmet("lib", "react", "^18.0.0", Some("20.0.0"))));
assert!(!r.silences(&unmet("lib", "react", "^18.0.0", None)));
}
#[test]
fn allowed_versions_scoped_key_only_matches_named_parent() {
let r = rules(&[], &[], &[("styled-components>react", "^19.0.0")]);
assert!(r.silences(&unmet(
"styled-components",
"react",
"^18.0.0",
Some("19.0.0")
)));
assert!(!r.silences(&unmet("other-lib", "react", "^18.0.0", Some("19.0.0"))));
}
#[test]
fn invalid_override_range_does_not_silence() {
let r = rules(&[], &[], &[("react", "not-a-range")]);
assert!(!r.silences(&unmet("parent", "react", "^18.0.0", Some("19.0.0"))));
}
}