use crate::commands::install::{self, FrozenMode, InstallOptions};
use crate::commands::pack::build_archive;
use aube_manifest::PackageJson;
use clap::Args;
use miette::{Context, IntoDiagnostic, miette};
use std::collections::BTreeMap;
use std::path::{Path, PathBuf};
#[derive(Debug, Args)]
pub struct DeployArgs {
pub target: PathBuf,
#[arg(short = 'D', long, conflicts_with = "prod")]
pub dev: bool,
#[arg(long)]
pub no_optional: bool,
#[arg(short = 'P', long, visible_alias = "production")]
pub prod: bool,
}
pub async fn run(
args: DeployArgs,
filter: aube_workspace::selector::EffectiveFilter,
) -> miette::Result<()> {
if filter.is_empty() {
return Err(miette!(
"aube deploy: --filter/-F is required to pick a workspace package"
));
}
let source_root = crate::dirs::cwd().wrap_err("failed to read current directory")?;
let npmrc_entries = aube_registry::config::load_npmrc_entries(&source_root);
let raw_workspace = aube_manifest::workspace::load_raw(&source_root).unwrap_or_default();
let env = aube_settings::values::capture_env();
let settings_ctx = aube_settings::ResolveCtx {
npmrc: &npmrc_entries,
workspace_yaml: &raw_workspace,
env: &env,
cli: &[],
};
let deploy_all_files = aube_settings::resolved::deploy_all_files(&settings_ctx);
let workspace_pkgs = aube_workspace::find_workspace_packages(&source_root)
.map_err(|e| miette!("failed to discover workspace packages: {e}"))?;
if workspace_pkgs.is_empty() {
return Err(miette!(
"aube deploy: no workspace packages found. \
`deploy` requires a pnpm-workspace.yaml at {}",
source_root.display()
));
}
let mut ws_index: BTreeMap<String, (PathBuf, String)> = BTreeMap::new();
for dir in &workspace_pkgs {
let Ok(m) = PackageJson::from_path(&dir.join("package.json")) else {
continue;
};
if let (Some(n), Some(v)) = (m.name, m.version) {
ws_index.insert(n, (dir.clone(), v));
}
}
let selected =
aube_workspace::selector::select_workspace_packages(&source_root, &workspace_pkgs, &filter)
.map_err(|e| miette!("invalid --filter selector: {e}"))?;
let mut matches: Vec<(String, PathBuf)> = selected
.into_iter()
.filter_map(|pkg| pkg.name.map(|name| (name, pkg.dir)))
.filter(|(name, _)| ws_index.contains_key(name))
.collect();
matches.sort_by(|a, b| a.0.cmp(&b.0));
if matches.is_empty() {
let names: Vec<&str> = ws_index.keys().map(String::as_str).collect();
return Err(miette!(
"aube deploy: --filter {:?} did not match any workspace package. Known: {}",
filter,
names.join(", ")
));
}
let target_root = if args.target.is_absolute() {
args.target.clone()
} else {
source_root.join(&args.target)
};
let plan: Vec<(String, PathBuf, PathBuf)> = if matches.len() == 1 {
let (name, src) = matches.into_iter().next().unwrap();
vec![(name, src, target_root.clone())]
} else {
ensure_target_writable(&target_root)?;
let mut used: BTreeMap<String, String> = BTreeMap::new();
let mut v = Vec::with_capacity(matches.len());
for (name, src) in matches {
let base = src
.file_name()
.and_then(|s| s.to_str())
.map(str::to_string)
.ok_or_else(|| {
miette!(
"aube deploy: workspace package {} has no directory name",
src.display()
)
})?;
if let Some(prev) = used.insert(base.clone(), name.clone()) {
return Err(miette!(
"aube deploy: workspace packages {prev:?} and {name:?} both live in a directory named {base:?}; \
multi-package deploy uses the source basename as the target subdir, so these would collide"
));
}
v.push((name, src, target_root.join(&base)));
}
v
};
let mut staged: Vec<StagedDeploy> = Vec::with_capacity(plan.len());
for (_name, source_pkg_dir, target) in &plan {
staged.push(stage_one(
source_pkg_dir,
target,
&ws_index,
&args,
deploy_all_files,
)?);
}
for (s, source_pkg_dir) in staged.iter().zip(plan.iter().map(|(_, src, _)| src)) {
let seeded = seed_target_lockfile(&source_root, source_pkg_dir, &s.target, &args)?;
super::retarget_cwd(&s.target)?;
let mode = if seeded {
FrozenMode::Prefer
} else {
FrozenMode::No
};
let opts = InstallOptions {
project_dir: Some(s.target.clone()),
mode,
prod: !args.dev,
dev: args.dev,
no_optional: args.no_optional,
ignore_pnpmfile: false,
ignore_scripts: false,
lockfile_only: false,
merge_git_branch_lockfiles: false,
dangerously_allow_all_builds: false,
network_mode: aube_registry::NetworkMode::Online,
minimum_release_age_override: None,
strict_no_lockfile: false,
force: false,
cli_flags: Vec::new(),
env_snapshot: aube_settings::values::capture_env(),
git_prepare_depth: 0,
workspace_filter: aube_workspace::selector::EffectiveFilter::default(),
};
install::run(opts).await?;
println!(
"deployed {}@{} to {}",
s.name,
s.version,
s.target.display()
);
}
Ok(())
}
struct StagedDeploy {
name: String,
version: String,
target: PathBuf,
}
fn seed_target_lockfile(
source_root: &Path,
source_pkg_dir: &Path,
target: &Path,
args: &DeployArgs,
) -> miette::Result<bool> {
let Ok(source_manifest) = PackageJson::from_path(&source_root.join("package.json")) else {
tracing::debug!("deploy: workspace root package.json unreadable, skipping lockfile subset");
return Ok(false);
};
let (graph, kind) = match aube_lockfile::parse_lockfile_with_kind(source_root, &source_manifest)
{
Ok(pair) => pair,
Err(e) => {
tracing::debug!("deploy: no usable source lockfile ({e}); fresh install instead");
return Ok(false);
}
};
let importer_path = super::workspace_importer_path(source_root, source_pkg_dir)?;
let prod = !args.dev;
let dev = args.dev;
let keep_optional = !(args.no_optional || args.dev);
let keep = move |d: &aube_lockfile::DirectDep| match d.dep_type {
aube_lockfile::DepType::Production => prod,
aube_lockfile::DepType::Dev => dev,
aube_lockfile::DepType::Optional => keep_optional,
};
let Some(mut subset) = graph.subset_to_importer(&importer_path, keep) else {
tracing::debug!(
"deploy: importer {importer_path:?} not in source lockfile; fresh install instead"
);
return Ok(false);
};
let has_local_root = subset.root_deps().iter().any(|d| {
subset
.get_package(&d.dep_path)
.and_then(|p| p.local_source.as_ref())
.is_some_and(|src| {
matches!(
src,
aube_lockfile::LocalSource::Link(_)
| aube_lockfile::LocalSource::Directory(_)
| aube_lockfile::LocalSource::Tarball(_)
)
})
});
if has_local_root {
tracing::debug!("deploy: source importer has link:/file: roots; fresh install instead");
return Ok(false);
}
subset.overrides.clear();
subset.ignored_optional_dependencies.clear();
subset.catalogs.clear();
let canonical_keys: std::collections::HashSet<String> = subset
.packages
.values()
.map(|pkg| format!("{}@{}", pkg.name, pkg.version))
.collect();
subset.times.retain(|key, _| canonical_keys.contains(key));
let target_manifest = PackageJson::from_path(&target.join("package.json"))
.into_diagnostic()
.wrap_err("deploy: failed to re-read rewritten target package.json")?;
aube_lockfile::write_lockfile_as(target, &subset, &target_manifest, kind)
.into_diagnostic()
.wrap_err("deploy: failed to write subset lockfile into target")?;
Ok(true)
}
fn stage_one(
source_pkg_dir: &Path,
target: &Path,
ws_index: &BTreeMap<String, (PathBuf, String)>,
args: &DeployArgs,
deploy_all_files: bool,
) -> miette::Result<StagedDeploy> {
ensure_target_writable(target)?;
std::fs::create_dir_all(target)
.into_diagnostic()
.wrap_err_with(|| format!("failed to create {}", target.display()))?;
let (name, version, files) = if deploy_all_files {
let manifest = PackageJson::from_path(&source_pkg_dir.join("package.json"))
.into_diagnostic()
.wrap_err("failed to read package.json")?;
let name = manifest
.name
.ok_or_else(|| miette!("deploy: package.json has no `name` field"))?;
let version = manifest
.version
.ok_or_else(|| miette!("deploy: package.json has no `version` field"))?;
let files = collect_all_files(source_pkg_dir, target)?;
(name, version, files)
} else {
let archive = build_archive(source_pkg_dir)?;
let files = archive
.files
.into_iter()
.map(|rel| (source_pkg_dir.join(&rel), rel))
.collect();
(archive.name, archive.version, files)
};
for (src, rel) in &files {
let dst = target.join(rel);
if let Some(parent) = dst.parent() {
std::fs::create_dir_all(parent)
.into_diagnostic()
.wrap_err_with(|| format!("failed to create {}", parent.display()))?;
}
std::fs::copy(src, &dst)
.into_diagnostic()
.wrap_err_with(|| format!("failed to copy {} -> {}", src.display(), dst.display()))?;
}
let strip = StripFields {
dependencies: args.dev,
dev_dependencies: !args.dev,
optional_dependencies: args.no_optional || args.dev,
};
rewrite_workspace_deps(&target.join("package.json"), ws_index, strip)?;
Ok(StagedDeploy {
name,
version,
target: target.to_path_buf(),
})
}
fn collect_all_files(source: &Path, target: &Path) -> miette::Result<Vec<(PathBuf, String)>> {
let target_canon = std::fs::canonicalize(target).unwrap_or_else(|_| target.to_path_buf());
let mut out = Vec::new();
let mut stack = vec![source.to_path_buf()];
while let Some(dir) = stack.pop() {
let iter = std::fs::read_dir(&dir)
.into_diagnostic()
.wrap_err_with(|| format!("deploy: read_dir({}) failed", dir.display()))?;
for entry in iter {
let entry = entry
.into_diagnostic()
.wrap_err_with(|| format!("deploy: failed to read entry in {}", dir.display()))?;
let name = entry.file_name();
if matches!(name.to_string_lossy().as_ref(), "node_modules" | ".git") {
continue;
}
let path = entry.path();
let canon = std::fs::canonicalize(&path).unwrap_or_else(|_| path.clone());
if canon == target_canon {
continue;
}
let ft = entry
.file_type()
.into_diagnostic()
.wrap_err_with(|| format!("deploy: failed to stat {}", path.display()))?;
let (is_dir, is_file) = if ft.is_symlink() {
match std::fs::metadata(&path) {
Ok(md) => (md.is_dir(), md.is_file()),
Err(_) => (false, false),
}
} else {
(ft.is_dir(), ft.is_file())
};
if is_dir && !ft.is_symlink() {
stack.push(path);
} else if is_file && let Ok(rel) = path.strip_prefix(source) {
out.push((path.clone(), rel.to_string_lossy().replace('\\', "/")));
}
}
}
Ok(out)
}
fn ensure_target_writable(target: &Path) -> miette::Result<()> {
match std::fs::read_dir(target) {
Ok(mut entries) => {
if entries.next().is_some() {
return Err(miette!(
"aube deploy: target directory {} is not empty",
target.display()
));
}
Ok(())
}
Err(e) if e.kind() == std::io::ErrorKind::NotFound => Ok(()),
Err(e) => Err(miette!(
"aube deploy: failed to inspect {}: {e}",
target.display()
)),
}
}
#[derive(Debug, Clone, Copy, Default)]
struct StripFields {
dependencies: bool,
dev_dependencies: bool,
optional_dependencies: bool,
}
fn rewrite_workspace_deps(
manifest_path: &Path,
ws_index: &BTreeMap<String, (PathBuf, String)>,
strip: StripFields,
) -> miette::Result<()> {
let raw = std::fs::read_to_string(manifest_path)
.into_diagnostic()
.wrap_err_with(|| format!("failed to read {}", manifest_path.display()))?;
let mut doc: serde_json::Value = serde_json::from_str(&raw)
.into_diagnostic()
.wrap_err_with(|| format!("failed to parse {}", manifest_path.display()))?;
const DEP_FIELDS: &[&str] = &[
"dependencies",
"devDependencies",
"optionalDependencies",
"peerDependencies",
];
let Some(obj) = doc.as_object_mut() else {
return Err(miette!(
"{} did not parse to a JSON object",
manifest_path.display()
));
};
if strip.dependencies {
obj.remove("dependencies");
}
if strip.dev_dependencies {
obj.remove("devDependencies");
}
if strip.optional_dependencies {
obj.remove("optionalDependencies");
}
for field in DEP_FIELDS {
let Some(deps) = obj.get_mut(*field).and_then(|v| v.as_object_mut()) else {
continue;
};
for (name, spec_val) in deps.iter_mut() {
let Some(spec) = spec_val.as_str() else {
continue;
};
if !spec.starts_with("workspace:") {
continue;
}
let (_, concrete_version) = ws_index.get(name).ok_or_else(|| {
miette!(
"aube deploy: {} declares `{name}: {spec}` but no workspace package named {name:?} was found",
manifest_path.display()
)
})?;
*spec_val = serde_json::Value::String(resolve_workspace_spec(spec, concrete_version));
}
}
let rewritten = serde_json::to_string_pretty(&doc)
.into_diagnostic()
.wrap_err("failed to serialize rewritten package.json")?;
std::fs::write(manifest_path, rewritten)
.into_diagnostic()
.wrap_err_with(|| format!("failed to write {}", manifest_path.display()))?;
Ok(())
}
fn resolve_workspace_spec(spec: &str, concrete_version: &str) -> String {
let suffix = spec.strip_prefix("workspace:").unwrap_or(spec);
match suffix {
"" | "*" => concrete_version.to_string(),
"^" => format!("^{concrete_version}"),
"~" => format!("~{concrete_version}"),
other => other.to_string(),
}
}
#[cfg(test)]
mod tests {
use super::*;
fn ws_index(entries: &[(&str, &str)]) -> BTreeMap<String, (PathBuf, String)> {
entries
.iter()
.map(|(n, v)| ((*n).to_string(), (PathBuf::from("/tmp"), (*v).to_string())))
.collect()
}
#[test]
fn resolve_workspace_spec_star_pins_exact() {
assert_eq!(resolve_workspace_spec("workspace:*", "1.2.3"), "1.2.3");
assert_eq!(resolve_workspace_spec("workspace:", "1.2.3"), "1.2.3");
}
#[test]
fn resolve_workspace_spec_caret_and_tilde_preserve_operator() {
assert_eq!(resolve_workspace_spec("workspace:^", "1.2.3"), "^1.2.3");
assert_eq!(resolve_workspace_spec("workspace:~", "1.2.3"), "~1.2.3");
}
#[test]
fn resolve_workspace_spec_literal_suffix_wins() {
assert_eq!(
resolve_workspace_spec("workspace:^2.0.0", "1.2.3"),
"^2.0.0"
);
assert_eq!(resolve_workspace_spec("workspace:1.2.3", "9.9.9"), "1.2.3");
assert_eq!(resolve_workspace_spec("workspace:>=2", "1.2.3"), ">=2");
}
#[test]
fn rewrite_replaces_workspace_star_with_version() {
let tmp = tempfile::tempdir().unwrap();
let path = tmp.path().join("package.json");
std::fs::write(
&path,
r#"{"name":"x","version":"1.0.0","dependencies":{"@test/lib":"workspace:*","lodash":"^4"}}"#,
)
.unwrap();
let idx = ws_index(&[("@test/lib", "1.2.3")]);
rewrite_workspace_deps(&path, &idx, StripFields::default()).unwrap();
let out: serde_json::Value =
serde_json::from_str(&std::fs::read_to_string(&path).unwrap()).unwrap();
assert_eq!(out["dependencies"]["@test/lib"], "1.2.3");
assert_eq!(out["dependencies"]["lodash"], "^4");
}
#[test]
fn rewrite_preserves_caret_and_tilde_range_operators() {
let tmp = tempfile::tempdir().unwrap();
let path = tmp.path().join("package.json");
std::fs::write(
&path,
r#"{"name":"x","version":"1.0.0","dependencies":{"@a/lib":"workspace:^","@b/lib":"workspace:~"}}"#,
)
.unwrap();
let idx = ws_index(&[("@a/lib", "1.2.3"), ("@b/lib", "4.5.6")]);
rewrite_workspace_deps(&path, &idx, StripFields::default()).unwrap();
let out: serde_json::Value =
serde_json::from_str(&std::fs::read_to_string(&path).unwrap()).unwrap();
assert_eq!(out["dependencies"]["@a/lib"], "^1.2.3");
assert_eq!(out["dependencies"]["@b/lib"], "~4.5.6");
}
#[test]
fn rewrite_dev_only_drops_non_dev_dep_fields() {
let tmp = tempfile::tempdir().unwrap();
let path = tmp.path().join("package.json");
std::fs::write(
&path,
r#"{"name":"x","version":"1.0.0","dependencies":{"lodash":"^4"},"optionalDependencies":{"fsevents":"^2"},"devDependencies":{"jest":"^29"}}"#,
)
.unwrap();
let idx = ws_index(&[]);
rewrite_workspace_deps(
&path,
&idx,
StripFields {
dependencies: true,
dev_dependencies: false,
optional_dependencies: true,
},
)
.unwrap();
let out: serde_json::Value =
serde_json::from_str(&std::fs::read_to_string(&path).unwrap()).unwrap();
assert!(out.get("dependencies").is_none());
assert!(out.get("optionalDependencies").is_none());
assert_eq!(out["devDependencies"]["jest"], "^29");
}
#[test]
fn rewrite_prod_mode_drops_dev_dependencies() {
let tmp = tempfile::tempdir().unwrap();
let path = tmp.path().join("package.json");
std::fs::write(
&path,
r#"{"name":"x","version":"1.0.0","dependencies":{"lodash":"^4"},"devDependencies":{"@test/internal":"workspace:*"}}"#,
)
.unwrap();
let idx = ws_index(&[]); rewrite_workspace_deps(
&path,
&idx,
StripFields {
dependencies: false,
dev_dependencies: true,
optional_dependencies: false,
},
)
.unwrap();
let out: serde_json::Value =
serde_json::from_str(&std::fs::read_to_string(&path).unwrap()).unwrap();
assert!(out.get("devDependencies").is_none());
assert_eq!(out["dependencies"]["lodash"], "^4");
}
#[test]
fn rewrite_errors_on_unknown_workspace_ref() {
let tmp = tempfile::tempdir().unwrap();
let path = tmp.path().join("package.json");
std::fs::write(
&path,
r#"{"name":"x","version":"1.0.0","dependencies":{"@test/missing":"workspace:*"}}"#,
)
.unwrap();
let idx = ws_index(&[]);
let err = rewrite_workspace_deps(&path, &idx, StripFields::default()).unwrap_err();
assert!(err.to_string().contains("@test/missing"));
}
#[test]
fn ensure_target_writable_empty_dir_is_ok() {
let tmp = tempfile::tempdir().unwrap();
ensure_target_writable(tmp.path()).unwrap();
}
#[test]
fn ensure_target_writable_missing_is_ok() {
let tmp = tempfile::tempdir().unwrap();
ensure_target_writable(&tmp.path().join("nope")).unwrap();
}
#[test]
fn ensure_target_writable_nonempty_errors() {
let tmp = tempfile::tempdir().unwrap();
std::fs::write(tmp.path().join("stuff"), "hi").unwrap();
assert!(ensure_target_writable(tmp.path()).is_err());
}
}