use anyhow::{Context, Result};
use chrono::Utc;
use futures::future::join_all;
use semver::{Version, VersionReq};
use std::collections::HashSet;
use std::fs;
use crate::cli::ResolveArgs;
use crate::types::{
NpmAlias, NpmPackageMetadata, NpmVersionInfo, ResolutionContext, ResolvedPackage,
WorkspacePackageRef,
};
use crate::utils::escape_nix_string;
const MAX_CONCURRENT_FETCHES: usize = 32;
pub async fn run_resolve(args: ResolveArgs) -> Result<()> {
println!("pleme-linker resolve: Resolving dependencies");
println!(" Project: {}", args.project.display());
println!(" Registry: {}", args.registry);
println!(" Platform: {}", args.platform);
println!();
let package_json_path = args.project.join("package.json");
let package_json_content = fs::read_to_string(&package_json_path)
.with_context(|| format!("Failed to read {}", package_json_path.display()))?;
let package_json: serde_json::Value = serde_json::from_str(&package_json_content)
.with_context(|| "Failed to parse package.json")?;
let mut ctx = ResolutionContext::new(args.registry.clone(), args.platform.clone());
let mut dep_count = 0;
if let Some(deps) = package_json.get("dependencies").and_then(|d| d.as_object()) {
for (name, version) in deps {
let version_str = version.as_str().unwrap_or("*");
if version_str.starts_with("workspace:")
|| version_str.starts_with("file:")
|| version_str.starts_with("link:")
{
let relative_path = if version_str.starts_with("file:") {
version_str.strip_prefix("file:").unwrap_or(version_str).to_string()
} else if version_str.starts_with("link:") {
version_str.strip_prefix("link:").unwrap_or(version_str).to_string()
} else {
"*".to_string()
};
ctx.workspace_packages.push(WorkspacePackageRef {
name: name.clone(),
relative_path,
});
println!(" Found workspace dependency: {} -> {}", name, version_str);
continue;
}
ctx.queue.push_back((name.clone(), version_str.to_string(), false));
ctx.queued.insert(format!("{}@{}", name, version_str));
dep_count += 1;
}
}
if args.include_dev {
if let Some(deps) = package_json
.get("devDependencies")
.and_then(|d| d.as_object())
{
for (name, version) in deps {
let version_str = version.as_str().unwrap_or("*");
if version_str.starts_with("workspace:")
|| version_str.starts_with("file:")
|| version_str.starts_with("link:")
{
continue;
}
let key = format!("{}@{}", name, version_str);
if !ctx.queued.contains(&key) {
ctx.queue.push_back((name.clone(), version_str.to_string(), false));
ctx.queued.insert(key);
dep_count += 1;
}
}
}
}
println!(" Found {} direct dependencies", dep_count);
println!();
println!("Resolving dependency tree (parallel fetching, {} concurrent)...", MAX_CONCURRENT_FETCHES);
let mut resolved_count = 0;
while !ctx.queue.is_empty() {
let mut batch: Vec<(String, String, bool, String)> = Vec::new(); let mut packages_to_fetch: HashSet<String> = HashSet::new();
while let Some((name, constraint, is_optional)) = ctx.queue.pop_front() {
let (actual_name, actual_constraint, target_package) =
if let Some(alias) = parse_npm_alias(&constraint) {
(name.clone(), alias.target_constraint.clone(), alias.target_package.clone())
} else {
(name.clone(), constraint.clone(), name.clone())
};
if is_satisfied(&ctx, &actual_name, &actual_constraint) {
continue;
}
if !ctx.metadata_cache.contains_key(&target_package) {
packages_to_fetch.insert(target_package.clone());
}
batch.push((name, constraint, is_optional, target_package));
if packages_to_fetch.len() >= MAX_CONCURRENT_FETCHES {
break;
}
}
if batch.is_empty() {
continue;
}
if !packages_to_fetch.is_empty() {
let fetch_results = fetch_metadata_batch(&ctx.client, &ctx.registry, &packages_to_fetch).await;
for (name, result) in fetch_results {
match result {
Ok(metadata) => {
ctx.metadata_cache.insert(name, metadata);
}
Err(e) => {
for (batch_name, batch_constraint, is_optional, target) in &batch {
if target == &name && !*is_optional {
ctx.failed.push((batch_name.clone(), batch_constraint.clone(), e.clone()));
}
}
}
}
}
}
for (name, constraint, is_optional, target_package) in batch {
let (actual_name, actual_constraint, _) =
if let Some(alias) = parse_npm_alias(&constraint) {
(name.clone(), alias.target_constraint.clone(), alias.target_package.clone())
} else {
(name.clone(), constraint.clone(), name.clone())
};
let metadata = match ctx.metadata_cache.get(&target_package) {
Some(m) => m.clone(),
None => {
if !is_optional {
}
continue;
}
};
let version_info = match resolve_version(&metadata, &actual_constraint) {
Some(v) => v.clone(),
None => {
if !is_optional {
ctx.failed.push((
name.clone(),
constraint.clone(),
format!("No version satisfies constraint {}", actual_constraint),
));
}
continue;
}
};
if !is_platform_compatible(&version_info, &ctx.platform) {
continue;
}
let key = format!("{}@{}", actual_name, version_info.version);
if ctx.resolved.contains_key(&key) {
continue;
}
let integrity = version_info
.dist
.integrity
.clone()
.or_else(|| version_info.dist.shasum.as_ref().map(|s| format!("sha1-{}", s)))
.unwrap_or_default();
let mut dep_keys = Vec::new();
for (dep_name, dep_constraint) in &version_info.dependencies {
let dep_key = queue_dependency(&mut ctx, dep_name, dep_constraint, false);
if let Some(k) = dep_key {
dep_keys.push(k);
}
}
for (dep_name, dep_constraint) in &version_info.optional_dependencies {
let dep_key = queue_dependency(&mut ctx, dep_name, dep_constraint, true);
if let Some(k) = dep_key {
dep_keys.push(k);
}
}
for (dep_name, dep_constraint) in &version_info.peer_dependencies {
queue_dependency(&mut ctx, dep_name, dep_constraint, true);
}
let root_key = format!("{}@{}", actual_name, version_info.version);
if dep_count > 0 && ctx.root_deps.len() < dep_count * 2 {
ctx.root_deps.push(root_key.clone());
}
ctx.resolved.insert(
key,
ResolvedPackage {
pname: actual_name.clone(),
version: version_info.version.clone(),
url: version_info.dist.tarball.clone(),
integrity,
dependencies: dep_keys,
has_bin: version_info.bin.as_ref().map(|_| true),
},
);
resolved_count += 1;
}
print!("\r Resolved {} packages...", resolved_count);
use std::io::Write;
std::io::stdout().flush().ok();
}
println!("\r Resolved {} packages total ", ctx.resolved.len());
let resolved_clone: Vec<(String, ResolvedPackage)> = ctx.resolved.iter()
.map(|(k, v)| (k.clone(), v.clone()))
.collect();
for (_key, pkg) in &mut ctx.resolved {
let mut updated_deps = Vec::new();
for dep_constraint_key in &pkg.dependencies {
let (dep_name, dep_constraint) = if dep_constraint_key.starts_with('@') {
if let Some(at_pos) = dep_constraint_key[1..].find('@') {
let split_pos = at_pos + 1;
(
dep_constraint_key[..split_pos].to_string(),
dep_constraint_key[split_pos + 1..].to_string(),
)
} else {
(dep_constraint_key.clone(), "*".to_string())
}
} else {
if let Some(at_pos) = dep_constraint_key.find('@') {
(
dep_constraint_key[..at_pos].to_string(),
dep_constraint_key[at_pos + 1..].to_string(),
)
} else {
(dep_constraint_key.clone(), "*".to_string())
}
};
let mut found_version = None;
for (_, resolved_pkg) in &resolved_clone {
if resolved_pkg.pname == dep_name {
if let Some(req) = parse_version_req(&dep_constraint) {
if let Ok(ver) = Version::parse(&resolved_pkg.version) {
if req.matches(&ver) {
found_version = Some(format!("{}@{}", dep_name, resolved_pkg.version));
break;
}
}
}
}
}
if let Some(resolved_key) = found_version {
updated_deps.push(resolved_key);
} else {
updated_deps.push(dep_constraint_key.clone());
}
}
pkg.dependencies = updated_deps;
}
if !ctx.failed.is_empty() {
println!();
println!("Warning: {} packages failed to resolve:", ctx.failed.len());
for (name, constraint, error) in &ctx.failed {
println!(" {} @ {}: {}", name, constraint, error);
}
}
let output_path = args.output.unwrap_or_else(|| args.project.join("deps.nix"));
println!();
println!("Generating {}...", output_path.display());
let nix_content = generate_deps_nix(&ctx);
fs::write(&output_path, nix_content)?;
println!();
println!("Done!");
println!(" Output: {}", output_path.display());
println!(" Packages: {}", ctx.resolved.len());
println!(" Root dependencies: {}", ctx.root_deps.len());
Ok(())
}
async fn fetch_metadata_batch(
client: &reqwest::Client,
registry: &str,
packages: &HashSet<String>,
) -> Vec<(String, Result<NpmPackageMetadata, String>)> {
let futures: Vec<_> = packages
.iter()
.map(|name| {
let client = client.clone();
let registry = registry.to_string();
let name = name.clone();
async move {
let result = fetch_single_metadata(&client, ®istry, &name).await;
(name, result)
}
})
.collect();
join_all(futures).await
}
async fn fetch_single_metadata(
client: &reqwest::Client,
registry: &str,
name: &str,
) -> Result<NpmPackageMetadata, String> {
let url = if name.starts_with('@') {
let encoded = name.replace('/', "%2f");
format!("{}/{}", registry, encoded)
} else {
format!("{}/{}", registry, name)
};
let response = client
.get(&url)
.header("Accept", "application/json")
.send()
.await
.map_err(|e| format!("Failed to fetch metadata for {}: {}", name, e))?;
if !response.status().is_success() {
return Err(format!(
"Failed to fetch {}: HTTP {}",
name,
response.status()
));
}
response
.json()
.await
.map_err(|e| format!("Failed to parse metadata for {}: {}", name, e))
}
fn is_satisfied(ctx: &ResolutionContext, name: &str, constraint: &str) -> bool {
let req = match parse_version_req(constraint) {
Some(r) => r,
None => return false,
};
ctx.resolved.iter().any(|(_key, pkg)| {
if pkg.pname != name {
return false;
}
if let Ok(version) = Version::parse(&pkg.version) {
req.matches(&version)
} else {
false
}
})
}
fn resolve_version<'a>(metadata: &'a NpmPackageMetadata, constraint: &str) -> Option<&'a NpmVersionInfo> {
if constraint == "latest" || constraint == "*" {
if let Some(latest_version) = metadata.dist_tags.get("latest") {
return metadata.versions.get(latest_version);
}
}
if let Some(version) = metadata.dist_tags.get(constraint) {
return metadata.versions.get(version);
}
let req = parse_version_req(constraint)?;
let mut best: Option<(&String, &NpmVersionInfo)> = None;
for (version_str, info) in &metadata.versions {
if let Ok(version) = Version::parse(version_str) {
if req.matches(&version) {
match &best {
None => best = Some((version_str, info)),
Some((best_str, _)) => {
if let Ok(best_ver) = Version::parse(best_str) {
if version > best_ver {
best = Some((version_str, info));
}
}
}
}
}
}
}
best.map(|(_, info)| info)
}
fn parse_version_req(constraint: &str) -> Option<VersionReq> {
let cleaned = constraint
.trim()
.trim_start_matches('v')
.trim_start_matches('=');
let cleaned = cleaned
.replace(".x", "")
.replace(".X", "")
.replace(".*", "");
let cleaned = if let Some(pos) = cleaned.find("||") {
cleaned[..pos].trim().to_string()
} else {
cleaned.to_string()
};
if let Some(result) = parse_hyphen_range(&cleaned) {
return Some(result);
}
if let Some(result) = parse_space_separated_range(&cleaned) {
return Some(result);
}
VersionReq::parse(&cleaned).ok()
}
fn parse_hyphen_range(constraint: &str) -> Option<VersionReq> {
let parts: Vec<&str> = constraint.split(" - ").collect();
if parts.len() == 2 {
let lower = parts[0].trim();
let upper = parts[1].trim();
if !lower.is_empty() && !upper.is_empty()
&& lower.chars().next().map(|c| c.is_ascii_digit()).unwrap_or(false)
&& upper.chars().next().map(|c| c.is_ascii_digit()).unwrap_or(false)
{
let semver_range = format!(">={}, <={}", lower, upper);
return VersionReq::parse(&semver_range).ok();
}
}
None
}
fn parse_space_separated_range(constraint: &str) -> Option<VersionReq> {
let parts: Vec<&str> = constraint.split_whitespace().collect();
if parts.len() < 2 {
return None;
}
let mut constraints: Vec<String> = Vec::new();
let mut i = 0;
while i < parts.len() {
let part = parts[i];
if part.starts_with(">=") || part.starts_with("<=") ||
part.starts_with('>') || part.starts_with('<') ||
part.starts_with('^') || part.starts_with('~') {
constraints.push(part.to_string());
i += 1;
}
else if (part == ">=" || part == "<=" || part == ">" || part == "<") && i + 1 < parts.len() {
constraints.push(format!("{}{}", part, parts[i + 1]));
i += 2;
}
else if part.chars().next().map(|c| c.is_ascii_digit()).unwrap_or(false) {
constraints.push(format!("={}", part));
i += 1;
}
else {
return None;
}
}
if constraints.len() >= 2 {
let semver_range = constraints.join(", ");
return VersionReq::parse(&semver_range).ok();
}
None
}
fn parse_npm_alias(constraint: &str) -> Option<NpmAlias> {
if !constraint.starts_with("npm:") {
return None;
}
let rest = &constraint[4..];
let (package, version) = if let Some(at_pos) = rest.rfind('@') {
if at_pos == 0 {
if let Some(second_at) = rest[1..].find('@') {
let pos = second_at + 1;
(&rest[..pos], &rest[pos + 1..])
} else {
(rest, "*")
}
} else {
(&rest[..at_pos], &rest[at_pos + 1..])
}
} else {
(rest, "*")
};
Some(NpmAlias {
target_package: package.to_string(),
target_constraint: version.to_string(),
})
}
fn is_platform_compatible(info: &NpmVersionInfo, platform: &str) -> bool {
if let Some(os_list) = &info.os {
let blocked = format!("!{}", platform);
if os_list.iter().any(|os| os == &blocked) {
return false;
}
if !os_list.is_empty()
&& !os_list.iter().any(|os| os == platform || os.starts_with('!'))
{
return false;
}
}
true
}
fn queue_dependency(
ctx: &mut ResolutionContext,
name: &str,
constraint: &str,
is_optional: bool,
) -> Option<String> {
if constraint.starts_with("workspace:")
|| constraint.starts_with("file:")
|| constraint.starts_with("link:")
{
return None;
}
let key = format!("{}@{}", name, constraint);
if !ctx.queued.contains(&key) {
ctx.queue.push_back((name.to_string(), constraint.to_string(), is_optional));
ctx.queued.insert(key.clone());
}
Some(key)
}
fn generate_deps_nix(ctx: &ResolutionContext) -> String {
let timestamp = Utc::now().format("%Y-%m-%dT%H:%M:%SZ");
let mut packages: Vec<_> = ctx.resolved.iter().collect();
packages.sort_by(|a, b| a.0.cmp(b.0));
let mut root_deps = ctx.root_deps.clone();
root_deps.sort();
let mut workspace_packages = ctx.workspace_packages.clone();
workspace_packages.sort_by(|a, b| a.name.cmp(&b.name));
let package_entries: Vec<String> = packages
.iter()
.map(|(key, pkg)| {
let deps_str = if pkg.dependencies.is_empty() {
String::new()
} else {
let deps: Vec<String> = pkg.dependencies.iter().map(|d| format!("\"{}\"", d)).collect();
format!("\n dependencies = [ {} ];", deps.join(" "))
};
let bin_str = if pkg.has_bin == Some(true) {
"\n hasBin = true;"
} else {
""
};
format!(
r#" "{}" = {{
pname = "{}";
version = "{}";
url = "{}";
integrity = "{}";{}{}
}};"#,
key,
escape_nix_string(&pkg.pname),
pkg.version,
escape_nix_string(&pkg.url),
escape_nix_string(&pkg.integrity),
deps_str,
bin_str
)
})
.collect();
let root_deps_str: Vec<String> = root_deps.iter().map(|d| format!(" \"{}\"", d)).collect();
let workspace_section = if workspace_packages.is_empty() {
String::new()
} else {
let workspace_entries: Vec<String> = workspace_packages
.iter()
.map(|wp| {
format!(
r#" {{ name = "{}"; path = "{}"; }}"#,
escape_nix_string(&wp.name),
escape_nix_string(&wp.relative_path)
)
})
.collect();
format!(
r#"
# Workspace packages (local file: dependencies)
# These are built from source by pleme-linker build-project
workspacePackages = [
{}
];"#,
workspace_entries.join("\n")
)
};
format!(
r#"# Generated by pleme-linker resolve
# DO NOT EDIT - regenerate with: pleme-linker resolve --project .
#
# This file IS the lockfile. It contains:
# - All resolved packages with exact versions
# - Tarball URLs and integrity hashes (from npm registry)
# - Dependency relationships
# - Workspace packages (local file: dependencies)
#
# Nix uses fetchurl to download each package (cached in Attic),
# then pleme-linker build assembles node_modules.
{{
# Metadata
generatedAt = "{timestamp}";
resolverVersion = "0.3.0";
packageCount = {package_count};
# Root dependencies (direct deps from package.json)
rootDependencies = [
{root_deps}
];
# All resolved packages
packages = {{
{packages}
}};{workspace_section}
}}
"#,
timestamp = timestamp,
package_count = packages.len(),
root_deps = root_deps_str.join("\n"),
packages = package_entries.join("\n\n"),
workspace_section = workspace_section
)
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_parse_hyphen_range() {
let result = parse_hyphen_range("1 - 2");
assert!(result.is_some(), "Should parse hyphen range '1 - 2'");
let result = parse_hyphen_range("1.2.3 - 2.3.4");
assert!(result.is_some(), "Should parse hyphen range '1.2.3 - 2.3.4'");
let result = parse_hyphen_range(">=1.0.0");
assert!(result.is_none(), "Should not parse non-hyphen range");
}
#[test]
fn test_parse_space_separated_range() {
let result = parse_space_separated_range(">=1.0.0 <2.0.0");
assert!(result.is_some(), "Should parse space-separated range '>=1.0.0 <2.0.0'");
let result = parse_space_separated_range(">=3.1.1 <6");
assert!(result.is_some(), "Should parse space-separated range '>=3.1.1 <6'");
let result = parse_space_separated_range(">=0.5 <1");
assert!(result.is_some(), "Should parse space-separated range '>=0.5 <1'");
let result = parse_space_separated_range(">=1.0.0");
assert!(result.is_none(), "Should not parse single constraint");
}
#[test]
fn test_parse_version_req_npm_formats() {
let result = parse_version_req("1 - 2");
assert!(result.is_some(), "parse_version_req should handle '1 - 2'");
let result = parse_version_req(">=3.1.1 <6");
assert!(result.is_some(), "parse_version_req should handle '>=3.1.1 <6'");
let result = parse_version_req("^1.0.0");
assert!(result.is_some(), "parse_version_req should handle '^1.0.0'");
let result = parse_version_req(">=1.0.0");
assert!(result.is_some(), "parse_version_req should handle '>=1.0.0'");
let result = parse_version_req("~1.0.0");
assert!(result.is_some(), "parse_version_req should handle '~1.0.0'");
}
}