use std::path::PathBuf;
use std::collections::HashMap;
use std::io::Cursor;
use serde::{Deserialize, Serialize};
use tokio::fs as async_fs;
use tokio::process::Command;
use tokio::io::AsyncWriteExt;
use flate2::read::GzDecoder;
use tar::Archive;
use crate::error::{Result, DumplingError};
const NPM_REGISTRY: &str = "https://registry.npmjs.org";
fn cache_dir() -> PathBuf {
dirs::cache_dir()
.unwrap_or_else(|| PathBuf::from("."))
.join("dumpling")
}
#[derive(Debug, Deserialize)]
struct NpmPackageInfo {
#[serde(rename = "dist-tags")]
dist_tags: Option<HashMap<String, String>>,
versions: Option<HashMap<String, NpmVersionInfo>>,
}
#[derive(Debug, Deserialize)]
struct NpmVersionInfo {
dist: Option<NpmDistInfo>,
}
#[derive(Debug, Deserialize)]
struct NpmDistInfo {
tarball: Option<String>,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct PackageLock {
pub name: String,
pub version: String,
#[serde(rename = "lockfileVersion")]
pub lockfile_version: u32,
pub packages: HashMap<String, LockPackage>,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct LockPackage {
pub version: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub resolved: Option<String>,
}
pub async fn install(packages: Vec<String>, dev: bool) -> Result<()> {
let current_dir = std::env::current_dir()?;
let package_manager = PackageManager::new(current_dir);
package_manager.install(packages, dev).await
}
pub async fn init(name: Option<String>) -> Result<()> {
PackageManager::init(name).await
}
pub async fn run_script(script_name: String) -> Result<()> {
let current_dir = std::env::current_dir()?;
let package_manager = PackageManager::new(current_dir);
package_manager.run_script(&script_name).await
}
pub async fn uninstall(packages: Vec<String>) -> Result<()> {
let current_dir = std::env::current_dir()?;
let package_manager = PackageManager::new(current_dir);
package_manager.uninstall(packages).await
}
pub async fn prune() -> Result<()> {
let current_dir = std::env::current_dir()?;
let package_manager = PackageManager::new(current_dir);
package_manager.prune().await
}
#[derive(Debug, Serialize, Deserialize)]
pub struct PackageJson {
pub name: Option<String>,
pub version: Option<String>,
pub description: Option<String>,
pub main: Option<String>,
pub module: Option<String>,
pub scripts: Option<HashMap<String, String>>,
pub dependencies: Option<HashMap<String, String>>,
pub dev_dependencies: Option<HashMap<String, String>>,
#[serde(rename = "devDependencies")]
pub dev_dependencies_alt: Option<HashMap<String, String>>,
pub peer_dependencies: Option<HashMap<String, String>>,
#[serde(rename = "peerDependencies")]
pub peer_dependencies_alt: Option<HashMap<String, String>>,
}
impl PackageJson {
pub async fn load(path: &PathBuf) -> Result<Self> {
let content = async_fs::read_to_string(path).await?;
let package: PackageJson = serde_json::from_str(&content)?;
Ok(package)
}
pub async fn save(&self, path: &PathBuf) -> Result<()> {
let content = serde_json::to_string_pretty(self)?;
async_fs::write(path, content).await?;
Ok(())
}
pub fn dependencies(&self) -> HashMap<String, String> {
let mut deps = HashMap::new();
if let Some(ref d) = self.dependencies {
deps.extend(d.clone());
}
deps
}
pub fn dev_dependencies(&self) -> HashMap<String, String> {
let mut deps = HashMap::new();
if let Some(ref d) = self.dev_dependencies {
deps.extend(d.clone());
}
if let Some(ref d) = self.dev_dependencies_alt {
deps.extend(d.clone());
}
deps
}
}
pub struct PackageManager {
root: PathBuf,
}
impl PackageManager {
pub fn new(root: PathBuf) -> Self {
Self { root }
}
pub async fn init(name: Option<String>) -> Result<()> {
let current_dir = std::env::current_dir()?;
let package_json_path = current_dir.join("package.json");
if package_json_path.exists() {
return Err(DumplingError::Build("package.json already exists".to_string()));
}
let project_name = name.unwrap_or_else(|| {
current_dir
.file_name()
.unwrap_or_default()
.to_string_lossy()
.to_string()
});
let package = PackageJson {
name: Some(project_name),
version: Some("0.1.0".to_string()),
description: Some("A dumpling project".to_string()),
main: Some("index.js".to_string()),
module: None,
scripts: Some(HashMap::from([
("start".to_string(), "dumpling run index.js".to_string()),
("dev".to_string(), "dumpling dev".to_string()),
("build".to_string(), "dumpling bundle src/index.js --output dist/bundle.js".to_string()),
])),
dependencies: Some(HashMap::new()),
dev_dependencies: None,
dev_dependencies_alt: Some(HashMap::new()),
peer_dependencies: None,
peer_dependencies_alt: None,
};
package.save(&package_json_path).await?;
async_fs::create_dir_all(current_dir.join("src")).await?;
async_fs::create_dir_all(current_dir.join("dist")).await?;
let index_js_path = current_dir.join("src").join("index.js");
async_fs::write(&index_js_path, "console.log('Hello, Dumpling!');\n").await?;
println!("Initialized a new dumpling project!");
println!("Created:");
println!(" package.json");
println!(" src/index.js");
println!(" dist/");
Ok(())
}
pub async fn install(&self, packages: Vec<String>, dev: bool) -> Result<()> {
if packages.is_empty() {
self.install_all().await?;
} else {
let tasks: Vec<_> = packages
.iter()
.map(|pkg| self.install_package(pkg, dev))
.collect();
futures::future::try_join_all(tasks).await?;
}
Ok(())
}
pub async fn uninstall(&self, packages: Vec<String>) -> Result<()> {
let package_json_path = self.root.join("package.json");
if !package_json_path.exists() {
return Err(DumplingError::FileNotFound("package.json not found".to_string()));
}
let mut package = PackageJson::load(&package_json_path).await?;
for name in &packages {
let package_dir = if name.starts_with('@') {
if let Some((scope, pkg)) = name.split_once('/') {
self.root.join("node_modules").join(scope).join(pkg)
} else {
self.root.join("node_modules").join(name)
}
} else {
self.root.join("node_modules").join(name)
};
if package_dir.exists() {
tokio::fs::remove_dir_all(&package_dir).await?;
println!("✓ Removed {}", name);
} else {
println!("Package {} not found in node_modules", name);
}
if let Some(ref mut deps) = package.dependencies {
deps.remove(name);
}
if let Some(ref mut deps) = package.dev_dependencies {
deps.remove(name);
}
if let Some(ref mut deps) = package.dev_dependencies_alt {
deps.remove(name);
}
if name.starts_with('@') {
if let Some((scope, _)) = name.split_once('/') {
let scope_dir = self.root.join("node_modules").join(scope);
if scope_dir.exists() {
if std::fs::read_dir(&scope_dir).map(|mut e| e.next().is_none()).unwrap_or(false) {
let _ = tokio::fs::remove_dir(scope_dir).await;
}
}
}
}
}
package.save(&package_json_path).await?;
self.save_lock_file(&package)?;
Ok(())
}
pub async fn prune(&self) -> Result<()> {
let node_modules = self.root.join("node_modules");
if !node_modules.exists() {
println!("No node_modules to prune");
return Ok(());
}
let package_json_path = self.root.join("package.json");
if !package_json_path.exists() {
return Err(DumplingError::FileNotFound("package.json not found".to_string()));
}
let package = PackageJson::load(&package_json_path).await?;
let mut allowed: std::collections::HashSet<String> = package.dependencies().keys()
.chain(package.dev_dependencies().keys())
.cloned()
.collect();
if let Ok(lock) = std::fs::read_to_string(self.root.join("package-lock.json")) {
if let Ok(lock_data) = serde_json::from_str::<PackageLock>(&lock) {
for key in lock_data.packages.keys() {
if key.is_empty() {
continue;
}
if let Some(name) = key.strip_prefix("node_modules/") {
allowed.insert(name.to_string());
}
}
}
}
let mut removed = 0;
if let Ok(entries) = std::fs::read_dir(&node_modules) {
for entry in entries.flatten() {
let name = entry.file_name().to_string_lossy().to_string();
if name.starts_with('.') {
continue;
}
let path = entry.path();
if path.is_dir() {
if name.starts_with('@') {
if let Ok(sub_entries) = std::fs::read_dir(&path) {
for sub in sub_entries.flatten() {
let sub_name = format!("{}/{}", name, sub.file_name().to_string_lossy());
if !allowed.contains(&sub_name) {
let sub_path = sub.path();
if sub_path.is_dir() {
tokio::fs::remove_dir_all(&sub_path).await?;
println!("✓ Pruned {}", sub_name);
removed += 1;
}
}
}
}
if std::fs::read_dir(&path).map(|mut e| e.next().is_none()).unwrap_or(false) {
let _ = tokio::fs::remove_dir(path).await;
}
} else if !allowed.contains(&name) {
tokio::fs::remove_dir_all(&path).await?;
println!("✓ Pruned {}", name);
removed += 1;
}
}
}
}
if removed == 0 {
println!("No packages to prune");
} else {
let package = PackageJson::load(&package_json_path).await?;
self.save_lock_file(&package)?;
}
Ok(())
}
async fn install_all(&self) -> Result<()> {
let package_json_path = self.root.join("package.json");
if !package_json_path.exists() {
return Err(DumplingError::FileNotFound("package.json not found".to_string()));
}
let package = PackageJson::load(&package_json_path).await?;
for (name, version) in package.dependencies() {
self.install_package_version(&name, &version, false).await?;
}
for (name, version) in package.dev_dependencies() {
self.install_package_version(&name, &version, true).await?;
}
self.save_lock_file(&package)?;
Ok(())
}
async fn install_package(&self, package_spec: &str, dev: bool) -> Result<()> {
let (name, version) = self.parse_package_spec(package_spec)?;
self.install_package_version(&name, &version, dev).await
}
async fn install_package_version(&self, name: &str, version: &str, dev: bool) -> Result<()> {
println!("Installing {}@{} {}", name, version, if dev { "(dev)" } else { "" });
let node_modules = self.root.join("node_modules");
async_fs::create_dir_all(&node_modules).await?;
let package_dir = node_modules.join(name);
if package_dir.exists() {
println!("Package {} already exists, skipping", name);
return Ok(());
}
let version_to_install = self.resolve_version_from_lock(name).unwrap_or_else(|| version.to_string());
if let Ok(resolved_version) = self.fetch_and_extract_from_registry(name, &version_to_install, &package_dir).await {
println!("✓ {}@{} installed", name, resolved_version);
return Ok(());
}
async_fs::create_dir_all(&package_dir).await?;
let mock_package = PackageJson {
name: Some(name.to_string()),
version: Some(version.to_string()),
description: Some(format!("Mock package {}", name)),
main: Some("index.js".to_string()),
module: None,
scripts: None,
dependencies: Some(HashMap::new()),
dev_dependencies: None,
dev_dependencies_alt: None,
peer_dependencies: None,
peer_dependencies_alt: None,
};
mock_package.save(&package_dir.join("package.json")).await?;
async_fs::write(
&package_dir.join("index.js"),
format!("// Mock implementation of {}\nmodule.exports = {{}};\n", name),
)
.await?;
println!("✓ {}@{} installed (mock)", name, version);
Ok(())
}
async fn fetch_and_extract_from_registry(
&self,
name: &str,
version: &str,
package_dir: &PathBuf,
) -> Result<String> {
let registry_name = name.replace('/', "%2F");
let url = format!("{}/{}", NPM_REGISTRY, registry_name);
let client = reqwest::Client::new();
let response = client
.get(&url)
.header("Accept", "application/json")
.send()
.await
.map_err(|e| DumplingError::Http(e))?;
if !response.status().is_success() {
return Err(DumplingError::Build(format!(
"Registry returned {} for {}",
response.status(),
name
)));
}
let info: NpmPackageInfo = response.json().await.map_err(DumplingError::Http)?;
let resolved_version = self.resolve_version(version, &info)?;
let version_info = info
.versions
.as_ref()
.and_then(|v| v.get(&resolved_version))
.ok_or_else(|| {
DumplingError::Build(format!("Version {} not found for {}", resolved_version, name))
})?;
let tarball_url = version_info
.dist
.as_ref()
.and_then(|d| d.tarball.clone())
.ok_or_else(|| DumplingError::Build(format!("No tarball for {}@{}", name, resolved_version)))?;
let tarball_bytes = self.fetch_tarball_with_cache(&client, name, &resolved_version, &tarball_url).await?;
let decoder = GzDecoder::new(Cursor::new(tarball_bytes));
let mut archive = Archive::new(decoder);
for entry in archive.entries().map_err(|e| DumplingError::Build(e.to_string()))? {
let mut entry = entry.map_err(|e| DumplingError::Build(e.to_string()))?;
let path = entry.path().map_err(|e| DumplingError::Build(e.to_string()))?;
let path_str = path.to_string_lossy();
if let Some(stripped) = path_str.strip_prefix("package/") {
let out_path = package_dir.join(stripped);
if entry.header().entry_type().is_dir() {
async_fs::create_dir_all(&out_path).await?;
} else {
if let Some(parent) = out_path.parent() {
async_fs::create_dir_all(parent).await?;
}
let mut out_file = async_fs::File::create(&out_path).await?;
let mut buf = Vec::new();
std::io::copy(&mut entry, &mut buf).map_err(DumplingError::Io)?;
out_file.write_all(&buf).await?;
}
}
}
Ok(resolved_version)
}
async fn fetch_tarball_with_cache(
&self,
client: &reqwest::Client,
name: &str,
version: &str,
tarball_url: &str,
) -> Result<Vec<u8>> {
let cache_path = self.cache_path_for_package(name, version);
if cache_path.exists() {
let bytes = async_fs::read(&cache_path).await?;
return Ok(bytes);
}
let tarball_bytes = client
.get(tarball_url)
.send()
.await
.map_err(DumplingError::Http)?
.bytes()
.await
.map_err(DumplingError::Http)?
.to_vec();
if let Some(parent) = cache_path.parent() {
async_fs::create_dir_all(parent).await?;
}
async_fs::write(&cache_path, &tarball_bytes).await?;
Ok(tarball_bytes)
}
fn cache_path_for_package(&self, name: &str, version: &str) -> PathBuf {
let safe_name = name.replace('/', "-");
cache_dir().join(safe_name).join(format!("{}.tgz", version))
}
fn resolve_version_from_lock(&self, name: &str) -> Option<String> {
let lock_path = self.root.join("package-lock.json");
let content = std::fs::read_to_string(&lock_path).ok()?;
let lock: PackageLock = serde_json::from_str(&content).ok()?;
let key = format!("node_modules/{}", name);
lock.packages.get(&key).map(|p| p.version.clone())
}
fn save_lock_file(&self, package_json: &PackageJson) -> Result<()> {
let mut lock_packages = HashMap::new();
lock_packages.insert("".to_string(), LockPackage {
version: package_json.version.as_deref().unwrap_or("0.0.0").to_string(),
resolved: None,
});
let node_modules = self.root.join("node_modules");
if node_modules.exists() {
for entry in std::fs::read_dir(&node_modules).unwrap_or_else(|_| std::fs::read_dir(".").unwrap()) {
if let Ok(entry) = entry {
let name = entry.file_name().to_string_lossy().to_string();
if name.starts_with('.') {
continue;
}
let pkg_path = entry.path();
if pkg_path.is_dir() {
let pkg_json = pkg_path.join("package.json");
if pkg_json.is_file() {
if let Ok(content) = std::fs::read_to_string(&pkg_json) {
if let Ok(pkg) = serde_json::from_str::<serde_json::Value>(&content) {
if let Some(v) = pkg.get("version").and_then(|x| x.as_str()) {
lock_packages.insert(format!("node_modules/{}", name), LockPackage {
version: v.to_string(),
resolved: None,
});
}
}
}
}
if name.starts_with('@') {
if let Ok(entries) = std::fs::read_dir(&pkg_path) {
for sub in entries {
if let Ok(sub) = sub {
let sub_name = sub.file_name().to_string_lossy().to_string();
let sub_path = sub.path();
if sub_path.is_dir() {
let pkg_json = sub_path.join("package.json");
if pkg_json.is_file() {
if let Ok(content) = std::fs::read_to_string(&pkg_json) {
if let Ok(pkg) = serde_json::from_str::<serde_json::Value>(&content) {
if let Some(v) = pkg.get("version").and_then(|x| x.as_str()) {
let full_name = format!("{}/{}", name, sub_name);
lock_packages.insert(format!("node_modules/{}", full_name), LockPackage {
version: v.to_string(),
resolved: None,
});
}
}
}
}
}
}
}
}
}
}
}
}
}
let lock = PackageLock {
name: package_json.name.as_deref().unwrap_or("dumpling-project").to_string(),
version: package_json.version.as_deref().unwrap_or("0.0.0").to_string(),
lockfile_version: 3,
packages: lock_packages,
};
let content = serde_json::to_string_pretty(&lock)?;
std::fs::write(self.root.join("package-lock.json"), content)?;
Ok(())
}
fn resolve_version(&self, version: &str, info: &NpmPackageInfo) -> Result<String> {
if version == "latest" {
return info
.dist_tags
.as_ref()
.and_then(|t| t.get("latest").cloned())
.ok_or_else(|| DumplingError::Build("No latest tag found".to_string()));
}
if let Some(versions) = &info.versions {
if versions.contains_key(version) {
return Ok(version.to_string());
}
if version.starts_with('^') || version.starts_with('~') {
if let Some(latest) = info.dist_tags.as_ref().and_then(|t| t.get("latest")) {
return Ok(latest.clone());
}
}
}
Err(DumplingError::Build(format!("Version {} not found", version)))
}
fn parse_package_spec(&self, spec: &str) -> Result<(String, String)> {
if let Some((name, version)) = spec.split_once('@') {
if name.is_empty() {
if let Some(at_pos) = spec[1..].find('@') {
let name = &spec[..at_pos + 1];
let version = &spec[at_pos + 2..];
return Ok((name.to_string(), version.to_string()));
}
}
Ok((name.to_string(), version.to_string()))
} else {
Ok((spec.to_string(), "latest".to_string()))
}
}
pub async fn run_script(&self, script_name: &str) -> Result<()> {
let package_json_path = self.root.join("package.json");
if !package_json_path.exists() {
return Err(DumplingError::FileNotFound("package.json not found".to_string()));
}
let package = PackageJson::load(&package_json_path).await?;
let scripts = package.scripts.ok_or_else(|| {
DumplingError::Build("No scripts found in package.json".to_string())
})?;
let script = scripts.get(script_name).ok_or_else(|| {
DumplingError::Build(format!("Script '{}' not found", script_name))
})?;
let mut child = Command::new("sh")
.arg("-c")
.arg(script)
.current_dir(&self.root)
.spawn()
.map_err(|e| DumplingError::Build(format!("Failed to run script: {}", e)))?;
let status = child.wait().await
.map_err(|e| DumplingError::Build(format!("Failed to wait for script: {}", e)))?;
if !status.success() {
return Err(DumplingError::Build(format!("Script '{}' failed", script_name)));
}
Ok(())
}
}