1use crate::cache::ContentCache;
2use crate::errors::{DnxError, Result};
3use crate::resolver::{DependencyGraph, ResolvedPackage};
4use crate::workspace::Workspace;
5use rayon::prelude::*;
6use serde::{Deserialize, Serialize};
7use std::collections::{HashMap, HashSet};
8use std::path::{Path, PathBuf};
9use std::sync::atomic::{AtomicBool, AtomicUsize, Ordering};
10use std::sync::Arc;
11use walkdir::WalkDir;
12
13static REFLINK_SUPPORTED: AtomicBool = AtomicBool::new(true);
16
17#[cfg(windows)]
19static NTFS_LIMIT_WARNED: AtomicBool = AtomicBool::new(false);
20
21#[cfg(windows)]
23static COPY_FALLBACK_COUNT: AtomicUsize = AtomicUsize::new(0);
24
25#[derive(Debug, Default, Serialize, Deserialize)]
26pub struct LinkStats {
27 pub linked: usize,
28 pub hardlinks: usize,
29 pub symlinks: usize,
30}
31
32pub struct LinkProgress {
35 pub total: AtomicUsize,
36 pub completed: AtomicUsize,
37}
38
39impl LinkProgress {
40 pub fn new() -> Arc<Self> {
41 Arc::new(Self {
42 total: AtomicUsize::new(0),
43 completed: AtomicUsize::new(0),
44 })
45 }
46}
47
48#[derive(Debug, Default)]
49pub struct ScriptStats {
50 pub ran: usize,
51 pub failed: usize,
52 pub skipped: usize,
53}
54
55pub struct Linker {
56 cache: Arc<ContentCache>,
57 project_root: PathBuf,
58 shamefully_hoist: bool,
59 hoist: bool,
60 hoist_pattern: Vec<String>,
61 public_hoist_pattern: Vec<String>,
62}
63
64fn validate_package_name(name: &str) -> Result<()> {
66 if name.contains("..") || name.contains("\\") || (name.contains('/') && !name.starts_with('@'))
67 {
68 return Err(DnxError::Linker(format!(
69 "Invalid package name (potential path traversal): {}",
70 name
71 )));
72 }
73 if name.starts_with('@') {
75 let parts: Vec<&str> = name.splitn(2, '/').collect();
76 if parts.len() != 2 || parts[0].len() <= 1 || parts[1].is_empty() {
77 return Err(DnxError::Linker(format!(
78 "Invalid scoped package name: {}",
79 name
80 )));
81 }
82 if parts[0].contains("..") || parts[1].contains("..") || parts[1].contains('/') {
83 return Err(DnxError::Linker(format!(
84 "Invalid scoped package name (potential path traversal): {}",
85 name
86 )));
87 }
88 }
89 Ok(())
90}
91
92fn validate_bin_entry(name: &str, path: &str) -> Result<()> {
94 if name.contains("..") || name.contains('/') || name.contains('\\') {
95 return Err(DnxError::Linker(format!(
96 "Invalid bin name (potential path traversal): {}",
97 name
98 )));
99 }
100 if path.contains("..") {
101 return Err(DnxError::Linker(format!(
102 "Invalid bin path (potential path traversal): {}",
103 path
104 )));
105 }
106 Ok(())
107}
108
109impl Linker {
110 pub fn new(cache: Arc<ContentCache>, project_root: PathBuf) -> Self {
111 Self {
112 cache,
113 project_root,
114 shamefully_hoist: false,
115 hoist: true,
116 hoist_pattern: vec!["*".to_string()],
117 public_hoist_pattern: vec!["@types/*".to_string()],
118 }
119 }
120
121 pub fn with_config(
122 cache: Arc<ContentCache>,
123 project_root: PathBuf,
124 shamefully_hoist: bool,
125 hoist: bool,
126 hoist_pattern: Vec<String>,
127 public_hoist_pattern: Vec<String>,
128 ) -> Self {
129 Self {
130 cache,
131 project_root,
132 shamefully_hoist,
133 hoist,
134 hoist_pattern,
135 public_hoist_pattern,
136 }
137 }
138
139 pub async fn link(&self, graph: &DependencyGraph, direct_deps: &[String]) -> Result<LinkStats> {
141 self.link_with_progress(graph, direct_deps, None).await
142 }
143
144 pub async fn link_with_progress(
146 &self,
147 graph: &DependencyGraph,
148 direct_deps: &[String],
149 progress: Option<Arc<LinkProgress>>,
150 ) -> Result<LinkStats> {
151 let mut stats = LinkStats::default();
152 let node_modules = self.project_root.join("node_modules");
153 let dnx_dir = node_modules.join(".dnx");
154
155 let cache_root = self.cache.content_dir();
157 warn_cross_volume(&cache_root, &self.project_root);
158
159 warn_cloud_sync(&self.project_root);
161
162 let old_manifest = PackagesManifest::read(&dnx_dir);
164
165 let mut new_manifest = PackagesManifest::new();
167 for pkg in &graph.packages {
168 if pkg.version.starts_with("link:")
169 || pkg.version.starts_with("file:")
170 || pkg.version.starts_with("git+")
171 || pkg.version.starts_with("github:")
172 || pkg.version.starts_with("workspace:")
173 {
174 continue;
175 }
176 let pkg_key = format!("{}@{}", pkg.name, pkg.version);
177 new_manifest.packages.insert(pkg_key, pkg.integrity.clone());
178 }
179
180 let stale_keys: Vec<String> = old_manifest
182 .packages
183 .keys()
184 .filter(|k| !new_manifest.packages.contains_key(*k))
185 .cloned()
186 .collect();
187
188 for key in &stale_keys {
189 let stale_dir = dnx_dir.join(key);
190 match std::fs::remove_dir_all(&stale_dir) {
191 Ok(()) => {}
192 Err(e) if e.kind() == std::io::ErrorKind::NotFound => {}
193 Err(e) => {
194 tracing::warn!("Failed to remove stale package {}: {}", key, e);
195 }
196 }
197 }
198
199 std::fs::create_dir_all(&dnx_dir)
201 .map_err(|e| DnxError::Io(format!("Failed to create .dnx directory: {}", e)))?;
202
203 let pkg_map: HashMap<String, &ResolvedPackage> = graph
205 .packages
206 .iter()
207 .map(|pkg| (pkg.name.clone(), pkg))
208 .collect();
209
210 for pkg in &graph.packages {
213 validate_package_name(&pkg.name)?;
214 }
215
216 let total_hardlinks = AtomicUsize::new(0);
217 let total_linked = AtomicUsize::new(0);
218
219 if let Some(ref p) = progress {
221 p.total.store(graph.packages.len(), Ordering::Relaxed);
222 }
223
224 let link_results: std::result::Result<Vec<()>, DnxError> = graph
225 .packages
226 .par_iter()
227 .map(|pkg| {
228 if pkg.version.starts_with("link:")
230 || pkg.version.starts_with("file:")
231 || pkg.version.starts_with("git+")
232 || pkg.version.starts_with("github:")
233 {
234 total_linked.fetch_add(1, Ordering::Relaxed);
235 if let Some(ref p) = progress {
236 p.completed.fetch_add(1, Ordering::Relaxed);
237 }
238 return Ok(());
239 }
240
241 let pkg_key = format!("{}@{}", pkg.name, pkg.version);
242
243 if let Some(old_integrity) = old_manifest.packages.get(&pkg_key) {
245 if old_integrity == &pkg.integrity {
246 let virtual_pkg_dir =
247 dnx_dir.join(&pkg_key).join("node_modules").join(&pkg.name);
248 if virtual_pkg_dir.exists() {
249 total_linked.fetch_add(1, Ordering::Relaxed);
250 if let Some(ref p) = progress {
251 p.completed.fetch_add(1, Ordering::Relaxed);
252 }
253 return Ok(());
254 }
255 }
256 }
257
258 let virtual_pkg_dir = dnx_dir.join(&pkg_key).join("node_modules").join(&pkg.name);
259
260 std::fs::create_dir_all(&virtual_pkg_dir).map_err(|e| {
262 DnxError::Io(format!(
263 "Failed to create virtual directory for {}: {}",
264 pkg_key, e
265 ))
266 })?;
267
268 let cached_path = self.cache.get_path(&pkg.integrity);
270
271 if !cached_path.exists() {
272 return Err(DnxError::Linker(format!(
273 "Package {} not found in cache at {}",
274 pkg_key,
275 cached_path.display()
276 )));
277 }
278
279 let (hardlinks, copies) = hardlink_dir(&cached_path, &virtual_pkg_dir)?;
281 total_hardlinks.fetch_add(hardlinks, Ordering::Relaxed);
282 total_linked.fetch_add(1, Ordering::Relaxed);
283 if let Some(ref p) = progress {
284 p.completed.fetch_add(1, Ordering::Relaxed);
285 }
286
287 if copies > 0 {
288 eprintln!(
289 "Warning: {} files copied instead of hardlinked for {}",
290 copies, pkg_key
291 );
292 }
293
294 Ok(())
295 })
296 .collect();
297
298 link_results?;
299 stats.hardlinks = total_hardlinks.load(Ordering::Relaxed);
300 stats.linked = total_linked.load(Ordering::Relaxed);
301
302 let mut symlink_pairs: Vec<(PathBuf, PathBuf)> = Vec::new();
305 let mut scope_dirs: HashSet<PathBuf> = HashSet::new();
306
307 for pkg in &graph.packages {
308 let pkg_key = format!("{}@{}", pkg.name, pkg.version);
309 let virtual_node_modules = dnx_dir.join(&pkg_key).join("node_modules");
310
311 let all_deps = pkg.dependencies.iter().chain(pkg.peer_dependencies.iter());
312 for dep in all_deps {
313 let (dep_name, dep_version) = if let Some(stripped) = dep.strip_prefix('@') {
314 if let Some(idx) = stripped.find('@') {
315 (&dep[..idx + 1], &stripped[idx + 1..])
316 } else {
317 continue;
318 }
319 } else if let Some(idx) = dep.find('@') {
320 (&dep[..idx], &dep[idx + 1..])
321 } else {
322 continue;
323 };
324
325 let dep_key = format!("{}@{}", dep_name, dep_version);
326 let target = dnx_dir.join(&dep_key).join("node_modules").join(dep_name);
327 let link = virtual_node_modules.join(dep_name);
328
329 if dep_name.contains('/') {
330 if let Some(parent) = link.parent() {
331 scope_dirs.insert(parent.to_path_buf());
332 }
333 }
334
335 symlink_pairs.push((target, link));
336 }
337 }
338
339 for dir in &scope_dirs {
341 std::fs::create_dir_all(dir)
342 .map_err(|e| DnxError::Io(format!("Failed to create scope directory: {}", e)))?;
343 }
344
345 let symlink_count = AtomicUsize::new(0);
347 let symlink_results: std::result::Result<Vec<()>, DnxError> = symlink_pairs
348 .par_iter()
349 .map(|(target, link)| {
350 if link.symlink_metadata().is_ok() {
352 if let Ok(existing_target) = std::fs::read_link(link) {
353 if existing_target == *target {
354 return Ok(());
355 }
356 }
357 return Ok(()); }
359 create_symlink_or_junction(target, link)?;
360 symlink_count.fetch_add(1, Ordering::Relaxed);
361 Ok(())
362 })
363 .collect();
364 symlink_results?;
365 stats.symlinks += symlink_count.load(Ordering::Relaxed);
366
367 for dep_name in direct_deps {
369 let pkg = pkg_map.get(dep_name).ok_or_else(|| {
370 DnxError::Linker(format!("Direct dependency {} not found in graph", dep_name))
371 })?;
372
373 let target = if pkg.version.starts_with("link:") {
375 let link_path = &pkg.version[5..];
376 let abs_path = if std::path::Path::new(link_path).is_absolute() {
377 PathBuf::from(link_path)
378 } else {
379 self.project_root.join(link_path)
380 };
381 abs_path
382 } else {
383 let pkg_key = format!("{}@{}", pkg.name, pkg.version);
384 dnx_dir.join(&pkg_key).join("node_modules").join(&pkg.name)
385 };
386
387 let link = node_modules.join(&pkg.name);
389
390 if pkg.name.contains('/') {
392 if let Some(parent) = link.parent() {
393 std::fs::create_dir_all(parent).map_err(|e| {
394 DnxError::Io(format!(
395 "Failed to create scope directory for {}: {}",
396 pkg.name, e
397 ))
398 })?;
399 }
400 }
401
402 if link.symlink_metadata().is_ok() {
404 if let Ok(existing_target) = std::fs::read_link(&link) {
405 if existing_target == target {
406 continue;
407 }
408 }
409 match std::fs::remove_dir_all(&link) {
411 Ok(()) => {}
412 Err(e) if e.kind() == std::io::ErrorKind::NotFound => {}
413 Err(e) => {
414 return Err(DnxError::Linker(format!(
415 "Failed to remove stale symlink {}: {}",
416 link.display(),
417 e
418 )));
419 }
420 }
421 }
422
423 #[cfg(unix)]
426 {
427 let temp_link = link.with_extension(".dnx-tmp");
428 let _ = std::fs::remove_file(&temp_link);
429 create_symlink_or_junction(&target, &temp_link)?;
430 if let Err(e) = std::fs::rename(&temp_link, &link) {
431 let _ = std::fs::remove_file(&temp_link);
432 return Err(DnxError::Linker(format!(
433 "Failed to atomically place symlink {}: {}",
434 link.display(),
435 e
436 )));
437 }
438 }
439 #[cfg(not(unix))]
440 {
441 create_symlink_or_junction_with_retry(&target, &link)?;
442 }
443 stats.symlinks += 1;
444 }
445
446 let bin_dir = node_modules.join(".bin");
448 std::fs::create_dir_all(&bin_dir)
449 .map_err(|e| DnxError::Io(format!("Failed to create .bin directory: {}", e)))?;
450
451 for pkg in &graph.packages {
452 let pkg_key = format!("{}@{}", pkg.name, pkg.version);
453 let pkg_path = dnx_dir.join(&pkg_key).join("node_modules").join(&pkg.name);
454
455 let bin = if pkg.bin.is_empty() {
456 extract_bin_from_package_json(&pkg_path)
458 } else {
459 pkg.bin.clone()
460 };
461
462 if !bin.is_empty() {
463 let synthetic = ResolvedPackage {
464 name: pkg.name.clone(),
465 version: pkg.version.clone(),
466 tarball_url: String::new(),
467 integrity: String::new(),
468 dependencies: Vec::new(),
469 peer_dependencies: Vec::new(),
470 bin,
471 has_install_script: false,
472 };
473 create_bin_scripts(&bin_dir, &synthetic, &pkg_path)?;
474 }
475 }
476
477 new_manifest.write(&dnx_dir);
479
480 Ok(stats)
481 }
482
483 pub async fn link_workspace(
490 &self,
491 graph: &DependencyGraph,
492 workspace: &Workspace,
493 ) -> Result<LinkStats> {
494 self.link_workspace_with_progress(graph, workspace, None)
495 .await
496 }
497
498 pub async fn link_workspace_with_progress(
499 &self,
500 graph: &DependencyGraph,
501 workspace: &Workspace,
502 progress: Option<Arc<LinkProgress>>,
503 ) -> Result<LinkStats> {
504 let mut stats = LinkStats::default();
505 let node_modules = self.project_root.join("node_modules");
506 let dnx_dir = node_modules.join(".dnx");
507
508 let cache_root = self.cache.content_dir();
510 warn_cross_volume(&cache_root, &self.project_root);
511
512 warn_cloud_sync(&self.project_root);
514
515 let old_manifest = PackagesManifest::read(&dnx_dir);
517
518 let mut new_manifest = PackagesManifest::new();
520 for pkg in &graph.packages {
521 if pkg.version.starts_with("workspace:")
522 || pkg.version.starts_with("link:")
523 || pkg.version.starts_with("file:")
524 || pkg.version.starts_with("git+")
525 || pkg.version.starts_with("github:")
526 {
527 continue;
528 }
529 let pkg_key = format!("{}@{}", pkg.name, pkg.version);
530 new_manifest.packages.insert(pkg_key, pkg.integrity.clone());
531 }
532
533 let stale_keys: Vec<String> = old_manifest
535 .packages
536 .keys()
537 .filter(|k| !new_manifest.packages.contains_key(*k))
538 .cloned()
539 .collect();
540
541 for key in &stale_keys {
542 let stale_dir = dnx_dir.join(key);
543 match std::fs::remove_dir_all(&stale_dir) {
544 Ok(()) => {}
545 Err(e) if e.kind() == std::io::ErrorKind::NotFound => {}
546 Err(e) => {
547 tracing::warn!("Failed to remove stale package {}: {}", key, e);
548 }
549 }
550 }
551
552 std::fs::create_dir_all(&dnx_dir)
553 .map_err(|e| DnxError::Io(format!("Failed to create .dnx directory: {}", e)))?;
554
555 let pkg_map: HashMap<String, &ResolvedPackage> = graph
557 .packages
558 .iter()
559 .map(|pkg| (pkg.name.clone(), pkg))
560 .collect();
561
562 for pkg in &graph.packages {
564 validate_package_name(&pkg.name)?;
565 }
566
567 let total_hardlinks = AtomicUsize::new(0);
569 let total_linked = AtomicUsize::new(0);
570
571 if let Some(ref p) = progress {
573 let external_count = graph
574 .packages
575 .iter()
576 .filter(|pkg| {
577 !pkg.version.starts_with("workspace:")
578 && !pkg.version.starts_with("link:")
579 && !pkg.version.starts_with("file:")
580 && !pkg.version.starts_with("git+")
581 && !pkg.version.starts_with("github:")
582 })
583 .count();
584 p.total.store(external_count, Ordering::Relaxed);
585 }
586
587 let link_results: std::result::Result<Vec<()>, DnxError> = graph
588 .packages
589 .par_iter()
590 .filter(|pkg| {
591 !pkg.version.starts_with("workspace:")
592 && !pkg.version.starts_with("link:")
593 && !pkg.version.starts_with("file:")
594 && !pkg.version.starts_with("git+")
595 && !pkg.version.starts_with("github:")
596 })
597 .map(|pkg| {
598 let pkg_key = format!("{}@{}", pkg.name, pkg.version);
599
600 if let Some(old_integrity) = old_manifest.packages.get(&pkg_key) {
602 if old_integrity == &pkg.integrity {
603 let virtual_pkg_dir =
604 dnx_dir.join(&pkg_key).join("node_modules").join(&pkg.name);
605 if virtual_pkg_dir.exists() {
606 total_linked.fetch_add(1, Ordering::Relaxed);
607 if let Some(ref p) = progress {
608 p.completed.fetch_add(1, Ordering::Relaxed);
609 }
610 return Ok(());
611 }
612 }
613 }
614
615 let virtual_pkg_dir = dnx_dir.join(&pkg_key).join("node_modules").join(&pkg.name);
616
617 std::fs::create_dir_all(&virtual_pkg_dir).map_err(|e| {
618 DnxError::Io(format!(
619 "Failed to create virtual directory for {}: {}",
620 pkg_key, e
621 ))
622 })?;
623
624 let cached_path = self.cache.get_path(&pkg.integrity);
625 if !cached_path.exists() {
626 return Err(DnxError::Linker(format!(
627 "Package {} not found in cache at {}",
628 pkg_key,
629 cached_path.display()
630 )));
631 }
632
633 let (hardlinks, _copies) = hardlink_dir(&cached_path, &virtual_pkg_dir)?;
634 total_hardlinks.fetch_add(hardlinks, Ordering::Relaxed);
635 total_linked.fetch_add(1, Ordering::Relaxed);
636 if let Some(ref p) = progress {
637 p.completed.fetch_add(1, Ordering::Relaxed);
638 }
639 Ok(())
640 })
641 .collect();
642
643 link_results?;
644 stats.hardlinks = total_hardlinks.load(Ordering::Relaxed);
645 stats.linked = total_linked.load(Ordering::Relaxed);
646
647 let mut ws_symlink_pairs: Vec<(PathBuf, PathBuf)> = Vec::new();
649 let mut ws_scope_dirs: HashSet<PathBuf> = HashSet::new();
650
651 for pkg in &graph.packages {
652 if pkg.version.starts_with("workspace:") {
653 continue;
654 }
655 let pkg_key = format!("{}@{}", pkg.name, pkg.version);
656 let virtual_node_modules = dnx_dir.join(&pkg_key).join("node_modules");
657
658 let all_deps = pkg.dependencies.iter().chain(pkg.peer_dependencies.iter());
659 for dep in all_deps {
660 let (dep_name, dep_version) = parse_dep_ref(dep);
661 if dep_name.is_empty() {
662 continue;
663 }
664
665 let dep_key = format!("{}@{}", dep_name, dep_version);
666 let target = dnx_dir.join(&dep_key).join("node_modules").join(dep_name);
667 let link = virtual_node_modules.join(dep_name);
668
669 if dep_name.contains('/') {
670 if let Some(parent) = link.parent() {
671 ws_scope_dirs.insert(parent.to_path_buf());
672 }
673 }
674
675 ws_symlink_pairs.push((target, link));
676 }
677 }
678
679 for dir in &ws_scope_dirs {
680 std::fs::create_dir_all(dir)
681 .map_err(|e| DnxError::Io(format!("Failed to create scope directory: {}", e)))?;
682 }
683
684 let ws_symlink_count = AtomicUsize::new(0);
685 let ws_symlink_results: std::result::Result<Vec<()>, DnxError> = ws_symlink_pairs
686 .par_iter()
687 .map(|(target, link)| {
688 if link.symlink_metadata().is_ok() {
689 if let Ok(existing_target) = std::fs::read_link(link) {
690 if existing_target == *target {
691 return Ok(());
692 }
693 }
694 return Ok(());
695 }
696 create_symlink_or_junction(target, link)?;
697 ws_symlink_count.fetch_add(1, Ordering::Relaxed);
698 Ok(())
699 })
700 .collect();
701 ws_symlink_results?;
702 stats.symlinks += ws_symlink_count.load(Ordering::Relaxed);
703
704 for (name, member) in &workspace.members {
706 let link = node_modules.join(name);
707
708 if name.contains('/') {
709 if let Some(parent) = link.parent() {
710 std::fs::create_dir_all(parent).map_err(|e| {
711 DnxError::Io(format!(
712 "Failed to create scope directory for {}: {}",
713 name, e
714 ))
715 })?;
716 }
717 }
718
719 if link.symlink_metadata().is_ok() {
720 if let Ok(existing_target) = std::fs::read_link(&link) {
721 if existing_target == member.path {
722 continue;
723 }
724 }
725 let _ = std::fs::remove_dir_all(&link);
726 }
727
728 create_symlink_or_junction(&member.path, &link)?;
729 stats.symlinks += 1;
730 }
731
732 for (name, member) in &workspace.members {
734 let member_nm = member.path.join("node_modules");
735 std::fs::create_dir_all(&member_nm).map_err(|e| {
736 DnxError::Io(format!("Failed to create node_modules for {}: {}", name, e))
737 })?;
738
739 let all_deps = member.package_json.all_dependencies();
740 for (dep_name, dep_spec) in &all_deps {
741 let link = member_nm.join(dep_name);
742
743 if dep_name.contains('/') {
744 if let Some(parent) = link.parent() {
745 std::fs::create_dir_all(parent).map_err(|e| {
746 DnxError::Io(format!(
747 "Failed to create scope dir for {}: {}",
748 dep_name, e
749 ))
750 })?;
751 }
752 }
753
754 if link.exists() || link.symlink_metadata().is_ok() {
755 continue;
756 }
757
758 if dep_spec.starts_with("workspace:") || workspace.members.contains_key(dep_name) {
760 if let Some(dep_member) = workspace.members.get(dep_name) {
761 create_symlink_or_junction(&dep_member.path, &link)?;
762 stats.symlinks += 1;
763 }
764 } else {
765 if let Some(pkg) = pkg_map.get(dep_name) {
767 if !pkg.version.starts_with("workspace:") {
768 let pkg_key = format!("{}@{}", pkg.name, pkg.version);
769 let target =
770 dnx_dir.join(&pkg_key).join("node_modules").join(&pkg.name);
771 create_symlink_or_junction(&target, &link)?;
772 stats.symlinks += 1;
773 }
774 }
775 }
776 }
777
778 let member_bin_dir = member_nm.join(".bin");
780 std::fs::create_dir_all(&member_bin_dir).map_err(|e| {
781 DnxError::Io(format!("Failed to create .bin dir for {}: {}", name, e))
782 })?;
783 }
784
785 let bin_dir = node_modules.join(".bin");
787 std::fs::create_dir_all(&bin_dir)
788 .map_err(|e| DnxError::Io(format!("Failed to create .bin directory: {}", e)))?;
789
790 for pkg in &graph.packages {
791 if pkg.version.starts_with("workspace:") {
792 continue;
793 }
794 let pkg_key = format!("{}@{}", pkg.name, pkg.version);
795 let pkg_path = dnx_dir.join(&pkg_key).join("node_modules").join(&pkg.name);
796
797 let bin = if pkg.bin.is_empty() {
798 extract_bin_from_package_json(&pkg_path)
799 } else {
800 pkg.bin.clone()
801 };
802
803 if !bin.is_empty() {
804 let synthetic = ResolvedPackage {
805 name: pkg.name.clone(),
806 version: pkg.version.clone(),
807 tarball_url: String::new(),
808 integrity: String::new(),
809 dependencies: Vec::new(),
810 peer_dependencies: Vec::new(),
811 bin,
812 has_install_script: false,
813 };
814 create_bin_scripts(&bin_dir, &synthetic, &pkg_path)?;
815 }
816 }
817
818 stats.linked += workspace.members.len();
819
820 new_manifest.write(&dnx_dir);
822
823 Ok(stats)
824 }
825
826 pub fn write_state(&self, lockfile_hash: &str) -> Result<()> {
829 let state_dir = self.project_root.join("node_modules").join(".dnx");
830 std::fs::create_dir_all(&state_dir)
831 .map_err(|e| DnxError::Io(format!("Failed to create .dnx directory: {}", e)))?;
832
833 let state = serde_json::json!({ "lockfileHash": lockfile_hash });
834 let state_path = state_dir.join("state.json");
835 std::fs::write(&state_path, state.to_string())
836 .map_err(|e| DnxError::Io(format!("Failed to write state file: {}", e)))?;
837 Ok(())
838 }
839
840 pub fn check_state(&self, lockfile_hash: &str) -> bool {
842 let state_path = self
843 .project_root
844 .join("node_modules")
845 .join(".dnx")
846 .join("state.json");
847 let content = match std::fs::read_to_string(&state_path) {
848 Ok(c) => c,
849 Err(_) => return false,
850 };
851 let json: serde_json::Value = match serde_json::from_str(&content) {
852 Ok(v) => v,
853 Err(_) => return false,
854 };
855 json.get("lockfileHash")
856 .and_then(|v| v.as_str())
857 .map(|h| h == lockfile_hash)
858 .unwrap_or(false)
859 }
860
861 pub fn run_install_scripts(&self, graph: &DependencyGraph) -> ScriptStats {
864 let node_modules = self.project_root.join("node_modules");
865 let dnx_dir = node_modules.join(".dnx");
866 let bin_dir = node_modules.join(".bin");
867
868 let packages_with_scripts: Vec<&ResolvedPackage> = graph
869 .packages
870 .iter()
871 .filter(|pkg| {
872 pkg.has_install_script
873 && !pkg.version.starts_with("link:")
874 && !pkg.version.starts_with("file:")
875 && !pkg.version.starts_with("git+")
876 && !pkg.version.starts_with("github:")
877 && !pkg.version.starts_with("workspace:")
878 })
879 .collect();
880
881 if packages_with_scripts.is_empty() {
882 return ScriptStats {
883 ran: 0,
884 failed: 0,
885 skipped: 0,
886 };
887 }
888
889 let ran = AtomicUsize::new(0);
890 let failed = AtomicUsize::new(0);
891 let skipped = AtomicUsize::new(0);
892
893 packages_with_scripts.par_iter().for_each(|pkg| {
894 let pkg_key = format!("{}@{}", pkg.name, pkg.version);
895 let pkg_path = dnx_dir
896 .join(&pkg_key)
897 .join("node_modules")
898 .join(&pkg.name);
899
900 let pkg_json_path = pkg_path.join("package.json");
901 if !pkg_json_path.exists() {
902 skipped.fetch_add(1, Ordering::Relaxed);
903 return;
904 }
905
906 let content = match std::fs::read_to_string(&pkg_json_path) {
908 Ok(c) => c,
909 Err(_) => {
910 skipped.fetch_add(1, Ordering::Relaxed);
911 return;
912 }
913 };
914 let json: serde_json::Value = match serde_json::from_str(&content) {
915 Ok(v) => v,
916 Err(_) => {
917 skipped.fetch_add(1, Ordering::Relaxed);
918 return;
919 }
920 };
921
922 let scripts = match json.get("scripts").and_then(|s| s.as_object()) {
923 Some(s) => s,
924 None => {
925 skipped.fetch_add(1, Ordering::Relaxed);
926 return;
927 }
928 };
929
930 let has_any = scripts.contains_key("preinstall")
931 || scripts.contains_key("install")
932 || scripts.contains_key("postinstall");
933
934 if !has_any {
935 skipped.fetch_add(1, Ordering::Relaxed);
936 return;
937 }
938
939 let current_path = std::env::var("PATH").unwrap_or_default();
941 let path_sep = if cfg!(windows) { ";" } else { ":" };
942 let new_path = format!("{}{}{}", bin_dir.to_string_lossy(), path_sep, current_path);
943
944 let pkg_name = json.get("name").and_then(|v| v.as_str()).unwrap_or(&pkg.name);
945 let pkg_version = json.get("version").and_then(|v| v.as_str()).unwrap_or(&pkg.version);
946
947 let is_optional = pkg.dependencies.iter().any(|d| d.contains("optional"));
949
950 for script_name in &["preinstall", "install", "postinstall"] {
952 if let Some(script_content) = scripts.get(*script_name).and_then(|v| v.as_str()) {
953 if script_content.is_empty() {
954 continue;
955 }
956
957 let mut cmd = if cfg!(windows) {
958 let mut c = std::process::Command::new("cmd.exe");
959 c.args(["/C", script_content]);
960 c
961 } else {
962 let mut c = std::process::Command::new("/bin/sh");
963 c.args(["-c", script_content]);
964 c
965 };
966
967 cmd.current_dir(&pkg_path)
968 .env("PATH", &new_path)
969 .env("npm_lifecycle_event", *script_name)
970 .env("npm_package_name", pkg_name)
971 .env("npm_package_version", pkg_version)
972 .stdout(std::process::Stdio::piped())
973 .stderr(std::process::Stdio::piped());
974
975 match cmd.status() {
976 Ok(status) if status.success() => {
977 }
979 Ok(_) | Err(_) => {
980 if is_optional {
981 eprintln!(
982 "\x1b[33m⚠ Optional script {} for {} failed, continuing\x1b[0m",
983 script_name, pkg_key
984 );
985 } else {
986 eprintln!(
987 "\x1b[33m⚠ Script {} for {} failed\x1b[0m",
988 script_name, pkg_key
989 );
990 }
991 failed.fetch_add(1, Ordering::Relaxed);
992 return; }
994 }
995 }
996 }
997
998 ran.fetch_add(1, Ordering::Relaxed);
999 });
1000
1001 ScriptStats {
1002 ran: ran.load(Ordering::Relaxed),
1003 failed: failed.load(Ordering::Relaxed),
1004 skipped: skipped.load(Ordering::Relaxed),
1005 }
1006 }
1007
1008 pub fn hoist_packages(&self, graph: &DependencyGraph, direct_deps: &[String]) -> Result<usize> {
1013 let node_modules = self.project_root.join("node_modules");
1014 let mut hoisted = 0usize;
1015
1016 if self.shamefully_hoist {
1017 for pkg in &graph.packages {
1019 let pkg_key = format!("{}@{}", pkg.name, pkg.version);
1020 let source = node_modules
1021 .join(".dnx")
1022 .join(&pkg_key)
1023 .join("node_modules")
1024 .join(&pkg.name);
1025
1026 if !source.exists() {
1027 continue;
1028 }
1029
1030 let target = node_modules.join(&pkg.name);
1031
1032 if direct_deps.contains(&pkg.name) {
1034 continue;
1035 }
1036
1037 if pkg.name.contains('/') {
1039 if let Some(parent) = target.parent() {
1040 let _ = std::fs::create_dir_all(parent);
1041 }
1042 }
1043
1044 if target.exists() || target.symlink_metadata().is_ok() {
1046 continue;
1047 }
1048
1049 if create_symlink_or_junction(&source, &target).is_ok() {
1051 hoisted += 1;
1052 }
1053 }
1054 } else if self.hoist {
1055 let hoist_dir = node_modules.join(".dnx").join("node_modules");
1057 let _ = std::fs::create_dir_all(&hoist_dir);
1058
1059 for pkg in &graph.packages {
1060 if !self.matches_pattern(&pkg.name, &self.hoist_pattern) {
1061 continue;
1062 }
1063
1064 let pkg_key = format!("{}@{}", pkg.name, pkg.version);
1065 let source = node_modules
1066 .join(".dnx")
1067 .join(&pkg_key)
1068 .join("node_modules")
1069 .join(&pkg.name);
1070
1071 if !source.exists() {
1072 continue;
1073 }
1074
1075 let target = hoist_dir.join(&pkg.name);
1076 if pkg.name.contains('/') {
1077 if let Some(parent) = target.parent() {
1078 let _ = std::fs::create_dir_all(parent);
1079 }
1080 }
1081
1082 if !target.exists() && create_symlink_or_junction(&source, &target).is_ok() {
1083 hoisted += 1;
1084 }
1085 }
1086
1087 for pkg in &graph.packages {
1089 if !self.matches_pattern(&pkg.name, &self.public_hoist_pattern) {
1090 continue;
1091 }
1092 if direct_deps.contains(&pkg.name) {
1093 continue;
1094 }
1095
1096 let pkg_key = format!("{}@{}", pkg.name, pkg.version);
1097 let source = node_modules
1098 .join(".dnx")
1099 .join(&pkg_key)
1100 .join("node_modules")
1101 .join(&pkg.name);
1102
1103 if !source.exists() {
1104 continue;
1105 }
1106
1107 let target = node_modules.join(&pkg.name);
1108 if pkg.name.contains('/') {
1109 if let Some(parent) = target.parent() {
1110 let _ = std::fs::create_dir_all(parent);
1111 }
1112 }
1113
1114 if !target.exists()
1115 && target.symlink_metadata().is_err()
1116 && create_symlink_or_junction(&source, &target).is_ok()
1117 {
1118 hoisted += 1;
1119 }
1120 }
1121 }
1122
1123 Ok(hoisted)
1124 }
1125
1126 fn matches_pattern(&self, name: &str, patterns: &[String]) -> bool {
1128 for pattern in patterns {
1129 if pattern == "*" {
1130 return true;
1131 }
1132 if let Some(prefix) = pattern.strip_suffix("/*") {
1134 if name.starts_with(&format!("{}/", prefix)) {
1135 return true;
1136 }
1137 } else if pattern == name {
1138 return true;
1139 }
1140 }
1141 false
1142 }
1143
1144 pub async fn unlink(&self, packages: &[String]) -> Result<()> {
1146 let node_modules = self.project_root.join("node_modules");
1147
1148 for pkg_name in packages {
1149 let link = node_modules.join(pkg_name);
1150
1151 if link.exists() || link.symlink_metadata().is_ok() {
1152 std::fs::remove_dir_all(&link)
1153 .map_err(|e| DnxError::Io(format!("Failed to unlink {}: {}", pkg_name, e)))?;
1154 }
1155 }
1156
1157 Ok(())
1158 }
1159}
1160
1161fn create_symlink_or_junction(target: &Path, link: &Path) -> Result<()> {
1169 #[cfg(windows)]
1170 {
1171 let target_abs = if target.is_absolute() {
1174 target
1176 .canonicalize()
1177 .unwrap_or_else(|_| target.to_path_buf())
1178 } else {
1179 let joined = link.parent().unwrap_or(Path::new(".")).join(target);
1180 joined.canonicalize().unwrap_or(joined)
1181 };
1182
1183 junction::create(&target_abs, link).map_err(|e| {
1184 DnxError::Linker(format!(
1185 "Failed to create junction from {} to {}: {}",
1186 link.display(),
1187 target_abs.display(),
1188 e
1189 ))
1190 })?;
1191 }
1192
1193 #[cfg(unix)]
1194 {
1195 std::os::unix::fs::symlink(target, link).map_err(|e| {
1196 DnxError::Io(format!(
1197 "Failed to create symlink from {} to {}: {}",
1198 link.display(),
1199 target.display(),
1200 e
1201 ))
1202 })?;
1203 }
1204
1205 Ok(())
1206}
1207
1208#[cfg(not(unix))]
1211fn create_symlink_or_junction_with_retry(target: &Path, link: &Path) -> Result<()> {
1212 const MAX_RETRIES: usize = 3;
1213 for attempt in 0..MAX_RETRIES {
1214 match create_symlink_or_junction(target, link) {
1215 Ok(()) => return Ok(()),
1216 Err(e) if attempt + 1 < MAX_RETRIES => {
1217 tracing::debug!(
1218 "Junction creation attempt {} failed for {}: {}, retrying…",
1219 attempt + 1,
1220 link.display(),
1221 e
1222 );
1223 std::thread::sleep(std::time::Duration::from_millis(10));
1225 match std::fs::remove_dir_all(link) {
1227 Ok(()) => {}
1228 Err(e) if e.kind() == std::io::ErrorKind::NotFound => {}
1229 Err(_) => {}
1230 }
1231 }
1232 Err(e) => return Err(e),
1233 }
1234 }
1235 unreachable!()
1236}
1237
1238fn same_volume(path_a: &Path, path_b: &Path) -> bool {
1241 #[cfg(unix)]
1242 {
1243 use std::os::unix::fs::MetadataExt;
1244 let meta_a = std::fs::metadata(path_a);
1245 let meta_b = std::fs::metadata(path_b);
1246 match (meta_a, meta_b) {
1247 (Ok(a), Ok(b)) => a.dev() == b.dev(),
1248 _ => true, }
1250 }
1251 #[cfg(windows)]
1252 {
1253 fn get_volume_path(path: &Path) -> Option<String> {
1255 use std::os::windows::ffi::OsStrExt;
1256 let wide: Vec<u16> = path
1257 .as_os_str()
1258 .encode_wide()
1259 .chain(std::iter::once(0))
1260 .collect();
1261 let mut volume_path: Vec<u16> = vec![0u16; 260];
1262
1263 extern "system" {
1264 fn GetVolumePathNameW(
1265 lpszFileName: *const u16,
1266 lpszVolumePathName: *mut u16,
1267 cchBufferLength: u32,
1268 ) -> i32;
1269 }
1270
1271 let result = unsafe {
1272 GetVolumePathNameW(
1273 wide.as_ptr(),
1274 volume_path.as_mut_ptr(),
1275 volume_path.len() as u32,
1276 )
1277 };
1278
1279 if result != 0 {
1280 let len = volume_path
1281 .iter()
1282 .position(|&c| c == 0)
1283 .unwrap_or(volume_path.len());
1284 Some(String::from_utf16_lossy(&volume_path[..len]))
1285 } else {
1286 None
1287 }
1288 }
1289
1290 if let (Some(vol_a), Some(vol_b)) = (get_volume_path(path_a), get_volume_path(path_b)) {
1292 return vol_a.eq_ignore_ascii_case(&vol_b);
1293 }
1294
1295 let a_str = path_a.to_string_lossy();
1297 let b_str = path_b.to_string_lossy();
1298 let a_drive = a_str.chars().next();
1299 let b_drive = b_str.chars().next();
1300 match (a_drive, b_drive) {
1301 (Some(a), Some(b)) => a.eq_ignore_ascii_case(&b),
1302 _ => true,
1303 }
1304 }
1305 #[cfg(not(any(unix, windows)))]
1306 {
1307 let _ = (path_a, path_b);
1308 true
1309 }
1310}
1311
1312fn warn_cross_volume(cache_path: &Path, project_path: &Path) {
1314 if !same_volume(cache_path, project_path) {
1315 let project_drive = project_path.to_string_lossy().chars().next().unwrap_or('C');
1316
1317 eprintln!(
1318 "\x1b[33m⚠ Cache ({}) and project ({}) are on different volumes.\n \
1319 Hardlinks will not work — falling back to file copies.\n \
1320 Fix: set DNX_CACHE_DIR to a path on the same drive as your project:\n \
1321 \x1b[1m DNX_CACHE_DIR={}:/.dnx/cache dnx install\x1b[0m\x1b[33m\n \
1322 Or add to dnx.toml: [settings] cache-dir = \"{}:/.dnx/cache\"\x1b[0m",
1323 cache_path.display(),
1324 project_path.display(),
1325 project_drive,
1326 project_drive,
1327 );
1328 }
1329}
1330
1331fn warn_cloud_sync(project_path: &Path) {
1335 use std::sync::atomic::AtomicBool;
1336 static WARNED: AtomicBool = AtomicBool::new(false);
1337
1338 if WARNED.load(Ordering::Relaxed) {
1339 return;
1340 }
1341
1342 let path_str = project_path.to_string_lossy();
1343 let cloud_indicators = [
1344 "OneDrive",
1345 "Dropbox",
1346 "iCloud Drive",
1347 "Google Drive",
1348 "MEGA",
1349 ];
1350
1351 for indicator in &cloud_indicators {
1352 if path_str.contains(indicator) {
1353 WARNED.store(true, Ordering::Relaxed);
1354 eprintln!(
1355 "\x1b[33m\u{26a0} Project is inside {} — file sync may slow down installs.\n \
1356 Consider moving your project outside the synced folder, or excluding\n \
1357 node_modules from sync.\x1b[0m",
1358 indicator
1359 );
1360 return;
1361 }
1362 }
1363}
1364
1365fn import_file(src: &Path, dest: &Path) -> Result<u8> {
1372 if REFLINK_SUPPORTED.load(Ordering::Relaxed) {
1376 if reflink_copy::reflink(src, dest).is_ok() {
1377 return Ok(0);
1378 }
1379 REFLINK_SUPPORTED.store(false, Ordering::Relaxed);
1381 }
1382
1383 match std::fs::hard_link(src, dest) {
1385 Ok(()) => return Ok(1),
1386 Err(e) => {
1387 #[cfg(windows)]
1389 {
1390 if let Some(code) = e.raw_os_error() {
1391 if code == 1142 {
1392 COPY_FALLBACK_COUNT.fetch_add(1, Ordering::Relaxed);
1394 if !NTFS_LIMIT_WARNED.swap(true, Ordering::Relaxed) {
1395 eprintln!(
1396 "\x1b[33m⚠ NTFS hardlink limit (1023) reached — falling back to copies for remaining files.\n \
1397 Consider enabling Developer Mode for symlink support.\x1b[0m"
1398 );
1399 }
1400 }
1402 }
1403 }
1404 let _ = e;
1405 }
1406 }
1407
1408 std::fs::copy(src, dest).map_err(|e| {
1410 DnxError::Io(format!(
1411 "Failed to copy {} to {}: {}",
1412 src.display(),
1413 dest.display(),
1414 e
1415 ))
1416 })?;
1417 Ok(2)
1418}
1419
1420fn hardlink_dir(src: &Path, dest: &Path) -> Result<(usize, usize)> {
1429 let mut files: Vec<(PathBuf, PathBuf)> = Vec::new();
1431
1432 for entry in WalkDir::new(src).into_iter() {
1433 let entry = entry.map_err(|e| {
1434 DnxError::Io(format!("Failed to walk directory {}: {}", src.display(), e))
1435 })?;
1436
1437 let path = entry.path();
1438 let relative = path.strip_prefix(src).map_err(|e| {
1439 DnxError::Io(format!(
1440 "Failed to get relative path for {}: {}",
1441 path.display(),
1442 e
1443 ))
1444 })?;
1445
1446 let dest_path = dest.join(relative);
1447
1448 if entry.file_type().is_dir() {
1449 match std::fs::create_dir_all(&dest_path) {
1450 Ok(()) => {}
1451 Err(e) if e.kind() == std::io::ErrorKind::AlreadyExists => {}
1452 Err(e) => {
1453 return Err(DnxError::Io(format!(
1454 "Failed to create directory {}: {}",
1455 dest_path.display(),
1456 e
1457 )));
1458 }
1459 }
1460 } else if entry.file_type().is_file() {
1461 files.push((path.to_path_buf(), dest_path));
1462 }
1463 }
1464
1465 let hardlinks = AtomicUsize::new(0);
1468 let copies = AtomicUsize::new(0);
1469
1470 let result: std::result::Result<Vec<()>, DnxError> = files
1471 .par_iter()
1472 .map(|(src_path, dest_path)| {
1473 if let Ok(dest_meta) = std::fs::metadata(dest_path) {
1475 if let Ok(src_meta) = std::fs::metadata(src_path) {
1476 if dest_meta.len() == src_meta.len() {
1477 hardlinks.fetch_add(1, Ordering::Relaxed);
1478 return Ok(());
1479 }
1480 }
1481 }
1482 let _ = std::fs::remove_file(dest_path);
1484 match import_file(src_path, dest_path)? {
1485 0 | 1 => {
1486 hardlinks.fetch_add(1, Ordering::Relaxed);
1487 }
1488 _ => {
1489 copies.fetch_add(1, Ordering::Relaxed);
1490 }
1491 }
1492 Ok(())
1493 })
1494 .collect();
1495
1496 result?;
1497
1498 Ok((
1499 hardlinks.load(Ordering::Relaxed),
1500 copies.load(Ordering::Relaxed),
1501 ))
1502}
1503
1504fn create_bin_scripts(bin_dir: &Path, pkg: &ResolvedPackage, pkg_path: &Path) -> Result<()> {
1506 for (bin_name, bin_path) in &pkg.bin {
1507 validate_bin_entry(bin_name, bin_path)?;
1508 let actual_bin = pkg_path.join(bin_path);
1510
1511 let bin_dir_abs = bin_dir
1513 .canonicalize()
1514 .unwrap_or_else(|_| bin_dir.to_path_buf());
1515 let actual_bin_abs = actual_bin
1516 .canonicalize()
1517 .unwrap_or_else(|_| actual_bin.clone());
1518
1519 let relative_path = pathdiff::diff_paths(&actual_bin_abs, &bin_dir_abs)
1520 .unwrap_or_else(|| PathBuf::from("..").join(pkg.name.clone()).join(bin_path));
1521
1522 #[cfg(windows)]
1523 {
1524 let cmd_file = bin_dir.join(format!("{}.cmd", bin_name));
1526 let cmd_content = format!(
1527 "@IF EXIST \"%~dp0\\node.exe\" (\n \"%~dp0\\node.exe\" \"%~dp0\\{}\" %*\n) ELSE (\n node \"%~dp0\\{}\" %*\n)",
1528 relative_path.to_string_lossy().replace("/", "\\"),
1529 relative_path.to_string_lossy().replace("/", "\\")
1530 );
1531 std::fs::write(&cmd_file, cmd_content).map_err(|e| {
1532 DnxError::Io(format!(
1533 "Failed to write bin script {}: {}",
1534 cmd_file.display(),
1535 e
1536 ))
1537 })?;
1538 }
1539
1540 let sh_file = bin_dir.join(bin_name);
1542 let sh_content = format!(
1543 "#!/bin/sh\nbasedir=$(dirname \"$(echo \"$0\" | sed -e 's,\\\\,/,g')\")\nnode \"$basedir/{}\" \"$@\"",
1544 relative_path.to_string_lossy().replace("\\", "/")
1545 );
1546 std::fs::write(&sh_file, sh_content).map_err(|e| {
1547 DnxError::Io(format!(
1548 "Failed to write bin script {}: {}",
1549 sh_file.display(),
1550 e
1551 ))
1552 })?;
1553
1554 #[cfg(unix)]
1555 {
1556 use std::os::unix::fs::PermissionsExt;
1557 let mut perms = std::fs::metadata(&sh_file)
1559 .map_err(|e| DnxError::Io(format!("Failed to get metadata: {}", e)))?
1560 .permissions();
1561 perms.set_mode(0o755);
1562 std::fs::set_permissions(&sh_file, perms)
1563 .map_err(|e| DnxError::Io(format!("Failed to set permissions: {}", e)))?;
1564 }
1565 }
1566
1567 Ok(())
1568}
1569
1570fn extract_bin_from_package_json(pkg_path: &Path) -> HashMap<String, String> {
1573 let pj_path = pkg_path.join("package.json");
1574 let content = match std::fs::read_to_string(&pj_path) {
1575 Ok(c) => c,
1576 Err(_) => return HashMap::new(),
1577 };
1578 let json: serde_json::Value = match serde_json::from_str(&content) {
1579 Ok(v) => v,
1580 Err(_) => return HashMap::new(),
1581 };
1582 let pkg_name = json.get("name").and_then(|v| v.as_str()).unwrap_or("");
1583 let bin_val = json.get("bin");
1584 match bin_val {
1585 None => HashMap::new(),
1586 Some(serde_json::Value::String(s)) => {
1587 let cmd = if let Some(idx) = pkg_name.rfind('/') {
1588 &pkg_name[idx + 1..]
1589 } else {
1590 pkg_name
1591 };
1592 let mut map = HashMap::new();
1593 if !cmd.is_empty() {
1594 map.insert(cmd.to_string(), s.clone());
1595 }
1596 map
1597 }
1598 Some(serde_json::Value::Object(obj)) => {
1599 let mut map = HashMap::new();
1600 for (key, val) in obj {
1601 if let serde_json::Value::String(s) = val {
1602 map.insert(key.clone(), s.clone());
1603 }
1604 }
1605 map
1606 }
1607 _ => HashMap::new(),
1608 }
1609}
1610
1611fn parse_dep_ref(dep: &str) -> (&str, &str) {
1613 if let Some(stripped) = dep.strip_prefix('@') {
1614 if let Some(idx) = stripped.find('@') {
1615 return (&dep[..idx + 1], &stripped[idx + 1..]);
1616 }
1617 return (dep, "");
1618 }
1619 if let Some(idx) = dep.find('@') {
1620 (&dep[..idx], &dep[idx + 1..])
1621 } else {
1622 (dep, "")
1623 }
1624}
1625
1626#[derive(Debug, Default, Serialize, Deserialize)]
1629struct PackagesManifest {
1630 packages: HashMap<String, String>,
1632}
1633
1634impl PackagesManifest {
1635 fn new() -> Self {
1636 Self {
1637 packages: HashMap::new(),
1638 }
1639 }
1640
1641 fn read(dnx_dir: &Path) -> Self {
1642 let path = dnx_dir.join("packages.json");
1643 match std::fs::read_to_string(&path) {
1644 Ok(content) => serde_json::from_str(&content).unwrap_or_default(),
1645 Err(_) => Self::default(),
1646 }
1647 }
1648
1649 fn write(&self, dnx_dir: &Path) {
1650 let path = dnx_dir.join("packages.json");
1651 if let Ok(content) = serde_json::to_string(self) {
1652 let _ = std::fs::write(&path, content);
1653 }
1654 }
1655}
1656
1657pub fn reconstruct_graph_from_node_modules(project_root: &Path) -> Option<DependencyGraph> {
1664 let dnx_dir = project_root.join("node_modules").join(".dnx");
1665 let manifest = PackagesManifest::read(&dnx_dir);
1666 if manifest.packages.is_empty() {
1667 return None;
1668 }
1669
1670 let mut packages = Vec::new();
1671
1672 for (pkg_key, integrity) in &manifest.packages {
1673 let (name, version) = if let Some(stripped) = pkg_key.strip_prefix('@') {
1675 if let Some(idx) = stripped.find('@') {
1676 (&pkg_key[..idx + 1], &stripped[idx + 1..])
1677 } else {
1678 continue;
1679 }
1680 } else if let Some(idx) = pkg_key.find('@') {
1681 (&pkg_key[..idx], &pkg_key[idx + 1..])
1682 } else {
1683 continue;
1684 };
1685
1686 let pkg_path = dnx_dir.join(pkg_key).join("node_modules").join(name);
1687 let pkg_json_path = pkg_path.join("package.json");
1688
1689 let mut dep_refs = Vec::new();
1691 let mut peer_dep_refs = Vec::new();
1692 let mut bin = HashMap::new();
1693 let mut has_install_script = false;
1694 let mut tarball_url = String::new();
1695
1696 if let Ok(content) = std::fs::read_to_string(&pkg_json_path) {
1697 if let Ok(json) = serde_json::from_str::<serde_json::Value>(&content) {
1698 if let Some(deps) = json.get("dependencies").and_then(|d| d.as_object()) {
1700 for (dep_name, dep_range) in deps {
1701 if let Some(range) = dep_range.as_str() {
1702 dep_refs.push(format!("{}@{}", dep_name, range));
1703 }
1704 }
1705 }
1706 if let Some(peers) = json.get("peerDependencies").and_then(|d| d.as_object()) {
1708 for (dep_name, dep_range) in peers {
1709 if let Some(range) = dep_range.as_str() {
1710 peer_dep_refs.push(format!("{}@{}", dep_name, range));
1711 }
1712 }
1713 }
1714 if let Some(bin_val) = json.get("bin") {
1716 match bin_val {
1717 serde_json::Value::String(s) => {
1718 let cmd = if let Some(idx) = name.rfind('/') {
1719 &name[idx + 1..]
1720 } else {
1721 name
1722 };
1723 if !cmd.is_empty() {
1724 bin.insert(cmd.to_string(), s.clone());
1725 }
1726 }
1727 serde_json::Value::Object(obj) => {
1728 for (k, v) in obj {
1729 if let serde_json::Value::String(s) = v {
1730 bin.insert(k.clone(), s.clone());
1731 }
1732 }
1733 }
1734 _ => {}
1735 }
1736 }
1737 if let Some(scripts) = json.get("scripts").and_then(|s| s.as_object()) {
1739 has_install_script = scripts.contains_key("preinstall")
1740 || scripts.contains_key("install")
1741 || scripts.contains_key("postinstall");
1742 }
1743 if let Some(resolved) = json.get("_resolved").and_then(|v| v.as_str()) {
1745 tarball_url = resolved.to_string();
1746 }
1747 }
1748 }
1749
1750 packages.push(crate::resolver::ResolvedPackage {
1751 name: name.to_string(),
1752 version: version.to_string(),
1753 tarball_url,
1754 integrity: integrity.clone(),
1755 dependencies: dep_refs,
1756 peer_dependencies: peer_dep_refs,
1757 bin,
1758 has_install_script,
1759 });
1760 }
1761
1762 if packages.is_empty() {
1763 return None;
1764 }
1765
1766 Some(DependencyGraph { packages })
1767}
1768
1769mod pathdiff {
1771 use std::path::{Component, Path, PathBuf};
1772
1773 pub fn diff_paths<P, B>(path: P, base: B) -> Option<PathBuf>
1774 where
1775 P: AsRef<Path>,
1776 B: AsRef<Path>,
1777 {
1778 let path = path.as_ref();
1779 let base = base.as_ref();
1780
1781 if path.is_absolute() != base.is_absolute() {
1782 if path.is_absolute() {
1783 Some(PathBuf::from(path))
1784 } else {
1785 None
1786 }
1787 } else {
1788 let mut ita = path.components();
1789 let mut itb = base.components();
1790 let mut comps: Vec<Component> = vec![];
1791
1792 loop {
1793 match (ita.next(), itb.next()) {
1794 (None, None) => break,
1795 (Some(a), None) => {
1796 comps.push(a);
1797 comps.extend(ita.by_ref());
1798 break;
1799 }
1800 (None, _) => comps.push(Component::ParentDir),
1801 (Some(a), Some(b)) if comps.is_empty() && a == b => (),
1802 (Some(a), Some(_)) => {
1803 comps.push(Component::ParentDir);
1804 for _ in itb {
1805 comps.push(Component::ParentDir);
1806 }
1807 comps.push(a);
1808 comps.extend(ita.by_ref());
1809 break;
1810 }
1811 }
1812 }
1813 Some(comps.iter().map(|c| c.as_os_str()).collect())
1814 }
1815 }
1816}