1pub mod analyze;
2pub mod cache;
3pub mod changed_files;
4pub mod churn;
5pub mod cross_reference;
6pub mod discover;
7pub mod duplicates;
8pub(crate) mod errors;
9mod external_style_usage;
10pub mod extract;
11pub mod plugins;
12pub(crate) mod progress;
13pub mod results;
14pub(crate) mod scripts;
15pub mod suppress;
16pub mod trace;
17
18pub use fallow_graph::graph;
20pub use fallow_graph::project;
21pub use fallow_graph::resolve;
22
23use std::path::Path;
24use std::time::Instant;
25
26use errors::FallowError;
27use fallow_config::{
28 EntryPointRole, PackageJson, ResolvedConfig, discover_workspaces,
29 find_undeclared_workspaces_with_ignores,
30};
31use rayon::prelude::*;
32use results::AnalysisResults;
33use trace::PipelineTimings;
34
35const UNDECLARED_WORKSPACE_WARNING_PREVIEW: usize = 5;
36type LoadedWorkspacePackage<'a> = (&'a fallow_config::WorkspaceInfo, PackageJson);
37
38pub struct AnalysisOutput {
40 pub results: AnalysisResults,
41 pub timings: Option<PipelineTimings>,
42 pub graph: Option<graph::ModuleGraph>,
43 pub modules: Option<Vec<extract::ModuleInfo>>,
46 pub files: Option<Vec<discover::DiscoveredFile>>,
48 pub script_used_packages: rustc_hash::FxHashSet<String>,
53}
54
55fn update_cache(
57 store: &mut cache::CacheStore,
58 modules: &[extract::ModuleInfo],
59 files: &[discover::DiscoveredFile],
60) {
61 for module in modules {
62 if let Some(file) = files.get(module.file_id.0 as usize) {
63 let (mt, sz) = file_mtime_and_size(&file.path);
64 if let Some(cached) = store.get_by_path_only(&file.path)
66 && cached.content_hash == module.content_hash
67 {
68 if cached.mtime_secs != mt || cached.file_size != sz {
69 store.insert(&file.path, cache::module_to_cached(module, mt, sz));
70 }
71 continue;
72 }
73 store.insert(&file.path, cache::module_to_cached(module, mt, sz));
74 }
75 }
76 store.retain_paths(files);
77}
78
79fn file_mtime_and_size(path: &std::path::Path) -> (u64, u64) {
81 std::fs::metadata(path).map_or((0, 0), |m| {
82 let mt = m
83 .modified()
84 .ok()
85 .and_then(|t| t.duration_since(std::time::SystemTime::UNIX_EPOCH).ok())
86 .map_or(0, |d| d.as_secs());
87 (mt, m.len())
88 })
89}
90
91fn format_undeclared_workspace_warning(
92 root: &Path,
93 undeclared: &[fallow_config::WorkspaceDiagnostic],
94) -> Option<String> {
95 if undeclared.is_empty() {
96 return None;
97 }
98
99 let preview = undeclared
100 .iter()
101 .take(UNDECLARED_WORKSPACE_WARNING_PREVIEW)
102 .map(|diag| {
103 diag.path
104 .strip_prefix(root)
105 .unwrap_or(&diag.path)
106 .display()
107 .to_string()
108 .replace('\\', "/")
109 })
110 .collect::<Vec<_>>();
111 let remaining = undeclared
112 .len()
113 .saturating_sub(UNDECLARED_WORKSPACE_WARNING_PREVIEW);
114 let tail = if remaining > 0 {
115 format!(" (and {remaining} more)")
116 } else {
117 String::new()
118 };
119 let noun = if undeclared.len() == 1 {
120 "directory with package.json is"
121 } else {
122 "directories with package.json are"
123 };
124 let guidance = if undeclared.len() == 1 {
125 "Add that path to package.json workspaces or pnpm-workspace.yaml if it should be analyzed as a workspace."
126 } else {
127 "Add those paths to package.json workspaces or pnpm-workspace.yaml if they should be analyzed as workspaces."
128 };
129
130 Some(format!(
131 "{} {} not declared as {}: {}{}. {}",
132 undeclared.len(),
133 noun,
134 if undeclared.len() == 1 {
135 "a workspace"
136 } else {
137 "workspaces"
138 },
139 preview.join(", "),
140 tail,
141 guidance
142 ))
143}
144
145fn warn_undeclared_workspaces(
146 root: &Path,
147 workspaces_vec: &[fallow_config::WorkspaceInfo],
148 ignore_patterns: &globset::GlobSet,
149 quiet: bool,
150) {
151 if quiet {
152 return;
153 }
154
155 let undeclared = find_undeclared_workspaces_with_ignores(root, workspaces_vec, ignore_patterns);
156 if let Some(message) = format_undeclared_workspace_warning(root, &undeclared) {
157 tracing::warn!("{message}");
158 }
159}
160
161pub fn analyze(config: &ResolvedConfig) -> Result<AnalysisResults, FallowError> {
167 let output = analyze_full(config, false, false, false, false)?;
168 Ok(output.results)
169}
170
171pub fn analyze_with_usages(config: &ResolvedConfig) -> Result<AnalysisResults, FallowError> {
177 let output = analyze_full(config, false, true, false, false)?;
178 Ok(output.results)
179}
180
181pub fn analyze_with_trace(config: &ResolvedConfig) -> Result<AnalysisOutput, FallowError> {
187 analyze_full(config, true, false, false, false)
188}
189
190pub fn analyze_retaining_modules(
200 config: &ResolvedConfig,
201 need_complexity: bool,
202 retain_graph: bool,
203) -> Result<AnalysisOutput, FallowError> {
204 analyze_full(config, retain_graph, false, need_complexity, true)
205}
206
207#[allow(
218 clippy::too_many_lines,
219 reason = "pipeline orchestration stays easier to audit in one place"
220)]
221pub fn analyze_with_parse_result(
222 config: &ResolvedConfig,
223 modules: &[extract::ModuleInfo],
224) -> Result<AnalysisOutput, FallowError> {
225 let _span = tracing::info_span!("fallow_analyze_with_parse_result").entered();
226 let pipeline_start = Instant::now();
227
228 let show_progress = !config.quiet
229 && std::io::IsTerminal::is_terminal(&std::io::stderr())
230 && matches!(
231 config.output,
232 fallow_config::OutputFormat::Human
233 | fallow_config::OutputFormat::Compact
234 | fallow_config::OutputFormat::Markdown
235 );
236 let progress = progress::AnalysisProgress::new(show_progress);
237
238 if !config.root.join("node_modules").is_dir() {
239 tracing::warn!(
240 "node_modules directory not found. Run `npm install` / `pnpm install` first for accurate results."
241 );
242 }
243
244 let t = Instant::now();
246 let workspaces_vec = discover_workspaces(&config.root);
247 let workspaces_ms = t.elapsed().as_secs_f64() * 1000.0;
248 if !workspaces_vec.is_empty() {
249 tracing::info!(count = workspaces_vec.len(), "workspaces discovered");
250 }
251
252 warn_undeclared_workspaces(
254 &config.root,
255 &workspaces_vec,
256 &config.ignore_patterns,
257 config.quiet,
258 );
259
260 let t = Instant::now();
262 let pb = progress.stage_spinner("Discovering files...");
263 let discovered_files = discover::discover_files(config);
264 let discover_ms = t.elapsed().as_secs_f64() * 1000.0;
265 pb.finish_and_clear();
266
267 let project = project::ProjectState::new(discovered_files, workspaces_vec);
268 let files = project.files();
269 let workspaces = project.workspaces();
270 let root_pkg = load_root_package_json(config);
271 let workspace_pkgs = load_workspace_packages(workspaces);
272
273 let t = Instant::now();
275 let pb = progress.stage_spinner("Detecting plugins...");
276 let mut plugin_result = run_plugins(
277 config,
278 files,
279 workspaces,
280 root_pkg.as_ref(),
281 &workspace_pkgs,
282 );
283 let plugins_ms = t.elapsed().as_secs_f64() * 1000.0;
284 pb.finish_and_clear();
285
286 let t = Instant::now();
288 analyze_all_scripts(
289 config,
290 workspaces,
291 root_pkg.as_ref(),
292 &workspace_pkgs,
293 &mut plugin_result,
294 );
295 let scripts_ms = t.elapsed().as_secs_f64() * 1000.0;
296
297 let t = Instant::now();
301 let entry_points = discover_all_entry_points(
302 config,
303 files,
304 workspaces,
305 root_pkg.as_ref(),
306 &workspace_pkgs,
307 &plugin_result,
308 );
309 let entry_points_ms = t.elapsed().as_secs_f64() * 1000.0;
310
311 let ep_summary = summarize_entry_points(&entry_points.all);
313
314 let t = Instant::now();
316 let pb = progress.stage_spinner("Resolving imports...");
317 let mut resolved = resolve::resolve_all_imports(
318 modules,
319 files,
320 workspaces,
321 &plugin_result.active_plugins,
322 &plugin_result.path_aliases,
323 &plugin_result.scss_include_paths,
324 &config.root,
325 &config.resolve.conditions,
326 );
327 external_style_usage::augment_external_style_package_usage(
328 &mut resolved,
329 config,
330 workspaces,
331 &plugin_result,
332 );
333 let resolve_ms = t.elapsed().as_secs_f64() * 1000.0;
334 pb.finish_and_clear();
335
336 let t = Instant::now();
338 let pb = progress.stage_spinner("Building module graph...");
339 let graph = graph::ModuleGraph::build_with_reachability_roots(
340 &resolved,
341 &entry_points.all,
342 &entry_points.runtime,
343 &entry_points.test,
344 files,
345 );
346 let graph_ms = t.elapsed().as_secs_f64() * 1000.0;
347 pb.finish_and_clear();
348
349 let t = Instant::now();
351 let pb = progress.stage_spinner("Analyzing...");
352 let mut result = analyze::find_dead_code_full(
353 &graph,
354 config,
355 &resolved,
356 Some(&plugin_result),
357 workspaces,
358 modules,
359 false,
360 );
361 let analyze_ms = t.elapsed().as_secs_f64() * 1000.0;
362 pb.finish_and_clear();
363 progress.finish();
364
365 result.entry_point_summary = Some(ep_summary);
366
367 let total_ms = pipeline_start.elapsed().as_secs_f64() * 1000.0;
368
369 tracing::debug!(
370 "\n┌─ Pipeline Profile (reuse) ─────────────────────\n\
371 │ discover files: {:>8.1}ms ({} files)\n\
372 │ workspaces: {:>8.1}ms\n\
373 │ plugins: {:>8.1}ms\n\
374 │ script analysis: {:>8.1}ms\n\
375 │ parse/extract: SKIPPED (reused {} modules)\n\
376 │ entry points: {:>8.1}ms ({} entries)\n\
377 │ resolve imports: {:>8.1}ms\n\
378 │ build graph: {:>8.1}ms\n\
379 │ analyze: {:>8.1}ms\n\
380 │ ────────────────────────────────────────────\n\
381 │ TOTAL: {:>8.1}ms\n\
382 └─────────────────────────────────────────────────",
383 discover_ms,
384 files.len(),
385 workspaces_ms,
386 plugins_ms,
387 scripts_ms,
388 modules.len(),
389 entry_points_ms,
390 entry_points.all.len(),
391 resolve_ms,
392 graph_ms,
393 analyze_ms,
394 total_ms,
395 );
396
397 let timings = Some(PipelineTimings {
398 discover_files_ms: discover_ms,
399 file_count: files.len(),
400 workspaces_ms,
401 workspace_count: workspaces.len(),
402 plugins_ms,
403 script_analysis_ms: scripts_ms,
404 parse_extract_ms: 0.0, module_count: modules.len(),
406 cache_hits: 0,
407 cache_misses: 0,
408 cache_update_ms: 0.0,
409 entry_points_ms,
410 entry_point_count: entry_points.all.len(),
411 resolve_imports_ms: resolve_ms,
412 build_graph_ms: graph_ms,
413 analyze_ms,
414 total_ms,
415 });
416
417 Ok(AnalysisOutput {
418 results: result,
419 timings,
420 graph: Some(graph),
421 modules: None,
422 files: None,
423 script_used_packages: plugin_result.script_used_packages.clone(),
424 })
425}
426
427#[expect(
428 clippy::unnecessary_wraps,
429 reason = "Result kept for future error handling"
430)]
431#[expect(
432 clippy::too_many_lines,
433 reason = "main pipeline function; sequential phases are held together for clarity"
434)]
435fn analyze_full(
436 config: &ResolvedConfig,
437 retain: bool,
438 collect_usages: bool,
439 need_complexity: bool,
440 retain_modules: bool,
441) -> Result<AnalysisOutput, FallowError> {
442 let _span = tracing::info_span!("fallow_analyze").entered();
443 let pipeline_start = Instant::now();
444
445 let show_progress = !config.quiet
449 && std::io::IsTerminal::is_terminal(&std::io::stderr())
450 && matches!(
451 config.output,
452 fallow_config::OutputFormat::Human
453 | fallow_config::OutputFormat::Compact
454 | fallow_config::OutputFormat::Markdown
455 );
456 let progress = progress::AnalysisProgress::new(show_progress);
457
458 if !config.root.join("node_modules").is_dir() {
460 tracing::warn!(
461 "node_modules directory not found. Run `npm install` / `pnpm install` first for accurate results."
462 );
463 }
464
465 let t = Instant::now();
467 let workspaces_vec = discover_workspaces(&config.root);
468 let workspaces_ms = t.elapsed().as_secs_f64() * 1000.0;
469 if !workspaces_vec.is_empty() {
470 tracing::info!(count = workspaces_vec.len(), "workspaces discovered");
471 }
472
473 warn_undeclared_workspaces(
475 &config.root,
476 &workspaces_vec,
477 &config.ignore_patterns,
478 config.quiet,
479 );
480
481 let t = Instant::now();
483 let pb = progress.stage_spinner("Discovering files...");
484 let discovered_files = discover::discover_files(config);
485 let discover_ms = t.elapsed().as_secs_f64() * 1000.0;
486 pb.finish_and_clear();
487
488 let project = project::ProjectState::new(discovered_files, workspaces_vec);
491 let files = project.files();
492 let workspaces = project.workspaces();
493 let root_pkg = load_root_package_json(config);
494 let workspace_pkgs = load_workspace_packages(workspaces);
495
496 let t = Instant::now();
498 let pb = progress.stage_spinner("Detecting plugins...");
499 let mut plugin_result = run_plugins(
500 config,
501 files,
502 workspaces,
503 root_pkg.as_ref(),
504 &workspace_pkgs,
505 );
506 let plugins_ms = t.elapsed().as_secs_f64() * 1000.0;
507 pb.finish_and_clear();
508
509 let t = Instant::now();
511 analyze_all_scripts(
512 config,
513 workspaces,
514 root_pkg.as_ref(),
515 &workspace_pkgs,
516 &mut plugin_result,
517 );
518 let scripts_ms = t.elapsed().as_secs_f64() * 1000.0;
519
520 let t = Instant::now();
522 let pb = progress.stage_spinner(&format!("Parsing {} files...", files.len()));
523 let mut cache_store = if config.no_cache {
524 None
525 } else {
526 cache::CacheStore::load(&config.cache_dir)
527 };
528
529 let parse_result = extract::parse_all_files(files, cache_store.as_ref(), need_complexity);
530 let modules = parse_result.modules;
531 let cache_hits = parse_result.cache_hits;
532 let cache_misses = parse_result.cache_misses;
533 let parse_ms = t.elapsed().as_secs_f64() * 1000.0;
534 pb.finish_and_clear();
535
536 let t = Instant::now();
538 if !config.no_cache {
539 let store = cache_store.get_or_insert_with(cache::CacheStore::new);
540 update_cache(store, &modules, files);
541 if let Err(e) = store.save(&config.cache_dir) {
542 tracing::warn!("Failed to save cache: {e}");
543 }
544 }
545 let cache_ms = t.elapsed().as_secs_f64() * 1000.0;
546
547 let t = Instant::now();
549 let entry_points = discover_all_entry_points(
550 config,
551 files,
552 workspaces,
553 root_pkg.as_ref(),
554 &workspace_pkgs,
555 &plugin_result,
556 );
557 let entry_points_ms = t.elapsed().as_secs_f64() * 1000.0;
558
559 let t = Instant::now();
561 let pb = progress.stage_spinner("Resolving imports...");
562 let mut resolved = resolve::resolve_all_imports(
563 &modules,
564 files,
565 workspaces,
566 &plugin_result.active_plugins,
567 &plugin_result.path_aliases,
568 &plugin_result.scss_include_paths,
569 &config.root,
570 &config.resolve.conditions,
571 );
572 external_style_usage::augment_external_style_package_usage(
573 &mut resolved,
574 config,
575 workspaces,
576 &plugin_result,
577 );
578 let resolve_ms = t.elapsed().as_secs_f64() * 1000.0;
579 pb.finish_and_clear();
580
581 let t = Instant::now();
583 let pb = progress.stage_spinner("Building module graph...");
584 let graph = graph::ModuleGraph::build_with_reachability_roots(
585 &resolved,
586 &entry_points.all,
587 &entry_points.runtime,
588 &entry_points.test,
589 files,
590 );
591 let graph_ms = t.elapsed().as_secs_f64() * 1000.0;
592 pb.finish_and_clear();
593
594 let ep_summary = summarize_entry_points(&entry_points.all);
596
597 let t = Instant::now();
599 let pb = progress.stage_spinner("Analyzing...");
600 let mut result = analyze::find_dead_code_full(
601 &graph,
602 config,
603 &resolved,
604 Some(&plugin_result),
605 workspaces,
606 &modules,
607 collect_usages,
608 );
609 let analyze_ms = t.elapsed().as_secs_f64() * 1000.0;
610 pb.finish_and_clear();
611 progress.finish();
612
613 result.entry_point_summary = Some(ep_summary);
614
615 let total_ms = pipeline_start.elapsed().as_secs_f64() * 1000.0;
616
617 let cache_summary = if cache_hits > 0 {
618 format!(" ({cache_hits} cached, {cache_misses} parsed)")
619 } else {
620 String::new()
621 };
622
623 tracing::debug!(
624 "\n┌─ Pipeline Profile ─────────────────────────────\n\
625 │ discover files: {:>8.1}ms ({} files)\n\
626 │ workspaces: {:>8.1}ms\n\
627 │ plugins: {:>8.1}ms\n\
628 │ script analysis: {:>8.1}ms\n\
629 │ parse/extract: {:>8.1}ms ({} modules{})\n\
630 │ cache update: {:>8.1}ms\n\
631 │ entry points: {:>8.1}ms ({} entries)\n\
632 │ resolve imports: {:>8.1}ms\n\
633 │ build graph: {:>8.1}ms\n\
634 │ analyze: {:>8.1}ms\n\
635 │ ────────────────────────────────────────────\n\
636 │ TOTAL: {:>8.1}ms\n\
637 └─────────────────────────────────────────────────",
638 discover_ms,
639 files.len(),
640 workspaces_ms,
641 plugins_ms,
642 scripts_ms,
643 parse_ms,
644 modules.len(),
645 cache_summary,
646 cache_ms,
647 entry_points_ms,
648 entry_points.all.len(),
649 resolve_ms,
650 graph_ms,
651 analyze_ms,
652 total_ms,
653 );
654
655 let timings = if retain {
656 Some(PipelineTimings {
657 discover_files_ms: discover_ms,
658 file_count: files.len(),
659 workspaces_ms,
660 workspace_count: workspaces.len(),
661 plugins_ms,
662 script_analysis_ms: scripts_ms,
663 parse_extract_ms: parse_ms,
664 module_count: modules.len(),
665 cache_hits,
666 cache_misses,
667 cache_update_ms: cache_ms,
668 entry_points_ms,
669 entry_point_count: entry_points.all.len(),
670 resolve_imports_ms: resolve_ms,
671 build_graph_ms: graph_ms,
672 analyze_ms,
673 total_ms,
674 })
675 } else {
676 None
677 };
678
679 Ok(AnalysisOutput {
680 results: result,
681 timings,
682 graph: if retain { Some(graph) } else { None },
683 modules: if retain_modules { Some(modules) } else { None },
684 files: if retain_modules {
685 Some(files.to_vec())
686 } else {
687 None
688 },
689 script_used_packages: plugin_result.script_used_packages,
690 })
691}
692
693fn load_root_package_json(config: &ResolvedConfig) -> Option<PackageJson> {
698 PackageJson::load(&config.root.join("package.json")).ok()
699}
700
701fn load_workspace_packages(
702 workspaces: &[fallow_config::WorkspaceInfo],
703) -> Vec<LoadedWorkspacePackage<'_>> {
704 workspaces
705 .iter()
706 .filter_map(|ws| {
707 PackageJson::load(&ws.root.join("package.json"))
708 .ok()
709 .map(|pkg| (ws, pkg))
710 })
711 .collect()
712}
713
714fn analyze_all_scripts(
715 config: &ResolvedConfig,
716 workspaces: &[fallow_config::WorkspaceInfo],
717 root_pkg: Option<&PackageJson>,
718 workspace_pkgs: &[LoadedWorkspacePackage<'_>],
719 plugin_result: &mut plugins::AggregatedPluginResult,
720) {
721 let mut all_dep_names: Vec<String> = Vec::new();
725 if let Some(pkg) = root_pkg {
726 all_dep_names.extend(pkg.all_dependency_names());
727 }
728 for (_, ws_pkg) in workspace_pkgs {
729 all_dep_names.extend(ws_pkg.all_dependency_names());
730 }
731 all_dep_names.sort_unstable();
732 all_dep_names.dedup();
733
734 let mut nm_roots: Vec<&std::path::Path> = Vec::new();
737 if config.root.join("node_modules").is_dir() {
738 nm_roots.push(&config.root);
739 }
740 for ws in workspaces {
741 if ws.root.join("node_modules").is_dir() {
742 nm_roots.push(&ws.root);
743 }
744 }
745 let bin_map = scripts::build_bin_to_package_map(&nm_roots, &all_dep_names);
746
747 if let Some(pkg) = root_pkg
748 && let Some(ref pkg_scripts) = pkg.scripts
749 {
750 let scripts_to_analyze = if config.production {
751 scripts::filter_production_scripts(pkg_scripts)
752 } else {
753 pkg_scripts.clone()
754 };
755 let script_analysis = scripts::analyze_scripts(&scripts_to_analyze, &config.root, &bin_map);
756 plugin_result.script_used_packages = script_analysis.used_packages;
757
758 for config_file in &script_analysis.config_files {
759 plugin_result
760 .discovered_always_used
761 .push((config_file.clone(), "scripts".to_string()));
762 }
763 for entry in &script_analysis.entry_files {
764 if let Some(pat) = scripts::normalize_script_entry_pattern("", entry) {
765 plugin_result
766 .entry_patterns
767 .push((plugins::PathRule::new(pat), "scripts".to_string()));
768 }
769 }
770 }
771 for (ws, ws_pkg) in workspace_pkgs {
772 if let Some(ref ws_scripts) = ws_pkg.scripts {
773 let scripts_to_analyze = if config.production {
774 scripts::filter_production_scripts(ws_scripts)
775 } else {
776 ws_scripts.clone()
777 };
778 let ws_analysis = scripts::analyze_scripts(&scripts_to_analyze, &ws.root, &bin_map);
779 plugin_result
780 .script_used_packages
781 .extend(ws_analysis.used_packages);
782
783 let ws_prefix = ws
784 .root
785 .strip_prefix(&config.root)
786 .unwrap_or(&ws.root)
787 .to_string_lossy();
788 for config_file in &ws_analysis.config_files {
789 plugin_result
790 .discovered_always_used
791 .push((format!("{ws_prefix}/{config_file}"), "scripts".to_string()));
792 }
793 for entry in &ws_analysis.entry_files {
794 if let Some(pat) = scripts::normalize_script_entry_pattern(&ws_prefix, entry) {
795 plugin_result
796 .entry_patterns
797 .push((plugins::PathRule::new(pat), "scripts".to_string()));
798 }
799 }
800 }
801 }
802
803 let ci_analysis = scripts::ci::analyze_ci_files(&config.root, &bin_map);
810 plugin_result
811 .script_used_packages
812 .extend(ci_analysis.used_packages);
813 for entry in &ci_analysis.entry_files {
814 if let Some(pat) = scripts::normalize_script_entry_pattern("", entry) {
815 plugin_result
816 .entry_patterns
817 .push((plugins::PathRule::new(pat), "scripts".to_string()));
818 }
819 }
820 plugin_result
821 .entry_point_roles
822 .entry("scripts".to_string())
823 .or_insert(EntryPointRole::Support);
824}
825
826fn discover_all_entry_points(
828 config: &ResolvedConfig,
829 files: &[discover::DiscoveredFile],
830 workspaces: &[fallow_config::WorkspaceInfo],
831 root_pkg: Option<&PackageJson>,
832 workspace_pkgs: &[LoadedWorkspacePackage<'_>],
833 plugin_result: &plugins::AggregatedPluginResult,
834) -> discover::CategorizedEntryPoints {
835 let mut entry_points = discover::CategorizedEntryPoints::default();
836 let root_discovery = discover::discover_entry_points_with_warnings_from_pkg(
837 config,
838 files,
839 root_pkg,
840 workspaces.is_empty(),
841 );
842
843 let workspace_pkg_by_root: rustc_hash::FxHashMap<std::path::PathBuf, &PackageJson> =
844 workspace_pkgs
845 .iter()
846 .map(|(ws, pkg)| (ws.root.clone(), pkg))
847 .collect();
848
849 let workspace_discovery: Vec<discover::EntryPointDiscovery> = workspaces
850 .par_iter()
851 .map(|ws| {
852 let pkg = workspace_pkg_by_root.get(&ws.root).copied();
853 discover::discover_workspace_entry_points_with_warnings_from_pkg(&ws.root, files, pkg)
854 })
855 .collect();
856 let mut skipped_entries = rustc_hash::FxHashMap::default();
857 entry_points.extend_runtime(root_discovery.entries);
858 for (path, count) in root_discovery.skipped_entries {
859 *skipped_entries.entry(path).or_insert(0) += count;
860 }
861 let mut ws_entries = Vec::new();
862 for workspace in workspace_discovery {
863 ws_entries.extend(workspace.entries);
864 for (path, count) in workspace.skipped_entries {
865 *skipped_entries.entry(path).or_insert(0) += count;
866 }
867 }
868 discover::warn_skipped_entry_summary(&skipped_entries);
869 entry_points.extend_runtime(ws_entries);
870
871 let plugin_entries = discover::discover_plugin_entry_point_sets(plugin_result, config, files);
872 entry_points.extend(plugin_entries);
873
874 let infra_entries = discover::discover_infrastructure_entry_points(&config.root);
875 entry_points.extend_runtime(infra_entries);
876
877 if !config.dynamically_loaded.is_empty() {
879 let dynamic_entries = discover::discover_dynamically_loaded_entry_points(config, files);
880 entry_points.extend_runtime(dynamic_entries);
881 }
882
883 entry_points.dedup()
884}
885
886fn summarize_entry_points(entry_points: &[discover::EntryPoint]) -> results::EntryPointSummary {
888 let mut counts: rustc_hash::FxHashMap<String, usize> = rustc_hash::FxHashMap::default();
889 for ep in entry_points {
890 let category = match &ep.source {
891 discover::EntryPointSource::PackageJsonMain
892 | discover::EntryPointSource::PackageJsonModule
893 | discover::EntryPointSource::PackageJsonExports
894 | discover::EntryPointSource::PackageJsonBin
895 | discover::EntryPointSource::PackageJsonScript => "package.json",
896 discover::EntryPointSource::Plugin { .. } => "plugin",
897 discover::EntryPointSource::TestFile => "test file",
898 discover::EntryPointSource::DefaultIndex => "default index",
899 discover::EntryPointSource::ManualEntry => "manual entry",
900 discover::EntryPointSource::InfrastructureConfig => "config",
901 discover::EntryPointSource::DynamicallyLoaded => "dynamically loaded",
902 };
903 *counts.entry(category.to_string()).or_insert(0) += 1;
904 }
905 let mut by_source: Vec<(String, usize)> = counts.into_iter().collect();
906 by_source.sort_by(|a, b| b.1.cmp(&a.1).then_with(|| a.0.cmp(&b.0)));
907 results::EntryPointSummary {
908 total: entry_points.len(),
909 by_source,
910 }
911}
912
913fn run_plugins(
915 config: &ResolvedConfig,
916 files: &[discover::DiscoveredFile],
917 workspaces: &[fallow_config::WorkspaceInfo],
918 root_pkg: Option<&PackageJson>,
919 workspace_pkgs: &[LoadedWorkspacePackage<'_>],
920) -> plugins::AggregatedPluginResult {
921 let registry = plugins::PluginRegistry::new(config.external_plugins.clone());
922 let file_paths: Vec<std::path::PathBuf> = files.iter().map(|f| f.path.clone()).collect();
923 let root_config_search_roots = collect_config_search_roots(&config.root, &file_paths);
924 let root_config_search_root_refs: Vec<&Path> = root_config_search_roots
925 .iter()
926 .map(std::path::PathBuf::as_path)
927 .collect();
928
929 let mut result = root_pkg.map_or_else(plugins::AggregatedPluginResult::default, |pkg| {
931 registry.run_with_search_roots(
932 pkg,
933 &config.root,
934 &file_paths,
935 &root_config_search_root_refs,
936 )
937 });
938
939 if workspaces.is_empty() {
940 return result;
941 }
942
943 let root_active_plugins: rustc_hash::FxHashSet<&str> =
944 result.active_plugins.iter().map(String::as_str).collect();
945
946 let precompiled_matchers = registry.precompile_config_matchers();
950 let relative_files: Vec<(&std::path::PathBuf, String)> = file_paths
951 .iter()
952 .map(|f| {
953 let rel = f
954 .strip_prefix(&config.root)
955 .unwrap_or(f)
956 .to_string_lossy()
957 .into_owned();
958 (f, rel)
959 })
960 .collect();
961
962 let ws_results: Vec<_> = workspace_pkgs
964 .par_iter()
965 .filter_map(|(ws, ws_pkg)| {
966 let ws_result = registry.run_workspace_fast(
967 ws_pkg,
968 &ws.root,
969 &config.root,
970 &precompiled_matchers,
971 &relative_files,
972 &root_active_plugins,
973 );
974 if ws_result.active_plugins.is_empty() {
975 return None;
976 }
977 let ws_prefix = ws
978 .root
979 .strip_prefix(&config.root)
980 .unwrap_or(&ws.root)
981 .to_string_lossy()
982 .into_owned();
983 Some((ws_result, ws_prefix))
984 })
985 .collect();
986
987 let mut seen_plugins: rustc_hash::FxHashSet<String> =
990 result.active_plugins.iter().cloned().collect();
991 let mut seen_prefixes: rustc_hash::FxHashSet<String> =
992 result.virtual_module_prefixes.iter().cloned().collect();
993 let mut seen_generated: rustc_hash::FxHashSet<String> =
994 result.generated_import_patterns.iter().cloned().collect();
995 for (ws_result, ws_prefix) in ws_results {
996 let prefix_if_needed = |pat: &str| -> String {
1001 if pat.starts_with(ws_prefix.as_str()) || pat.starts_with('/') {
1002 pat.to_string()
1003 } else {
1004 format!("{ws_prefix}/{pat}")
1005 }
1006 };
1007
1008 for (rule, pname) in &ws_result.entry_patterns {
1009 result
1010 .entry_patterns
1011 .push((rule.prefixed(&ws_prefix), pname.clone()));
1012 }
1013 for (plugin_name, role) in ws_result.entry_point_roles {
1014 result.entry_point_roles.entry(plugin_name).or_insert(role);
1015 }
1016 for (pat, pname) in &ws_result.always_used {
1017 result
1018 .always_used
1019 .push((prefix_if_needed(pat), pname.clone()));
1020 }
1021 for (pat, pname) in &ws_result.discovered_always_used {
1022 result
1023 .discovered_always_used
1024 .push((prefix_if_needed(pat), pname.clone()));
1025 }
1026 for (pat, pname) in &ws_result.fixture_patterns {
1027 result
1028 .fixture_patterns
1029 .push((prefix_if_needed(pat), pname.clone()));
1030 }
1031 for rule in &ws_result.used_exports {
1032 result.used_exports.push(rule.prefixed(&ws_prefix));
1033 }
1034 for plugin_name in ws_result.active_plugins {
1036 if !seen_plugins.contains(&plugin_name) {
1037 seen_plugins.insert(plugin_name.clone());
1038 result.active_plugins.push(plugin_name);
1039 }
1040 }
1041 result
1043 .referenced_dependencies
1044 .extend(ws_result.referenced_dependencies);
1045 result.setup_files.extend(ws_result.setup_files);
1046 result
1047 .tooling_dependencies
1048 .extend(ws_result.tooling_dependencies);
1049 for prefix in ws_result.virtual_module_prefixes {
1052 if !seen_prefixes.contains(&prefix) {
1053 seen_prefixes.insert(prefix.clone());
1054 result.virtual_module_prefixes.push(prefix);
1055 }
1056 }
1057 for pattern in ws_result.generated_import_patterns {
1060 if !seen_generated.contains(&pattern) {
1061 seen_generated.insert(pattern.clone());
1062 result.generated_import_patterns.push(pattern);
1063 }
1064 }
1065 for (prefix, replacement) in ws_result.path_aliases {
1068 result
1069 .path_aliases
1070 .push((prefix, format!("{ws_prefix}/{replacement}")));
1071 }
1072 }
1073
1074 result
1075}
1076
1077fn collect_config_search_roots(
1078 root: &Path,
1079 file_paths: &[std::path::PathBuf],
1080) -> Vec<std::path::PathBuf> {
1081 let mut roots: rustc_hash::FxHashSet<std::path::PathBuf> = rustc_hash::FxHashSet::default();
1082 roots.insert(root.to_path_buf());
1083
1084 for file_path in file_paths {
1085 let mut current = file_path.parent();
1086 while let Some(dir) = current {
1087 if !dir.starts_with(root) {
1088 break;
1089 }
1090 roots.insert(dir.to_path_buf());
1091 if dir == root {
1092 break;
1093 }
1094 current = dir.parent();
1095 }
1096 }
1097
1098 let mut roots_vec: Vec<_> = roots.into_iter().collect();
1099 roots_vec.sort();
1100 roots_vec
1101}
1102
1103pub fn analyze_project(root: &Path) -> Result<AnalysisResults, FallowError> {
1109 let config = default_config(root);
1110 analyze_with_usages(&config)
1111}
1112
1113pub(crate) fn default_config(root: &Path) -> ResolvedConfig {
1124 let user_config = fallow_config::FallowConfig::find_and_load(root)
1125 .ok()
1126 .flatten();
1127 match user_config {
1128 Some((mut config, _path)) => {
1129 let dead_code_production = config
1130 .production
1131 .for_analysis(fallow_config::ProductionAnalysis::DeadCode);
1132 config.production = dead_code_production.into();
1133 config.resolve(
1134 root.to_path_buf(),
1135 fallow_config::OutputFormat::Human,
1136 num_cpus(),
1137 false,
1138 true, )
1140 }
1141 None => fallow_config::FallowConfig::default().resolve(
1142 root.to_path_buf(),
1143 fallow_config::OutputFormat::Human,
1144 num_cpus(),
1145 false,
1146 true,
1147 ),
1148 }
1149}
1150
1151fn num_cpus() -> usize {
1152 std::thread::available_parallelism().map_or(4, std::num::NonZeroUsize::get)
1153}
1154
1155#[cfg(test)]
1156mod tests {
1157 use super::{collect_config_search_roots, format_undeclared_workspace_warning};
1158 use std::path::{Path, PathBuf};
1159
1160 use fallow_config::WorkspaceDiagnostic;
1161
1162 fn diag(root: &Path, relative: &str) -> WorkspaceDiagnostic {
1163 WorkspaceDiagnostic {
1164 path: root.join(relative),
1165 message: String::new(),
1166 }
1167 }
1168
1169 #[test]
1170 fn undeclared_workspace_warning_is_singular_for_one_path() {
1171 let root = Path::new("/repo");
1172 let warning = format_undeclared_workspace_warning(root, &[diag(root, "packages/api")])
1173 .expect("warning should be rendered");
1174
1175 assert_eq!(
1176 warning,
1177 "1 directory with package.json is not declared as a workspace: packages/api. Add that path to package.json workspaces or pnpm-workspace.yaml if it should be analyzed as a workspace."
1178 );
1179 }
1180
1181 #[test]
1182 fn undeclared_workspace_warning_summarizes_many_paths() {
1183 let root = PathBuf::from("/repo");
1184 let diagnostics = [
1185 "examples/a",
1186 "examples/b",
1187 "examples/c",
1188 "examples/d",
1189 "examples/e",
1190 "examples/f",
1191 ]
1192 .into_iter()
1193 .map(|path| diag(&root, path))
1194 .collect::<Vec<_>>();
1195
1196 let warning = format_undeclared_workspace_warning(&root, &diagnostics)
1197 .expect("warning should be rendered");
1198
1199 assert_eq!(
1200 warning,
1201 "6 directories with package.json are not declared as workspaces: examples/a, examples/b, examples/c, examples/d, examples/e (and 1 more). Add those paths to package.json workspaces or pnpm-workspace.yaml if they should be analyzed as workspaces."
1202 );
1203 }
1204
1205 #[test]
1206 fn collect_config_search_roots_includes_file_ancestors_once() {
1207 let root = PathBuf::from("/repo");
1208 let search_roots = collect_config_search_roots(
1209 &root,
1210 &[
1211 root.join("apps/query/src/main.ts"),
1212 root.join("packages/shared/lib/index.ts"),
1213 ],
1214 );
1215
1216 assert_eq!(
1217 search_roots,
1218 vec![
1219 root.clone(),
1220 root.join("apps"),
1221 root.join("apps/query"),
1222 root.join("apps/query/src"),
1223 root.join("packages"),
1224 root.join("packages/shared"),
1225 root.join("packages/shared/lib"),
1226 ]
1227 );
1228 }
1229}