1pub mod analyze;
2pub mod cache;
3pub mod changed_files;
4pub mod churn;
5pub mod cross_reference;
6pub mod discover;
7pub mod duplicates;
8pub(crate) mod errors;
9mod external_style_usage;
10pub mod extract;
11pub mod plugins;
12pub(crate) mod progress;
13pub mod results;
14pub(crate) mod scripts;
15pub mod suppress;
16pub mod trace;
17
18pub use fallow_graph::graph;
20pub use fallow_graph::project;
21pub use fallow_graph::resolve;
22
23use std::path::Path;
24use std::time::Instant;
25
26use errors::FallowError;
27use fallow_config::{
28 EntryPointRole, PackageJson, ResolvedConfig, discover_workspaces,
29 find_undeclared_workspaces_with_ignores,
30};
31use rayon::prelude::*;
32use results::AnalysisResults;
33use rustc_hash::FxHashSet;
34use trace::PipelineTimings;
35
36const UNDECLARED_WORKSPACE_WARNING_PREVIEW: usize = 5;
37type LoadedWorkspacePackage<'a> = (&'a fallow_config::WorkspaceInfo, PackageJson);
38
39fn record_graph_package_usage(
40 graph: &mut graph::ModuleGraph,
41 package_name: &str,
42 file_id: discover::FileId,
43 is_type_only: bool,
44) {
45 graph
46 .package_usage
47 .entry(package_name.to_owned())
48 .or_default()
49 .push(file_id);
50 if is_type_only {
51 graph
52 .type_only_package_usage
53 .entry(package_name.to_owned())
54 .or_default()
55 .push(file_id);
56 }
57}
58
59fn workspace_package_name<'a>(
60 source: &str,
61 workspace_names: &'a FxHashSet<&str>,
62) -> Option<&'a str> {
63 if !resolve::is_bare_specifier(source) {
64 return None;
65 }
66 let package_name = resolve::extract_package_name(source);
67 workspace_names.get(package_name.as_str()).copied()
68}
69
70fn credit_workspace_package_usage(
71 graph: &mut graph::ModuleGraph,
72 resolved: &[resolve::ResolvedModule],
73 workspaces: &[fallow_config::WorkspaceInfo],
74) {
75 if workspaces.is_empty() {
76 return;
77 }
78
79 let workspace_names: FxHashSet<&str> = workspaces.iter().map(|ws| ws.name.as_str()).collect();
80 for module in resolved {
81 for import in module
82 .resolved_imports
83 .iter()
84 .chain(module.resolved_dynamic_imports.iter())
85 {
86 if matches!(import.target, resolve::ResolveResult::InternalModule(_))
87 && let Some(package_name) =
88 workspace_package_name(&import.info.source, &workspace_names)
89 {
90 record_graph_package_usage(
91 graph,
92 package_name,
93 module.file_id,
94 import.info.is_type_only,
95 );
96 }
97 }
98
99 for re_export in &module.re_exports {
100 if matches!(re_export.target, resolve::ResolveResult::InternalModule(_))
101 && let Some(package_name) =
102 workspace_package_name(&re_export.info.source, &workspace_names)
103 {
104 record_graph_package_usage(
105 graph,
106 package_name,
107 module.file_id,
108 re_export.info.is_type_only,
109 );
110 }
111 }
112 }
113}
114
115pub struct AnalysisOutput {
117 pub results: AnalysisResults,
118 pub timings: Option<PipelineTimings>,
119 pub graph: Option<graph::ModuleGraph>,
120 pub modules: Option<Vec<extract::ModuleInfo>>,
123 pub files: Option<Vec<discover::DiscoveredFile>>,
125 pub script_used_packages: rustc_hash::FxHashSet<String>,
130}
131
132fn update_cache(
134 store: &mut cache::CacheStore,
135 modules: &[extract::ModuleInfo],
136 files: &[discover::DiscoveredFile],
137) {
138 for module in modules {
139 if let Some(file) = files.get(module.file_id.0 as usize) {
140 let (mt, sz) = file_mtime_and_size(&file.path);
141 if let Some(cached) = store.get_by_path_only(&file.path)
143 && cached.content_hash == module.content_hash
144 {
145 if cached.mtime_secs != mt || cached.file_size != sz {
146 store.insert(&file.path, cache::module_to_cached(module, mt, sz));
147 }
148 continue;
149 }
150 store.insert(&file.path, cache::module_to_cached(module, mt, sz));
151 }
152 }
153 store.retain_paths(files);
154}
155
156fn file_mtime_and_size(path: &std::path::Path) -> (u64, u64) {
158 std::fs::metadata(path).map_or((0, 0), |m| {
159 let mt = m
160 .modified()
161 .ok()
162 .and_then(|t| t.duration_since(std::time::SystemTime::UNIX_EPOCH).ok())
163 .map_or(0, |d| d.as_secs());
164 (mt, m.len())
165 })
166}
167
168fn format_undeclared_workspace_warning(
169 root: &Path,
170 undeclared: &[fallow_config::WorkspaceDiagnostic],
171) -> Option<String> {
172 if undeclared.is_empty() {
173 return None;
174 }
175
176 let preview = undeclared
177 .iter()
178 .take(UNDECLARED_WORKSPACE_WARNING_PREVIEW)
179 .map(|diag| {
180 diag.path
181 .strip_prefix(root)
182 .unwrap_or(&diag.path)
183 .display()
184 .to_string()
185 .replace('\\', "/")
186 })
187 .collect::<Vec<_>>();
188 let remaining = undeclared
189 .len()
190 .saturating_sub(UNDECLARED_WORKSPACE_WARNING_PREVIEW);
191 let tail = if remaining > 0 {
192 format!(" (and {remaining} more)")
193 } else {
194 String::new()
195 };
196 let noun = if undeclared.len() == 1 {
197 "directory with package.json is"
198 } else {
199 "directories with package.json are"
200 };
201 let guidance = if undeclared.len() == 1 {
202 "Add that path to package.json workspaces or pnpm-workspace.yaml if it should be analyzed as a workspace."
203 } else {
204 "Add those paths to package.json workspaces or pnpm-workspace.yaml if they should be analyzed as workspaces."
205 };
206
207 Some(format!(
208 "{} {} not declared as {}: {}{}. {}",
209 undeclared.len(),
210 noun,
211 if undeclared.len() == 1 {
212 "a workspace"
213 } else {
214 "workspaces"
215 },
216 preview.join(", "),
217 tail,
218 guidance
219 ))
220}
221
222fn warn_undeclared_workspaces(
223 root: &Path,
224 workspaces_vec: &[fallow_config::WorkspaceInfo],
225 ignore_patterns: &globset::GlobSet,
226 quiet: bool,
227) {
228 if quiet {
229 return;
230 }
231
232 let undeclared = find_undeclared_workspaces_with_ignores(root, workspaces_vec, ignore_patterns);
233 if let Some(message) = format_undeclared_workspace_warning(root, &undeclared) {
234 tracing::warn!("{message}");
235 }
236}
237
238pub fn analyze(config: &ResolvedConfig) -> Result<AnalysisResults, FallowError> {
244 let output = analyze_full(config, false, false, false, false)?;
245 Ok(output.results)
246}
247
248pub fn analyze_with_usages(config: &ResolvedConfig) -> Result<AnalysisResults, FallowError> {
254 let output = analyze_full(config, false, true, false, false)?;
255 Ok(output.results)
256}
257
258pub fn analyze_with_trace(config: &ResolvedConfig) -> Result<AnalysisOutput, FallowError> {
264 analyze_full(config, true, false, false, false)
265}
266
267pub fn analyze_retaining_modules(
277 config: &ResolvedConfig,
278 need_complexity: bool,
279 retain_graph: bool,
280) -> Result<AnalysisOutput, FallowError> {
281 analyze_full(config, retain_graph, false, need_complexity, true)
282}
283
284#[allow(
295 clippy::too_many_lines,
296 reason = "pipeline orchestration stays easier to audit in one place"
297)]
298pub fn analyze_with_parse_result(
299 config: &ResolvedConfig,
300 modules: &[extract::ModuleInfo],
301) -> Result<AnalysisOutput, FallowError> {
302 let _span = tracing::info_span!("fallow_analyze_with_parse_result").entered();
303 let pipeline_start = Instant::now();
304
305 let show_progress = !config.quiet
306 && std::io::IsTerminal::is_terminal(&std::io::stderr())
307 && matches!(
308 config.output,
309 fallow_config::OutputFormat::Human
310 | fallow_config::OutputFormat::Compact
311 | fallow_config::OutputFormat::Markdown
312 );
313 let progress = progress::AnalysisProgress::new(show_progress);
314
315 if !config.root.join("node_modules").is_dir() {
316 tracing::warn!(
317 "node_modules directory not found. Run `npm install` / `pnpm install` first for accurate results."
318 );
319 }
320
321 let t = Instant::now();
323 let workspaces_vec = discover_workspaces(&config.root);
324 let workspaces_ms = t.elapsed().as_secs_f64() * 1000.0;
325 if !workspaces_vec.is_empty() {
326 tracing::info!(count = workspaces_vec.len(), "workspaces discovered");
327 }
328
329 warn_undeclared_workspaces(
331 &config.root,
332 &workspaces_vec,
333 &config.ignore_patterns,
334 config.quiet,
335 );
336
337 let t = Instant::now();
339 let pb = progress.stage_spinner("Discovering files...");
340 let discovered_files = discover::discover_files(config);
341 let discover_ms = t.elapsed().as_secs_f64() * 1000.0;
342 pb.finish_and_clear();
343
344 let project = project::ProjectState::new(discovered_files, workspaces_vec);
345 let files = project.files();
346 let workspaces = project.workspaces();
347 let root_pkg = load_root_package_json(config);
348 let workspace_pkgs = load_workspace_packages(workspaces);
349
350 let t = Instant::now();
352 let pb = progress.stage_spinner("Detecting plugins...");
353 let mut plugin_result = run_plugins(
354 config,
355 files,
356 workspaces,
357 root_pkg.as_ref(),
358 &workspace_pkgs,
359 );
360 let plugins_ms = t.elapsed().as_secs_f64() * 1000.0;
361 pb.finish_and_clear();
362
363 let t = Instant::now();
365 analyze_all_scripts(
366 config,
367 workspaces,
368 root_pkg.as_ref(),
369 &workspace_pkgs,
370 &mut plugin_result,
371 );
372 let scripts_ms = t.elapsed().as_secs_f64() * 1000.0;
373
374 let t = Instant::now();
378 let entry_points = discover_all_entry_points(
379 config,
380 files,
381 workspaces,
382 root_pkg.as_ref(),
383 &workspace_pkgs,
384 &plugin_result,
385 );
386 let entry_points_ms = t.elapsed().as_secs_f64() * 1000.0;
387
388 let ep_summary = summarize_entry_points(&entry_points.all);
390
391 let t = Instant::now();
393 let pb = progress.stage_spinner("Resolving imports...");
394 let mut resolved = resolve::resolve_all_imports(
395 modules,
396 files,
397 workspaces,
398 &plugin_result.active_plugins,
399 &plugin_result.path_aliases,
400 &plugin_result.scss_include_paths,
401 &config.root,
402 &config.resolve.conditions,
403 );
404 external_style_usage::augment_external_style_package_usage(
405 &mut resolved,
406 config,
407 workspaces,
408 &plugin_result,
409 );
410 let resolve_ms = t.elapsed().as_secs_f64() * 1000.0;
411 pb.finish_and_clear();
412
413 let t = Instant::now();
415 let pb = progress.stage_spinner("Building module graph...");
416 let mut graph = graph::ModuleGraph::build_with_reachability_roots(
417 &resolved,
418 &entry_points.all,
419 &entry_points.runtime,
420 &entry_points.test,
421 files,
422 );
423 credit_workspace_package_usage(&mut graph, &resolved, workspaces);
424 let graph_ms = t.elapsed().as_secs_f64() * 1000.0;
425 pb.finish_and_clear();
426
427 let t = Instant::now();
429 let pb = progress.stage_spinner("Analyzing...");
430 let mut result = analyze::find_dead_code_full(
431 &graph,
432 config,
433 &resolved,
434 Some(&plugin_result),
435 workspaces,
436 modules,
437 false,
438 );
439 let analyze_ms = t.elapsed().as_secs_f64() * 1000.0;
440 pb.finish_and_clear();
441 progress.finish();
442
443 result.entry_point_summary = Some(ep_summary);
444
445 let total_ms = pipeline_start.elapsed().as_secs_f64() * 1000.0;
446
447 tracing::debug!(
448 "\n┌─ Pipeline Profile (reuse) ─────────────────────\n\
449 │ discover files: {:>8.1}ms ({} files)\n\
450 │ workspaces: {:>8.1}ms\n\
451 │ plugins: {:>8.1}ms\n\
452 │ script analysis: {:>8.1}ms\n\
453 │ parse/extract: SKIPPED (reused {} modules)\n\
454 │ entry points: {:>8.1}ms ({} entries)\n\
455 │ resolve imports: {:>8.1}ms\n\
456 │ build graph: {:>8.1}ms\n\
457 │ analyze: {:>8.1}ms\n\
458 │ ────────────────────────────────────────────\n\
459 │ TOTAL: {:>8.1}ms\n\
460 └─────────────────────────────────────────────────",
461 discover_ms,
462 files.len(),
463 workspaces_ms,
464 plugins_ms,
465 scripts_ms,
466 modules.len(),
467 entry_points_ms,
468 entry_points.all.len(),
469 resolve_ms,
470 graph_ms,
471 analyze_ms,
472 total_ms,
473 );
474
475 let timings = Some(PipelineTimings {
476 discover_files_ms: discover_ms,
477 file_count: files.len(),
478 workspaces_ms,
479 workspace_count: workspaces.len(),
480 plugins_ms,
481 script_analysis_ms: scripts_ms,
482 parse_extract_ms: 0.0, module_count: modules.len(),
484 cache_hits: 0,
485 cache_misses: 0,
486 cache_update_ms: 0.0,
487 entry_points_ms,
488 entry_point_count: entry_points.all.len(),
489 resolve_imports_ms: resolve_ms,
490 build_graph_ms: graph_ms,
491 analyze_ms,
492 total_ms,
493 });
494
495 Ok(AnalysisOutput {
496 results: result,
497 timings,
498 graph: Some(graph),
499 modules: None,
500 files: None,
501 script_used_packages: plugin_result.script_used_packages.clone(),
502 })
503}
504
505#[expect(
506 clippy::unnecessary_wraps,
507 reason = "Result kept for future error handling"
508)]
509#[expect(
510 clippy::too_many_lines,
511 reason = "main pipeline function; sequential phases are held together for clarity"
512)]
513fn analyze_full(
514 config: &ResolvedConfig,
515 retain: bool,
516 collect_usages: bool,
517 need_complexity: bool,
518 retain_modules: bool,
519) -> Result<AnalysisOutput, FallowError> {
520 let _span = tracing::info_span!("fallow_analyze").entered();
521 let pipeline_start = Instant::now();
522
523 let show_progress = !config.quiet
527 && std::io::IsTerminal::is_terminal(&std::io::stderr())
528 && matches!(
529 config.output,
530 fallow_config::OutputFormat::Human
531 | fallow_config::OutputFormat::Compact
532 | fallow_config::OutputFormat::Markdown
533 );
534 let progress = progress::AnalysisProgress::new(show_progress);
535
536 if !config.root.join("node_modules").is_dir() {
538 tracing::warn!(
539 "node_modules directory not found. Run `npm install` / `pnpm install` first for accurate results."
540 );
541 }
542
543 let t = Instant::now();
545 let workspaces_vec = discover_workspaces(&config.root);
546 let workspaces_ms = t.elapsed().as_secs_f64() * 1000.0;
547 if !workspaces_vec.is_empty() {
548 tracing::info!(count = workspaces_vec.len(), "workspaces discovered");
549 }
550
551 warn_undeclared_workspaces(
553 &config.root,
554 &workspaces_vec,
555 &config.ignore_patterns,
556 config.quiet,
557 );
558
559 let t = Instant::now();
561 let pb = progress.stage_spinner("Discovering files...");
562 let discovered_files = discover::discover_files(config);
563 let discover_ms = t.elapsed().as_secs_f64() * 1000.0;
564 pb.finish_and_clear();
565
566 let project = project::ProjectState::new(discovered_files, workspaces_vec);
569 let files = project.files();
570 let workspaces = project.workspaces();
571 let root_pkg = load_root_package_json(config);
572 let workspace_pkgs = load_workspace_packages(workspaces);
573
574 let t = Instant::now();
576 let pb = progress.stage_spinner("Detecting plugins...");
577 let mut plugin_result = run_plugins(
578 config,
579 files,
580 workspaces,
581 root_pkg.as_ref(),
582 &workspace_pkgs,
583 );
584 let plugins_ms = t.elapsed().as_secs_f64() * 1000.0;
585 pb.finish_and_clear();
586
587 let t = Instant::now();
589 analyze_all_scripts(
590 config,
591 workspaces,
592 root_pkg.as_ref(),
593 &workspace_pkgs,
594 &mut plugin_result,
595 );
596 let scripts_ms = t.elapsed().as_secs_f64() * 1000.0;
597
598 let t = Instant::now();
600 let pb = progress.stage_spinner(&format!("Parsing {} files...", files.len()));
601 let mut cache_store = if config.no_cache {
602 None
603 } else {
604 cache::CacheStore::load(&config.cache_dir)
605 };
606
607 let parse_result = extract::parse_all_files(files, cache_store.as_ref(), need_complexity);
608 let modules = parse_result.modules;
609 let cache_hits = parse_result.cache_hits;
610 let cache_misses = parse_result.cache_misses;
611 let parse_ms = t.elapsed().as_secs_f64() * 1000.0;
612 pb.finish_and_clear();
613
614 let t = Instant::now();
616 if !config.no_cache {
617 let store = cache_store.get_or_insert_with(cache::CacheStore::new);
618 update_cache(store, &modules, files);
619 if let Err(e) = store.save(&config.cache_dir) {
620 tracing::warn!("Failed to save cache: {e}");
621 }
622 }
623 let cache_ms = t.elapsed().as_secs_f64() * 1000.0;
624
625 let t = Instant::now();
627 let entry_points = discover_all_entry_points(
628 config,
629 files,
630 workspaces,
631 root_pkg.as_ref(),
632 &workspace_pkgs,
633 &plugin_result,
634 );
635 let entry_points_ms = t.elapsed().as_secs_f64() * 1000.0;
636
637 let t = Instant::now();
639 let pb = progress.stage_spinner("Resolving imports...");
640 let mut resolved = resolve::resolve_all_imports(
641 &modules,
642 files,
643 workspaces,
644 &plugin_result.active_plugins,
645 &plugin_result.path_aliases,
646 &plugin_result.scss_include_paths,
647 &config.root,
648 &config.resolve.conditions,
649 );
650 external_style_usage::augment_external_style_package_usage(
651 &mut resolved,
652 config,
653 workspaces,
654 &plugin_result,
655 );
656 let resolve_ms = t.elapsed().as_secs_f64() * 1000.0;
657 pb.finish_and_clear();
658
659 let t = Instant::now();
661 let pb = progress.stage_spinner("Building module graph...");
662 let mut graph = graph::ModuleGraph::build_with_reachability_roots(
663 &resolved,
664 &entry_points.all,
665 &entry_points.runtime,
666 &entry_points.test,
667 files,
668 );
669 credit_workspace_package_usage(&mut graph, &resolved, workspaces);
670 let graph_ms = t.elapsed().as_secs_f64() * 1000.0;
671 pb.finish_and_clear();
672
673 let ep_summary = summarize_entry_points(&entry_points.all);
675
676 let t = Instant::now();
678 let pb = progress.stage_spinner("Analyzing...");
679 let mut result = analyze::find_dead_code_full(
680 &graph,
681 config,
682 &resolved,
683 Some(&plugin_result),
684 workspaces,
685 &modules,
686 collect_usages,
687 );
688 let analyze_ms = t.elapsed().as_secs_f64() * 1000.0;
689 pb.finish_and_clear();
690 progress.finish();
691
692 result.entry_point_summary = Some(ep_summary);
693
694 let total_ms = pipeline_start.elapsed().as_secs_f64() * 1000.0;
695
696 let cache_summary = if cache_hits > 0 {
697 format!(" ({cache_hits} cached, {cache_misses} parsed)")
698 } else {
699 String::new()
700 };
701
702 tracing::debug!(
703 "\n┌─ Pipeline Profile ─────────────────────────────\n\
704 │ discover files: {:>8.1}ms ({} files)\n\
705 │ workspaces: {:>8.1}ms\n\
706 │ plugins: {:>8.1}ms\n\
707 │ script analysis: {:>8.1}ms\n\
708 │ parse/extract: {:>8.1}ms ({} modules{})\n\
709 │ cache update: {:>8.1}ms\n\
710 │ entry points: {:>8.1}ms ({} entries)\n\
711 │ resolve imports: {:>8.1}ms\n\
712 │ build graph: {:>8.1}ms\n\
713 │ analyze: {:>8.1}ms\n\
714 │ ────────────────────────────────────────────\n\
715 │ TOTAL: {:>8.1}ms\n\
716 └─────────────────────────────────────────────────",
717 discover_ms,
718 files.len(),
719 workspaces_ms,
720 plugins_ms,
721 scripts_ms,
722 parse_ms,
723 modules.len(),
724 cache_summary,
725 cache_ms,
726 entry_points_ms,
727 entry_points.all.len(),
728 resolve_ms,
729 graph_ms,
730 analyze_ms,
731 total_ms,
732 );
733
734 let timings = if retain {
735 Some(PipelineTimings {
736 discover_files_ms: discover_ms,
737 file_count: files.len(),
738 workspaces_ms,
739 workspace_count: workspaces.len(),
740 plugins_ms,
741 script_analysis_ms: scripts_ms,
742 parse_extract_ms: parse_ms,
743 module_count: modules.len(),
744 cache_hits,
745 cache_misses,
746 cache_update_ms: cache_ms,
747 entry_points_ms,
748 entry_point_count: entry_points.all.len(),
749 resolve_imports_ms: resolve_ms,
750 build_graph_ms: graph_ms,
751 analyze_ms,
752 total_ms,
753 })
754 } else {
755 None
756 };
757
758 Ok(AnalysisOutput {
759 results: result,
760 timings,
761 graph: if retain { Some(graph) } else { None },
762 modules: if retain_modules { Some(modules) } else { None },
763 files: if retain_modules {
764 Some(files.to_vec())
765 } else {
766 None
767 },
768 script_used_packages: plugin_result.script_used_packages,
769 })
770}
771
772fn load_root_package_json(config: &ResolvedConfig) -> Option<PackageJson> {
777 PackageJson::load(&config.root.join("package.json")).ok()
778}
779
780fn load_workspace_packages(
781 workspaces: &[fallow_config::WorkspaceInfo],
782) -> Vec<LoadedWorkspacePackage<'_>> {
783 workspaces
784 .iter()
785 .filter_map(|ws| {
786 PackageJson::load(&ws.root.join("package.json"))
787 .ok()
788 .map(|pkg| (ws, pkg))
789 })
790 .collect()
791}
792
793fn analyze_all_scripts(
794 config: &ResolvedConfig,
795 workspaces: &[fallow_config::WorkspaceInfo],
796 root_pkg: Option<&PackageJson>,
797 workspace_pkgs: &[LoadedWorkspacePackage<'_>],
798 plugin_result: &mut plugins::AggregatedPluginResult,
799) {
800 let mut all_dep_names: Vec<String> = Vec::new();
804 if let Some(pkg) = root_pkg {
805 all_dep_names.extend(pkg.all_dependency_names());
806 }
807 for (_, ws_pkg) in workspace_pkgs {
808 all_dep_names.extend(ws_pkg.all_dependency_names());
809 }
810 all_dep_names.sort_unstable();
811 all_dep_names.dedup();
812
813 let mut nm_roots: Vec<&std::path::Path> = Vec::new();
816 if config.root.join("node_modules").is_dir() {
817 nm_roots.push(&config.root);
818 }
819 for ws in workspaces {
820 if ws.root.join("node_modules").is_dir() {
821 nm_roots.push(&ws.root);
822 }
823 }
824 let bin_map = scripts::build_bin_to_package_map(&nm_roots, &all_dep_names);
825
826 if let Some(pkg) = root_pkg
827 && let Some(ref pkg_scripts) = pkg.scripts
828 {
829 let scripts_to_analyze = if config.production {
830 scripts::filter_production_scripts(pkg_scripts)
831 } else {
832 pkg_scripts.clone()
833 };
834 let script_analysis = scripts::analyze_scripts(&scripts_to_analyze, &config.root, &bin_map);
835 plugin_result.script_used_packages = script_analysis.used_packages;
836
837 for config_file in &script_analysis.config_files {
838 plugin_result
839 .discovered_always_used
840 .push((config_file.clone(), "scripts".to_string()));
841 }
842 for entry in &script_analysis.entry_files {
843 if let Some(pat) = scripts::normalize_script_entry_pattern("", entry) {
844 plugin_result
845 .entry_patterns
846 .push((plugins::PathRule::new(pat), "scripts".to_string()));
847 }
848 }
849 }
850 for (ws, ws_pkg) in workspace_pkgs {
851 if let Some(ref ws_scripts) = ws_pkg.scripts {
852 let scripts_to_analyze = if config.production {
853 scripts::filter_production_scripts(ws_scripts)
854 } else {
855 ws_scripts.clone()
856 };
857 let ws_analysis = scripts::analyze_scripts(&scripts_to_analyze, &ws.root, &bin_map);
858 plugin_result
859 .script_used_packages
860 .extend(ws_analysis.used_packages);
861
862 let ws_prefix = ws
863 .root
864 .strip_prefix(&config.root)
865 .unwrap_or(&ws.root)
866 .to_string_lossy();
867 for config_file in &ws_analysis.config_files {
868 plugin_result
869 .discovered_always_used
870 .push((format!("{ws_prefix}/{config_file}"), "scripts".to_string()));
871 }
872 for entry in &ws_analysis.entry_files {
873 if let Some(pat) = scripts::normalize_script_entry_pattern(&ws_prefix, entry) {
874 plugin_result
875 .entry_patterns
876 .push((plugins::PathRule::new(pat), "scripts".to_string()));
877 }
878 }
879 }
880 }
881
882 let ci_analysis = scripts::ci::analyze_ci_files(&config.root, &bin_map);
889 plugin_result
890 .script_used_packages
891 .extend(ci_analysis.used_packages);
892 for entry in &ci_analysis.entry_files {
893 if let Some(pat) = scripts::normalize_script_entry_pattern("", entry) {
894 plugin_result
895 .entry_patterns
896 .push((plugins::PathRule::new(pat), "scripts".to_string()));
897 }
898 }
899 plugin_result
900 .entry_point_roles
901 .entry("scripts".to_string())
902 .or_insert(EntryPointRole::Support);
903}
904
905fn discover_all_entry_points(
907 config: &ResolvedConfig,
908 files: &[discover::DiscoveredFile],
909 workspaces: &[fallow_config::WorkspaceInfo],
910 root_pkg: Option<&PackageJson>,
911 workspace_pkgs: &[LoadedWorkspacePackage<'_>],
912 plugin_result: &plugins::AggregatedPluginResult,
913) -> discover::CategorizedEntryPoints {
914 let mut entry_points = discover::CategorizedEntryPoints::default();
915 let root_discovery = discover::discover_entry_points_with_warnings_from_pkg(
916 config,
917 files,
918 root_pkg,
919 workspaces.is_empty(),
920 );
921
922 let workspace_pkg_by_root: rustc_hash::FxHashMap<std::path::PathBuf, &PackageJson> =
923 workspace_pkgs
924 .iter()
925 .map(|(ws, pkg)| (ws.root.clone(), pkg))
926 .collect();
927
928 let workspace_discovery: Vec<discover::EntryPointDiscovery> = workspaces
929 .par_iter()
930 .map(|ws| {
931 let pkg = workspace_pkg_by_root.get(&ws.root).copied();
932 discover::discover_workspace_entry_points_with_warnings_from_pkg(&ws.root, files, pkg)
933 })
934 .collect();
935 let mut skipped_entries = rustc_hash::FxHashMap::default();
936 entry_points.extend_runtime(root_discovery.entries);
937 for (path, count) in root_discovery.skipped_entries {
938 *skipped_entries.entry(path).or_insert(0) += count;
939 }
940 let mut ws_entries = Vec::new();
941 for workspace in workspace_discovery {
942 ws_entries.extend(workspace.entries);
943 for (path, count) in workspace.skipped_entries {
944 *skipped_entries.entry(path).or_insert(0) += count;
945 }
946 }
947 discover::warn_skipped_entry_summary(&skipped_entries);
948 entry_points.extend_runtime(ws_entries);
949
950 let plugin_entries = discover::discover_plugin_entry_point_sets(plugin_result, config, files);
951 entry_points.extend(plugin_entries);
952
953 let infra_entries = discover::discover_infrastructure_entry_points(&config.root);
954 entry_points.extend_runtime(infra_entries);
955
956 if !config.dynamically_loaded.is_empty() {
958 let dynamic_entries = discover::discover_dynamically_loaded_entry_points(config, files);
959 entry_points.extend_runtime(dynamic_entries);
960 }
961
962 entry_points.dedup()
963}
964
965fn summarize_entry_points(entry_points: &[discover::EntryPoint]) -> results::EntryPointSummary {
967 let mut counts: rustc_hash::FxHashMap<String, usize> = rustc_hash::FxHashMap::default();
968 for ep in entry_points {
969 let category = match &ep.source {
970 discover::EntryPointSource::PackageJsonMain
971 | discover::EntryPointSource::PackageJsonModule
972 | discover::EntryPointSource::PackageJsonExports
973 | discover::EntryPointSource::PackageJsonBin
974 | discover::EntryPointSource::PackageJsonScript => "package.json",
975 discover::EntryPointSource::Plugin { .. } => "plugin",
976 discover::EntryPointSource::TestFile => "test file",
977 discover::EntryPointSource::DefaultIndex => "default index",
978 discover::EntryPointSource::ManualEntry => "manual entry",
979 discover::EntryPointSource::InfrastructureConfig => "config",
980 discover::EntryPointSource::DynamicallyLoaded => "dynamically loaded",
981 };
982 *counts.entry(category.to_string()).or_insert(0) += 1;
983 }
984 let mut by_source: Vec<(String, usize)> = counts.into_iter().collect();
985 by_source.sort_by(|a, b| b.1.cmp(&a.1).then_with(|| a.0.cmp(&b.0)));
986 results::EntryPointSummary {
987 total: entry_points.len(),
988 by_source,
989 }
990}
991
992fn run_plugins(
994 config: &ResolvedConfig,
995 files: &[discover::DiscoveredFile],
996 workspaces: &[fallow_config::WorkspaceInfo],
997 root_pkg: Option<&PackageJson>,
998 workspace_pkgs: &[LoadedWorkspacePackage<'_>],
999) -> plugins::AggregatedPluginResult {
1000 let registry = plugins::PluginRegistry::new(config.external_plugins.clone());
1001 let file_paths: Vec<std::path::PathBuf> = files.iter().map(|f| f.path.clone()).collect();
1002 let root_config_search_roots = collect_config_search_roots(&config.root, &file_paths);
1003 let root_config_search_root_refs: Vec<&Path> = root_config_search_roots
1004 .iter()
1005 .map(std::path::PathBuf::as_path)
1006 .collect();
1007
1008 let mut result = root_pkg.map_or_else(plugins::AggregatedPluginResult::default, |pkg| {
1010 registry.run_with_search_roots(
1011 pkg,
1012 &config.root,
1013 &file_paths,
1014 &root_config_search_root_refs,
1015 )
1016 });
1017
1018 if workspaces.is_empty() {
1019 return result;
1020 }
1021
1022 let root_active_plugins: rustc_hash::FxHashSet<&str> =
1023 result.active_plugins.iter().map(String::as_str).collect();
1024
1025 let precompiled_matchers = registry.precompile_config_matchers();
1029 let workspace_relative_files = bucket_files_by_workspace(workspace_pkgs, &file_paths);
1030
1031 let ws_results: Vec<_> = workspace_pkgs
1033 .par_iter()
1034 .zip(workspace_relative_files.par_iter())
1035 .filter_map(|((ws, ws_pkg), relative_files)| {
1036 let ws_result = registry.run_workspace_fast(
1037 ws_pkg,
1038 &ws.root,
1039 &config.root,
1040 &precompiled_matchers,
1041 relative_files,
1042 &root_active_plugins,
1043 );
1044 if ws_result.active_plugins.is_empty() {
1045 return None;
1046 }
1047 let ws_prefix = ws
1048 .root
1049 .strip_prefix(&config.root)
1050 .unwrap_or(&ws.root)
1051 .to_string_lossy()
1052 .into_owned();
1053 Some((ws_result, ws_prefix))
1054 })
1055 .collect();
1056
1057 let mut seen_plugins: rustc_hash::FxHashSet<String> =
1060 result.active_plugins.iter().cloned().collect();
1061 let mut seen_prefixes: rustc_hash::FxHashSet<String> =
1062 result.virtual_module_prefixes.iter().cloned().collect();
1063 let mut seen_generated: rustc_hash::FxHashSet<String> =
1064 result.generated_import_patterns.iter().cloned().collect();
1065 for (ws_result, ws_prefix) in ws_results {
1066 let prefix_if_needed = |pat: &str| -> String {
1071 if pat.starts_with(ws_prefix.as_str()) || pat.starts_with('/') {
1072 pat.to_string()
1073 } else {
1074 format!("{ws_prefix}/{pat}")
1075 }
1076 };
1077
1078 for (rule, pname) in &ws_result.entry_patterns {
1079 result
1080 .entry_patterns
1081 .push((rule.prefixed(&ws_prefix), pname.clone()));
1082 }
1083 for (plugin_name, role) in ws_result.entry_point_roles {
1084 result.entry_point_roles.entry(plugin_name).or_insert(role);
1085 }
1086 for (pat, pname) in &ws_result.always_used {
1087 result
1088 .always_used
1089 .push((prefix_if_needed(pat), pname.clone()));
1090 }
1091 for (pat, pname) in &ws_result.discovered_always_used {
1092 result
1093 .discovered_always_used
1094 .push((prefix_if_needed(pat), pname.clone()));
1095 }
1096 for (pat, pname) in &ws_result.fixture_patterns {
1097 result
1098 .fixture_patterns
1099 .push((prefix_if_needed(pat), pname.clone()));
1100 }
1101 for rule in &ws_result.used_exports {
1102 result.used_exports.push(rule.prefixed(&ws_prefix));
1103 }
1104 for plugin_name in ws_result.active_plugins {
1106 if !seen_plugins.contains(&plugin_name) {
1107 seen_plugins.insert(plugin_name.clone());
1108 result.active_plugins.push(plugin_name);
1109 }
1110 }
1111 result
1113 .referenced_dependencies
1114 .extend(ws_result.referenced_dependencies);
1115 result.setup_files.extend(ws_result.setup_files);
1116 result
1117 .tooling_dependencies
1118 .extend(ws_result.tooling_dependencies);
1119 for prefix in ws_result.virtual_module_prefixes {
1122 if !seen_prefixes.contains(&prefix) {
1123 seen_prefixes.insert(prefix.clone());
1124 result.virtual_module_prefixes.push(prefix);
1125 }
1126 }
1127 for pattern in ws_result.generated_import_patterns {
1130 if !seen_generated.contains(&pattern) {
1131 seen_generated.insert(pattern.clone());
1132 result.generated_import_patterns.push(pattern);
1133 }
1134 }
1135 for (prefix, replacement) in ws_result.path_aliases {
1138 result
1139 .path_aliases
1140 .push((prefix, format!("{ws_prefix}/{replacement}")));
1141 }
1142 }
1143
1144 result
1145}
1146
1147fn bucket_files_by_workspace(
1148 workspace_pkgs: &[LoadedWorkspacePackage<'_>],
1149 file_paths: &[std::path::PathBuf],
1150) -> Vec<Vec<(std::path::PathBuf, String)>> {
1151 let mut buckets = vec![Vec::new(); workspace_pkgs.len()];
1152
1153 for file_path in file_paths {
1154 for (idx, (ws, _)) in workspace_pkgs.iter().enumerate() {
1155 if let Ok(relative) = file_path.strip_prefix(&ws.root) {
1156 buckets[idx].push((file_path.clone(), relative.to_string_lossy().into_owned()));
1157 break;
1158 }
1159 }
1160 }
1161
1162 buckets
1163}
1164
1165fn collect_config_search_roots(
1166 root: &Path,
1167 file_paths: &[std::path::PathBuf],
1168) -> Vec<std::path::PathBuf> {
1169 let mut roots: rustc_hash::FxHashSet<std::path::PathBuf> = rustc_hash::FxHashSet::default();
1170 roots.insert(root.to_path_buf());
1171
1172 for file_path in file_paths {
1173 let mut current = file_path.parent();
1174 while let Some(dir) = current {
1175 if !dir.starts_with(root) {
1176 break;
1177 }
1178 roots.insert(dir.to_path_buf());
1179 if dir == root {
1180 break;
1181 }
1182 current = dir.parent();
1183 }
1184 }
1185
1186 let mut roots_vec: Vec<_> = roots.into_iter().collect();
1187 roots_vec.sort();
1188 roots_vec
1189}
1190
1191pub fn analyze_project(root: &Path) -> Result<AnalysisResults, FallowError> {
1197 let config = default_config(root);
1198 analyze_with_usages(&config)
1199}
1200
1201pub(crate) fn default_config(root: &Path) -> ResolvedConfig {
1212 let user_config = fallow_config::FallowConfig::find_and_load(root)
1213 .ok()
1214 .flatten();
1215 match user_config {
1216 Some((mut config, _path)) => {
1217 let dead_code_production = config
1218 .production
1219 .for_analysis(fallow_config::ProductionAnalysis::DeadCode);
1220 config.production = dead_code_production.into();
1221 config.resolve(
1222 root.to_path_buf(),
1223 fallow_config::OutputFormat::Human,
1224 num_cpus(),
1225 false,
1226 true, )
1228 }
1229 None => fallow_config::FallowConfig::default().resolve(
1230 root.to_path_buf(),
1231 fallow_config::OutputFormat::Human,
1232 num_cpus(),
1233 false,
1234 true,
1235 ),
1236 }
1237}
1238
1239fn num_cpus() -> usize {
1240 std::thread::available_parallelism().map_or(4, std::num::NonZeroUsize::get)
1241}
1242
1243#[cfg(test)]
1244mod tests {
1245 use super::{
1246 bucket_files_by_workspace, collect_config_search_roots, format_undeclared_workspace_warning,
1247 };
1248 use std::path::{Path, PathBuf};
1249
1250 use fallow_config::WorkspaceDiagnostic;
1251
1252 fn diag(root: &Path, relative: &str) -> WorkspaceDiagnostic {
1253 WorkspaceDiagnostic {
1254 path: root.join(relative),
1255 message: String::new(),
1256 }
1257 }
1258
1259 #[test]
1260 fn undeclared_workspace_warning_is_singular_for_one_path() {
1261 let root = Path::new("/repo");
1262 let warning = format_undeclared_workspace_warning(root, &[diag(root, "packages/api")])
1263 .expect("warning should be rendered");
1264
1265 assert_eq!(
1266 warning,
1267 "1 directory with package.json is not declared as a workspace: packages/api. Add that path to package.json workspaces or pnpm-workspace.yaml if it should be analyzed as a workspace."
1268 );
1269 }
1270
1271 #[test]
1272 fn undeclared_workspace_warning_summarizes_many_paths() {
1273 let root = PathBuf::from("/repo");
1274 let diagnostics = [
1275 "examples/a",
1276 "examples/b",
1277 "examples/c",
1278 "examples/d",
1279 "examples/e",
1280 "examples/f",
1281 ]
1282 .into_iter()
1283 .map(|path| diag(&root, path))
1284 .collect::<Vec<_>>();
1285
1286 let warning = format_undeclared_workspace_warning(&root, &diagnostics)
1287 .expect("warning should be rendered");
1288
1289 assert_eq!(
1290 warning,
1291 "6 directories with package.json are not declared as workspaces: examples/a, examples/b, examples/c, examples/d, examples/e (and 1 more). Add those paths to package.json workspaces or pnpm-workspace.yaml if they should be analyzed as workspaces."
1292 );
1293 }
1294
1295 #[test]
1296 fn collect_config_search_roots_includes_file_ancestors_once() {
1297 let root = PathBuf::from("/repo");
1298 let search_roots = collect_config_search_roots(
1299 &root,
1300 &[
1301 root.join("apps/query/src/main.ts"),
1302 root.join("packages/shared/lib/index.ts"),
1303 ],
1304 );
1305
1306 assert_eq!(
1307 search_roots,
1308 vec![
1309 root.clone(),
1310 root.join("apps"),
1311 root.join("apps/query"),
1312 root.join("apps/query/src"),
1313 root.join("packages"),
1314 root.join("packages/shared"),
1315 root.join("packages/shared/lib"),
1316 ]
1317 );
1318 }
1319
1320 #[test]
1321 fn bucket_files_by_workspace_uses_workspace_relative_paths() {
1322 let root = PathBuf::from("/repo");
1323 let ui = fallow_config::WorkspaceInfo {
1324 root: root.join("apps/ui"),
1325 name: "ui".to_string(),
1326 is_internal_dependency: false,
1327 };
1328 let api = fallow_config::WorkspaceInfo {
1329 root: root.join("apps/api"),
1330 name: "api".to_string(),
1331 is_internal_dependency: false,
1332 };
1333 let workspace_pkgs = vec![
1334 (
1335 &ui,
1336 fallow_config::PackageJson {
1337 name: Some("ui".to_string()),
1338 ..Default::default()
1339 },
1340 ),
1341 (
1342 &api,
1343 fallow_config::PackageJson {
1344 name: Some("api".to_string()),
1345 ..Default::default()
1346 },
1347 ),
1348 ];
1349 let files = vec![
1350 root.join("apps/ui/vite.config.ts"),
1351 root.join("apps/ui/src/main.ts"),
1352 root.join("apps/api/src/server.ts"),
1353 root.join("tools/build.ts"),
1354 ];
1355
1356 let buckets = bucket_files_by_workspace(&workspace_pkgs, &files);
1357
1358 assert_eq!(
1359 buckets[0],
1360 vec![
1361 (
1362 root.join("apps/ui/vite.config.ts"),
1363 "vite.config.ts".to_string()
1364 ),
1365 (root.join("apps/ui/src/main.ts"), "src/main.ts".to_string()),
1366 ]
1367 );
1368 assert_eq!(
1369 buckets[1],
1370 vec![(
1371 root.join("apps/api/src/server.ts"),
1372 "src/server.ts".to_string()
1373 )]
1374 );
1375 }
1376}