1pub mod analyze;
2pub mod cache;
3pub mod churn;
4pub mod cross_reference;
5pub mod discover;
6pub mod duplicates;
7pub mod errors;
8pub mod extract;
9pub mod plugins;
10pub mod progress;
11pub mod results;
12pub mod scripts;
13pub mod suppress;
14pub mod trace;
15
16pub use fallow_graph::graph;
18pub use fallow_graph::project;
19pub use fallow_graph::resolve;
20
21use std::path::Path;
22use std::time::Instant;
23
24use errors::FallowError;
25use fallow_config::{
26 EntryPointRole, PackageJson, ResolvedConfig, discover_workspaces, find_undeclared_workspaces,
27};
28use rayon::prelude::*;
29use results::AnalysisResults;
30use trace::PipelineTimings;
31
32pub struct AnalysisOutput {
34 pub results: AnalysisResults,
35 pub timings: Option<PipelineTimings>,
36 pub graph: Option<graph::ModuleGraph>,
37}
38
39fn update_cache(
41 store: &mut cache::CacheStore,
42 modules: &[extract::ModuleInfo],
43 files: &[discover::DiscoveredFile],
44) {
45 for module in modules {
46 if let Some(file) = files.get(module.file_id.0 as usize) {
47 let (mt, sz) = file_mtime_and_size(&file.path);
48 if let Some(cached) = store.get_by_path_only(&file.path)
50 && cached.content_hash == module.content_hash
51 {
52 if cached.mtime_secs != mt || cached.file_size != sz {
53 store.insert(&file.path, cache::module_to_cached(module, mt, sz));
54 }
55 continue;
56 }
57 store.insert(&file.path, cache::module_to_cached(module, mt, sz));
58 }
59 }
60 store.retain_paths(files);
61}
62
63fn file_mtime_and_size(path: &std::path::Path) -> (u64, u64) {
65 std::fs::metadata(path)
66 .map(|m| {
67 let mt = m
68 .modified()
69 .ok()
70 .and_then(|t| t.duration_since(std::time::SystemTime::UNIX_EPOCH).ok())
71 .map_or(0, |d| d.as_secs());
72 (mt, m.len())
73 })
74 .unwrap_or((0, 0))
75}
76
77pub fn analyze(config: &ResolvedConfig) -> Result<AnalysisResults, FallowError> {
83 let output = analyze_full(config, false, false)?;
84 Ok(output.results)
85}
86
87pub fn analyze_with_usages(config: &ResolvedConfig) -> Result<AnalysisResults, FallowError> {
93 let output = analyze_full(config, false, true)?;
94 Ok(output.results)
95}
96
97pub fn analyze_with_trace(config: &ResolvedConfig) -> Result<AnalysisOutput, FallowError> {
103 analyze_full(config, true, false)
104}
105
106pub fn analyze_with_parse_result(
117 config: &ResolvedConfig,
118 modules: &[extract::ModuleInfo],
119) -> Result<AnalysisOutput, FallowError> {
120 let _span = tracing::info_span!("fallow_analyze_with_parse_result").entered();
121 let pipeline_start = Instant::now();
122
123 let show_progress = !config.quiet
124 && std::io::IsTerminal::is_terminal(&std::io::stderr())
125 && matches!(
126 config.output,
127 fallow_config::OutputFormat::Human
128 | fallow_config::OutputFormat::Compact
129 | fallow_config::OutputFormat::Markdown
130 );
131 let progress = progress::AnalysisProgress::new(show_progress);
132
133 if !config.root.join("node_modules").is_dir() {
134 tracing::warn!(
135 "node_modules directory not found. Run `npm install` / `pnpm install` first for accurate results."
136 );
137 }
138
139 let t = Instant::now();
141 let workspaces_vec = discover_workspaces(&config.root);
142 let workspaces_ms = t.elapsed().as_secs_f64() * 1000.0;
143 if !workspaces_vec.is_empty() {
144 tracing::info!(count = workspaces_vec.len(), "workspaces discovered");
145 }
146
147 if !config.quiet {
149 let undeclared = find_undeclared_workspaces(&config.root, &workspaces_vec);
150 for diag in &undeclared {
151 tracing::warn!("{}", diag.message);
152 }
153 }
154
155 let t = Instant::now();
157 let pb = progress.stage_spinner("Discovering files...");
158 let discovered_files = discover::discover_files(config);
159 let discover_ms = t.elapsed().as_secs_f64() * 1000.0;
160 pb.finish_and_clear();
161
162 let project = project::ProjectState::new(discovered_files, workspaces_vec);
163 let files = project.files();
164 let workspaces = project.workspaces();
165
166 let t = Instant::now();
168 let pb = progress.stage_spinner("Detecting plugins...");
169 let mut plugin_result = run_plugins(config, files, workspaces);
170 let plugins_ms = t.elapsed().as_secs_f64() * 1000.0;
171 pb.finish_and_clear();
172
173 let t = Instant::now();
175 analyze_all_scripts(config, workspaces, &mut plugin_result);
176 let scripts_ms = t.elapsed().as_secs_f64() * 1000.0;
177
178 let t = Instant::now();
182 let entry_points = discover_all_entry_points(config, files, workspaces, &plugin_result);
183 let entry_points_ms = t.elapsed().as_secs_f64() * 1000.0;
184
185 let ep_summary = summarize_entry_points(&entry_points.all);
187
188 let t = Instant::now();
190 let pb = progress.stage_spinner("Resolving imports...");
191 let resolved = resolve::resolve_all_imports(
192 modules,
193 files,
194 workspaces,
195 &plugin_result.active_plugins,
196 &plugin_result.path_aliases,
197 &config.root,
198 );
199 let resolve_ms = t.elapsed().as_secs_f64() * 1000.0;
200 pb.finish_and_clear();
201
202 let t = Instant::now();
204 let pb = progress.stage_spinner("Building module graph...");
205 let graph = graph::ModuleGraph::build_with_reachability_roots(
206 &resolved,
207 &entry_points.all,
208 &entry_points.runtime,
209 &entry_points.test,
210 files,
211 );
212 let graph_ms = t.elapsed().as_secs_f64() * 1000.0;
213 pb.finish_and_clear();
214
215 let t = Instant::now();
217 let pb = progress.stage_spinner("Analyzing...");
218 let mut result = analyze::find_dead_code_full(
219 &graph,
220 config,
221 &resolved,
222 Some(&plugin_result),
223 workspaces,
224 modules,
225 false,
226 );
227 let analyze_ms = t.elapsed().as_secs_f64() * 1000.0;
228 pb.finish_and_clear();
229 progress.finish();
230
231 result.entry_point_summary = Some(ep_summary);
232
233 let total_ms = pipeline_start.elapsed().as_secs_f64() * 1000.0;
234
235 tracing::debug!(
236 "\n┌─ Pipeline Profile (reuse) ─────────────────────\n\
237 │ discover files: {:>8.1}ms ({} files)\n\
238 │ workspaces: {:>8.1}ms\n\
239 │ plugins: {:>8.1}ms\n\
240 │ script analysis: {:>8.1}ms\n\
241 │ parse/extract: SKIPPED (reused {} modules)\n\
242 │ entry points: {:>8.1}ms ({} entries)\n\
243 │ resolve imports: {:>8.1}ms\n\
244 │ build graph: {:>8.1}ms\n\
245 │ analyze: {:>8.1}ms\n\
246 │ ────────────────────────────────────────────\n\
247 │ TOTAL: {:>8.1}ms\n\
248 └─────────────────────────────────────────────────",
249 discover_ms,
250 files.len(),
251 workspaces_ms,
252 plugins_ms,
253 scripts_ms,
254 modules.len(),
255 entry_points_ms,
256 entry_points.all.len(),
257 resolve_ms,
258 graph_ms,
259 analyze_ms,
260 total_ms,
261 );
262
263 let timings = Some(PipelineTimings {
264 discover_files_ms: discover_ms,
265 file_count: files.len(),
266 workspaces_ms,
267 workspace_count: workspaces.len(),
268 plugins_ms,
269 script_analysis_ms: scripts_ms,
270 parse_extract_ms: 0.0, module_count: modules.len(),
272 cache_hits: 0,
273 cache_misses: 0,
274 cache_update_ms: 0.0,
275 entry_points_ms,
276 entry_point_count: entry_points.all.len(),
277 resolve_imports_ms: resolve_ms,
278 build_graph_ms: graph_ms,
279 analyze_ms,
280 total_ms,
281 });
282
283 Ok(AnalysisOutput {
284 results: result,
285 timings,
286 graph: Some(graph),
287 })
288}
289
290#[expect(
291 clippy::unnecessary_wraps,
292 reason = "Result kept for future error handling"
293)]
294fn analyze_full(
295 config: &ResolvedConfig,
296 retain: bool,
297 collect_usages: bool,
298) -> Result<AnalysisOutput, FallowError> {
299 let _span = tracing::info_span!("fallow_analyze").entered();
300 let pipeline_start = Instant::now();
301
302 let show_progress = !config.quiet
306 && std::io::IsTerminal::is_terminal(&std::io::stderr())
307 && matches!(
308 config.output,
309 fallow_config::OutputFormat::Human
310 | fallow_config::OutputFormat::Compact
311 | fallow_config::OutputFormat::Markdown
312 );
313 let progress = progress::AnalysisProgress::new(show_progress);
314
315 if !config.root.join("node_modules").is_dir() {
317 tracing::warn!(
318 "node_modules directory not found. Run `npm install` / `pnpm install` first for accurate results."
319 );
320 }
321
322 let t = Instant::now();
324 let workspaces_vec = discover_workspaces(&config.root);
325 let workspaces_ms = t.elapsed().as_secs_f64() * 1000.0;
326 if !workspaces_vec.is_empty() {
327 tracing::info!(count = workspaces_vec.len(), "workspaces discovered");
328 }
329
330 if !config.quiet {
332 let undeclared = find_undeclared_workspaces(&config.root, &workspaces_vec);
333 for diag in &undeclared {
334 tracing::warn!("{}", diag.message);
335 }
336 }
337
338 let t = Instant::now();
340 let pb = progress.stage_spinner("Discovering files...");
341 let discovered_files = discover::discover_files(config);
342 let discover_ms = t.elapsed().as_secs_f64() * 1000.0;
343 pb.finish_and_clear();
344
345 let project = project::ProjectState::new(discovered_files, workspaces_vec);
348 let files = project.files();
349 let workspaces = project.workspaces();
350
351 let t = Instant::now();
353 let pb = progress.stage_spinner("Detecting plugins...");
354 let mut plugin_result = run_plugins(config, files, workspaces);
355 let plugins_ms = t.elapsed().as_secs_f64() * 1000.0;
356 pb.finish_and_clear();
357
358 let t = Instant::now();
360 analyze_all_scripts(config, workspaces, &mut plugin_result);
361 let scripts_ms = t.elapsed().as_secs_f64() * 1000.0;
362
363 let t = Instant::now();
365 let pb = progress.stage_spinner(&format!("Parsing {} files...", files.len()));
366 let mut cache_store = if config.no_cache {
367 None
368 } else {
369 cache::CacheStore::load(&config.cache_dir)
370 };
371
372 let parse_result = extract::parse_all_files(files, cache_store.as_ref());
373 let modules = parse_result.modules;
374 let cache_hits = parse_result.cache_hits;
375 let cache_misses = parse_result.cache_misses;
376 let parse_ms = t.elapsed().as_secs_f64() * 1000.0;
377 pb.finish_and_clear();
378
379 let t = Instant::now();
381 if !config.no_cache {
382 let store = cache_store.get_or_insert_with(cache::CacheStore::new);
383 update_cache(store, &modules, files);
384 if let Err(e) = store.save(&config.cache_dir) {
385 tracing::warn!("Failed to save cache: {e}");
386 }
387 }
388 let cache_ms = t.elapsed().as_secs_f64() * 1000.0;
389
390 let t = Instant::now();
392 let entry_points = discover_all_entry_points(config, files, workspaces, &plugin_result);
393 let entry_points_ms = t.elapsed().as_secs_f64() * 1000.0;
394
395 let t = Instant::now();
397 let pb = progress.stage_spinner("Resolving imports...");
398 let resolved = resolve::resolve_all_imports(
399 &modules,
400 files,
401 workspaces,
402 &plugin_result.active_plugins,
403 &plugin_result.path_aliases,
404 &config.root,
405 );
406 let resolve_ms = t.elapsed().as_secs_f64() * 1000.0;
407 pb.finish_and_clear();
408
409 let t = Instant::now();
411 let pb = progress.stage_spinner("Building module graph...");
412 let graph = graph::ModuleGraph::build_with_reachability_roots(
413 &resolved,
414 &entry_points.all,
415 &entry_points.runtime,
416 &entry_points.test,
417 files,
418 );
419 let graph_ms = t.elapsed().as_secs_f64() * 1000.0;
420 pb.finish_and_clear();
421
422 let ep_summary = summarize_entry_points(&entry_points.all);
424
425 let t = Instant::now();
427 let pb = progress.stage_spinner("Analyzing...");
428 let mut result = analyze::find_dead_code_full(
429 &graph,
430 config,
431 &resolved,
432 Some(&plugin_result),
433 workspaces,
434 &modules,
435 collect_usages,
436 );
437 let analyze_ms = t.elapsed().as_secs_f64() * 1000.0;
438 pb.finish_and_clear();
439 progress.finish();
440
441 result.entry_point_summary = Some(ep_summary);
442
443 let total_ms = pipeline_start.elapsed().as_secs_f64() * 1000.0;
444
445 let cache_summary = if cache_hits > 0 {
446 format!(" ({cache_hits} cached, {cache_misses} parsed)")
447 } else {
448 String::new()
449 };
450
451 tracing::debug!(
452 "\n┌─ Pipeline Profile ─────────────────────────────\n\
453 │ discover files: {:>8.1}ms ({} files)\n\
454 │ workspaces: {:>8.1}ms\n\
455 │ plugins: {:>8.1}ms\n\
456 │ script analysis: {:>8.1}ms\n\
457 │ parse/extract: {:>8.1}ms ({} modules{})\n\
458 │ cache update: {:>8.1}ms\n\
459 │ entry points: {:>8.1}ms ({} entries)\n\
460 │ resolve imports: {:>8.1}ms\n\
461 │ build graph: {:>8.1}ms\n\
462 │ analyze: {:>8.1}ms\n\
463 │ ────────────────────────────────────────────\n\
464 │ TOTAL: {:>8.1}ms\n\
465 └─────────────────────────────────────────────────",
466 discover_ms,
467 files.len(),
468 workspaces_ms,
469 plugins_ms,
470 scripts_ms,
471 parse_ms,
472 modules.len(),
473 cache_summary,
474 cache_ms,
475 entry_points_ms,
476 entry_points.all.len(),
477 resolve_ms,
478 graph_ms,
479 analyze_ms,
480 total_ms,
481 );
482
483 let timings = if retain {
484 Some(PipelineTimings {
485 discover_files_ms: discover_ms,
486 file_count: files.len(),
487 workspaces_ms,
488 workspace_count: workspaces.len(),
489 plugins_ms,
490 script_analysis_ms: scripts_ms,
491 parse_extract_ms: parse_ms,
492 module_count: modules.len(),
493 cache_hits,
494 cache_misses,
495 cache_update_ms: cache_ms,
496 entry_points_ms,
497 entry_point_count: entry_points.all.len(),
498 resolve_imports_ms: resolve_ms,
499 build_graph_ms: graph_ms,
500 analyze_ms,
501 total_ms,
502 })
503 } else {
504 None
505 };
506
507 Ok(AnalysisOutput {
508 results: result,
509 timings,
510 graph: if retain { Some(graph) } else { None },
511 })
512}
513
514fn analyze_all_scripts(
519 config: &ResolvedConfig,
520 workspaces: &[fallow_config::WorkspaceInfo],
521 plugin_result: &mut plugins::AggregatedPluginResult,
522) {
523 let pkg_path = config.root.join("package.json");
524 if let Ok(pkg) = PackageJson::load(&pkg_path)
525 && let Some(ref pkg_scripts) = pkg.scripts
526 {
527 let scripts_to_analyze = if config.production {
528 scripts::filter_production_scripts(pkg_scripts)
529 } else {
530 pkg_scripts.clone()
531 };
532 let script_analysis = scripts::analyze_scripts(&scripts_to_analyze, &config.root);
533 plugin_result.script_used_packages = script_analysis.used_packages;
534
535 for config_file in &script_analysis.config_files {
536 plugin_result
537 .discovered_always_used
538 .push((config_file.clone(), "scripts".to_string()));
539 }
540 }
541 for ws in workspaces {
542 let ws_pkg_path = ws.root.join("package.json");
543 if let Ok(ws_pkg) = PackageJson::load(&ws_pkg_path)
544 && let Some(ref ws_scripts) = ws_pkg.scripts
545 {
546 let scripts_to_analyze = if config.production {
547 scripts::filter_production_scripts(ws_scripts)
548 } else {
549 ws_scripts.clone()
550 };
551 let ws_analysis = scripts::analyze_scripts(&scripts_to_analyze, &ws.root);
552 plugin_result
553 .script_used_packages
554 .extend(ws_analysis.used_packages);
555
556 let ws_prefix = ws
557 .root
558 .strip_prefix(&config.root)
559 .unwrap_or(&ws.root)
560 .to_string_lossy();
561 for config_file in &ws_analysis.config_files {
562 plugin_result
563 .discovered_always_used
564 .push((format!("{ws_prefix}/{config_file}"), "scripts".to_string()));
565 }
566 }
567 }
568
569 let ci_packages = scripts::ci::analyze_ci_files(&config.root);
571 plugin_result.script_used_packages.extend(ci_packages);
572 plugin_result
573 .entry_point_roles
574 .entry("scripts".to_string())
575 .or_insert(EntryPointRole::Support);
576}
577
578fn discover_all_entry_points(
580 config: &ResolvedConfig,
581 files: &[discover::DiscoveredFile],
582 workspaces: &[fallow_config::WorkspaceInfo],
583 plugin_result: &plugins::AggregatedPluginResult,
584) -> discover::CategorizedEntryPoints {
585 let mut entry_points = discover::CategorizedEntryPoints::default();
586 entry_points.extend_runtime(discover::discover_entry_points(config, files));
587
588 let ws_entries: Vec<_> = workspaces
589 .par_iter()
590 .flat_map(|ws| discover::discover_workspace_entry_points(&ws.root, config, files))
591 .collect();
592 entry_points.extend_runtime(ws_entries);
593
594 let plugin_entries = discover::discover_plugin_entry_point_sets(plugin_result, config, files);
595 entry_points.extend(plugin_entries);
596
597 let infra_entries = discover::discover_infrastructure_entry_points(&config.root);
598 entry_points.extend_runtime(infra_entries);
599
600 if !config.dynamically_loaded.is_empty() {
602 let dynamic_entries = discover::discover_dynamically_loaded_entry_points(config, files);
603 entry_points.extend_runtime(dynamic_entries);
604 }
605
606 entry_points.dedup()
607}
608
609fn summarize_entry_points(entry_points: &[discover::EntryPoint]) -> results::EntryPointSummary {
611 let mut counts: rustc_hash::FxHashMap<String, usize> = rustc_hash::FxHashMap::default();
612 for ep in entry_points {
613 let category = match &ep.source {
614 discover::EntryPointSource::PackageJsonMain
615 | discover::EntryPointSource::PackageJsonModule
616 | discover::EntryPointSource::PackageJsonExports
617 | discover::EntryPointSource::PackageJsonBin
618 | discover::EntryPointSource::PackageJsonScript => "package.json",
619 discover::EntryPointSource::Plugin { .. } => "plugin",
620 discover::EntryPointSource::TestFile => "test file",
621 discover::EntryPointSource::DefaultIndex => "default index",
622 discover::EntryPointSource::ManualEntry => "manual entry",
623 discover::EntryPointSource::InfrastructureConfig => "config",
624 discover::EntryPointSource::DynamicallyLoaded => "dynamically loaded",
625 };
626 *counts.entry(category.to_string()).or_insert(0) += 1;
627 }
628 let mut by_source: Vec<(String, usize)> = counts.into_iter().collect();
629 by_source.sort_by(|a, b| b.1.cmp(&a.1).then_with(|| a.0.cmp(&b.0)));
630 results::EntryPointSummary {
631 total: entry_points.len(),
632 by_source,
633 }
634}
635
636fn run_plugins(
638 config: &ResolvedConfig,
639 files: &[discover::DiscoveredFile],
640 workspaces: &[fallow_config::WorkspaceInfo],
641) -> plugins::AggregatedPluginResult {
642 let registry = plugins::PluginRegistry::new(config.external_plugins.clone());
643 let file_paths: Vec<std::path::PathBuf> = files.iter().map(|f| f.path.clone()).collect();
644
645 let pkg_path = config.root.join("package.json");
647 let mut result = PackageJson::load(&pkg_path).map_or_else(
648 |_| plugins::AggregatedPluginResult::default(),
649 |pkg| registry.run(&pkg, &config.root, &file_paths),
650 );
651
652 if workspaces.is_empty() {
653 return result;
654 }
655
656 let precompiled_matchers = registry.precompile_config_matchers();
660 let relative_files: Vec<(&std::path::PathBuf, String)> = file_paths
661 .iter()
662 .map(|f| {
663 let rel = f
664 .strip_prefix(&config.root)
665 .unwrap_or(f)
666 .to_string_lossy()
667 .into_owned();
668 (f, rel)
669 })
670 .collect();
671
672 let ws_results: Vec<_> = workspaces
674 .par_iter()
675 .filter_map(|ws| {
676 let ws_pkg_path = ws.root.join("package.json");
677 let ws_pkg = PackageJson::load(&ws_pkg_path).ok()?;
678 let ws_result = registry.run_workspace_fast(
679 &ws_pkg,
680 &ws.root,
681 &config.root,
682 &precompiled_matchers,
683 &relative_files,
684 );
685 if ws_result.active_plugins.is_empty() {
686 return None;
687 }
688 let ws_prefix = ws
689 .root
690 .strip_prefix(&config.root)
691 .unwrap_or(&ws.root)
692 .to_string_lossy()
693 .into_owned();
694 Some((ws_result, ws_prefix))
695 })
696 .collect();
697
698 let mut seen_plugins: rustc_hash::FxHashSet<String> =
701 result.active_plugins.iter().cloned().collect();
702 let mut seen_prefixes: rustc_hash::FxHashSet<String> =
703 result.virtual_module_prefixes.iter().cloned().collect();
704 let mut seen_generated: rustc_hash::FxHashSet<String> =
705 result.generated_import_patterns.iter().cloned().collect();
706 for (ws_result, ws_prefix) in ws_results {
707 let prefix_if_needed = |pat: &str| -> String {
712 if pat.starts_with(ws_prefix.as_str()) || pat.starts_with('/') {
713 pat.to_string()
714 } else {
715 format!("{ws_prefix}/{pat}")
716 }
717 };
718
719 for (pat, pname) in &ws_result.entry_patterns {
720 result
721 .entry_patterns
722 .push((prefix_if_needed(pat), pname.clone()));
723 }
724 for (plugin_name, role) in ws_result.entry_point_roles {
725 result.entry_point_roles.entry(plugin_name).or_insert(role);
726 }
727 for (pat, pname) in &ws_result.always_used {
728 result
729 .always_used
730 .push((prefix_if_needed(pat), pname.clone()));
731 }
732 for (pat, pname) in &ws_result.discovered_always_used {
733 result
734 .discovered_always_used
735 .push((prefix_if_needed(pat), pname.clone()));
736 }
737 for (pat, pname) in &ws_result.fixture_patterns {
738 result
739 .fixture_patterns
740 .push((prefix_if_needed(pat), pname.clone()));
741 }
742 for (file_pat, exports) in &ws_result.used_exports {
743 result
744 .used_exports
745 .push((prefix_if_needed(file_pat), exports.clone()));
746 }
747 for plugin_name in ws_result.active_plugins {
749 if !seen_plugins.contains(&plugin_name) {
750 seen_plugins.insert(plugin_name.clone());
751 result.active_plugins.push(plugin_name);
752 }
753 }
754 result
756 .referenced_dependencies
757 .extend(ws_result.referenced_dependencies);
758 result.setup_files.extend(ws_result.setup_files);
759 result
760 .tooling_dependencies
761 .extend(ws_result.tooling_dependencies);
762 for prefix in ws_result.virtual_module_prefixes {
765 if !seen_prefixes.contains(&prefix) {
766 seen_prefixes.insert(prefix.clone());
767 result.virtual_module_prefixes.push(prefix);
768 }
769 }
770 for pattern in ws_result.generated_import_patterns {
773 if !seen_generated.contains(&pattern) {
774 seen_generated.insert(pattern.clone());
775 result.generated_import_patterns.push(pattern);
776 }
777 }
778 for (prefix, replacement) in ws_result.path_aliases {
781 result
782 .path_aliases
783 .push((prefix, format!("{ws_prefix}/{replacement}")));
784 }
785 }
786
787 result
788}
789
790pub fn analyze_project(root: &Path) -> Result<AnalysisResults, FallowError> {
796 let config = default_config(root);
797 analyze_with_usages(&config)
798}
799
800pub(crate) fn default_config(root: &Path) -> ResolvedConfig {
802 let user_config = fallow_config::FallowConfig::find_and_load(root)
803 .ok()
804 .flatten();
805 match user_config {
806 Some((config, _path)) => config.resolve(
807 root.to_path_buf(),
808 fallow_config::OutputFormat::Human,
809 num_cpus(),
810 false,
811 true, ),
813 None => fallow_config::FallowConfig::default().resolve(
814 root.to_path_buf(),
815 fallow_config::OutputFormat::Human,
816 num_cpus(),
817 false,
818 true,
819 ),
820 }
821}
822
823fn num_cpus() -> usize {
824 std::thread::available_parallelism()
825 .map(std::num::NonZeroUsize::get)
826 .unwrap_or(4)
827}