Skip to main content

fallow_core/
lib.rs

1pub mod analyze;
2pub mod cache;
3pub mod churn;
4pub mod cross_reference;
5pub mod discover;
6pub mod duplicates;
7pub mod errors;
8pub mod extract;
9pub mod plugins;
10pub mod progress;
11pub mod results;
12pub mod scripts;
13pub mod suppress;
14pub mod trace;
15
16// Re-export from fallow-graph for backwards compatibility
17pub use fallow_graph::graph;
18pub use fallow_graph::project;
19pub use fallow_graph::resolve;
20
21use std::path::Path;
22use std::time::Instant;
23
24use errors::FallowError;
25use fallow_config::{PackageJson, ResolvedConfig, discover_workspaces, find_undeclared_workspaces};
26use rayon::prelude::*;
27use results::AnalysisResults;
28use trace::PipelineTimings;
29
30/// Result of the full analysis pipeline, including optional performance timings.
31pub struct AnalysisOutput {
32    pub results: AnalysisResults,
33    pub timings: Option<PipelineTimings>,
34    pub graph: Option<graph::ModuleGraph>,
35}
36
37/// Update cache: write freshly parsed modules and refresh stale mtime/size entries.
38fn update_cache(
39    store: &mut cache::CacheStore,
40    modules: &[extract::ModuleInfo],
41    files: &[discover::DiscoveredFile],
42) {
43    for module in modules {
44        if let Some(file) = files.get(module.file_id.0 as usize) {
45            let (mt, sz) = file_mtime_and_size(&file.path);
46            // If content hash matches, just refresh mtime/size if stale (e.g. `touch`ed file)
47            if let Some(cached) = store.get_by_path_only(&file.path)
48                && cached.content_hash == module.content_hash
49            {
50                if cached.mtime_secs != mt || cached.file_size != sz {
51                    store.insert(&file.path, cache::module_to_cached(module, mt, sz));
52                }
53                continue;
54            }
55            store.insert(&file.path, cache::module_to_cached(module, mt, sz));
56        }
57    }
58    store.retain_paths(files);
59}
60
61/// Extract mtime (seconds since epoch) and file size from a path.
62fn file_mtime_and_size(path: &std::path::Path) -> (u64, u64) {
63    std::fs::metadata(path)
64        .map(|m| {
65            let mt = m
66                .modified()
67                .ok()
68                .and_then(|t| t.duration_since(std::time::SystemTime::UNIX_EPOCH).ok())
69                .map_or(0, |d| d.as_secs());
70            (mt, m.len())
71        })
72        .unwrap_or((0, 0))
73}
74
75/// Run the full analysis pipeline.
76///
77/// # Errors
78///
79/// Returns an error if file discovery, parsing, or analysis fails.
80pub fn analyze(config: &ResolvedConfig) -> Result<AnalysisResults, FallowError> {
81    let output = analyze_full(config, false, false)?;
82    Ok(output.results)
83}
84
85/// Run the full analysis pipeline with export usage collection (for LSP Code Lens).
86///
87/// # Errors
88///
89/// Returns an error if file discovery, parsing, or analysis fails.
90pub fn analyze_with_usages(config: &ResolvedConfig) -> Result<AnalysisResults, FallowError> {
91    let output = analyze_full(config, false, true)?;
92    Ok(output.results)
93}
94
95/// Run the full analysis pipeline with optional performance timings and graph retention.
96///
97/// # Errors
98///
99/// Returns an error if file discovery, parsing, or analysis fails.
100pub fn analyze_with_trace(config: &ResolvedConfig) -> Result<AnalysisOutput, FallowError> {
101    analyze_full(config, true, false)
102}
103
104/// Run the analysis pipeline using pre-parsed modules, skipping the parsing stage.
105///
106/// This avoids re-parsing files when the caller already has a `ParseResult` (e.g., from
107/// `fallow_core::extract::parse_all_files`). Discovery, plugins, scripts, entry points,
108/// import resolution, graph construction, and dead code detection still run normally.
109/// The graph is always retained (needed for file scores).
110///
111/// # Errors
112///
113/// Returns an error if discovery, graph construction, or analysis fails.
114pub fn analyze_with_parse_result(
115    config: &ResolvedConfig,
116    modules: &[extract::ModuleInfo],
117) -> Result<AnalysisOutput, FallowError> {
118    let _span = tracing::info_span!("fallow_analyze_with_parse_result").entered();
119    let pipeline_start = Instant::now();
120
121    let show_progress = !config.quiet
122        && std::io::IsTerminal::is_terminal(&std::io::stderr())
123        && matches!(
124            config.output,
125            fallow_config::OutputFormat::Human
126                | fallow_config::OutputFormat::Compact
127                | fallow_config::OutputFormat::Markdown
128        );
129    let progress = progress::AnalysisProgress::new(show_progress);
130
131    if !config.root.join("node_modules").is_dir() {
132        tracing::warn!(
133            "node_modules directory not found. Run `npm install` / `pnpm install` first for accurate results."
134        );
135    }
136
137    // Discover workspaces
138    let t = Instant::now();
139    let workspaces_vec = discover_workspaces(&config.root);
140    let workspaces_ms = t.elapsed().as_secs_f64() * 1000.0;
141    if !workspaces_vec.is_empty() {
142        tracing::info!(count = workspaces_vec.len(), "workspaces discovered");
143    }
144
145    // Warn about directories with package.json not declared as workspaces
146    if !config.quiet {
147        let undeclared = find_undeclared_workspaces(&config.root, &workspaces_vec);
148        for diag in &undeclared {
149            tracing::warn!("{}", diag.message);
150        }
151    }
152
153    // Stage 1: Discover files (cheap — needed for file registry and resolution)
154    let t = Instant::now();
155    let pb = progress.stage_spinner("Discovering files...");
156    let discovered_files = discover::discover_files(config);
157    let discover_ms = t.elapsed().as_secs_f64() * 1000.0;
158    pb.finish_and_clear();
159
160    let project = project::ProjectState::new(discovered_files, workspaces_vec);
161    let files = project.files();
162    let workspaces = project.workspaces();
163
164    // Stage 1.5: Run plugin system
165    let t = Instant::now();
166    let pb = progress.stage_spinner("Detecting plugins...");
167    let mut plugin_result = run_plugins(config, files, workspaces);
168    let plugins_ms = t.elapsed().as_secs_f64() * 1000.0;
169    pb.finish_and_clear();
170
171    // Stage 1.6: Analyze package.json scripts
172    let t = Instant::now();
173    analyze_all_scripts(config, workspaces, &mut plugin_result);
174    let scripts_ms = t.elapsed().as_secs_f64() * 1000.0;
175
176    // Stage 2: SKIPPED — using pre-parsed modules from caller
177
178    // Stage 3: Discover entry points
179    let t = Instant::now();
180    let entry_points = discover_all_entry_points(config, files, workspaces, &plugin_result);
181    let entry_points_ms = t.elapsed().as_secs_f64() * 1000.0;
182
183    // Compute entry-point summary before the graph consumes the entry_points vec
184    let ep_summary = summarize_entry_points(&entry_points);
185
186    // Stage 4: Resolve imports to file IDs
187    let t = Instant::now();
188    let pb = progress.stage_spinner("Resolving imports...");
189    let resolved = resolve::resolve_all_imports(
190        modules,
191        files,
192        workspaces,
193        &plugin_result.active_plugins,
194        &plugin_result.path_aliases,
195        &config.root,
196    );
197    let resolve_ms = t.elapsed().as_secs_f64() * 1000.0;
198    pb.finish_and_clear();
199
200    // Stage 5: Build module graph
201    let t = Instant::now();
202    let pb = progress.stage_spinner("Building module graph...");
203    let graph = graph::ModuleGraph::build(&resolved, &entry_points, files);
204    let graph_ms = t.elapsed().as_secs_f64() * 1000.0;
205    pb.finish_and_clear();
206
207    // Stage 6: Analyze for dead code
208    let t = Instant::now();
209    let pb = progress.stage_spinner("Analyzing...");
210    let mut result = analyze::find_dead_code_full(
211        &graph,
212        config,
213        &resolved,
214        Some(&plugin_result),
215        workspaces,
216        modules,
217        false,
218    );
219    let analyze_ms = t.elapsed().as_secs_f64() * 1000.0;
220    pb.finish_and_clear();
221    progress.finish();
222
223    result.entry_point_summary = Some(ep_summary);
224
225    let total_ms = pipeline_start.elapsed().as_secs_f64() * 1000.0;
226
227    tracing::debug!(
228        "\n┌─ Pipeline Profile (reuse) ─────────────────────\n\
229         │  discover files:   {:>8.1}ms  ({} files)\n\
230         │  workspaces:       {:>8.1}ms\n\
231         │  plugins:          {:>8.1}ms\n\
232         │  script analysis:  {:>8.1}ms\n\
233         │  parse/extract:    SKIPPED (reused {} modules)\n\
234         │  entry points:     {:>8.1}ms  ({} entries)\n\
235         │  resolve imports:  {:>8.1}ms\n\
236         │  build graph:      {:>8.1}ms\n\
237         │  analyze:          {:>8.1}ms\n\
238         │  ────────────────────────────────────────────\n\
239         │  TOTAL:            {:>8.1}ms\n\
240         └─────────────────────────────────────────────────",
241        discover_ms,
242        files.len(),
243        workspaces_ms,
244        plugins_ms,
245        scripts_ms,
246        modules.len(),
247        entry_points_ms,
248        entry_points.len(),
249        resolve_ms,
250        graph_ms,
251        analyze_ms,
252        total_ms,
253    );
254
255    let timings = Some(PipelineTimings {
256        discover_files_ms: discover_ms,
257        file_count: files.len(),
258        workspaces_ms,
259        workspace_count: workspaces.len(),
260        plugins_ms,
261        script_analysis_ms: scripts_ms,
262        parse_extract_ms: 0.0, // Skipped — modules were reused
263        module_count: modules.len(),
264        cache_hits: 0,
265        cache_misses: 0,
266        cache_update_ms: 0.0,
267        entry_points_ms,
268        entry_point_count: entry_points.len(),
269        resolve_imports_ms: resolve_ms,
270        build_graph_ms: graph_ms,
271        analyze_ms,
272        total_ms,
273    });
274
275    Ok(AnalysisOutput {
276        results: result,
277        timings,
278        graph: Some(graph),
279    })
280}
281
282#[expect(
283    clippy::unnecessary_wraps,
284    reason = "Result kept for future error handling"
285)]
286fn analyze_full(
287    config: &ResolvedConfig,
288    retain: bool,
289    collect_usages: bool,
290) -> Result<AnalysisOutput, FallowError> {
291    let _span = tracing::info_span!("fallow_analyze").entered();
292    let pipeline_start = Instant::now();
293
294    // Progress bars: enabled when not quiet, stderr is a terminal, and output is human-readable.
295    // Structured formats (JSON, SARIF) suppress spinners even on TTY — users piping structured
296    // output don't expect progress noise on stderr.
297    let show_progress = !config.quiet
298        && std::io::IsTerminal::is_terminal(&std::io::stderr())
299        && matches!(
300            config.output,
301            fallow_config::OutputFormat::Human
302                | fallow_config::OutputFormat::Compact
303                | fallow_config::OutputFormat::Markdown
304        );
305    let progress = progress::AnalysisProgress::new(show_progress);
306
307    // Warn if node_modules is missing — resolution will be severely degraded
308    if !config.root.join("node_modules").is_dir() {
309        tracing::warn!(
310            "node_modules directory not found. Run `npm install` / `pnpm install` first for accurate results."
311        );
312    }
313
314    // Discover workspaces if in a monorepo
315    let t = Instant::now();
316    let workspaces_vec = discover_workspaces(&config.root);
317    let workspaces_ms = t.elapsed().as_secs_f64() * 1000.0;
318    if !workspaces_vec.is_empty() {
319        tracing::info!(count = workspaces_vec.len(), "workspaces discovered");
320    }
321
322    // Warn about directories with package.json not declared as workspaces
323    if !config.quiet {
324        let undeclared = find_undeclared_workspaces(&config.root, &workspaces_vec);
325        for diag in &undeclared {
326            tracing::warn!("{}", diag.message);
327        }
328    }
329
330    // Stage 1: Discover all source files
331    let t = Instant::now();
332    let pb = progress.stage_spinner("Discovering files...");
333    let discovered_files = discover::discover_files(config);
334    let discover_ms = t.elapsed().as_secs_f64() * 1000.0;
335    pb.finish_and_clear();
336
337    // Build ProjectState: owns the file registry with stable FileIds and workspace metadata.
338    // This is the foundation for cross-workspace resolution and future incremental analysis.
339    let project = project::ProjectState::new(discovered_files, workspaces_vec);
340    let files = project.files();
341    let workspaces = project.workspaces();
342
343    // Stage 1.5: Run plugin system — parse config files, discover dynamic entries
344    let t = Instant::now();
345    let pb = progress.stage_spinner("Detecting plugins...");
346    let mut plugin_result = run_plugins(config, files, workspaces);
347    let plugins_ms = t.elapsed().as_secs_f64() * 1000.0;
348    pb.finish_and_clear();
349
350    // Stage 1.6: Analyze package.json scripts for binary usage and config file refs
351    let t = Instant::now();
352    analyze_all_scripts(config, workspaces, &mut plugin_result);
353    let scripts_ms = t.elapsed().as_secs_f64() * 1000.0;
354
355    // Stage 2: Parse all files in parallel and extract imports/exports
356    let t = Instant::now();
357    let pb = progress.stage_spinner(&format!("Parsing {} files...", files.len()));
358    let mut cache_store = if config.no_cache {
359        None
360    } else {
361        cache::CacheStore::load(&config.cache_dir)
362    };
363
364    let parse_result = extract::parse_all_files(files, cache_store.as_ref());
365    let modules = parse_result.modules;
366    let cache_hits = parse_result.cache_hits;
367    let cache_misses = parse_result.cache_misses;
368    let parse_ms = t.elapsed().as_secs_f64() * 1000.0;
369    pb.finish_and_clear();
370
371    // Update cache with freshly parsed modules and refresh stale mtime/size entries.
372    let t = Instant::now();
373    if !config.no_cache {
374        let store = cache_store.get_or_insert_with(cache::CacheStore::new);
375        update_cache(store, &modules, files);
376        if let Err(e) = store.save(&config.cache_dir) {
377            tracing::warn!("Failed to save cache: {e}");
378        }
379    }
380    let cache_ms = t.elapsed().as_secs_f64() * 1000.0;
381
382    // Stage 3: Discover entry points (static patterns + plugin-discovered patterns)
383    let t = Instant::now();
384    let entry_points = discover_all_entry_points(config, files, workspaces, &plugin_result);
385    let entry_points_ms = t.elapsed().as_secs_f64() * 1000.0;
386
387    // Stage 4: Resolve imports to file IDs
388    let t = Instant::now();
389    let pb = progress.stage_spinner("Resolving imports...");
390    let resolved = resolve::resolve_all_imports(
391        &modules,
392        files,
393        workspaces,
394        &plugin_result.active_plugins,
395        &plugin_result.path_aliases,
396        &config.root,
397    );
398    let resolve_ms = t.elapsed().as_secs_f64() * 1000.0;
399    pb.finish_and_clear();
400
401    // Stage 5: Build module graph
402    let t = Instant::now();
403    let pb = progress.stage_spinner("Building module graph...");
404    let graph = graph::ModuleGraph::build(&resolved, &entry_points, files);
405    let graph_ms = t.elapsed().as_secs_f64() * 1000.0;
406    pb.finish_and_clear();
407
408    // Compute entry-point summary before the graph consumes the entry_points vec
409    let ep_summary = summarize_entry_points(&entry_points);
410
411    // Stage 6: Analyze for dead code (with plugin context and workspace info)
412    let t = Instant::now();
413    let pb = progress.stage_spinner("Analyzing...");
414    let mut result = analyze::find_dead_code_full(
415        &graph,
416        config,
417        &resolved,
418        Some(&plugin_result),
419        workspaces,
420        &modules,
421        collect_usages,
422    );
423    let analyze_ms = t.elapsed().as_secs_f64() * 1000.0;
424    pb.finish_and_clear();
425    progress.finish();
426
427    result.entry_point_summary = Some(ep_summary);
428
429    let total_ms = pipeline_start.elapsed().as_secs_f64() * 1000.0;
430
431    let cache_summary = if cache_hits > 0 {
432        format!(" ({cache_hits} cached, {cache_misses} parsed)")
433    } else {
434        String::new()
435    };
436
437    tracing::debug!(
438        "\n┌─ Pipeline Profile ─────────────────────────────\n\
439         │  discover files:   {:>8.1}ms  ({} files)\n\
440         │  workspaces:       {:>8.1}ms\n\
441         │  plugins:          {:>8.1}ms\n\
442         │  script analysis:  {:>8.1}ms\n\
443         │  parse/extract:    {:>8.1}ms  ({} modules{})\n\
444         │  cache update:     {:>8.1}ms\n\
445         │  entry points:     {:>8.1}ms  ({} entries)\n\
446         │  resolve imports:  {:>8.1}ms\n\
447         │  build graph:      {:>8.1}ms\n\
448         │  analyze:          {:>8.1}ms\n\
449         │  ────────────────────────────────────────────\n\
450         │  TOTAL:            {:>8.1}ms\n\
451         └─────────────────────────────────────────────────",
452        discover_ms,
453        files.len(),
454        workspaces_ms,
455        plugins_ms,
456        scripts_ms,
457        parse_ms,
458        modules.len(),
459        cache_summary,
460        cache_ms,
461        entry_points_ms,
462        entry_points.len(),
463        resolve_ms,
464        graph_ms,
465        analyze_ms,
466        total_ms,
467    );
468
469    let timings = if retain {
470        Some(PipelineTimings {
471            discover_files_ms: discover_ms,
472            file_count: files.len(),
473            workspaces_ms,
474            workspace_count: workspaces.len(),
475            plugins_ms,
476            script_analysis_ms: scripts_ms,
477            parse_extract_ms: parse_ms,
478            module_count: modules.len(),
479            cache_hits,
480            cache_misses,
481            cache_update_ms: cache_ms,
482            entry_points_ms,
483            entry_point_count: entry_points.len(),
484            resolve_imports_ms: resolve_ms,
485            build_graph_ms: graph_ms,
486            analyze_ms,
487            total_ms,
488        })
489    } else {
490        None
491    };
492
493    Ok(AnalysisOutput {
494        results: result,
495        timings,
496        graph: if retain { Some(graph) } else { None },
497    })
498}
499
500/// Analyze package.json scripts from root and all workspace packages.
501///
502/// Populates the plugin result with script-used packages and config file
503/// entry patterns. Also scans CI config files for binary invocations.
504fn analyze_all_scripts(
505    config: &ResolvedConfig,
506    workspaces: &[fallow_config::WorkspaceInfo],
507    plugin_result: &mut plugins::AggregatedPluginResult,
508) {
509    let pkg_path = config.root.join("package.json");
510    if let Ok(pkg) = PackageJson::load(&pkg_path)
511        && let Some(ref pkg_scripts) = pkg.scripts
512    {
513        let scripts_to_analyze = if config.production {
514            scripts::filter_production_scripts(pkg_scripts)
515        } else {
516            pkg_scripts.clone()
517        };
518        let script_analysis = scripts::analyze_scripts(&scripts_to_analyze, &config.root);
519        plugin_result.script_used_packages = script_analysis.used_packages;
520
521        for config_file in &script_analysis.config_files {
522            plugin_result
523                .entry_patterns
524                .push((config_file.clone(), "scripts".to_string()));
525        }
526    }
527    for ws in workspaces {
528        let ws_pkg_path = ws.root.join("package.json");
529        if let Ok(ws_pkg) = PackageJson::load(&ws_pkg_path)
530            && let Some(ref ws_scripts) = ws_pkg.scripts
531        {
532            let scripts_to_analyze = if config.production {
533                scripts::filter_production_scripts(ws_scripts)
534            } else {
535                ws_scripts.clone()
536            };
537            let ws_analysis = scripts::analyze_scripts(&scripts_to_analyze, &ws.root);
538            plugin_result
539                .script_used_packages
540                .extend(ws_analysis.used_packages);
541
542            let ws_prefix = ws
543                .root
544                .strip_prefix(&config.root)
545                .unwrap_or(&ws.root)
546                .to_string_lossy();
547            for config_file in &ws_analysis.config_files {
548                plugin_result
549                    .entry_patterns
550                    .push((format!("{ws_prefix}/{config_file}"), "scripts".to_string()));
551            }
552        }
553    }
554
555    // Scan CI config files for binary invocations
556    let ci_packages = scripts::ci::analyze_ci_files(&config.root);
557    plugin_result.script_used_packages.extend(ci_packages);
558}
559
560/// Discover all entry points from static patterns, workspaces, plugins, and infrastructure.
561fn discover_all_entry_points(
562    config: &ResolvedConfig,
563    files: &[discover::DiscoveredFile],
564    workspaces: &[fallow_config::WorkspaceInfo],
565    plugin_result: &plugins::AggregatedPluginResult,
566) -> Vec<discover::EntryPoint> {
567    let mut entry_points = discover::discover_entry_points(config, files);
568    let ws_entries: Vec<_> = workspaces
569        .par_iter()
570        .flat_map(|ws| discover::discover_workspace_entry_points(&ws.root, config, files))
571        .collect();
572    entry_points.extend(ws_entries);
573    let plugin_entries = discover::discover_plugin_entry_points(plugin_result, config, files);
574    entry_points.extend(plugin_entries);
575    let infra_entries = discover::discover_infrastructure_entry_points(&config.root);
576    entry_points.extend(infra_entries);
577
578    // Add dynamically loaded files from config as entry points
579    if !config.dynamically_loaded.is_empty() {
580        let dynamic_entries = discover::discover_dynamically_loaded_entry_points(config, files);
581        entry_points.extend(dynamic_entries);
582    }
583
584    entry_points
585}
586
587/// Summarize entry points by source category for user-facing output.
588fn summarize_entry_points(entry_points: &[discover::EntryPoint]) -> results::EntryPointSummary {
589    let mut counts: rustc_hash::FxHashMap<String, usize> = rustc_hash::FxHashMap::default();
590    for ep in entry_points {
591        let category = match &ep.source {
592            discover::EntryPointSource::PackageJsonMain
593            | discover::EntryPointSource::PackageJsonModule
594            | discover::EntryPointSource::PackageJsonExports
595            | discover::EntryPointSource::PackageJsonBin
596            | discover::EntryPointSource::PackageJsonScript => "package.json",
597            discover::EntryPointSource::Plugin { .. } => "plugin",
598            discover::EntryPointSource::TestFile => "test file",
599            discover::EntryPointSource::DefaultIndex => "default index",
600            discover::EntryPointSource::ManualEntry => "manual entry",
601            discover::EntryPointSource::InfrastructureConfig => "config",
602            discover::EntryPointSource::DynamicallyLoaded => "dynamically loaded",
603        };
604        *counts.entry(category.to_string()).or_insert(0) += 1;
605    }
606    let mut by_source: Vec<(String, usize)> = counts.into_iter().collect();
607    by_source.sort_by(|a, b| b.1.cmp(&a.1).then_with(|| a.0.cmp(&b.0)));
608    results::EntryPointSummary {
609        total: entry_points.len(),
610        by_source,
611    }
612}
613
614/// Run plugins for root project and all workspace packages.
615fn run_plugins(
616    config: &ResolvedConfig,
617    files: &[discover::DiscoveredFile],
618    workspaces: &[fallow_config::WorkspaceInfo],
619) -> plugins::AggregatedPluginResult {
620    let registry = plugins::PluginRegistry::new(config.external_plugins.clone());
621    let file_paths: Vec<std::path::PathBuf> = files.iter().map(|f| f.path.clone()).collect();
622
623    // Run plugins for root project (full run with external plugins, inline config, etc.)
624    let pkg_path = config.root.join("package.json");
625    let mut result = PackageJson::load(&pkg_path).map_or_else(
626        |_| plugins::AggregatedPluginResult::default(),
627        |pkg| registry.run(&pkg, &config.root, &file_paths),
628    );
629
630    if workspaces.is_empty() {
631        return result;
632    }
633
634    // Pre-compile config matchers and relative files once for all workspace runs.
635    // This avoids re-compiling glob patterns and re-computing relative paths per workspace
636    // (previously O(workspaces × plugins × files) glob compilations).
637    let precompiled_matchers = registry.precompile_config_matchers();
638    let relative_files: Vec<(&std::path::PathBuf, String)> = file_paths
639        .iter()
640        .map(|f| {
641            let rel = f
642                .strip_prefix(&config.root)
643                .unwrap_or(f)
644                .to_string_lossy()
645                .into_owned();
646            (f, rel)
647        })
648        .collect();
649
650    // Run plugins for each workspace package in parallel, then merge results.
651    let ws_results: Vec<_> = workspaces
652        .par_iter()
653        .filter_map(|ws| {
654            let ws_pkg_path = ws.root.join("package.json");
655            let ws_pkg = PackageJson::load(&ws_pkg_path).ok()?;
656            let ws_result = registry.run_workspace_fast(
657                &ws_pkg,
658                &ws.root,
659                &config.root,
660                &precompiled_matchers,
661                &relative_files,
662            );
663            if ws_result.active_plugins.is_empty() {
664                return None;
665            }
666            let ws_prefix = ws
667                .root
668                .strip_prefix(&config.root)
669                .unwrap_or(&ws.root)
670                .to_string_lossy()
671                .into_owned();
672            Some((ws_result, ws_prefix))
673        })
674        .collect();
675
676    // Merge workspace results sequentially (deterministic order via par_iter index stability)
677    // Track seen names for O(1) dedup instead of O(n) Vec::contains
678    let mut seen_plugins: rustc_hash::FxHashSet<String> =
679        result.active_plugins.iter().cloned().collect();
680    let mut seen_prefixes: rustc_hash::FxHashSet<String> =
681        result.virtual_module_prefixes.iter().cloned().collect();
682    for (ws_result, ws_prefix) in ws_results {
683        // Prefix helper: workspace-relative patterns need the workspace prefix
684        // to be matchable from the monorepo root. But patterns that are already
685        // project-root-relative (e.g., from angular.json which uses absolute paths
686        // like "apps/client/src/styles.css") should not be double-prefixed.
687        let prefix_if_needed = |pat: &str| -> String {
688            if pat.starts_with(ws_prefix.as_str()) || pat.starts_with('/') {
689                pat.to_string()
690            } else {
691                format!("{ws_prefix}/{pat}")
692            }
693        };
694
695        for (pat, pname) in &ws_result.entry_patterns {
696            result
697                .entry_patterns
698                .push((prefix_if_needed(pat), pname.clone()));
699        }
700        for (pat, pname) in &ws_result.always_used {
701            result
702                .always_used
703                .push((prefix_if_needed(pat), pname.clone()));
704        }
705        for (pat, pname) in &ws_result.discovered_always_used {
706            result
707                .discovered_always_used
708                .push((prefix_if_needed(pat), pname.clone()));
709        }
710        for (pat, pname) in &ws_result.fixture_patterns {
711            result
712                .fixture_patterns
713                .push((prefix_if_needed(pat), pname.clone()));
714        }
715        for (file_pat, exports) in &ws_result.used_exports {
716            result
717                .used_exports
718                .push((prefix_if_needed(file_pat), exports.clone()));
719        }
720        // Merge active plugin names (deduplicated via HashSet)
721        for plugin_name in ws_result.active_plugins {
722            if !seen_plugins.contains(&plugin_name) {
723                seen_plugins.insert(plugin_name.clone());
724                result.active_plugins.push(plugin_name);
725            }
726        }
727        // These don't need prefixing (absolute paths / package names)
728        result
729            .referenced_dependencies
730            .extend(ws_result.referenced_dependencies);
731        result.setup_files.extend(ws_result.setup_files);
732        result
733            .tooling_dependencies
734            .extend(ws_result.tooling_dependencies);
735        // Virtual module prefixes (e.g., Docusaurus @theme/, @site/) are
736        // package-name prefixes, not file paths — no workspace prefix needed.
737        for prefix in ws_result.virtual_module_prefixes {
738            if !seen_prefixes.contains(&prefix) {
739                seen_prefixes.insert(prefix.clone());
740                result.virtual_module_prefixes.push(prefix);
741            }
742        }
743    }
744
745    result
746}
747
748/// Run analysis on a project directory (with export usages for LSP Code Lens).
749///
750/// # Errors
751///
752/// Returns an error if config loading, file discovery, parsing, or analysis fails.
753pub fn analyze_project(root: &Path) -> Result<AnalysisResults, FallowError> {
754    let config = default_config(root);
755    analyze_with_usages(&config)
756}
757
758/// Create a default config for a project root.
759pub(crate) fn default_config(root: &Path) -> ResolvedConfig {
760    let user_config = fallow_config::FallowConfig::find_and_load(root)
761        .ok()
762        .flatten();
763    match user_config {
764        Some((config, _path)) => config.resolve(
765            root.to_path_buf(),
766            fallow_config::OutputFormat::Human,
767            num_cpus(),
768            false,
769            true, // quiet: LSP/programmatic callers don't need progress bars
770        ),
771        None => fallow_config::FallowConfig::default().resolve(
772            root.to_path_buf(),
773            fallow_config::OutputFormat::Human,
774            num_cpus(),
775            false,
776            true,
777        ),
778    }
779}
780
781fn num_cpus() -> usize {
782    std::thread::available_parallelism()
783        .map(std::num::NonZeroUsize::get)
784        .unwrap_or(4)
785}