Skip to main content

fallow_core/
lib.rs

1pub mod analyze;
2pub mod cache;
3pub mod churn;
4pub mod cross_reference;
5pub mod discover;
6pub mod duplicates;
7pub(crate) mod errors;
8pub mod extract;
9pub mod plugins;
10pub(crate) mod progress;
11pub mod results;
12pub(crate) mod scripts;
13pub mod suppress;
14pub mod trace;
15
16// Re-export from fallow-graph for backwards compatibility
17pub use fallow_graph::graph;
18pub use fallow_graph::project;
19pub use fallow_graph::resolve;
20
21use std::path::Path;
22use std::time::Instant;
23
24use errors::FallowError;
25use fallow_config::{
26    EntryPointRole, PackageJson, ResolvedConfig, discover_workspaces, find_undeclared_workspaces,
27};
28use rayon::prelude::*;
29use results::AnalysisResults;
30use trace::PipelineTimings;
31
32/// Result of the full analysis pipeline, including optional performance timings.
33pub struct AnalysisOutput {
34    pub results: AnalysisResults,
35    pub timings: Option<PipelineTimings>,
36    pub graph: Option<graph::ModuleGraph>,
37}
38
39/// Update cache: write freshly parsed modules and refresh stale mtime/size entries.
40fn update_cache(
41    store: &mut cache::CacheStore,
42    modules: &[extract::ModuleInfo],
43    files: &[discover::DiscoveredFile],
44) {
45    for module in modules {
46        if let Some(file) = files.get(module.file_id.0 as usize) {
47            let (mt, sz) = file_mtime_and_size(&file.path);
48            // If content hash matches, just refresh mtime/size if stale (e.g. `touch`ed file)
49            if let Some(cached) = store.get_by_path_only(&file.path)
50                && cached.content_hash == module.content_hash
51            {
52                if cached.mtime_secs != mt || cached.file_size != sz {
53                    store.insert(&file.path, cache::module_to_cached(module, mt, sz));
54                }
55                continue;
56            }
57            store.insert(&file.path, cache::module_to_cached(module, mt, sz));
58        }
59    }
60    store.retain_paths(files);
61}
62
63/// Extract mtime (seconds since epoch) and file size from a path.
64fn file_mtime_and_size(path: &std::path::Path) -> (u64, u64) {
65    std::fs::metadata(path)
66        .map(|m| {
67            let mt = m
68                .modified()
69                .ok()
70                .and_then(|t| t.duration_since(std::time::SystemTime::UNIX_EPOCH).ok())
71                .map_or(0, |d| d.as_secs());
72            (mt, m.len())
73        })
74        .unwrap_or((0, 0))
75}
76
77/// Run the full analysis pipeline.
78///
79/// # Errors
80///
81/// Returns an error if file discovery, parsing, or analysis fails.
82pub fn analyze(config: &ResolvedConfig) -> Result<AnalysisResults, FallowError> {
83    let output = analyze_full(config, false, false)?;
84    Ok(output.results)
85}
86
87/// Run the full analysis pipeline with export usage collection (for LSP Code Lens).
88///
89/// # Errors
90///
91/// Returns an error if file discovery, parsing, or analysis fails.
92pub fn analyze_with_usages(config: &ResolvedConfig) -> Result<AnalysisResults, FallowError> {
93    let output = analyze_full(config, false, true)?;
94    Ok(output.results)
95}
96
97/// Run the full analysis pipeline with optional performance timings and graph retention.
98///
99/// # Errors
100///
101/// Returns an error if file discovery, parsing, or analysis fails.
102pub fn analyze_with_trace(config: &ResolvedConfig) -> Result<AnalysisOutput, FallowError> {
103    analyze_full(config, true, false)
104}
105
106/// Run the analysis pipeline using pre-parsed modules, skipping the parsing stage.
107///
108/// This avoids re-parsing files when the caller already has a `ParseResult` (e.g., from
109/// `fallow_core::extract::parse_all_files`). Discovery, plugins, scripts, entry points,
110/// import resolution, graph construction, and dead code detection still run normally.
111/// The graph is always retained (needed for file scores).
112///
113/// # Errors
114///
115/// Returns an error if discovery, graph construction, or analysis fails.
116pub fn analyze_with_parse_result(
117    config: &ResolvedConfig,
118    modules: &[extract::ModuleInfo],
119) -> Result<AnalysisOutput, FallowError> {
120    let _span = tracing::info_span!("fallow_analyze_with_parse_result").entered();
121    let pipeline_start = Instant::now();
122
123    let show_progress = !config.quiet
124        && std::io::IsTerminal::is_terminal(&std::io::stderr())
125        && matches!(
126            config.output,
127            fallow_config::OutputFormat::Human
128                | fallow_config::OutputFormat::Compact
129                | fallow_config::OutputFormat::Markdown
130        );
131    let progress = progress::AnalysisProgress::new(show_progress);
132
133    if !config.root.join("node_modules").is_dir() {
134        tracing::warn!(
135            "node_modules directory not found. Run `npm install` / `pnpm install` first for accurate results."
136        );
137    }
138
139    // Discover workspaces
140    let t = Instant::now();
141    let workspaces_vec = discover_workspaces(&config.root);
142    let workspaces_ms = t.elapsed().as_secs_f64() * 1000.0;
143    if !workspaces_vec.is_empty() {
144        tracing::info!(count = workspaces_vec.len(), "workspaces discovered");
145    }
146
147    // Warn about directories with package.json not declared as workspaces
148    if !config.quiet {
149        let undeclared = find_undeclared_workspaces(&config.root, &workspaces_vec);
150        for diag in &undeclared {
151            tracing::warn!("{}", diag.message);
152        }
153    }
154
155    // Stage 1: Discover files (cheap — needed for file registry and resolution)
156    let t = Instant::now();
157    let pb = progress.stage_spinner("Discovering files...");
158    let discovered_files = discover::discover_files(config);
159    let discover_ms = t.elapsed().as_secs_f64() * 1000.0;
160    pb.finish_and_clear();
161
162    let project = project::ProjectState::new(discovered_files, workspaces_vec);
163    let files = project.files();
164    let workspaces = project.workspaces();
165
166    // Stage 1.5: Run plugin system
167    let t = Instant::now();
168    let pb = progress.stage_spinner("Detecting plugins...");
169    let mut plugin_result = run_plugins(config, files, workspaces);
170    let plugins_ms = t.elapsed().as_secs_f64() * 1000.0;
171    pb.finish_and_clear();
172
173    // Stage 1.6: Analyze package.json scripts
174    let t = Instant::now();
175    analyze_all_scripts(config, workspaces, &mut plugin_result);
176    let scripts_ms = t.elapsed().as_secs_f64() * 1000.0;
177
178    // Stage 2: SKIPPED — using pre-parsed modules from caller
179
180    // Stage 3: Discover entry points
181    let t = Instant::now();
182    let entry_points = discover_all_entry_points(config, files, workspaces, &plugin_result);
183    let entry_points_ms = t.elapsed().as_secs_f64() * 1000.0;
184
185    // Compute entry-point summary before the graph consumes the entry_points vec
186    let ep_summary = summarize_entry_points(&entry_points.all);
187
188    // Stage 4: Resolve imports to file IDs
189    let t = Instant::now();
190    let pb = progress.stage_spinner("Resolving imports...");
191    let resolved = resolve::resolve_all_imports(
192        modules,
193        files,
194        workspaces,
195        &plugin_result.active_plugins,
196        &plugin_result.path_aliases,
197        &plugin_result.scss_include_paths,
198        &config.root,
199    );
200    let resolve_ms = t.elapsed().as_secs_f64() * 1000.0;
201    pb.finish_and_clear();
202
203    // Stage 5: Build module graph
204    let t = Instant::now();
205    let pb = progress.stage_spinner("Building module graph...");
206    let graph = graph::ModuleGraph::build_with_reachability_roots(
207        &resolved,
208        &entry_points.all,
209        &entry_points.runtime,
210        &entry_points.test,
211        files,
212    );
213    let graph_ms = t.elapsed().as_secs_f64() * 1000.0;
214    pb.finish_and_clear();
215
216    // Stage 6: Analyze for dead code
217    let t = Instant::now();
218    let pb = progress.stage_spinner("Analyzing...");
219    let mut result = analyze::find_dead_code_full(
220        &graph,
221        config,
222        &resolved,
223        Some(&plugin_result),
224        workspaces,
225        modules,
226        false,
227    );
228    let analyze_ms = t.elapsed().as_secs_f64() * 1000.0;
229    pb.finish_and_clear();
230    progress.finish();
231
232    result.entry_point_summary = Some(ep_summary);
233
234    let total_ms = pipeline_start.elapsed().as_secs_f64() * 1000.0;
235
236    tracing::debug!(
237        "\n┌─ Pipeline Profile (reuse) ─────────────────────\n\
238         │  discover files:   {:>8.1}ms  ({} files)\n\
239         │  workspaces:       {:>8.1}ms\n\
240         │  plugins:          {:>8.1}ms\n\
241         │  script analysis:  {:>8.1}ms\n\
242         │  parse/extract:    SKIPPED (reused {} modules)\n\
243         │  entry points:     {:>8.1}ms  ({} entries)\n\
244         │  resolve imports:  {:>8.1}ms\n\
245         │  build graph:      {:>8.1}ms\n\
246         │  analyze:          {:>8.1}ms\n\
247         │  ────────────────────────────────────────────\n\
248         │  TOTAL:            {:>8.1}ms\n\
249         └─────────────────────────────────────────────────",
250        discover_ms,
251        files.len(),
252        workspaces_ms,
253        plugins_ms,
254        scripts_ms,
255        modules.len(),
256        entry_points_ms,
257        entry_points.all.len(),
258        resolve_ms,
259        graph_ms,
260        analyze_ms,
261        total_ms,
262    );
263
264    let timings = Some(PipelineTimings {
265        discover_files_ms: discover_ms,
266        file_count: files.len(),
267        workspaces_ms,
268        workspace_count: workspaces.len(),
269        plugins_ms,
270        script_analysis_ms: scripts_ms,
271        parse_extract_ms: 0.0, // Skipped — modules were reused
272        module_count: modules.len(),
273        cache_hits: 0,
274        cache_misses: 0,
275        cache_update_ms: 0.0,
276        entry_points_ms,
277        entry_point_count: entry_points.all.len(),
278        resolve_imports_ms: resolve_ms,
279        build_graph_ms: graph_ms,
280        analyze_ms,
281        total_ms,
282    });
283
284    Ok(AnalysisOutput {
285        results: result,
286        timings,
287        graph: Some(graph),
288    })
289}
290
291#[expect(
292    clippy::unnecessary_wraps,
293    reason = "Result kept for future error handling"
294)]
295#[expect(
296    clippy::too_many_lines,
297    reason = "main pipeline function; split candidate for sig-audit-loop"
298)]
299fn analyze_full(
300    config: &ResolvedConfig,
301    retain: bool,
302    collect_usages: bool,
303) -> Result<AnalysisOutput, FallowError> {
304    let _span = tracing::info_span!("fallow_analyze").entered();
305    let pipeline_start = Instant::now();
306
307    // Progress bars: enabled when not quiet, stderr is a terminal, and output is human-readable.
308    // Structured formats (JSON, SARIF) suppress spinners even on TTY — users piping structured
309    // output don't expect progress noise on stderr.
310    let show_progress = !config.quiet
311        && std::io::IsTerminal::is_terminal(&std::io::stderr())
312        && matches!(
313            config.output,
314            fallow_config::OutputFormat::Human
315                | fallow_config::OutputFormat::Compact
316                | fallow_config::OutputFormat::Markdown
317        );
318    let progress = progress::AnalysisProgress::new(show_progress);
319
320    // Warn if node_modules is missing — resolution will be severely degraded
321    if !config.root.join("node_modules").is_dir() {
322        tracing::warn!(
323            "node_modules directory not found. Run `npm install` / `pnpm install` first for accurate results."
324        );
325    }
326
327    // Discover workspaces if in a monorepo
328    let t = Instant::now();
329    let workspaces_vec = discover_workspaces(&config.root);
330    let workspaces_ms = t.elapsed().as_secs_f64() * 1000.0;
331    if !workspaces_vec.is_empty() {
332        tracing::info!(count = workspaces_vec.len(), "workspaces discovered");
333    }
334
335    // Warn about directories with package.json not declared as workspaces
336    if !config.quiet {
337        let undeclared = find_undeclared_workspaces(&config.root, &workspaces_vec);
338        for diag in &undeclared {
339            tracing::warn!("{}", diag.message);
340        }
341    }
342
343    // Stage 1: Discover all source files
344    let t = Instant::now();
345    let pb = progress.stage_spinner("Discovering files...");
346    let discovered_files = discover::discover_files(config);
347    let discover_ms = t.elapsed().as_secs_f64() * 1000.0;
348    pb.finish_and_clear();
349
350    // Build ProjectState: owns the file registry with stable FileIds and workspace metadata.
351    // This is the foundation for cross-workspace resolution and future incremental analysis.
352    let project = project::ProjectState::new(discovered_files, workspaces_vec);
353    let files = project.files();
354    let workspaces = project.workspaces();
355
356    // Stage 1.5: Run plugin system — parse config files, discover dynamic entries
357    let t = Instant::now();
358    let pb = progress.stage_spinner("Detecting plugins...");
359    let mut plugin_result = run_plugins(config, files, workspaces);
360    let plugins_ms = t.elapsed().as_secs_f64() * 1000.0;
361    pb.finish_and_clear();
362
363    // Stage 1.6: Analyze package.json scripts for binary usage and config file refs
364    let t = Instant::now();
365    analyze_all_scripts(config, workspaces, &mut plugin_result);
366    let scripts_ms = t.elapsed().as_secs_f64() * 1000.0;
367
368    // Stage 2: Parse all files in parallel and extract imports/exports
369    let t = Instant::now();
370    let pb = progress.stage_spinner(&format!("Parsing {} files...", files.len()));
371    let mut cache_store = if config.no_cache {
372        None
373    } else {
374        cache::CacheStore::load(&config.cache_dir)
375    };
376
377    let parse_result = extract::parse_all_files(files, cache_store.as_ref(), false);
378    let modules = parse_result.modules;
379    let cache_hits = parse_result.cache_hits;
380    let cache_misses = parse_result.cache_misses;
381    let parse_ms = t.elapsed().as_secs_f64() * 1000.0;
382    pb.finish_and_clear();
383
384    // Update cache with freshly parsed modules and refresh stale mtime/size entries.
385    let t = Instant::now();
386    if !config.no_cache {
387        let store = cache_store.get_or_insert_with(cache::CacheStore::new);
388        update_cache(store, &modules, files);
389        if let Err(e) = store.save(&config.cache_dir) {
390            tracing::warn!("Failed to save cache: {e}");
391        }
392    }
393    let cache_ms = t.elapsed().as_secs_f64() * 1000.0;
394
395    // Stage 3: Discover entry points (static patterns + plugin-discovered patterns)
396    let t = Instant::now();
397    let entry_points = discover_all_entry_points(config, files, workspaces, &plugin_result);
398    let entry_points_ms = t.elapsed().as_secs_f64() * 1000.0;
399
400    // Stage 4: Resolve imports to file IDs
401    let t = Instant::now();
402    let pb = progress.stage_spinner("Resolving imports...");
403    let resolved = resolve::resolve_all_imports(
404        &modules,
405        files,
406        workspaces,
407        &plugin_result.active_plugins,
408        &plugin_result.path_aliases,
409        &plugin_result.scss_include_paths,
410        &config.root,
411    );
412    let resolve_ms = t.elapsed().as_secs_f64() * 1000.0;
413    pb.finish_and_clear();
414
415    // Stage 5: Build module graph
416    let t = Instant::now();
417    let pb = progress.stage_spinner("Building module graph...");
418    let graph = graph::ModuleGraph::build_with_reachability_roots(
419        &resolved,
420        &entry_points.all,
421        &entry_points.runtime,
422        &entry_points.test,
423        files,
424    );
425    let graph_ms = t.elapsed().as_secs_f64() * 1000.0;
426    pb.finish_and_clear();
427
428    // Compute entry-point summary before the graph consumes the entry_points vec
429    let ep_summary = summarize_entry_points(&entry_points.all);
430
431    // Stage 6: Analyze for dead code (with plugin context and workspace info)
432    let t = Instant::now();
433    let pb = progress.stage_spinner("Analyzing...");
434    let mut result = analyze::find_dead_code_full(
435        &graph,
436        config,
437        &resolved,
438        Some(&plugin_result),
439        workspaces,
440        &modules,
441        collect_usages,
442    );
443    let analyze_ms = t.elapsed().as_secs_f64() * 1000.0;
444    pb.finish_and_clear();
445    progress.finish();
446
447    result.entry_point_summary = Some(ep_summary);
448
449    let total_ms = pipeline_start.elapsed().as_secs_f64() * 1000.0;
450
451    let cache_summary = if cache_hits > 0 {
452        format!(" ({cache_hits} cached, {cache_misses} parsed)")
453    } else {
454        String::new()
455    };
456
457    tracing::debug!(
458        "\n┌─ Pipeline Profile ─────────────────────────────\n\
459         │  discover files:   {:>8.1}ms  ({} files)\n\
460         │  workspaces:       {:>8.1}ms\n\
461         │  plugins:          {:>8.1}ms\n\
462         │  script analysis:  {:>8.1}ms\n\
463         │  parse/extract:    {:>8.1}ms  ({} modules{})\n\
464         │  cache update:     {:>8.1}ms\n\
465         │  entry points:     {:>8.1}ms  ({} entries)\n\
466         │  resolve imports:  {:>8.1}ms\n\
467         │  build graph:      {:>8.1}ms\n\
468         │  analyze:          {:>8.1}ms\n\
469         │  ────────────────────────────────────────────\n\
470         │  TOTAL:            {:>8.1}ms\n\
471         └─────────────────────────────────────────────────",
472        discover_ms,
473        files.len(),
474        workspaces_ms,
475        plugins_ms,
476        scripts_ms,
477        parse_ms,
478        modules.len(),
479        cache_summary,
480        cache_ms,
481        entry_points_ms,
482        entry_points.all.len(),
483        resolve_ms,
484        graph_ms,
485        analyze_ms,
486        total_ms,
487    );
488
489    let timings = if retain {
490        Some(PipelineTimings {
491            discover_files_ms: discover_ms,
492            file_count: files.len(),
493            workspaces_ms,
494            workspace_count: workspaces.len(),
495            plugins_ms,
496            script_analysis_ms: scripts_ms,
497            parse_extract_ms: parse_ms,
498            module_count: modules.len(),
499            cache_hits,
500            cache_misses,
501            cache_update_ms: cache_ms,
502            entry_points_ms,
503            entry_point_count: entry_points.all.len(),
504            resolve_imports_ms: resolve_ms,
505            build_graph_ms: graph_ms,
506            analyze_ms,
507            total_ms,
508        })
509    } else {
510        None
511    };
512
513    Ok(AnalysisOutput {
514        results: result,
515        timings,
516        graph: if retain { Some(graph) } else { None },
517    })
518}
519
520/// Analyze package.json scripts from root and all workspace packages.
521///
522/// Populates the plugin result with script-used packages and config file
523/// entry patterns. Also scans CI config files for binary invocations.
524fn analyze_all_scripts(
525    config: &ResolvedConfig,
526    workspaces: &[fallow_config::WorkspaceInfo],
527    plugin_result: &mut plugins::AggregatedPluginResult,
528) {
529    let pkg_path = config.root.join("package.json");
530    if let Ok(pkg) = PackageJson::load(&pkg_path)
531        && let Some(ref pkg_scripts) = pkg.scripts
532    {
533        let scripts_to_analyze = if config.production {
534            scripts::filter_production_scripts(pkg_scripts)
535        } else {
536            pkg_scripts.clone()
537        };
538        let script_analysis = scripts::analyze_scripts(&scripts_to_analyze, &config.root);
539        plugin_result.script_used_packages = script_analysis.used_packages;
540
541        for config_file in &script_analysis.config_files {
542            plugin_result
543                .discovered_always_used
544                .push((config_file.clone(), "scripts".to_string()));
545        }
546    }
547    for ws in workspaces {
548        let ws_pkg_path = ws.root.join("package.json");
549        if let Ok(ws_pkg) = PackageJson::load(&ws_pkg_path)
550            && let Some(ref ws_scripts) = ws_pkg.scripts
551        {
552            let scripts_to_analyze = if config.production {
553                scripts::filter_production_scripts(ws_scripts)
554            } else {
555                ws_scripts.clone()
556            };
557            let ws_analysis = scripts::analyze_scripts(&scripts_to_analyze, &ws.root);
558            plugin_result
559                .script_used_packages
560                .extend(ws_analysis.used_packages);
561
562            let ws_prefix = ws
563                .root
564                .strip_prefix(&config.root)
565                .unwrap_or(&ws.root)
566                .to_string_lossy();
567            for config_file in &ws_analysis.config_files {
568                plugin_result
569                    .discovered_always_used
570                    .push((format!("{ws_prefix}/{config_file}"), "scripts".to_string()));
571            }
572        }
573    }
574
575    // Scan CI config files for binary invocations
576    let ci_packages = scripts::ci::analyze_ci_files(&config.root);
577    plugin_result.script_used_packages.extend(ci_packages);
578    plugin_result
579        .entry_point_roles
580        .entry("scripts".to_string())
581        .or_insert(EntryPointRole::Support);
582}
583
584/// Discover all entry points from static patterns, workspaces, plugins, and infrastructure.
585fn discover_all_entry_points(
586    config: &ResolvedConfig,
587    files: &[discover::DiscoveredFile],
588    workspaces: &[fallow_config::WorkspaceInfo],
589    plugin_result: &plugins::AggregatedPluginResult,
590) -> discover::CategorizedEntryPoints {
591    let mut entry_points = discover::CategorizedEntryPoints::default();
592    entry_points.extend_runtime(discover::discover_entry_points(config, files));
593
594    let ws_entries: Vec<_> = workspaces
595        .par_iter()
596        .flat_map(|ws| discover::discover_workspace_entry_points(&ws.root, config, files))
597        .collect();
598    entry_points.extend_runtime(ws_entries);
599
600    let plugin_entries = discover::discover_plugin_entry_point_sets(plugin_result, config, files);
601    entry_points.extend(plugin_entries);
602
603    let infra_entries = discover::discover_infrastructure_entry_points(&config.root);
604    entry_points.extend_runtime(infra_entries);
605
606    // Add dynamically loaded files from config as entry points
607    if !config.dynamically_loaded.is_empty() {
608        let dynamic_entries = discover::discover_dynamically_loaded_entry_points(config, files);
609        entry_points.extend_runtime(dynamic_entries);
610    }
611
612    entry_points.dedup()
613}
614
615/// Summarize entry points by source category for user-facing output.
616fn summarize_entry_points(entry_points: &[discover::EntryPoint]) -> results::EntryPointSummary {
617    let mut counts: rustc_hash::FxHashMap<String, usize> = rustc_hash::FxHashMap::default();
618    for ep in entry_points {
619        let category = match &ep.source {
620            discover::EntryPointSource::PackageJsonMain
621            | discover::EntryPointSource::PackageJsonModule
622            | discover::EntryPointSource::PackageJsonExports
623            | discover::EntryPointSource::PackageJsonBin
624            | discover::EntryPointSource::PackageJsonScript => "package.json",
625            discover::EntryPointSource::Plugin { .. } => "plugin",
626            discover::EntryPointSource::TestFile => "test file",
627            discover::EntryPointSource::DefaultIndex => "default index",
628            discover::EntryPointSource::ManualEntry => "manual entry",
629            discover::EntryPointSource::InfrastructureConfig => "config",
630            discover::EntryPointSource::DynamicallyLoaded => "dynamically loaded",
631        };
632        *counts.entry(category.to_string()).or_insert(0) += 1;
633    }
634    let mut by_source: Vec<(String, usize)> = counts.into_iter().collect();
635    by_source.sort_by(|a, b| b.1.cmp(&a.1).then_with(|| a.0.cmp(&b.0)));
636    results::EntryPointSummary {
637        total: entry_points.len(),
638        by_source,
639    }
640}
641
642/// Run plugins for root project and all workspace packages.
643fn run_plugins(
644    config: &ResolvedConfig,
645    files: &[discover::DiscoveredFile],
646    workspaces: &[fallow_config::WorkspaceInfo],
647) -> plugins::AggregatedPluginResult {
648    let registry = plugins::PluginRegistry::new(config.external_plugins.clone());
649    let file_paths: Vec<std::path::PathBuf> = files.iter().map(|f| f.path.clone()).collect();
650
651    // Run plugins for root project (full run with external plugins, inline config, etc.)
652    let pkg_path = config.root.join("package.json");
653    let mut result = PackageJson::load(&pkg_path).map_or_else(
654        |_| plugins::AggregatedPluginResult::default(),
655        |pkg| registry.run(&pkg, &config.root, &file_paths),
656    );
657
658    if workspaces.is_empty() {
659        return result;
660    }
661
662    // Pre-compile config matchers and relative files once for all workspace runs.
663    // This avoids re-compiling glob patterns and re-computing relative paths per workspace
664    // (previously O(workspaces × plugins × files) glob compilations).
665    let precompiled_matchers = registry.precompile_config_matchers();
666    let relative_files: Vec<(&std::path::PathBuf, String)> = file_paths
667        .iter()
668        .map(|f| {
669            let rel = f
670                .strip_prefix(&config.root)
671                .unwrap_or(f)
672                .to_string_lossy()
673                .into_owned();
674            (f, rel)
675        })
676        .collect();
677
678    // Run plugins for each workspace package in parallel, then merge results.
679    let ws_results: Vec<_> = workspaces
680        .par_iter()
681        .filter_map(|ws| {
682            let ws_pkg_path = ws.root.join("package.json");
683            let ws_pkg = PackageJson::load(&ws_pkg_path).ok()?;
684            let ws_result = registry.run_workspace_fast(
685                &ws_pkg,
686                &ws.root,
687                &config.root,
688                &precompiled_matchers,
689                &relative_files,
690            );
691            if ws_result.active_plugins.is_empty() {
692                return None;
693            }
694            let ws_prefix = ws
695                .root
696                .strip_prefix(&config.root)
697                .unwrap_or(&ws.root)
698                .to_string_lossy()
699                .into_owned();
700            Some((ws_result, ws_prefix))
701        })
702        .collect();
703
704    // Merge workspace results sequentially (deterministic order via par_iter index stability)
705    // Track seen names for O(1) dedup instead of O(n) Vec::contains
706    let mut seen_plugins: rustc_hash::FxHashSet<String> =
707        result.active_plugins.iter().cloned().collect();
708    let mut seen_prefixes: rustc_hash::FxHashSet<String> =
709        result.virtual_module_prefixes.iter().cloned().collect();
710    let mut seen_generated: rustc_hash::FxHashSet<String> =
711        result.generated_import_patterns.iter().cloned().collect();
712    for (ws_result, ws_prefix) in ws_results {
713        // Prefix helper: workspace-relative patterns need the workspace prefix
714        // to be matchable from the monorepo root. But patterns that are already
715        // project-root-relative (e.g., from angular.json which uses absolute paths
716        // like "apps/client/src/styles.css") should not be double-prefixed.
717        let prefix_if_needed = |pat: &str| -> String {
718            if pat.starts_with(ws_prefix.as_str()) || pat.starts_with('/') {
719                pat.to_string()
720            } else {
721                format!("{ws_prefix}/{pat}")
722            }
723        };
724
725        for (rule, pname) in &ws_result.entry_patterns {
726            result
727                .entry_patterns
728                .push((rule.prefixed(&ws_prefix), pname.clone()));
729        }
730        for (plugin_name, role) in ws_result.entry_point_roles {
731            result.entry_point_roles.entry(plugin_name).or_insert(role);
732        }
733        for (pat, pname) in &ws_result.always_used {
734            result
735                .always_used
736                .push((prefix_if_needed(pat), pname.clone()));
737        }
738        for (pat, pname) in &ws_result.discovered_always_used {
739            result
740                .discovered_always_used
741                .push((prefix_if_needed(pat), pname.clone()));
742        }
743        for (pat, pname) in &ws_result.fixture_patterns {
744            result
745                .fixture_patterns
746                .push((prefix_if_needed(pat), pname.clone()));
747        }
748        for rule in &ws_result.used_exports {
749            result.used_exports.push(rule.prefixed(&ws_prefix));
750        }
751        // Merge active plugin names (deduplicated via HashSet)
752        for plugin_name in ws_result.active_plugins {
753            if !seen_plugins.contains(&plugin_name) {
754                seen_plugins.insert(plugin_name.clone());
755                result.active_plugins.push(plugin_name);
756            }
757        }
758        // These don't need prefixing (absolute paths / package names)
759        result
760            .referenced_dependencies
761            .extend(ws_result.referenced_dependencies);
762        result.setup_files.extend(ws_result.setup_files);
763        result
764            .tooling_dependencies
765            .extend(ws_result.tooling_dependencies);
766        // Virtual module prefixes (e.g., Docusaurus @theme/, @site/) are
767        // package-name prefixes, not file paths — no workspace prefix needed.
768        for prefix in ws_result.virtual_module_prefixes {
769            if !seen_prefixes.contains(&prefix) {
770                seen_prefixes.insert(prefix.clone());
771                result.virtual_module_prefixes.push(prefix);
772            }
773        }
774        // Generated import patterns (e.g., SvelteKit /$types) are suffix
775        // matches on specifiers, not file paths — no workspace prefix needed.
776        for pattern in ws_result.generated_import_patterns {
777            if !seen_generated.contains(&pattern) {
778                seen_generated.insert(pattern.clone());
779                result.generated_import_patterns.push(pattern);
780            }
781        }
782        // Path aliases from workspace plugins (e.g., SvelteKit $lib/ → src/lib).
783        // Prefix the replacement directory so it resolves from the monorepo root.
784        for (prefix, replacement) in ws_result.path_aliases {
785            result
786                .path_aliases
787                .push((prefix, format!("{ws_prefix}/{replacement}")));
788        }
789    }
790
791    result
792}
793
794/// Run analysis on a project directory (with export usages for LSP Code Lens).
795///
796/// # Errors
797///
798/// Returns an error if config loading, file discovery, parsing, or analysis fails.
799pub fn analyze_project(root: &Path) -> Result<AnalysisResults, FallowError> {
800    let config = default_config(root);
801    analyze_with_usages(&config)
802}
803
804/// Create a default config for a project root.
805pub(crate) fn default_config(root: &Path) -> ResolvedConfig {
806    let user_config = fallow_config::FallowConfig::find_and_load(root)
807        .ok()
808        .flatten();
809    match user_config {
810        Some((config, _path)) => config.resolve(
811            root.to_path_buf(),
812            fallow_config::OutputFormat::Human,
813            num_cpus(),
814            false,
815            true, // quiet: LSP/programmatic callers don't need progress bars
816        ),
817        None => fallow_config::FallowConfig::default().resolve(
818            root.to_path_buf(),
819            fallow_config::OutputFormat::Human,
820            num_cpus(),
821            false,
822            true,
823        ),
824    }
825}
826
827fn num_cpus() -> usize {
828    std::thread::available_parallelism()
829        .map(std::num::NonZeroUsize::get)
830        .unwrap_or(4)
831}