Skip to main content

fallow_core/
lib.rs

1pub mod analyze;
2pub mod cache;
3pub mod churn;
4pub mod cross_reference;
5pub mod discover;
6pub mod duplicates;
7pub mod errors;
8pub mod extract;
9pub mod plugins;
10pub mod progress;
11pub mod results;
12pub mod scripts;
13pub mod suppress;
14pub mod trace;
15
16// Re-export from fallow-graph for backwards compatibility
17pub use fallow_graph::graph;
18pub use fallow_graph::project;
19pub use fallow_graph::resolve;
20
21use std::path::Path;
22use std::time::Instant;
23
24use errors::FallowError;
25use fallow_config::{PackageJson, ResolvedConfig, discover_workspaces};
26use rayon::prelude::*;
27use results::AnalysisResults;
28use trace::PipelineTimings;
29
30/// Result of the full analysis pipeline, including optional performance timings.
31pub struct AnalysisOutput {
32    pub results: AnalysisResults,
33    pub timings: Option<PipelineTimings>,
34    pub graph: Option<graph::ModuleGraph>,
35}
36
37/// Update cache: write freshly parsed modules and refresh stale mtime/size entries.
38fn update_cache(
39    store: &mut cache::CacheStore,
40    modules: &[extract::ModuleInfo],
41    files: &[discover::DiscoveredFile],
42) {
43    for module in modules {
44        if let Some(file) = files.get(module.file_id.0 as usize) {
45            let (mt, sz) = file_mtime_and_size(&file.path);
46            // If content hash matches, just refresh mtime/size if stale (e.g. `touch`ed file)
47            if let Some(cached) = store.get_by_path_only(&file.path)
48                && cached.content_hash == module.content_hash
49            {
50                if cached.mtime_secs != mt || cached.file_size != sz {
51                    store.insert(&file.path, cache::module_to_cached(module, mt, sz));
52                }
53                continue;
54            }
55            store.insert(&file.path, cache::module_to_cached(module, mt, sz));
56        }
57    }
58    store.retain_paths(files);
59}
60
61/// Extract mtime (seconds since epoch) and file size from a path.
62fn file_mtime_and_size(path: &std::path::Path) -> (u64, u64) {
63    std::fs::metadata(path)
64        .map(|m| {
65            let mt = m
66                .modified()
67                .ok()
68                .and_then(|t| t.duration_since(std::time::SystemTime::UNIX_EPOCH).ok())
69                .map_or(0, |d| d.as_secs());
70            (mt, m.len())
71        })
72        .unwrap_or((0, 0))
73}
74
75/// Run the full analysis pipeline.
76pub fn analyze(config: &ResolvedConfig) -> Result<AnalysisResults, FallowError> {
77    let output = analyze_full(config, false, false)?;
78    Ok(output.results)
79}
80
81/// Run the full analysis pipeline with export usage collection (for LSP Code Lens).
82pub fn analyze_with_usages(config: &ResolvedConfig) -> Result<AnalysisResults, FallowError> {
83    let output = analyze_full(config, false, true)?;
84    Ok(output.results)
85}
86
87/// Run the full analysis pipeline with optional performance timings and graph retention.
88pub fn analyze_with_trace(config: &ResolvedConfig) -> Result<AnalysisOutput, FallowError> {
89    analyze_full(config, true, false)
90}
91
92/// Run the analysis pipeline using pre-parsed modules, skipping the parsing stage.
93///
94/// This avoids re-parsing files when the caller already has a `ParseResult` (e.g., from
95/// `fallow_core::extract::parse_all_files`). Discovery, plugins, scripts, entry points,
96/// import resolution, graph construction, and dead code detection still run normally.
97/// The graph is always retained (needed for file scores).
98pub fn analyze_with_parse_result(
99    config: &ResolvedConfig,
100    modules: &[extract::ModuleInfo],
101) -> Result<AnalysisOutput, FallowError> {
102    let _span = tracing::info_span!("fallow_analyze_with_parse_result").entered();
103    let pipeline_start = Instant::now();
104
105    let show_progress = !config.quiet
106        && std::io::IsTerminal::is_terminal(&std::io::stderr())
107        && matches!(
108            config.output,
109            fallow_config::OutputFormat::Human
110                | fallow_config::OutputFormat::Compact
111                | fallow_config::OutputFormat::Markdown
112        );
113    let progress = progress::AnalysisProgress::new(show_progress);
114
115    if !config.root.join("node_modules").is_dir() {
116        tracing::warn!(
117            "node_modules directory not found. Run `npm install` / `pnpm install` first for accurate results."
118        );
119    }
120
121    // Discover workspaces
122    let t = Instant::now();
123    let workspaces_vec = discover_workspaces(&config.root);
124    let workspaces_ms = t.elapsed().as_secs_f64() * 1000.0;
125    if !workspaces_vec.is_empty() {
126        tracing::info!(count = workspaces_vec.len(), "workspaces discovered");
127    }
128
129    // Stage 1: Discover files (cheap — needed for file registry and resolution)
130    let t = Instant::now();
131    let pb = progress.stage_spinner("Discovering files...");
132    let discovered_files = discover::discover_files(config);
133    let discover_ms = t.elapsed().as_secs_f64() * 1000.0;
134    pb.finish_and_clear();
135
136    let project = project::ProjectState::new(discovered_files, workspaces_vec);
137    let files = project.files();
138    let workspaces = project.workspaces();
139
140    // Stage 1.5: Run plugin system
141    let t = Instant::now();
142    let pb = progress.stage_spinner("Detecting plugins...");
143    let mut plugin_result = run_plugins(config, files, workspaces);
144    let plugins_ms = t.elapsed().as_secs_f64() * 1000.0;
145    pb.finish_and_clear();
146
147    // Stage 1.6: Analyze package.json scripts
148    let t = Instant::now();
149    let pkg_path = config.root.join("package.json");
150    if let Ok(pkg) = PackageJson::load(&pkg_path)
151        && let Some(ref pkg_scripts) = pkg.scripts
152    {
153        let scripts_to_analyze = if config.production {
154            scripts::filter_production_scripts(pkg_scripts)
155        } else {
156            pkg_scripts.clone()
157        };
158        let script_analysis = scripts::analyze_scripts(&scripts_to_analyze, &config.root);
159        plugin_result.script_used_packages = script_analysis.used_packages;
160
161        for config_file in &script_analysis.config_files {
162            plugin_result
163                .entry_patterns
164                .push((config_file.clone(), "scripts".to_string()));
165        }
166    }
167    for ws in workspaces {
168        let ws_pkg_path = ws.root.join("package.json");
169        if let Ok(ws_pkg) = PackageJson::load(&ws_pkg_path)
170            && let Some(ref ws_scripts) = ws_pkg.scripts
171        {
172            let scripts_to_analyze = if config.production {
173                scripts::filter_production_scripts(ws_scripts)
174            } else {
175                ws_scripts.clone()
176            };
177            let ws_analysis = scripts::analyze_scripts(&scripts_to_analyze, &ws.root);
178            plugin_result
179                .script_used_packages
180                .extend(ws_analysis.used_packages);
181
182            let ws_prefix = ws
183                .root
184                .strip_prefix(&config.root)
185                .unwrap_or(&ws.root)
186                .to_string_lossy();
187            for config_file in &ws_analysis.config_files {
188                plugin_result
189                    .entry_patterns
190                    .push((format!("{ws_prefix}/{config_file}"), "scripts".to_string()));
191            }
192        }
193    }
194
195    // Stage 1.7: Analyze CI config files for binary invocations
196    let ci_packages = scripts::ci::analyze_ci_files(&config.root);
197    plugin_result.script_used_packages.extend(ci_packages);
198
199    let scripts_ms = t.elapsed().as_secs_f64() * 1000.0;
200
201    // Stage 2: SKIPPED — using pre-parsed modules from caller
202
203    // Stage 3: Discover entry points
204    let t = Instant::now();
205    let mut entry_points = discover::discover_entry_points(config, files);
206    let ws_entries: Vec<_> = workspaces
207        .par_iter()
208        .flat_map(|ws| discover::discover_workspace_entry_points(&ws.root, config, files))
209        .collect();
210    entry_points.extend(ws_entries);
211    let plugin_entries = discover::discover_plugin_entry_points(&plugin_result, config, files);
212    entry_points.extend(plugin_entries);
213    let infra_entries = discover::discover_infrastructure_entry_points(&config.root);
214    entry_points.extend(infra_entries);
215    let entry_points_ms = t.elapsed().as_secs_f64() * 1000.0;
216
217    // Stage 4: Resolve imports to file IDs
218    let t = Instant::now();
219    let pb = progress.stage_spinner("Resolving imports...");
220    let resolved = resolve::resolve_all_imports(
221        modules,
222        files,
223        workspaces,
224        &plugin_result.active_plugins,
225        &plugin_result.path_aliases,
226        &config.root,
227    );
228    let resolve_ms = t.elapsed().as_secs_f64() * 1000.0;
229    pb.finish_and_clear();
230
231    // Stage 5: Build module graph
232    let t = Instant::now();
233    let pb = progress.stage_spinner("Building module graph...");
234    let graph = graph::ModuleGraph::build(&resolved, &entry_points, files);
235    let graph_ms = t.elapsed().as_secs_f64() * 1000.0;
236    pb.finish_and_clear();
237
238    // Stage 6: Analyze for dead code
239    let t = Instant::now();
240    let pb = progress.stage_spinner("Analyzing...");
241    let result = analyze::find_dead_code_full(
242        &graph,
243        config,
244        &resolved,
245        Some(&plugin_result),
246        workspaces,
247        modules,
248        false,
249    );
250    let analyze_ms = t.elapsed().as_secs_f64() * 1000.0;
251    pb.finish_and_clear();
252    progress.finish();
253
254    let total_ms = pipeline_start.elapsed().as_secs_f64() * 1000.0;
255
256    tracing::debug!(
257        "\n┌─ Pipeline Profile (reuse) ─────────────────────\n\
258         │  discover files:   {:>8.1}ms  ({} files)\n\
259         │  workspaces:       {:>8.1}ms\n\
260         │  plugins:          {:>8.1}ms\n\
261         │  script analysis:  {:>8.1}ms\n\
262         │  parse/extract:    SKIPPED (reused {} modules)\n\
263         │  entry points:     {:>8.1}ms  ({} entries)\n\
264         │  resolve imports:  {:>8.1}ms\n\
265         │  build graph:      {:>8.1}ms\n\
266         │  analyze:          {:>8.1}ms\n\
267         │  ────────────────────────────────────────────\n\
268         │  TOTAL:            {:>8.1}ms\n\
269         └─────────────────────────────────────────────────",
270        discover_ms,
271        files.len(),
272        workspaces_ms,
273        plugins_ms,
274        scripts_ms,
275        modules.len(),
276        entry_points_ms,
277        entry_points.len(),
278        resolve_ms,
279        graph_ms,
280        analyze_ms,
281        total_ms,
282    );
283
284    let timings = Some(PipelineTimings {
285        discover_files_ms: discover_ms,
286        file_count: files.len(),
287        workspaces_ms,
288        workspace_count: workspaces.len(),
289        plugins_ms,
290        script_analysis_ms: scripts_ms,
291        parse_extract_ms: 0.0, // Skipped — modules were reused
292        module_count: modules.len(),
293        cache_hits: 0,
294        cache_misses: 0,
295        cache_update_ms: 0.0,
296        entry_points_ms,
297        entry_point_count: entry_points.len(),
298        resolve_imports_ms: resolve_ms,
299        build_graph_ms: graph_ms,
300        analyze_ms,
301        total_ms,
302    });
303
304    Ok(AnalysisOutput {
305        results: result,
306        timings,
307        graph: Some(graph),
308    })
309}
310
311#[expect(clippy::unnecessary_wraps)] // Result kept for future error handling
312fn analyze_full(
313    config: &ResolvedConfig,
314    retain: bool,
315    collect_usages: bool,
316) -> Result<AnalysisOutput, FallowError> {
317    let _span = tracing::info_span!("fallow_analyze").entered();
318    let pipeline_start = Instant::now();
319
320    // Progress bars: enabled when not quiet, stderr is a terminal, and output is human-readable.
321    // Structured formats (JSON, SARIF) suppress spinners even on TTY — users piping structured
322    // output don't expect progress noise on stderr.
323    let show_progress = !config.quiet
324        && std::io::IsTerminal::is_terminal(&std::io::stderr())
325        && matches!(
326            config.output,
327            fallow_config::OutputFormat::Human
328                | fallow_config::OutputFormat::Compact
329                | fallow_config::OutputFormat::Markdown
330        );
331    let progress = progress::AnalysisProgress::new(show_progress);
332
333    // Warn if node_modules is missing — resolution will be severely degraded
334    if !config.root.join("node_modules").is_dir() {
335        tracing::warn!(
336            "node_modules directory not found. Run `npm install` / `pnpm install` first for accurate results."
337        );
338    }
339
340    // Discover workspaces if in a monorepo
341    let t = Instant::now();
342    let workspaces_vec = discover_workspaces(&config.root);
343    let workspaces_ms = t.elapsed().as_secs_f64() * 1000.0;
344    if !workspaces_vec.is_empty() {
345        tracing::info!(count = workspaces_vec.len(), "workspaces discovered");
346    }
347
348    // Stage 1: Discover all source files
349    let t = Instant::now();
350    let pb = progress.stage_spinner("Discovering files...");
351    let discovered_files = discover::discover_files(config);
352    let discover_ms = t.elapsed().as_secs_f64() * 1000.0;
353    pb.finish_and_clear();
354
355    // Build ProjectState: owns the file registry with stable FileIds and workspace metadata.
356    // This is the foundation for cross-workspace resolution and future incremental analysis.
357    let project = project::ProjectState::new(discovered_files, workspaces_vec);
358    let files = project.files();
359    let workspaces = project.workspaces();
360
361    // Stage 1.5: Run plugin system — parse config files, discover dynamic entries
362    let t = Instant::now();
363    let pb = progress.stage_spinner("Detecting plugins...");
364    let mut plugin_result = run_plugins(config, files, workspaces);
365    let plugins_ms = t.elapsed().as_secs_f64() * 1000.0;
366    pb.finish_and_clear();
367
368    // Stage 1.6: Analyze package.json scripts for binary usage and config file refs
369    let t = Instant::now();
370    let pkg_path = config.root.join("package.json");
371    if let Ok(pkg) = PackageJson::load(&pkg_path)
372        && let Some(ref pkg_scripts) = pkg.scripts
373    {
374        // In production mode, only analyze start/build scripts
375        let scripts_to_analyze = if config.production {
376            scripts::filter_production_scripts(pkg_scripts)
377        } else {
378            pkg_scripts.clone()
379        };
380        let script_analysis = scripts::analyze_scripts(&scripts_to_analyze, &config.root);
381        plugin_result.script_used_packages = script_analysis.used_packages;
382
383        // Add config files from scripts as entry points (resolved later)
384        for config_file in &script_analysis.config_files {
385            plugin_result
386                .entry_patterns
387                .push((config_file.clone(), "scripts".to_string()));
388        }
389    }
390    // Also analyze workspace package.json scripts
391    for ws in workspaces {
392        let ws_pkg_path = ws.root.join("package.json");
393        if let Ok(ws_pkg) = PackageJson::load(&ws_pkg_path)
394            && let Some(ref ws_scripts) = ws_pkg.scripts
395        {
396            let scripts_to_analyze = if config.production {
397                scripts::filter_production_scripts(ws_scripts)
398            } else {
399                ws_scripts.clone()
400            };
401            let ws_analysis = scripts::analyze_scripts(&scripts_to_analyze, &ws.root);
402            plugin_result
403                .script_used_packages
404                .extend(ws_analysis.used_packages);
405
406            let ws_prefix = ws
407                .root
408                .strip_prefix(&config.root)
409                .unwrap_or(&ws.root)
410                .to_string_lossy();
411            for config_file in &ws_analysis.config_files {
412                plugin_result
413                    .entry_patterns
414                    .push((format!("{ws_prefix}/{config_file}"), "scripts".to_string()));
415            }
416        }
417    }
418
419    // Stage 1.7: Analyze CI config files for binary invocations
420    let ci_packages = scripts::ci::analyze_ci_files(&config.root);
421    plugin_result.script_used_packages.extend(ci_packages);
422
423    let scripts_ms = t.elapsed().as_secs_f64() * 1000.0;
424
425    // Stage 2: Parse all files in parallel and extract imports/exports
426    let t = Instant::now();
427    let pb = progress.stage_spinner(&format!("Parsing {} files...", files.len()));
428    let mut cache_store = if config.no_cache {
429        None
430    } else {
431        cache::CacheStore::load(&config.cache_dir)
432    };
433
434    let parse_result = extract::parse_all_files(files, cache_store.as_ref());
435    let modules = parse_result.modules;
436    let cache_hits = parse_result.cache_hits;
437    let cache_misses = parse_result.cache_misses;
438    let parse_ms = t.elapsed().as_secs_f64() * 1000.0;
439    pb.finish_and_clear();
440
441    // Update cache with freshly parsed modules and refresh stale mtime/size entries.
442    let t = Instant::now();
443    if !config.no_cache {
444        let store = cache_store.get_or_insert_with(cache::CacheStore::new);
445        update_cache(store, &modules, files);
446        if let Err(e) = store.save(&config.cache_dir) {
447            tracing::warn!("Failed to save cache: {e}");
448        }
449    }
450    let cache_ms = t.elapsed().as_secs_f64() * 1000.0;
451
452    // Stage 3: Discover entry points (static patterns + plugin-discovered patterns)
453    let t = Instant::now();
454    let mut entry_points = discover::discover_entry_points(config, files);
455    let ws_entries: Vec<_> = workspaces
456        .par_iter()
457        .flat_map(|ws| discover::discover_workspace_entry_points(&ws.root, config, files))
458        .collect();
459    entry_points.extend(ws_entries);
460    let plugin_entries = discover::discover_plugin_entry_points(&plugin_result, config, files);
461    entry_points.extend(plugin_entries);
462    let infra_entries = discover::discover_infrastructure_entry_points(&config.root);
463    entry_points.extend(infra_entries);
464    let entry_points_ms = t.elapsed().as_secs_f64() * 1000.0;
465
466    // Stage 4: Resolve imports to file IDs
467    let t = Instant::now();
468    let pb = progress.stage_spinner("Resolving imports...");
469    let resolved = resolve::resolve_all_imports(
470        &modules,
471        files,
472        workspaces,
473        &plugin_result.active_plugins,
474        &plugin_result.path_aliases,
475        &config.root,
476    );
477    let resolve_ms = t.elapsed().as_secs_f64() * 1000.0;
478    pb.finish_and_clear();
479
480    // Stage 5: Build module graph
481    let t = Instant::now();
482    let pb = progress.stage_spinner("Building module graph...");
483    let graph = graph::ModuleGraph::build(&resolved, &entry_points, files);
484    let graph_ms = t.elapsed().as_secs_f64() * 1000.0;
485    pb.finish_and_clear();
486
487    // Stage 6: Analyze for dead code (with plugin context and workspace info)
488    let t = Instant::now();
489    let pb = progress.stage_spinner("Analyzing...");
490    let result = analyze::find_dead_code_full(
491        &graph,
492        config,
493        &resolved,
494        Some(&plugin_result),
495        workspaces,
496        &modules,
497        collect_usages,
498    );
499    let analyze_ms = t.elapsed().as_secs_f64() * 1000.0;
500    pb.finish_and_clear();
501    progress.finish();
502
503    let total_ms = pipeline_start.elapsed().as_secs_f64() * 1000.0;
504
505    let cache_summary = if cache_hits > 0 {
506        format!(" ({cache_hits} cached, {cache_misses} parsed)")
507    } else {
508        String::new()
509    };
510
511    tracing::debug!(
512        "\n┌─ Pipeline Profile ─────────────────────────────\n\
513         │  discover files:   {:>8.1}ms  ({} files)\n\
514         │  workspaces:       {:>8.1}ms\n\
515         │  plugins:          {:>8.1}ms\n\
516         │  script analysis:  {:>8.1}ms\n\
517         │  parse/extract:    {:>8.1}ms  ({} modules{})\n\
518         │  cache update:     {:>8.1}ms\n\
519         │  entry points:     {:>8.1}ms  ({} entries)\n\
520         │  resolve imports:  {:>8.1}ms\n\
521         │  build graph:      {:>8.1}ms\n\
522         │  analyze:          {:>8.1}ms\n\
523         │  ────────────────────────────────────────────\n\
524         │  TOTAL:            {:>8.1}ms\n\
525         └─────────────────────────────────────────────────",
526        discover_ms,
527        files.len(),
528        workspaces_ms,
529        plugins_ms,
530        scripts_ms,
531        parse_ms,
532        modules.len(),
533        cache_summary,
534        cache_ms,
535        entry_points_ms,
536        entry_points.len(),
537        resolve_ms,
538        graph_ms,
539        analyze_ms,
540        total_ms,
541    );
542
543    let timings = if retain {
544        Some(PipelineTimings {
545            discover_files_ms: discover_ms,
546            file_count: files.len(),
547            workspaces_ms,
548            workspace_count: workspaces.len(),
549            plugins_ms,
550            script_analysis_ms: scripts_ms,
551            parse_extract_ms: parse_ms,
552            module_count: modules.len(),
553            cache_hits,
554            cache_misses,
555            cache_update_ms: cache_ms,
556            entry_points_ms,
557            entry_point_count: entry_points.len(),
558            resolve_imports_ms: resolve_ms,
559            build_graph_ms: graph_ms,
560            analyze_ms,
561            total_ms,
562        })
563    } else {
564        None
565    };
566
567    Ok(AnalysisOutput {
568        results: result,
569        timings,
570        graph: if retain { Some(graph) } else { None },
571    })
572}
573
574/// Run plugins for root project and all workspace packages.
575fn run_plugins(
576    config: &ResolvedConfig,
577    files: &[discover::DiscoveredFile],
578    workspaces: &[fallow_config::WorkspaceInfo],
579) -> plugins::AggregatedPluginResult {
580    let registry = plugins::PluginRegistry::new(config.external_plugins.clone());
581    let file_paths: Vec<std::path::PathBuf> = files.iter().map(|f| f.path.clone()).collect();
582
583    // Run plugins for root project (full run with external plugins, inline config, etc.)
584    let pkg_path = config.root.join("package.json");
585    let mut result = PackageJson::load(&pkg_path).map_or_else(
586        |_| plugins::AggregatedPluginResult::default(),
587        |pkg| registry.run(&pkg, &config.root, &file_paths),
588    );
589
590    if workspaces.is_empty() {
591        return result;
592    }
593
594    // Pre-compile config matchers and relative files once for all workspace runs.
595    // This avoids re-compiling glob patterns and re-computing relative paths per workspace
596    // (previously O(workspaces × plugins × files) glob compilations).
597    let precompiled_matchers = registry.precompile_config_matchers();
598    let relative_files: Vec<(&std::path::PathBuf, String)> = file_paths
599        .iter()
600        .map(|f| {
601            let rel = f
602                .strip_prefix(&config.root)
603                .unwrap_or(f)
604                .to_string_lossy()
605                .into_owned();
606            (f, rel)
607        })
608        .collect();
609
610    // Run plugins for each workspace package in parallel, then merge results.
611    let ws_results: Vec<_> = workspaces
612        .par_iter()
613        .filter_map(|ws| {
614            let ws_pkg_path = ws.root.join("package.json");
615            let ws_pkg = PackageJson::load(&ws_pkg_path).ok()?;
616            let ws_result = registry.run_workspace_fast(
617                &ws_pkg,
618                &ws.root,
619                &config.root,
620                &precompiled_matchers,
621                &relative_files,
622            );
623            if ws_result.active_plugins.is_empty() {
624                return None;
625            }
626            let ws_prefix = ws
627                .root
628                .strip_prefix(&config.root)
629                .unwrap_or(&ws.root)
630                .to_string_lossy()
631                .into_owned();
632            Some((ws_result, ws_prefix))
633        })
634        .collect();
635
636    // Merge workspace results sequentially (deterministic order via par_iter index stability)
637    // Track seen names for O(1) dedup instead of O(n) Vec::contains
638    let mut seen_plugins: rustc_hash::FxHashSet<String> =
639        result.active_plugins.iter().cloned().collect();
640    let mut seen_prefixes: rustc_hash::FxHashSet<String> =
641        result.virtual_module_prefixes.iter().cloned().collect();
642    for (ws_result, ws_prefix) in ws_results {
643        // Prefix helper: workspace-relative patterns need the workspace prefix
644        // to be matchable from the monorepo root. But patterns that are already
645        // project-root-relative (e.g., from angular.json which uses absolute paths
646        // like "apps/client/src/styles.css") should not be double-prefixed.
647        let prefix_if_needed = |pat: &str| -> String {
648            if pat.starts_with(ws_prefix.as_str()) || pat.starts_with('/') {
649                pat.to_string()
650            } else {
651                format!("{ws_prefix}/{pat}")
652            }
653        };
654
655        for (pat, pname) in &ws_result.entry_patterns {
656            result
657                .entry_patterns
658                .push((prefix_if_needed(pat), pname.clone()));
659        }
660        for (pat, pname) in &ws_result.always_used {
661            result
662                .always_used
663                .push((prefix_if_needed(pat), pname.clone()));
664        }
665        for (pat, pname) in &ws_result.discovered_always_used {
666            result
667                .discovered_always_used
668                .push((prefix_if_needed(pat), pname.clone()));
669        }
670        for (file_pat, exports) in &ws_result.used_exports {
671            result
672                .used_exports
673                .push((prefix_if_needed(file_pat), exports.clone()));
674        }
675        // Merge active plugin names (deduplicated via HashSet)
676        for plugin_name in ws_result.active_plugins {
677            if !seen_plugins.contains(&plugin_name) {
678                seen_plugins.insert(plugin_name.clone());
679                result.active_plugins.push(plugin_name);
680            }
681        }
682        // These don't need prefixing (absolute paths / package names)
683        result
684            .referenced_dependencies
685            .extend(ws_result.referenced_dependencies);
686        result.setup_files.extend(ws_result.setup_files);
687        result
688            .tooling_dependencies
689            .extend(ws_result.tooling_dependencies);
690        // Virtual module prefixes (e.g., Docusaurus @theme/, @site/) are
691        // package-name prefixes, not file paths — no workspace prefix needed.
692        for prefix in ws_result.virtual_module_prefixes {
693            if !seen_prefixes.contains(&prefix) {
694                seen_prefixes.insert(prefix.clone());
695                result.virtual_module_prefixes.push(prefix);
696            }
697        }
698    }
699
700    result
701}
702
703/// Run analysis on a project directory (with export usages for LSP Code Lens).
704pub fn analyze_project(root: &Path) -> Result<AnalysisResults, FallowError> {
705    let config = default_config(root);
706    analyze_with_usages(&config)
707}
708
709/// Create a default config for a project root.
710pub(crate) fn default_config(root: &Path) -> ResolvedConfig {
711    let user_config = fallow_config::FallowConfig::find_and_load(root)
712        .ok()
713        .flatten();
714    match user_config {
715        Some((config, _path)) => config.resolve(
716            root.to_path_buf(),
717            fallow_config::OutputFormat::Human,
718            num_cpus(),
719            false,
720            true, // quiet: LSP/programmatic callers don't need progress bars
721        ),
722        None => fallow_config::FallowConfig::default().resolve(
723            root.to_path_buf(),
724            fallow_config::OutputFormat::Human,
725            num_cpus(),
726            false,
727            true,
728        ),
729    }
730}
731
732fn num_cpus() -> usize {
733    std::thread::available_parallelism()
734        .map(|n| n.get())
735        .unwrap_or(4)
736}