Skip to main content

fallow_core/
lib.rs

1pub mod analyze;
2pub mod cache;
3pub mod churn;
4pub mod cross_reference;
5pub mod discover;
6pub mod duplicates;
7pub mod errors;
8pub mod extract;
9pub mod plugins;
10pub mod progress;
11pub mod results;
12pub mod scripts;
13pub mod suppress;
14pub mod trace;
15
16// Re-export from fallow-graph for backwards compatibility
17pub use fallow_graph::graph;
18pub use fallow_graph::project;
19pub use fallow_graph::resolve;
20
21use std::path::Path;
22use std::time::Instant;
23
24use errors::FallowError;
25use fallow_config::{PackageJson, ResolvedConfig, discover_workspaces};
26use rayon::prelude::*;
27use results::AnalysisResults;
28use trace::PipelineTimings;
29
30/// Result of the full analysis pipeline, including optional performance timings.
31pub struct AnalysisOutput {
32    pub results: AnalysisResults,
33    pub timings: Option<PipelineTimings>,
34    pub graph: Option<graph::ModuleGraph>,
35}
36
37/// Update cache: write freshly parsed modules and refresh stale mtime/size entries.
38fn update_cache(
39    store: &mut cache::CacheStore,
40    modules: &[extract::ModuleInfo],
41    files: &[discover::DiscoveredFile],
42) {
43    for module in modules {
44        if let Some(file) = files.get(module.file_id.0 as usize) {
45            let (mt, sz) = file_mtime_and_size(&file.path);
46            // If content hash matches, just refresh mtime/size if stale (e.g. `touch`ed file)
47            if let Some(cached) = store.get_by_path_only(&file.path)
48                && cached.content_hash == module.content_hash
49            {
50                if cached.mtime_secs != mt || cached.file_size != sz {
51                    store.insert(&file.path, cache::module_to_cached(module, mt, sz));
52                }
53                continue;
54            }
55            store.insert(&file.path, cache::module_to_cached(module, mt, sz));
56        }
57    }
58    store.retain_paths(files);
59}
60
61/// Extract mtime (seconds since epoch) and file size from a path.
62fn file_mtime_and_size(path: &std::path::Path) -> (u64, u64) {
63    std::fs::metadata(path)
64        .map(|m| {
65            let mt = m
66                .modified()
67                .ok()
68                .and_then(|t| t.duration_since(std::time::SystemTime::UNIX_EPOCH).ok())
69                .map_or(0, |d| d.as_secs());
70            (mt, m.len())
71        })
72        .unwrap_or((0, 0))
73}
74
75/// Run the full analysis pipeline.
76pub fn analyze(config: &ResolvedConfig) -> Result<AnalysisResults, FallowError> {
77    let output = analyze_full(config, false, false)?;
78    Ok(output.results)
79}
80
81/// Run the full analysis pipeline with export usage collection (for LSP Code Lens).
82pub fn analyze_with_usages(config: &ResolvedConfig) -> Result<AnalysisResults, FallowError> {
83    let output = analyze_full(config, false, true)?;
84    Ok(output.results)
85}
86
87/// Run the full analysis pipeline with optional performance timings and graph retention.
88pub fn analyze_with_trace(config: &ResolvedConfig) -> Result<AnalysisOutput, FallowError> {
89    analyze_full(config, true, false)
90}
91
92/// Run the analysis pipeline using pre-parsed modules, skipping the parsing stage.
93///
94/// This avoids re-parsing files when the caller already has a `ParseResult` (e.g., from
95/// `fallow_core::extract::parse_all_files`). Discovery, plugins, scripts, entry points,
96/// import resolution, graph construction, and dead code detection still run normally.
97/// The graph is always retained (needed for file scores).
98pub fn analyze_with_parse_result(
99    config: &ResolvedConfig,
100    modules: &[extract::ModuleInfo],
101) -> Result<AnalysisOutput, FallowError> {
102    let _span = tracing::info_span!("fallow_analyze_with_parse_result").entered();
103    let pipeline_start = Instant::now();
104
105    let show_progress = !config.quiet
106        && std::io::IsTerminal::is_terminal(&std::io::stderr())
107        && matches!(
108            config.output,
109            fallow_config::OutputFormat::Human
110                | fallow_config::OutputFormat::Compact
111                | fallow_config::OutputFormat::Markdown
112        );
113    let progress = progress::AnalysisProgress::new(show_progress);
114
115    if !config.root.join("node_modules").is_dir() {
116        tracing::warn!(
117            "node_modules directory not found. Run `npm install` / `pnpm install` first for accurate results."
118        );
119    }
120
121    // Discover workspaces
122    let t = Instant::now();
123    let workspaces_vec = discover_workspaces(&config.root);
124    let workspaces_ms = t.elapsed().as_secs_f64() * 1000.0;
125    if !workspaces_vec.is_empty() {
126        tracing::info!(count = workspaces_vec.len(), "workspaces discovered");
127    }
128
129    // Stage 1: Discover files (cheap — needed for file registry and resolution)
130    let t = Instant::now();
131    let pb = progress.stage_spinner("Discovering files...");
132    let discovered_files = discover::discover_files(config);
133    let discover_ms = t.elapsed().as_secs_f64() * 1000.0;
134    pb.finish_and_clear();
135
136    let project = project::ProjectState::new(discovered_files, workspaces_vec);
137    let files = project.files();
138    let workspaces = project.workspaces();
139
140    // Stage 1.5: Run plugin system
141    let t = Instant::now();
142    let pb = progress.stage_spinner("Detecting plugins...");
143    let mut plugin_result = run_plugins(config, files, workspaces);
144    let plugins_ms = t.elapsed().as_secs_f64() * 1000.0;
145    pb.finish_and_clear();
146
147    // Stage 1.6: Analyze package.json scripts
148    let t = Instant::now();
149    let pkg_path = config.root.join("package.json");
150    if let Ok(pkg) = PackageJson::load(&pkg_path)
151        && let Some(ref pkg_scripts) = pkg.scripts
152    {
153        let scripts_to_analyze = if config.production {
154            scripts::filter_production_scripts(pkg_scripts)
155        } else {
156            pkg_scripts.clone()
157        };
158        let script_analysis = scripts::analyze_scripts(&scripts_to_analyze, &config.root);
159        plugin_result.script_used_packages = script_analysis.used_packages;
160
161        for config_file in &script_analysis.config_files {
162            plugin_result
163                .entry_patterns
164                .push((config_file.clone(), "scripts".to_string()));
165        }
166    }
167    for ws in workspaces {
168        let ws_pkg_path = ws.root.join("package.json");
169        if let Ok(ws_pkg) = PackageJson::load(&ws_pkg_path)
170            && let Some(ref ws_scripts) = ws_pkg.scripts
171        {
172            let scripts_to_analyze = if config.production {
173                scripts::filter_production_scripts(ws_scripts)
174            } else {
175                ws_scripts.clone()
176            };
177            let ws_analysis = scripts::analyze_scripts(&scripts_to_analyze, &ws.root);
178            plugin_result
179                .script_used_packages
180                .extend(ws_analysis.used_packages);
181
182            let ws_prefix = ws
183                .root
184                .strip_prefix(&config.root)
185                .unwrap_or(&ws.root)
186                .to_string_lossy();
187            for config_file in &ws_analysis.config_files {
188                plugin_result
189                    .entry_patterns
190                    .push((format!("{ws_prefix}/{config_file}"), "scripts".to_string()));
191            }
192        }
193    }
194    let scripts_ms = t.elapsed().as_secs_f64() * 1000.0;
195
196    // Stage 2: SKIPPED — using pre-parsed modules from caller
197
198    // Stage 3: Discover entry points
199    let t = Instant::now();
200    let mut entry_points = discover::discover_entry_points(config, files);
201    let ws_entries: Vec<_> = workspaces
202        .par_iter()
203        .flat_map(|ws| discover::discover_workspace_entry_points(&ws.root, config, files))
204        .collect();
205    entry_points.extend(ws_entries);
206    let plugin_entries = discover::discover_plugin_entry_points(&plugin_result, config, files);
207    entry_points.extend(plugin_entries);
208    let infra_entries = discover::discover_infrastructure_entry_points(&config.root);
209    entry_points.extend(infra_entries);
210    let entry_points_ms = t.elapsed().as_secs_f64() * 1000.0;
211
212    // Stage 4: Resolve imports to file IDs
213    let t = Instant::now();
214    let pb = progress.stage_spinner("Resolving imports...");
215    let resolved = resolve::resolve_all_imports(
216        modules,
217        files,
218        workspaces,
219        &plugin_result.active_plugins,
220        &plugin_result.path_aliases,
221        &config.root,
222    );
223    let resolve_ms = t.elapsed().as_secs_f64() * 1000.0;
224    pb.finish_and_clear();
225
226    // Stage 5: Build module graph
227    let t = Instant::now();
228    let pb = progress.stage_spinner("Building module graph...");
229    let graph = graph::ModuleGraph::build(&resolved, &entry_points, files);
230    let graph_ms = t.elapsed().as_secs_f64() * 1000.0;
231    pb.finish_and_clear();
232
233    // Stage 6: Analyze for dead code
234    let t = Instant::now();
235    let pb = progress.stage_spinner("Analyzing...");
236    let result = analyze::find_dead_code_full(
237        &graph,
238        config,
239        &resolved,
240        Some(&plugin_result),
241        workspaces,
242        modules,
243        false,
244    );
245    let analyze_ms = t.elapsed().as_secs_f64() * 1000.0;
246    pb.finish_and_clear();
247    progress.finish();
248
249    let total_ms = pipeline_start.elapsed().as_secs_f64() * 1000.0;
250
251    tracing::debug!(
252        "\n┌─ Pipeline Profile (reuse) ─────────────────────\n\
253         │  discover files:   {:>8.1}ms  ({} files)\n\
254         │  workspaces:       {:>8.1}ms\n\
255         │  plugins:          {:>8.1}ms\n\
256         │  script analysis:  {:>8.1}ms\n\
257         │  parse/extract:    SKIPPED (reused {} modules)\n\
258         │  entry points:     {:>8.1}ms  ({} entries)\n\
259         │  resolve imports:  {:>8.1}ms\n\
260         │  build graph:      {:>8.1}ms\n\
261         │  analyze:          {:>8.1}ms\n\
262         │  ────────────────────────────────────────────\n\
263         │  TOTAL:            {:>8.1}ms\n\
264         └─────────────────────────────────────────────────",
265        discover_ms,
266        files.len(),
267        workspaces_ms,
268        plugins_ms,
269        scripts_ms,
270        modules.len(),
271        entry_points_ms,
272        entry_points.len(),
273        resolve_ms,
274        graph_ms,
275        analyze_ms,
276        total_ms,
277    );
278
279    let timings = Some(PipelineTimings {
280        discover_files_ms: discover_ms,
281        file_count: files.len(),
282        workspaces_ms,
283        workspace_count: workspaces.len(),
284        plugins_ms,
285        script_analysis_ms: scripts_ms,
286        parse_extract_ms: 0.0, // Skipped — modules were reused
287        module_count: modules.len(),
288        cache_hits: 0,
289        cache_misses: 0,
290        cache_update_ms: 0.0,
291        entry_points_ms,
292        entry_point_count: entry_points.len(),
293        resolve_imports_ms: resolve_ms,
294        build_graph_ms: graph_ms,
295        analyze_ms,
296        total_ms,
297    });
298
299    Ok(AnalysisOutput {
300        results: result,
301        timings,
302        graph: Some(graph),
303    })
304}
305
306#[expect(clippy::unnecessary_wraps)] // Result kept for future error handling
307fn analyze_full(
308    config: &ResolvedConfig,
309    retain: bool,
310    collect_usages: bool,
311) -> Result<AnalysisOutput, FallowError> {
312    let _span = tracing::info_span!("fallow_analyze").entered();
313    let pipeline_start = Instant::now();
314
315    // Progress bars: enabled when not quiet, stderr is a terminal, and output is human-readable.
316    // Structured formats (JSON, SARIF) suppress spinners even on TTY — users piping structured
317    // output don't expect progress noise on stderr.
318    let show_progress = !config.quiet
319        && std::io::IsTerminal::is_terminal(&std::io::stderr())
320        && matches!(
321            config.output,
322            fallow_config::OutputFormat::Human
323                | fallow_config::OutputFormat::Compact
324                | fallow_config::OutputFormat::Markdown
325        );
326    let progress = progress::AnalysisProgress::new(show_progress);
327
328    // Warn if node_modules is missing — resolution will be severely degraded
329    if !config.root.join("node_modules").is_dir() {
330        tracing::warn!(
331            "node_modules directory not found. Run `npm install` / `pnpm install` first for accurate results."
332        );
333    }
334
335    // Discover workspaces if in a monorepo
336    let t = Instant::now();
337    let workspaces_vec = discover_workspaces(&config.root);
338    let workspaces_ms = t.elapsed().as_secs_f64() * 1000.0;
339    if !workspaces_vec.is_empty() {
340        tracing::info!(count = workspaces_vec.len(), "workspaces discovered");
341    }
342
343    // Stage 1: Discover all source files
344    let t = Instant::now();
345    let pb = progress.stage_spinner("Discovering files...");
346    let discovered_files = discover::discover_files(config);
347    let discover_ms = t.elapsed().as_secs_f64() * 1000.0;
348    pb.finish_and_clear();
349
350    // Build ProjectState: owns the file registry with stable FileIds and workspace metadata.
351    // This is the foundation for cross-workspace resolution and future incremental analysis.
352    let project = project::ProjectState::new(discovered_files, workspaces_vec);
353    let files = project.files();
354    let workspaces = project.workspaces();
355
356    // Stage 1.5: Run plugin system — parse config files, discover dynamic entries
357    let t = Instant::now();
358    let pb = progress.stage_spinner("Detecting plugins...");
359    let mut plugin_result = run_plugins(config, files, workspaces);
360    let plugins_ms = t.elapsed().as_secs_f64() * 1000.0;
361    pb.finish_and_clear();
362
363    // Stage 1.6: Analyze package.json scripts for binary usage and config file refs
364    let t = Instant::now();
365    let pkg_path = config.root.join("package.json");
366    if let Ok(pkg) = PackageJson::load(&pkg_path)
367        && let Some(ref pkg_scripts) = pkg.scripts
368    {
369        // In production mode, only analyze start/build scripts
370        let scripts_to_analyze = if config.production {
371            scripts::filter_production_scripts(pkg_scripts)
372        } else {
373            pkg_scripts.clone()
374        };
375        let script_analysis = scripts::analyze_scripts(&scripts_to_analyze, &config.root);
376        plugin_result.script_used_packages = script_analysis.used_packages;
377
378        // Add config files from scripts as entry points (resolved later)
379        for config_file in &script_analysis.config_files {
380            plugin_result
381                .entry_patterns
382                .push((config_file.clone(), "scripts".to_string()));
383        }
384    }
385    // Also analyze workspace package.json scripts
386    for ws in workspaces {
387        let ws_pkg_path = ws.root.join("package.json");
388        if let Ok(ws_pkg) = PackageJson::load(&ws_pkg_path)
389            && let Some(ref ws_scripts) = ws_pkg.scripts
390        {
391            let scripts_to_analyze = if config.production {
392                scripts::filter_production_scripts(ws_scripts)
393            } else {
394                ws_scripts.clone()
395            };
396            let ws_analysis = scripts::analyze_scripts(&scripts_to_analyze, &ws.root);
397            plugin_result
398                .script_used_packages
399                .extend(ws_analysis.used_packages);
400
401            let ws_prefix = ws
402                .root
403                .strip_prefix(&config.root)
404                .unwrap_or(&ws.root)
405                .to_string_lossy();
406            for config_file in &ws_analysis.config_files {
407                plugin_result
408                    .entry_patterns
409                    .push((format!("{ws_prefix}/{config_file}"), "scripts".to_string()));
410            }
411        }
412    }
413    let scripts_ms = t.elapsed().as_secs_f64() * 1000.0;
414
415    // Stage 2: Parse all files in parallel and extract imports/exports
416    let t = Instant::now();
417    let pb = progress.stage_spinner(&format!("Parsing {} files...", files.len()));
418    let mut cache_store = if config.no_cache {
419        None
420    } else {
421        cache::CacheStore::load(&config.cache_dir)
422    };
423
424    let parse_result = extract::parse_all_files(files, cache_store.as_ref());
425    let modules = parse_result.modules;
426    let cache_hits = parse_result.cache_hits;
427    let cache_misses = parse_result.cache_misses;
428    let parse_ms = t.elapsed().as_secs_f64() * 1000.0;
429    pb.finish_and_clear();
430
431    // Update cache with freshly parsed modules and refresh stale mtime/size entries.
432    let t = Instant::now();
433    if !config.no_cache {
434        let store = cache_store.get_or_insert_with(cache::CacheStore::new);
435        update_cache(store, &modules, files);
436        if let Err(e) = store.save(&config.cache_dir) {
437            tracing::warn!("Failed to save cache: {e}");
438        }
439    }
440    let cache_ms = t.elapsed().as_secs_f64() * 1000.0;
441
442    // Stage 3: Discover entry points (static patterns + plugin-discovered patterns)
443    let t = Instant::now();
444    let mut entry_points = discover::discover_entry_points(config, files);
445    let ws_entries: Vec<_> = workspaces
446        .par_iter()
447        .flat_map(|ws| discover::discover_workspace_entry_points(&ws.root, config, files))
448        .collect();
449    entry_points.extend(ws_entries);
450    let plugin_entries = discover::discover_plugin_entry_points(&plugin_result, config, files);
451    entry_points.extend(plugin_entries);
452    let infra_entries = discover::discover_infrastructure_entry_points(&config.root);
453    entry_points.extend(infra_entries);
454    let entry_points_ms = t.elapsed().as_secs_f64() * 1000.0;
455
456    // Stage 4: Resolve imports to file IDs
457    let t = Instant::now();
458    let pb = progress.stage_spinner("Resolving imports...");
459    let resolved = resolve::resolve_all_imports(
460        &modules,
461        files,
462        workspaces,
463        &plugin_result.active_plugins,
464        &plugin_result.path_aliases,
465        &config.root,
466    );
467    let resolve_ms = t.elapsed().as_secs_f64() * 1000.0;
468    pb.finish_and_clear();
469
470    // Stage 5: Build module graph
471    let t = Instant::now();
472    let pb = progress.stage_spinner("Building module graph...");
473    let graph = graph::ModuleGraph::build(&resolved, &entry_points, files);
474    let graph_ms = t.elapsed().as_secs_f64() * 1000.0;
475    pb.finish_and_clear();
476
477    // Stage 6: Analyze for dead code (with plugin context and workspace info)
478    let t = Instant::now();
479    let pb = progress.stage_spinner("Analyzing...");
480    let result = analyze::find_dead_code_full(
481        &graph,
482        config,
483        &resolved,
484        Some(&plugin_result),
485        workspaces,
486        &modules,
487        collect_usages,
488    );
489    let analyze_ms = t.elapsed().as_secs_f64() * 1000.0;
490    pb.finish_and_clear();
491    progress.finish();
492
493    let total_ms = pipeline_start.elapsed().as_secs_f64() * 1000.0;
494
495    let cache_summary = if cache_hits > 0 {
496        format!(" ({cache_hits} cached, {cache_misses} parsed)")
497    } else {
498        String::new()
499    };
500
501    tracing::debug!(
502        "\n┌─ Pipeline Profile ─────────────────────────────\n\
503         │  discover files:   {:>8.1}ms  ({} files)\n\
504         │  workspaces:       {:>8.1}ms\n\
505         │  plugins:          {:>8.1}ms\n\
506         │  script analysis:  {:>8.1}ms\n\
507         │  parse/extract:    {:>8.1}ms  ({} modules{})\n\
508         │  cache update:     {:>8.1}ms\n\
509         │  entry points:     {:>8.1}ms  ({} entries)\n\
510         │  resolve imports:  {:>8.1}ms\n\
511         │  build graph:      {:>8.1}ms\n\
512         │  analyze:          {:>8.1}ms\n\
513         │  ────────────────────────────────────────────\n\
514         │  TOTAL:            {:>8.1}ms\n\
515         └─────────────────────────────────────────────────",
516        discover_ms,
517        files.len(),
518        workspaces_ms,
519        plugins_ms,
520        scripts_ms,
521        parse_ms,
522        modules.len(),
523        cache_summary,
524        cache_ms,
525        entry_points_ms,
526        entry_points.len(),
527        resolve_ms,
528        graph_ms,
529        analyze_ms,
530        total_ms,
531    );
532
533    let timings = if retain {
534        Some(PipelineTimings {
535            discover_files_ms: discover_ms,
536            file_count: files.len(),
537            workspaces_ms,
538            workspace_count: workspaces.len(),
539            plugins_ms,
540            script_analysis_ms: scripts_ms,
541            parse_extract_ms: parse_ms,
542            module_count: modules.len(),
543            cache_hits,
544            cache_misses,
545            cache_update_ms: cache_ms,
546            entry_points_ms,
547            entry_point_count: entry_points.len(),
548            resolve_imports_ms: resolve_ms,
549            build_graph_ms: graph_ms,
550            analyze_ms,
551            total_ms,
552        })
553    } else {
554        None
555    };
556
557    Ok(AnalysisOutput {
558        results: result,
559        timings,
560        graph: if retain { Some(graph) } else { None },
561    })
562}
563
564/// Run plugins for root project and all workspace packages.
565fn run_plugins(
566    config: &ResolvedConfig,
567    files: &[discover::DiscoveredFile],
568    workspaces: &[fallow_config::WorkspaceInfo],
569) -> plugins::AggregatedPluginResult {
570    let registry = plugins::PluginRegistry::new(config.external_plugins.clone());
571    let file_paths: Vec<std::path::PathBuf> = files.iter().map(|f| f.path.clone()).collect();
572
573    // Run plugins for root project (full run with external plugins, inline config, etc.)
574    let pkg_path = config.root.join("package.json");
575    let mut result = PackageJson::load(&pkg_path).map_or_else(
576        |_| plugins::AggregatedPluginResult::default(),
577        |pkg| registry.run(&pkg, &config.root, &file_paths),
578    );
579
580    if workspaces.is_empty() {
581        return result;
582    }
583
584    // Pre-compile config matchers and relative files once for all workspace runs.
585    // This avoids re-compiling glob patterns and re-computing relative paths per workspace
586    // (previously O(workspaces × plugins × files) glob compilations).
587    let precompiled_matchers = registry.precompile_config_matchers();
588    let relative_files: Vec<(&std::path::PathBuf, String)> = file_paths
589        .iter()
590        .map(|f| {
591            let rel = f
592                .strip_prefix(&config.root)
593                .unwrap_or(f)
594                .to_string_lossy()
595                .into_owned();
596            (f, rel)
597        })
598        .collect();
599
600    // Run plugins for each workspace package in parallel, then merge results.
601    let ws_results: Vec<_> = workspaces
602        .par_iter()
603        .filter_map(|ws| {
604            let ws_pkg_path = ws.root.join("package.json");
605            let ws_pkg = PackageJson::load(&ws_pkg_path).ok()?;
606            let ws_result = registry.run_workspace_fast(
607                &ws_pkg,
608                &ws.root,
609                &config.root,
610                &precompiled_matchers,
611                &relative_files,
612            );
613            if ws_result.active_plugins.is_empty() {
614                return None;
615            }
616            let ws_prefix = ws
617                .root
618                .strip_prefix(&config.root)
619                .unwrap_or(&ws.root)
620                .to_string_lossy()
621                .into_owned();
622            Some((ws_result, ws_prefix))
623        })
624        .collect();
625
626    // Merge workspace results sequentially (deterministic order via par_iter index stability)
627    // Track seen names for O(1) dedup instead of O(n) Vec::contains
628    let mut seen_plugins: rustc_hash::FxHashSet<String> =
629        result.active_plugins.iter().cloned().collect();
630    let mut seen_prefixes: rustc_hash::FxHashSet<String> =
631        result.virtual_module_prefixes.iter().cloned().collect();
632    for (ws_result, ws_prefix) in ws_results {
633        // Prefix helper: workspace-relative patterns need the workspace prefix
634        // to be matchable from the monorepo root. But patterns that are already
635        // project-root-relative (e.g., from angular.json which uses absolute paths
636        // like "apps/client/src/styles.css") should not be double-prefixed.
637        let prefix_if_needed = |pat: &str| -> String {
638            if pat.starts_with(ws_prefix.as_str()) || pat.starts_with('/') {
639                pat.to_string()
640            } else {
641                format!("{ws_prefix}/{pat}")
642            }
643        };
644
645        for (pat, pname) in &ws_result.entry_patterns {
646            result
647                .entry_patterns
648                .push((prefix_if_needed(pat), pname.clone()));
649        }
650        for (pat, pname) in &ws_result.always_used {
651            result
652                .always_used
653                .push((prefix_if_needed(pat), pname.clone()));
654        }
655        for (pat, pname) in &ws_result.discovered_always_used {
656            result
657                .discovered_always_used
658                .push((prefix_if_needed(pat), pname.clone()));
659        }
660        for (file_pat, exports) in &ws_result.used_exports {
661            result
662                .used_exports
663                .push((prefix_if_needed(file_pat), exports.clone()));
664        }
665        // Merge active plugin names (deduplicated via HashSet)
666        for plugin_name in ws_result.active_plugins {
667            if !seen_plugins.contains(&plugin_name) {
668                seen_plugins.insert(plugin_name.clone());
669                result.active_plugins.push(plugin_name);
670            }
671        }
672        // These don't need prefixing (absolute paths / package names)
673        result
674            .referenced_dependencies
675            .extend(ws_result.referenced_dependencies);
676        result.setup_files.extend(ws_result.setup_files);
677        result
678            .tooling_dependencies
679            .extend(ws_result.tooling_dependencies);
680        // Virtual module prefixes (e.g., Docusaurus @theme/, @site/) are
681        // package-name prefixes, not file paths — no workspace prefix needed.
682        for prefix in ws_result.virtual_module_prefixes {
683            if !seen_prefixes.contains(&prefix) {
684                seen_prefixes.insert(prefix.clone());
685                result.virtual_module_prefixes.push(prefix);
686            }
687        }
688    }
689
690    result
691}
692
693/// Run analysis on a project directory (with export usages for LSP Code Lens).
694pub fn analyze_project(root: &Path) -> Result<AnalysisResults, FallowError> {
695    let config = default_config(root);
696    analyze_with_usages(&config)
697}
698
699/// Create a default config for a project root.
700pub(crate) fn default_config(root: &Path) -> ResolvedConfig {
701    let user_config = fallow_config::FallowConfig::find_and_load(root)
702        .ok()
703        .flatten();
704    match user_config {
705        Some((config, _path)) => config.resolve(
706            root.to_path_buf(),
707            fallow_config::OutputFormat::Human,
708            num_cpus(),
709            false,
710            true, // quiet: LSP/programmatic callers don't need progress bars
711        ),
712        None => fallow_config::FallowConfig::default().resolve(
713            root.to_path_buf(),
714            fallow_config::OutputFormat::Human,
715            num_cpus(),
716            false,
717            true,
718        ),
719    }
720}
721
722fn num_cpus() -> usize {
723    std::thread::available_parallelism()
724        .map(|n| n.get())
725        .unwrap_or(4)
726}