Skip to main content

fallow_core/
lib.rs

1pub mod analyze;
2pub mod cache;
3pub mod churn;
4pub mod cross_reference;
5pub mod discover;
6pub mod duplicates;
7pub mod errors;
8pub mod extract;
9pub mod plugins;
10pub mod progress;
11pub mod results;
12pub mod scripts;
13pub mod suppress;
14pub mod trace;
15
16// Re-export from fallow-graph for backwards compatibility
17pub use fallow_graph::graph;
18pub use fallow_graph::project;
19pub use fallow_graph::resolve;
20
21use std::path::Path;
22use std::time::Instant;
23
24use errors::FallowError;
25use fallow_config::{PackageJson, ResolvedConfig, discover_workspaces};
26use rayon::prelude::*;
27use results::AnalysisResults;
28use trace::PipelineTimings;
29
30/// Result of the full analysis pipeline, including optional performance timings.
31pub struct AnalysisOutput {
32    pub results: AnalysisResults,
33    pub timings: Option<PipelineTimings>,
34    pub graph: Option<graph::ModuleGraph>,
35}
36
37/// Update cache: write freshly parsed modules and refresh stale mtime/size entries.
38fn update_cache(
39    store: &mut cache::CacheStore,
40    modules: &[extract::ModuleInfo],
41    files: &[discover::DiscoveredFile],
42) {
43    for module in modules {
44        if let Some(file) = files.get(module.file_id.0 as usize) {
45            let (mt, sz) = file_mtime_and_size(&file.path);
46            // If content hash matches, just refresh mtime/size if stale (e.g. `touch`ed file)
47            if let Some(cached) = store.get_by_path_only(&file.path)
48                && cached.content_hash == module.content_hash
49            {
50                if cached.mtime_secs != mt || cached.file_size != sz {
51                    store.insert(&file.path, cache::module_to_cached(module, mt, sz));
52                }
53                continue;
54            }
55            store.insert(&file.path, cache::module_to_cached(module, mt, sz));
56        }
57    }
58    store.retain_paths(files);
59}
60
61/// Extract mtime (seconds since epoch) and file size from a path.
62fn file_mtime_and_size(path: &std::path::Path) -> (u64, u64) {
63    std::fs::metadata(path)
64        .map(|m| {
65            let mt = m
66                .modified()
67                .ok()
68                .and_then(|t| t.duration_since(std::time::SystemTime::UNIX_EPOCH).ok())
69                .map_or(0, |d| d.as_secs());
70            (mt, m.len())
71        })
72        .unwrap_or((0, 0))
73}
74
75/// Run the full analysis pipeline.
76///
77/// # Errors
78///
79/// Returns an error if file discovery, parsing, or analysis fails.
80pub fn analyze(config: &ResolvedConfig) -> Result<AnalysisResults, FallowError> {
81    let output = analyze_full(config, false, false)?;
82    Ok(output.results)
83}
84
85/// Run the full analysis pipeline with export usage collection (for LSP Code Lens).
86///
87/// # Errors
88///
89/// Returns an error if file discovery, parsing, or analysis fails.
90pub fn analyze_with_usages(config: &ResolvedConfig) -> Result<AnalysisResults, FallowError> {
91    let output = analyze_full(config, false, true)?;
92    Ok(output.results)
93}
94
95/// Run the full analysis pipeline with optional performance timings and graph retention.
96///
97/// # Errors
98///
99/// Returns an error if file discovery, parsing, or analysis fails.
100pub fn analyze_with_trace(config: &ResolvedConfig) -> Result<AnalysisOutput, FallowError> {
101    analyze_full(config, true, false)
102}
103
104/// Run the analysis pipeline using pre-parsed modules, skipping the parsing stage.
105///
106/// This avoids re-parsing files when the caller already has a `ParseResult` (e.g., from
107/// `fallow_core::extract::parse_all_files`). Discovery, plugins, scripts, entry points,
108/// import resolution, graph construction, and dead code detection still run normally.
109/// The graph is always retained (needed for file scores).
110///
111/// # Errors
112///
113/// Returns an error if discovery, graph construction, or analysis fails.
114pub fn analyze_with_parse_result(
115    config: &ResolvedConfig,
116    modules: &[extract::ModuleInfo],
117) -> Result<AnalysisOutput, FallowError> {
118    let _span = tracing::info_span!("fallow_analyze_with_parse_result").entered();
119    let pipeline_start = Instant::now();
120
121    let show_progress = !config.quiet
122        && std::io::IsTerminal::is_terminal(&std::io::stderr())
123        && matches!(
124            config.output,
125            fallow_config::OutputFormat::Human
126                | fallow_config::OutputFormat::Compact
127                | fallow_config::OutputFormat::Markdown
128        );
129    let progress = progress::AnalysisProgress::new(show_progress);
130
131    if !config.root.join("node_modules").is_dir() {
132        tracing::warn!(
133            "node_modules directory not found. Run `npm install` / `pnpm install` first for accurate results."
134        );
135    }
136
137    // Discover workspaces
138    let t = Instant::now();
139    let workspaces_vec = discover_workspaces(&config.root);
140    let workspaces_ms = t.elapsed().as_secs_f64() * 1000.0;
141    if !workspaces_vec.is_empty() {
142        tracing::info!(count = workspaces_vec.len(), "workspaces discovered");
143    }
144
145    // Stage 1: Discover files (cheap — needed for file registry and resolution)
146    let t = Instant::now();
147    let pb = progress.stage_spinner("Discovering files...");
148    let discovered_files = discover::discover_files(config);
149    let discover_ms = t.elapsed().as_secs_f64() * 1000.0;
150    pb.finish_and_clear();
151
152    let project = project::ProjectState::new(discovered_files, workspaces_vec);
153    let files = project.files();
154    let workspaces = project.workspaces();
155
156    // Stage 1.5: Run plugin system
157    let t = Instant::now();
158    let pb = progress.stage_spinner("Detecting plugins...");
159    let mut plugin_result = run_plugins(config, files, workspaces);
160    let plugins_ms = t.elapsed().as_secs_f64() * 1000.0;
161    pb.finish_and_clear();
162
163    // Stage 1.6: Analyze package.json scripts
164    let t = Instant::now();
165    analyze_all_scripts(config, workspaces, &mut plugin_result);
166    let scripts_ms = t.elapsed().as_secs_f64() * 1000.0;
167
168    // Stage 2: SKIPPED — using pre-parsed modules from caller
169
170    // Stage 3: Discover entry points
171    let t = Instant::now();
172    let entry_points = discover_all_entry_points(config, files, workspaces, &plugin_result);
173    let entry_points_ms = t.elapsed().as_secs_f64() * 1000.0;
174
175    // Stage 4: Resolve imports to file IDs
176    let t = Instant::now();
177    let pb = progress.stage_spinner("Resolving imports...");
178    let resolved = resolve::resolve_all_imports(
179        modules,
180        files,
181        workspaces,
182        &plugin_result.active_plugins,
183        &plugin_result.path_aliases,
184        &config.root,
185    );
186    let resolve_ms = t.elapsed().as_secs_f64() * 1000.0;
187    pb.finish_and_clear();
188
189    // Stage 5: Build module graph
190    let t = Instant::now();
191    let pb = progress.stage_spinner("Building module graph...");
192    let graph = graph::ModuleGraph::build(&resolved, &entry_points, files);
193    let graph_ms = t.elapsed().as_secs_f64() * 1000.0;
194    pb.finish_and_clear();
195
196    // Stage 6: Analyze for dead code
197    let t = Instant::now();
198    let pb = progress.stage_spinner("Analyzing...");
199    let result = analyze::find_dead_code_full(
200        &graph,
201        config,
202        &resolved,
203        Some(&plugin_result),
204        workspaces,
205        modules,
206        false,
207    );
208    let analyze_ms = t.elapsed().as_secs_f64() * 1000.0;
209    pb.finish_and_clear();
210    progress.finish();
211
212    let total_ms = pipeline_start.elapsed().as_secs_f64() * 1000.0;
213
214    tracing::debug!(
215        "\n┌─ Pipeline Profile (reuse) ─────────────────────\n\
216         │  discover files:   {:>8.1}ms  ({} files)\n\
217         │  workspaces:       {:>8.1}ms\n\
218         │  plugins:          {:>8.1}ms\n\
219         │  script analysis:  {:>8.1}ms\n\
220         │  parse/extract:    SKIPPED (reused {} modules)\n\
221         │  entry points:     {:>8.1}ms  ({} entries)\n\
222         │  resolve imports:  {:>8.1}ms\n\
223         │  build graph:      {:>8.1}ms\n\
224         │  analyze:          {:>8.1}ms\n\
225         │  ────────────────────────────────────────────\n\
226         │  TOTAL:            {:>8.1}ms\n\
227         └─────────────────────────────────────────────────",
228        discover_ms,
229        files.len(),
230        workspaces_ms,
231        plugins_ms,
232        scripts_ms,
233        modules.len(),
234        entry_points_ms,
235        entry_points.len(),
236        resolve_ms,
237        graph_ms,
238        analyze_ms,
239        total_ms,
240    );
241
242    let timings = Some(PipelineTimings {
243        discover_files_ms: discover_ms,
244        file_count: files.len(),
245        workspaces_ms,
246        workspace_count: workspaces.len(),
247        plugins_ms,
248        script_analysis_ms: scripts_ms,
249        parse_extract_ms: 0.0, // Skipped — modules were reused
250        module_count: modules.len(),
251        cache_hits: 0,
252        cache_misses: 0,
253        cache_update_ms: 0.0,
254        entry_points_ms,
255        entry_point_count: entry_points.len(),
256        resolve_imports_ms: resolve_ms,
257        build_graph_ms: graph_ms,
258        analyze_ms,
259        total_ms,
260    });
261
262    Ok(AnalysisOutput {
263        results: result,
264        timings,
265        graph: Some(graph),
266    })
267}
268
269#[expect(
270    clippy::unnecessary_wraps,
271    reason = "Result kept for future error handling"
272)]
273fn analyze_full(
274    config: &ResolvedConfig,
275    retain: bool,
276    collect_usages: bool,
277) -> Result<AnalysisOutput, FallowError> {
278    let _span = tracing::info_span!("fallow_analyze").entered();
279    let pipeline_start = Instant::now();
280
281    // Progress bars: enabled when not quiet, stderr is a terminal, and output is human-readable.
282    // Structured formats (JSON, SARIF) suppress spinners even on TTY — users piping structured
283    // output don't expect progress noise on stderr.
284    let show_progress = !config.quiet
285        && std::io::IsTerminal::is_terminal(&std::io::stderr())
286        && matches!(
287            config.output,
288            fallow_config::OutputFormat::Human
289                | fallow_config::OutputFormat::Compact
290                | fallow_config::OutputFormat::Markdown
291        );
292    let progress = progress::AnalysisProgress::new(show_progress);
293
294    // Warn if node_modules is missing — resolution will be severely degraded
295    if !config.root.join("node_modules").is_dir() {
296        tracing::warn!(
297            "node_modules directory not found. Run `npm install` / `pnpm install` first for accurate results."
298        );
299    }
300
301    // Discover workspaces if in a monorepo
302    let t = Instant::now();
303    let workspaces_vec = discover_workspaces(&config.root);
304    let workspaces_ms = t.elapsed().as_secs_f64() * 1000.0;
305    if !workspaces_vec.is_empty() {
306        tracing::info!(count = workspaces_vec.len(), "workspaces discovered");
307    }
308
309    // Stage 1: Discover all source files
310    let t = Instant::now();
311    let pb = progress.stage_spinner("Discovering files...");
312    let discovered_files = discover::discover_files(config);
313    let discover_ms = t.elapsed().as_secs_f64() * 1000.0;
314    pb.finish_and_clear();
315
316    // Build ProjectState: owns the file registry with stable FileIds and workspace metadata.
317    // This is the foundation for cross-workspace resolution and future incremental analysis.
318    let project = project::ProjectState::new(discovered_files, workspaces_vec);
319    let files = project.files();
320    let workspaces = project.workspaces();
321
322    // Stage 1.5: Run plugin system — parse config files, discover dynamic entries
323    let t = Instant::now();
324    let pb = progress.stage_spinner("Detecting plugins...");
325    let mut plugin_result = run_plugins(config, files, workspaces);
326    let plugins_ms = t.elapsed().as_secs_f64() * 1000.0;
327    pb.finish_and_clear();
328
329    // Stage 1.6: Analyze package.json scripts for binary usage and config file refs
330    let t = Instant::now();
331    analyze_all_scripts(config, workspaces, &mut plugin_result);
332    let scripts_ms = t.elapsed().as_secs_f64() * 1000.0;
333
334    // Stage 2: Parse all files in parallel and extract imports/exports
335    let t = Instant::now();
336    let pb = progress.stage_spinner(&format!("Parsing {} files...", files.len()));
337    let mut cache_store = if config.no_cache {
338        None
339    } else {
340        cache::CacheStore::load(&config.cache_dir)
341    };
342
343    let parse_result = extract::parse_all_files(files, cache_store.as_ref());
344    let modules = parse_result.modules;
345    let cache_hits = parse_result.cache_hits;
346    let cache_misses = parse_result.cache_misses;
347    let parse_ms = t.elapsed().as_secs_f64() * 1000.0;
348    pb.finish_and_clear();
349
350    // Update cache with freshly parsed modules and refresh stale mtime/size entries.
351    let t = Instant::now();
352    if !config.no_cache {
353        let store = cache_store.get_or_insert_with(cache::CacheStore::new);
354        update_cache(store, &modules, files);
355        if let Err(e) = store.save(&config.cache_dir) {
356            tracing::warn!("Failed to save cache: {e}");
357        }
358    }
359    let cache_ms = t.elapsed().as_secs_f64() * 1000.0;
360
361    // Stage 3: Discover entry points (static patterns + plugin-discovered patterns)
362    let t = Instant::now();
363    let entry_points = discover_all_entry_points(config, files, workspaces, &plugin_result);
364    let entry_points_ms = t.elapsed().as_secs_f64() * 1000.0;
365
366    // Stage 4: Resolve imports to file IDs
367    let t = Instant::now();
368    let pb = progress.stage_spinner("Resolving imports...");
369    let resolved = resolve::resolve_all_imports(
370        &modules,
371        files,
372        workspaces,
373        &plugin_result.active_plugins,
374        &plugin_result.path_aliases,
375        &config.root,
376    );
377    let resolve_ms = t.elapsed().as_secs_f64() * 1000.0;
378    pb.finish_and_clear();
379
380    // Stage 5: Build module graph
381    let t = Instant::now();
382    let pb = progress.stage_spinner("Building module graph...");
383    let graph = graph::ModuleGraph::build(&resolved, &entry_points, files);
384    let graph_ms = t.elapsed().as_secs_f64() * 1000.0;
385    pb.finish_and_clear();
386
387    // Stage 6: Analyze for dead code (with plugin context and workspace info)
388    let t = Instant::now();
389    let pb = progress.stage_spinner("Analyzing...");
390    let result = analyze::find_dead_code_full(
391        &graph,
392        config,
393        &resolved,
394        Some(&plugin_result),
395        workspaces,
396        &modules,
397        collect_usages,
398    );
399    let analyze_ms = t.elapsed().as_secs_f64() * 1000.0;
400    pb.finish_and_clear();
401    progress.finish();
402
403    let total_ms = pipeline_start.elapsed().as_secs_f64() * 1000.0;
404
405    let cache_summary = if cache_hits > 0 {
406        format!(" ({cache_hits} cached, {cache_misses} parsed)")
407    } else {
408        String::new()
409    };
410
411    tracing::debug!(
412        "\n┌─ Pipeline Profile ─────────────────────────────\n\
413         │  discover files:   {:>8.1}ms  ({} files)\n\
414         │  workspaces:       {:>8.1}ms\n\
415         │  plugins:          {:>8.1}ms\n\
416         │  script analysis:  {:>8.1}ms\n\
417         │  parse/extract:    {:>8.1}ms  ({} modules{})\n\
418         │  cache update:     {:>8.1}ms\n\
419         │  entry points:     {:>8.1}ms  ({} entries)\n\
420         │  resolve imports:  {:>8.1}ms\n\
421         │  build graph:      {:>8.1}ms\n\
422         │  analyze:          {:>8.1}ms\n\
423         │  ────────────────────────────────────────────\n\
424         │  TOTAL:            {:>8.1}ms\n\
425         └─────────────────────────────────────────────────",
426        discover_ms,
427        files.len(),
428        workspaces_ms,
429        plugins_ms,
430        scripts_ms,
431        parse_ms,
432        modules.len(),
433        cache_summary,
434        cache_ms,
435        entry_points_ms,
436        entry_points.len(),
437        resolve_ms,
438        graph_ms,
439        analyze_ms,
440        total_ms,
441    );
442
443    let timings = if retain {
444        Some(PipelineTimings {
445            discover_files_ms: discover_ms,
446            file_count: files.len(),
447            workspaces_ms,
448            workspace_count: workspaces.len(),
449            plugins_ms,
450            script_analysis_ms: scripts_ms,
451            parse_extract_ms: parse_ms,
452            module_count: modules.len(),
453            cache_hits,
454            cache_misses,
455            cache_update_ms: cache_ms,
456            entry_points_ms,
457            entry_point_count: entry_points.len(),
458            resolve_imports_ms: resolve_ms,
459            build_graph_ms: graph_ms,
460            analyze_ms,
461            total_ms,
462        })
463    } else {
464        None
465    };
466
467    Ok(AnalysisOutput {
468        results: result,
469        timings,
470        graph: if retain { Some(graph) } else { None },
471    })
472}
473
474/// Analyze package.json scripts from root and all workspace packages.
475///
476/// Populates the plugin result with script-used packages and config file
477/// entry patterns. Also scans CI config files for binary invocations.
478fn analyze_all_scripts(
479    config: &ResolvedConfig,
480    workspaces: &[fallow_config::WorkspaceInfo],
481    plugin_result: &mut plugins::AggregatedPluginResult,
482) {
483    let pkg_path = config.root.join("package.json");
484    if let Ok(pkg) = PackageJson::load(&pkg_path)
485        && let Some(ref pkg_scripts) = pkg.scripts
486    {
487        let scripts_to_analyze = if config.production {
488            scripts::filter_production_scripts(pkg_scripts)
489        } else {
490            pkg_scripts.clone()
491        };
492        let script_analysis = scripts::analyze_scripts(&scripts_to_analyze, &config.root);
493        plugin_result.script_used_packages = script_analysis.used_packages;
494
495        for config_file in &script_analysis.config_files {
496            plugin_result
497                .entry_patterns
498                .push((config_file.clone(), "scripts".to_string()));
499        }
500    }
501    for ws in workspaces {
502        let ws_pkg_path = ws.root.join("package.json");
503        if let Ok(ws_pkg) = PackageJson::load(&ws_pkg_path)
504            && let Some(ref ws_scripts) = ws_pkg.scripts
505        {
506            let scripts_to_analyze = if config.production {
507                scripts::filter_production_scripts(ws_scripts)
508            } else {
509                ws_scripts.clone()
510            };
511            let ws_analysis = scripts::analyze_scripts(&scripts_to_analyze, &ws.root);
512            plugin_result
513                .script_used_packages
514                .extend(ws_analysis.used_packages);
515
516            let ws_prefix = ws
517                .root
518                .strip_prefix(&config.root)
519                .unwrap_or(&ws.root)
520                .to_string_lossy();
521            for config_file in &ws_analysis.config_files {
522                plugin_result
523                    .entry_patterns
524                    .push((format!("{ws_prefix}/{config_file}"), "scripts".to_string()));
525            }
526        }
527    }
528
529    // Scan CI config files for binary invocations
530    let ci_packages = scripts::ci::analyze_ci_files(&config.root);
531    plugin_result.script_used_packages.extend(ci_packages);
532}
533
534/// Discover all entry points from static patterns, workspaces, plugins, and infrastructure.
535fn discover_all_entry_points(
536    config: &ResolvedConfig,
537    files: &[discover::DiscoveredFile],
538    workspaces: &[fallow_config::WorkspaceInfo],
539    plugin_result: &plugins::AggregatedPluginResult,
540) -> Vec<discover::EntryPoint> {
541    let mut entry_points = discover::discover_entry_points(config, files);
542    let ws_entries: Vec<_> = workspaces
543        .par_iter()
544        .flat_map(|ws| discover::discover_workspace_entry_points(&ws.root, config, files))
545        .collect();
546    entry_points.extend(ws_entries);
547    let plugin_entries = discover::discover_plugin_entry_points(plugin_result, config, files);
548    entry_points.extend(plugin_entries);
549    let infra_entries = discover::discover_infrastructure_entry_points(&config.root);
550    entry_points.extend(infra_entries);
551    entry_points
552}
553
554/// Run plugins for root project and all workspace packages.
555fn run_plugins(
556    config: &ResolvedConfig,
557    files: &[discover::DiscoveredFile],
558    workspaces: &[fallow_config::WorkspaceInfo],
559) -> plugins::AggregatedPluginResult {
560    let registry = plugins::PluginRegistry::new(config.external_plugins.clone());
561    let file_paths: Vec<std::path::PathBuf> = files.iter().map(|f| f.path.clone()).collect();
562
563    // Run plugins for root project (full run with external plugins, inline config, etc.)
564    let pkg_path = config.root.join("package.json");
565    let mut result = PackageJson::load(&pkg_path).map_or_else(
566        |_| plugins::AggregatedPluginResult::default(),
567        |pkg| registry.run(&pkg, &config.root, &file_paths),
568    );
569
570    if workspaces.is_empty() {
571        return result;
572    }
573
574    // Pre-compile config matchers and relative files once for all workspace runs.
575    // This avoids re-compiling glob patterns and re-computing relative paths per workspace
576    // (previously O(workspaces × plugins × files) glob compilations).
577    let precompiled_matchers = registry.precompile_config_matchers();
578    let relative_files: Vec<(&std::path::PathBuf, String)> = file_paths
579        .iter()
580        .map(|f| {
581            let rel = f
582                .strip_prefix(&config.root)
583                .unwrap_or(f)
584                .to_string_lossy()
585                .into_owned();
586            (f, rel)
587        })
588        .collect();
589
590    // Run plugins for each workspace package in parallel, then merge results.
591    let ws_results: Vec<_> = workspaces
592        .par_iter()
593        .filter_map(|ws| {
594            let ws_pkg_path = ws.root.join("package.json");
595            let ws_pkg = PackageJson::load(&ws_pkg_path).ok()?;
596            let ws_result = registry.run_workspace_fast(
597                &ws_pkg,
598                &ws.root,
599                &config.root,
600                &precompiled_matchers,
601                &relative_files,
602            );
603            if ws_result.active_plugins.is_empty() {
604                return None;
605            }
606            let ws_prefix = ws
607                .root
608                .strip_prefix(&config.root)
609                .unwrap_or(&ws.root)
610                .to_string_lossy()
611                .into_owned();
612            Some((ws_result, ws_prefix))
613        })
614        .collect();
615
616    // Merge workspace results sequentially (deterministic order via par_iter index stability)
617    // Track seen names for O(1) dedup instead of O(n) Vec::contains
618    let mut seen_plugins: rustc_hash::FxHashSet<String> =
619        result.active_plugins.iter().cloned().collect();
620    let mut seen_prefixes: rustc_hash::FxHashSet<String> =
621        result.virtual_module_prefixes.iter().cloned().collect();
622    for (ws_result, ws_prefix) in ws_results {
623        // Prefix helper: workspace-relative patterns need the workspace prefix
624        // to be matchable from the monorepo root. But patterns that are already
625        // project-root-relative (e.g., from angular.json which uses absolute paths
626        // like "apps/client/src/styles.css") should not be double-prefixed.
627        let prefix_if_needed = |pat: &str| -> String {
628            if pat.starts_with(ws_prefix.as_str()) || pat.starts_with('/') {
629                pat.to_string()
630            } else {
631                format!("{ws_prefix}/{pat}")
632            }
633        };
634
635        for (pat, pname) in &ws_result.entry_patterns {
636            result
637                .entry_patterns
638                .push((prefix_if_needed(pat), pname.clone()));
639        }
640        for (pat, pname) in &ws_result.always_used {
641            result
642                .always_used
643                .push((prefix_if_needed(pat), pname.clone()));
644        }
645        for (pat, pname) in &ws_result.discovered_always_used {
646            result
647                .discovered_always_used
648                .push((prefix_if_needed(pat), pname.clone()));
649        }
650        for (file_pat, exports) in &ws_result.used_exports {
651            result
652                .used_exports
653                .push((prefix_if_needed(file_pat), exports.clone()));
654        }
655        // Merge active plugin names (deduplicated via HashSet)
656        for plugin_name in ws_result.active_plugins {
657            if !seen_plugins.contains(&plugin_name) {
658                seen_plugins.insert(plugin_name.clone());
659                result.active_plugins.push(plugin_name);
660            }
661        }
662        // These don't need prefixing (absolute paths / package names)
663        result
664            .referenced_dependencies
665            .extend(ws_result.referenced_dependencies);
666        result.setup_files.extend(ws_result.setup_files);
667        result
668            .tooling_dependencies
669            .extend(ws_result.tooling_dependencies);
670        // Virtual module prefixes (e.g., Docusaurus @theme/, @site/) are
671        // package-name prefixes, not file paths — no workspace prefix needed.
672        for prefix in ws_result.virtual_module_prefixes {
673            if !seen_prefixes.contains(&prefix) {
674                seen_prefixes.insert(prefix.clone());
675                result.virtual_module_prefixes.push(prefix);
676            }
677        }
678    }
679
680    result
681}
682
683/// Run analysis on a project directory (with export usages for LSP Code Lens).
684///
685/// # Errors
686///
687/// Returns an error if config loading, file discovery, parsing, or analysis fails.
688pub fn analyze_project(root: &Path) -> Result<AnalysisResults, FallowError> {
689    let config = default_config(root);
690    analyze_with_usages(&config)
691}
692
693/// Create a default config for a project root.
694pub(crate) fn default_config(root: &Path) -> ResolvedConfig {
695    let user_config = fallow_config::FallowConfig::find_and_load(root)
696        .ok()
697        .flatten();
698    match user_config {
699        Some((config, _path)) => config.resolve(
700            root.to_path_buf(),
701            fallow_config::OutputFormat::Human,
702            num_cpus(),
703            false,
704            true, // quiet: LSP/programmatic callers don't need progress bars
705        ),
706        None => fallow_config::FallowConfig::default().resolve(
707            root.to_path_buf(),
708            fallow_config::OutputFormat::Human,
709            num_cpus(),
710            false,
711            true,
712        ),
713    }
714}
715
716fn num_cpus() -> usize {
717    std::thread::available_parallelism()
718        .map(std::num::NonZeroUsize::get)
719        .unwrap_or(4)
720}