Skip to main content

fallow_core/
lib.rs

1pub mod analyze;
2pub mod cache;
3pub mod churn;
4pub mod cross_reference;
5pub mod discover;
6pub mod duplicates;
7pub(crate) mod errors;
8pub mod extract;
9pub mod plugins;
10pub(crate) mod progress;
11pub mod results;
12pub(crate) mod scripts;
13pub mod suppress;
14pub mod trace;
15
16// Re-export from fallow-graph for backwards compatibility
17pub use fallow_graph::graph;
18pub use fallow_graph::project;
19pub use fallow_graph::resolve;
20
21use std::path::Path;
22use std::time::Instant;
23
24use errors::FallowError;
25use fallow_config::{
26    EntryPointRole, PackageJson, ResolvedConfig, discover_workspaces, find_undeclared_workspaces,
27};
28use rayon::prelude::*;
29use results::AnalysisResults;
30use trace::PipelineTimings;
31
32/// Result of the full analysis pipeline, including optional performance timings.
33pub struct AnalysisOutput {
34    pub results: AnalysisResults,
35    pub timings: Option<PipelineTimings>,
36    pub graph: Option<graph::ModuleGraph>,
37    /// Parsed modules from the pipeline, available when `retain_modules` is true.
38    /// Used by the combined command to share a single parse across dead-code and health.
39    pub modules: Option<Vec<extract::ModuleInfo>>,
40    /// Discovered files from the pipeline, available when `retain_modules` is true.
41    pub files: Option<Vec<discover::DiscoveredFile>>,
42}
43
44/// Update cache: write freshly parsed modules and refresh stale mtime/size entries.
45fn update_cache(
46    store: &mut cache::CacheStore,
47    modules: &[extract::ModuleInfo],
48    files: &[discover::DiscoveredFile],
49) {
50    for module in modules {
51        if let Some(file) = files.get(module.file_id.0 as usize) {
52            let (mt, sz) = file_mtime_and_size(&file.path);
53            // If content hash matches, just refresh mtime/size if stale (e.g. `touch`ed file)
54            if let Some(cached) = store.get_by_path_only(&file.path)
55                && cached.content_hash == module.content_hash
56            {
57                if cached.mtime_secs != mt || cached.file_size != sz {
58                    store.insert(&file.path, cache::module_to_cached(module, mt, sz));
59                }
60                continue;
61            }
62            store.insert(&file.path, cache::module_to_cached(module, mt, sz));
63        }
64    }
65    store.retain_paths(files);
66}
67
68/// Extract mtime (seconds since epoch) and file size from a path.
69fn file_mtime_and_size(path: &std::path::Path) -> (u64, u64) {
70    std::fs::metadata(path).map_or((0, 0), |m| {
71        let mt = m
72            .modified()
73            .ok()
74            .and_then(|t| t.duration_since(std::time::SystemTime::UNIX_EPOCH).ok())
75            .map_or(0, |d| d.as_secs());
76        (mt, m.len())
77    })
78}
79
80/// Run the full analysis pipeline.
81///
82/// # Errors
83///
84/// Returns an error if file discovery, parsing, or analysis fails.
85pub fn analyze(config: &ResolvedConfig) -> Result<AnalysisResults, FallowError> {
86    let output = analyze_full(config, false, false, false, false)?;
87    Ok(output.results)
88}
89
90/// Run the full analysis pipeline with export usage collection (for LSP Code Lens).
91///
92/// # Errors
93///
94/// Returns an error if file discovery, parsing, or analysis fails.
95pub fn analyze_with_usages(config: &ResolvedConfig) -> Result<AnalysisResults, FallowError> {
96    let output = analyze_full(config, false, true, false, false)?;
97    Ok(output.results)
98}
99
100/// Run the full analysis pipeline with optional performance timings and graph retention.
101///
102/// # Errors
103///
104/// Returns an error if file discovery, parsing, or analysis fails.
105pub fn analyze_with_trace(config: &ResolvedConfig) -> Result<AnalysisOutput, FallowError> {
106    analyze_full(config, true, false, false, false)
107}
108
109/// Run the full analysis pipeline, retaining parsed modules and discovered files.
110///
111/// Used by the combined command to share a single parse across dead-code and health.
112/// When `need_complexity` is true, the `ComplexityVisitor` runs during parsing so
113/// the returned modules contain per-function complexity data.
114///
115/// # Errors
116///
117/// Returns an error if file discovery, parsing, or analysis fails.
118pub fn analyze_retaining_modules(
119    config: &ResolvedConfig,
120    need_complexity: bool,
121    retain_graph: bool,
122) -> Result<AnalysisOutput, FallowError> {
123    analyze_full(config, retain_graph, false, need_complexity, true)
124}
125
126/// Run the analysis pipeline using pre-parsed modules, skipping the parsing stage.
127///
128/// This avoids re-parsing files when the caller already has a `ParseResult` (e.g., from
129/// `fallow_core::extract::parse_all_files`). Discovery, plugins, scripts, entry points,
130/// import resolution, graph construction, and dead code detection still run normally.
131/// The graph is always retained (needed for file scores).
132///
133/// # Errors
134///
135/// Returns an error if discovery, graph construction, or analysis fails.
136pub fn analyze_with_parse_result(
137    config: &ResolvedConfig,
138    modules: &[extract::ModuleInfo],
139) -> Result<AnalysisOutput, FallowError> {
140    let _span = tracing::info_span!("fallow_analyze_with_parse_result").entered();
141    let pipeline_start = Instant::now();
142
143    let show_progress = !config.quiet
144        && std::io::IsTerminal::is_terminal(&std::io::stderr())
145        && matches!(
146            config.output,
147            fallow_config::OutputFormat::Human
148                | fallow_config::OutputFormat::Compact
149                | fallow_config::OutputFormat::Markdown
150        );
151    let progress = progress::AnalysisProgress::new(show_progress);
152
153    if !config.root.join("node_modules").is_dir() {
154        tracing::warn!(
155            "node_modules directory not found. Run `npm install` / `pnpm install` first for accurate results."
156        );
157    }
158
159    // Discover workspaces
160    let t = Instant::now();
161    let workspaces_vec = discover_workspaces(&config.root);
162    let workspaces_ms = t.elapsed().as_secs_f64() * 1000.0;
163    if !workspaces_vec.is_empty() {
164        tracing::info!(count = workspaces_vec.len(), "workspaces discovered");
165    }
166
167    // Warn about directories with package.json not declared as workspaces
168    if !config.quiet {
169        let undeclared = find_undeclared_workspaces(&config.root, &workspaces_vec);
170        for diag in &undeclared {
171            tracing::warn!("{}", diag.message);
172        }
173    }
174
175    // Stage 1: Discover files (cheap — needed for file registry and resolution)
176    let t = Instant::now();
177    let pb = progress.stage_spinner("Discovering files...");
178    let discovered_files = discover::discover_files(config);
179    let discover_ms = t.elapsed().as_secs_f64() * 1000.0;
180    pb.finish_and_clear();
181
182    let project = project::ProjectState::new(discovered_files, workspaces_vec);
183    let files = project.files();
184    let workspaces = project.workspaces();
185
186    // Stage 1.5: Run plugin system
187    let t = Instant::now();
188    let pb = progress.stage_spinner("Detecting plugins...");
189    let mut plugin_result = run_plugins(config, files, workspaces);
190    let plugins_ms = t.elapsed().as_secs_f64() * 1000.0;
191    pb.finish_and_clear();
192
193    // Stage 1.6: Analyze package.json scripts
194    let t = Instant::now();
195    analyze_all_scripts(config, workspaces, &mut plugin_result);
196    let scripts_ms = t.elapsed().as_secs_f64() * 1000.0;
197
198    // Stage 2: SKIPPED — using pre-parsed modules from caller
199
200    // Stage 3: Discover entry points
201    let t = Instant::now();
202    let entry_points = discover_all_entry_points(config, files, workspaces, &plugin_result);
203    let entry_points_ms = t.elapsed().as_secs_f64() * 1000.0;
204
205    // Compute entry-point summary before the graph consumes the entry_points vec
206    let ep_summary = summarize_entry_points(&entry_points.all);
207
208    // Stage 4: Resolve imports to file IDs
209    let t = Instant::now();
210    let pb = progress.stage_spinner("Resolving imports...");
211    let resolved = resolve::resolve_all_imports(
212        modules,
213        files,
214        workspaces,
215        &plugin_result.active_plugins,
216        &plugin_result.path_aliases,
217        &plugin_result.scss_include_paths,
218        &config.root,
219    );
220    let resolve_ms = t.elapsed().as_secs_f64() * 1000.0;
221    pb.finish_and_clear();
222
223    // Stage 5: Build module graph
224    let t = Instant::now();
225    let pb = progress.stage_spinner("Building module graph...");
226    let graph = graph::ModuleGraph::build_with_reachability_roots(
227        &resolved,
228        &entry_points.all,
229        &entry_points.runtime,
230        &entry_points.test,
231        files,
232    );
233    let graph_ms = t.elapsed().as_secs_f64() * 1000.0;
234    pb.finish_and_clear();
235
236    // Stage 6: Analyze for dead code
237    let t = Instant::now();
238    let pb = progress.stage_spinner("Analyzing...");
239    let mut result = analyze::find_dead_code_full(
240        &graph,
241        config,
242        &resolved,
243        Some(&plugin_result),
244        workspaces,
245        modules,
246        false,
247    );
248    let analyze_ms = t.elapsed().as_secs_f64() * 1000.0;
249    pb.finish_and_clear();
250    progress.finish();
251
252    result.entry_point_summary = Some(ep_summary);
253
254    let total_ms = pipeline_start.elapsed().as_secs_f64() * 1000.0;
255
256    tracing::debug!(
257        "\n┌─ Pipeline Profile (reuse) ─────────────────────\n\
258         │  discover files:   {:>8.1}ms  ({} files)\n\
259         │  workspaces:       {:>8.1}ms\n\
260         │  plugins:          {:>8.1}ms\n\
261         │  script analysis:  {:>8.1}ms\n\
262         │  parse/extract:    SKIPPED (reused {} modules)\n\
263         │  entry points:     {:>8.1}ms  ({} entries)\n\
264         │  resolve imports:  {:>8.1}ms\n\
265         │  build graph:      {:>8.1}ms\n\
266         │  analyze:          {:>8.1}ms\n\
267         │  ────────────────────────────────────────────\n\
268         │  TOTAL:            {:>8.1}ms\n\
269         └─────────────────────────────────────────────────",
270        discover_ms,
271        files.len(),
272        workspaces_ms,
273        plugins_ms,
274        scripts_ms,
275        modules.len(),
276        entry_points_ms,
277        entry_points.all.len(),
278        resolve_ms,
279        graph_ms,
280        analyze_ms,
281        total_ms,
282    );
283
284    let timings = Some(PipelineTimings {
285        discover_files_ms: discover_ms,
286        file_count: files.len(),
287        workspaces_ms,
288        workspace_count: workspaces.len(),
289        plugins_ms,
290        script_analysis_ms: scripts_ms,
291        parse_extract_ms: 0.0, // Skipped — modules were reused
292        module_count: modules.len(),
293        cache_hits: 0,
294        cache_misses: 0,
295        cache_update_ms: 0.0,
296        entry_points_ms,
297        entry_point_count: entry_points.all.len(),
298        resolve_imports_ms: resolve_ms,
299        build_graph_ms: graph_ms,
300        analyze_ms,
301        total_ms,
302    });
303
304    Ok(AnalysisOutput {
305        results: result,
306        timings,
307        graph: Some(graph),
308        modules: None,
309        files: None,
310    })
311}
312
313#[expect(
314    clippy::unnecessary_wraps,
315    reason = "Result kept for future error handling"
316)]
317#[expect(
318    clippy::too_many_lines,
319    reason = "main pipeline function; split candidate for sig-audit-loop"
320)]
321fn analyze_full(
322    config: &ResolvedConfig,
323    retain: bool,
324    collect_usages: bool,
325    need_complexity: bool,
326    retain_modules: bool,
327) -> Result<AnalysisOutput, FallowError> {
328    let _span = tracing::info_span!("fallow_analyze").entered();
329    let pipeline_start = Instant::now();
330
331    // Progress bars: enabled when not quiet, stderr is a terminal, and output is human-readable.
332    // Structured formats (JSON, SARIF) suppress spinners even on TTY — users piping structured
333    // output don't expect progress noise on stderr.
334    let show_progress = !config.quiet
335        && std::io::IsTerminal::is_terminal(&std::io::stderr())
336        && matches!(
337            config.output,
338            fallow_config::OutputFormat::Human
339                | fallow_config::OutputFormat::Compact
340                | fallow_config::OutputFormat::Markdown
341        );
342    let progress = progress::AnalysisProgress::new(show_progress);
343
344    // Warn if node_modules is missing — resolution will be severely degraded
345    if !config.root.join("node_modules").is_dir() {
346        tracing::warn!(
347            "node_modules directory not found. Run `npm install` / `pnpm install` first for accurate results."
348        );
349    }
350
351    // Discover workspaces if in a monorepo
352    let t = Instant::now();
353    let workspaces_vec = discover_workspaces(&config.root);
354    let workspaces_ms = t.elapsed().as_secs_f64() * 1000.0;
355    if !workspaces_vec.is_empty() {
356        tracing::info!(count = workspaces_vec.len(), "workspaces discovered");
357    }
358
359    // Warn about directories with package.json not declared as workspaces
360    if !config.quiet {
361        let undeclared = find_undeclared_workspaces(&config.root, &workspaces_vec);
362        for diag in &undeclared {
363            tracing::warn!("{}", diag.message);
364        }
365    }
366
367    // Stage 1: Discover all source files
368    let t = Instant::now();
369    let pb = progress.stage_spinner("Discovering files...");
370    let discovered_files = discover::discover_files(config);
371    let discover_ms = t.elapsed().as_secs_f64() * 1000.0;
372    pb.finish_and_clear();
373
374    // Build ProjectState: owns the file registry with stable FileIds and workspace metadata.
375    // This is the foundation for cross-workspace resolution and future incremental analysis.
376    let project = project::ProjectState::new(discovered_files, workspaces_vec);
377    let files = project.files();
378    let workspaces = project.workspaces();
379
380    // Stage 1.5: Run plugin system — parse config files, discover dynamic entries
381    let t = Instant::now();
382    let pb = progress.stage_spinner("Detecting plugins...");
383    let mut plugin_result = run_plugins(config, files, workspaces);
384    let plugins_ms = t.elapsed().as_secs_f64() * 1000.0;
385    pb.finish_and_clear();
386
387    // Stage 1.6: Analyze package.json scripts for binary usage and config file refs
388    let t = Instant::now();
389    analyze_all_scripts(config, workspaces, &mut plugin_result);
390    let scripts_ms = t.elapsed().as_secs_f64() * 1000.0;
391
392    // Stage 2: Parse all files in parallel and extract imports/exports
393    let t = Instant::now();
394    let pb = progress.stage_spinner(&format!("Parsing {} files...", files.len()));
395    let mut cache_store = if config.no_cache {
396        None
397    } else {
398        cache::CacheStore::load(&config.cache_dir)
399    };
400
401    let parse_result = extract::parse_all_files(files, cache_store.as_ref(), need_complexity);
402    let modules = parse_result.modules;
403    let cache_hits = parse_result.cache_hits;
404    let cache_misses = parse_result.cache_misses;
405    let parse_ms = t.elapsed().as_secs_f64() * 1000.0;
406    pb.finish_and_clear();
407
408    // Update cache with freshly parsed modules and refresh stale mtime/size entries.
409    let t = Instant::now();
410    if !config.no_cache {
411        let store = cache_store.get_or_insert_with(cache::CacheStore::new);
412        update_cache(store, &modules, files);
413        if let Err(e) = store.save(&config.cache_dir) {
414            tracing::warn!("Failed to save cache: {e}");
415        }
416    }
417    let cache_ms = t.elapsed().as_secs_f64() * 1000.0;
418
419    // Stage 3: Discover entry points (static patterns + plugin-discovered patterns)
420    let t = Instant::now();
421    let entry_points = discover_all_entry_points(config, files, workspaces, &plugin_result);
422    let entry_points_ms = t.elapsed().as_secs_f64() * 1000.0;
423
424    // Stage 4: Resolve imports to file IDs
425    let t = Instant::now();
426    let pb = progress.stage_spinner("Resolving imports...");
427    let resolved = resolve::resolve_all_imports(
428        &modules,
429        files,
430        workspaces,
431        &plugin_result.active_plugins,
432        &plugin_result.path_aliases,
433        &plugin_result.scss_include_paths,
434        &config.root,
435    );
436    let resolve_ms = t.elapsed().as_secs_f64() * 1000.0;
437    pb.finish_and_clear();
438
439    // Stage 5: Build module graph
440    let t = Instant::now();
441    let pb = progress.stage_spinner("Building module graph...");
442    let graph = graph::ModuleGraph::build_with_reachability_roots(
443        &resolved,
444        &entry_points.all,
445        &entry_points.runtime,
446        &entry_points.test,
447        files,
448    );
449    let graph_ms = t.elapsed().as_secs_f64() * 1000.0;
450    pb.finish_and_clear();
451
452    // Compute entry-point summary before the graph consumes the entry_points vec
453    let ep_summary = summarize_entry_points(&entry_points.all);
454
455    // Stage 6: Analyze for dead code (with plugin context and workspace info)
456    let t = Instant::now();
457    let pb = progress.stage_spinner("Analyzing...");
458    let mut result = analyze::find_dead_code_full(
459        &graph,
460        config,
461        &resolved,
462        Some(&plugin_result),
463        workspaces,
464        &modules,
465        collect_usages,
466    );
467    let analyze_ms = t.elapsed().as_secs_f64() * 1000.0;
468    pb.finish_and_clear();
469    progress.finish();
470
471    result.entry_point_summary = Some(ep_summary);
472
473    let total_ms = pipeline_start.elapsed().as_secs_f64() * 1000.0;
474
475    let cache_summary = if cache_hits > 0 {
476        format!(" ({cache_hits} cached, {cache_misses} parsed)")
477    } else {
478        String::new()
479    };
480
481    tracing::debug!(
482        "\n┌─ Pipeline Profile ─────────────────────────────\n\
483         │  discover files:   {:>8.1}ms  ({} files)\n\
484         │  workspaces:       {:>8.1}ms\n\
485         │  plugins:          {:>8.1}ms\n\
486         │  script analysis:  {:>8.1}ms\n\
487         │  parse/extract:    {:>8.1}ms  ({} modules{})\n\
488         │  cache update:     {:>8.1}ms\n\
489         │  entry points:     {:>8.1}ms  ({} entries)\n\
490         │  resolve imports:  {:>8.1}ms\n\
491         │  build graph:      {:>8.1}ms\n\
492         │  analyze:          {:>8.1}ms\n\
493         │  ────────────────────────────────────────────\n\
494         │  TOTAL:            {:>8.1}ms\n\
495         └─────────────────────────────────────────────────",
496        discover_ms,
497        files.len(),
498        workspaces_ms,
499        plugins_ms,
500        scripts_ms,
501        parse_ms,
502        modules.len(),
503        cache_summary,
504        cache_ms,
505        entry_points_ms,
506        entry_points.all.len(),
507        resolve_ms,
508        graph_ms,
509        analyze_ms,
510        total_ms,
511    );
512
513    let timings = if retain {
514        Some(PipelineTimings {
515            discover_files_ms: discover_ms,
516            file_count: files.len(),
517            workspaces_ms,
518            workspace_count: workspaces.len(),
519            plugins_ms,
520            script_analysis_ms: scripts_ms,
521            parse_extract_ms: parse_ms,
522            module_count: modules.len(),
523            cache_hits,
524            cache_misses,
525            cache_update_ms: cache_ms,
526            entry_points_ms,
527            entry_point_count: entry_points.all.len(),
528            resolve_imports_ms: resolve_ms,
529            build_graph_ms: graph_ms,
530            analyze_ms,
531            total_ms,
532        })
533    } else {
534        None
535    };
536
537    Ok(AnalysisOutput {
538        results: result,
539        timings,
540        graph: if retain { Some(graph) } else { None },
541        modules: if retain_modules { Some(modules) } else { None },
542        files: if retain_modules {
543            Some(files.to_vec())
544        } else {
545            None
546        },
547    })
548}
549
550/// Analyze package.json scripts from root and all workspace packages.
551///
552/// Populates the plugin result with script-used packages and config file
553/// entry patterns. Also scans CI config files for binary invocations.
554fn analyze_all_scripts(
555    config: &ResolvedConfig,
556    workspaces: &[fallow_config::WorkspaceInfo],
557    plugin_result: &mut plugins::AggregatedPluginResult,
558) {
559    // Load all package.jsons once: root + workspaces. Each is reused for both
560    // dep name collection (bin map) and script analysis (no double I/O).
561    let pkg_path = config.root.join("package.json");
562    let root_pkg = PackageJson::load(&pkg_path).ok();
563
564    let ws_pkgs: Vec<_> = workspaces
565        .iter()
566        .filter_map(|ws| {
567            PackageJson::load(&ws.root.join("package.json"))
568                .ok()
569                .map(|pkg| (ws, pkg))
570        })
571        .collect();
572
573    // Collect all dependency names to build the bin-name → package-name reverse map.
574    // This resolves binaries like "attw" to "@arethetypeswrong/cli" even without
575    // node_modules/.bin symlinks.
576    let mut all_dep_names: Vec<String> = Vec::new();
577    if let Some(ref pkg) = root_pkg {
578        all_dep_names.extend(pkg.all_dependency_names());
579    }
580    for (_, ws_pkg) in &ws_pkgs {
581        all_dep_names.extend(ws_pkg.all_dependency_names());
582    }
583    all_dep_names.sort_unstable();
584    all_dep_names.dedup();
585
586    // Probe node_modules/ at project root and each workspace root so non-hoisted
587    // deps (pnpm strict, Yarn workspaces) are also discovered.
588    let mut nm_roots: Vec<&std::path::Path> = vec![&config.root];
589    for ws in workspaces {
590        nm_roots.push(&ws.root);
591    }
592    let bin_map = scripts::build_bin_to_package_map(&nm_roots, &all_dep_names);
593
594    if let Some(ref pkg) = root_pkg
595        && let Some(ref pkg_scripts) = pkg.scripts
596    {
597        let scripts_to_analyze = if config.production {
598            scripts::filter_production_scripts(pkg_scripts)
599        } else {
600            pkg_scripts.clone()
601        };
602        let script_analysis = scripts::analyze_scripts(&scripts_to_analyze, &config.root, &bin_map);
603        plugin_result.script_used_packages = script_analysis.used_packages;
604
605        for config_file in &script_analysis.config_files {
606            plugin_result
607                .discovered_always_used
608                .push((config_file.clone(), "scripts".to_string()));
609        }
610    }
611    for (ws, ws_pkg) in &ws_pkgs {
612        if let Some(ref ws_scripts) = ws_pkg.scripts {
613            let scripts_to_analyze = if config.production {
614                scripts::filter_production_scripts(ws_scripts)
615            } else {
616                ws_scripts.clone()
617            };
618            let ws_analysis = scripts::analyze_scripts(&scripts_to_analyze, &ws.root, &bin_map);
619            plugin_result
620                .script_used_packages
621                .extend(ws_analysis.used_packages);
622
623            let ws_prefix = ws
624                .root
625                .strip_prefix(&config.root)
626                .unwrap_or(&ws.root)
627                .to_string_lossy();
628            for config_file in &ws_analysis.config_files {
629                plugin_result
630                    .discovered_always_used
631                    .push((format!("{ws_prefix}/{config_file}"), "scripts".to_string()));
632            }
633        }
634    }
635
636    // Scan CI config files for binary invocations
637    let ci_packages = scripts::ci::analyze_ci_files(&config.root, &bin_map);
638    plugin_result.script_used_packages.extend(ci_packages);
639    plugin_result
640        .entry_point_roles
641        .entry("scripts".to_string())
642        .or_insert(EntryPointRole::Support);
643}
644
645/// Discover all entry points from static patterns, workspaces, plugins, and infrastructure.
646fn discover_all_entry_points(
647    config: &ResolvedConfig,
648    files: &[discover::DiscoveredFile],
649    workspaces: &[fallow_config::WorkspaceInfo],
650    plugin_result: &plugins::AggregatedPluginResult,
651) -> discover::CategorizedEntryPoints {
652    let mut entry_points = discover::CategorizedEntryPoints::default();
653    entry_points.extend_runtime(discover::discover_entry_points(config, files));
654
655    let ws_entries: Vec<_> = workspaces
656        .par_iter()
657        .flat_map(|ws| discover::discover_workspace_entry_points(&ws.root, config, files))
658        .collect();
659    entry_points.extend_runtime(ws_entries);
660
661    let plugin_entries = discover::discover_plugin_entry_point_sets(plugin_result, config, files);
662    entry_points.extend(plugin_entries);
663
664    let infra_entries = discover::discover_infrastructure_entry_points(&config.root);
665    entry_points.extend_runtime(infra_entries);
666
667    // Add dynamically loaded files from config as entry points
668    if !config.dynamically_loaded.is_empty() {
669        let dynamic_entries = discover::discover_dynamically_loaded_entry_points(config, files);
670        entry_points.extend_runtime(dynamic_entries);
671    }
672
673    entry_points.dedup()
674}
675
676/// Summarize entry points by source category for user-facing output.
677fn summarize_entry_points(entry_points: &[discover::EntryPoint]) -> results::EntryPointSummary {
678    let mut counts: rustc_hash::FxHashMap<String, usize> = rustc_hash::FxHashMap::default();
679    for ep in entry_points {
680        let category = match &ep.source {
681            discover::EntryPointSource::PackageJsonMain
682            | discover::EntryPointSource::PackageJsonModule
683            | discover::EntryPointSource::PackageJsonExports
684            | discover::EntryPointSource::PackageJsonBin
685            | discover::EntryPointSource::PackageJsonScript => "package.json",
686            discover::EntryPointSource::Plugin { .. } => "plugin",
687            discover::EntryPointSource::TestFile => "test file",
688            discover::EntryPointSource::DefaultIndex => "default index",
689            discover::EntryPointSource::ManualEntry => "manual entry",
690            discover::EntryPointSource::InfrastructureConfig => "config",
691            discover::EntryPointSource::DynamicallyLoaded => "dynamically loaded",
692        };
693        *counts.entry(category.to_string()).or_insert(0) += 1;
694    }
695    let mut by_source: Vec<(String, usize)> = counts.into_iter().collect();
696    by_source.sort_by(|a, b| b.1.cmp(&a.1).then_with(|| a.0.cmp(&b.0)));
697    results::EntryPointSummary {
698        total: entry_points.len(),
699        by_source,
700    }
701}
702
703/// Run plugins for root project and all workspace packages.
704fn run_plugins(
705    config: &ResolvedConfig,
706    files: &[discover::DiscoveredFile],
707    workspaces: &[fallow_config::WorkspaceInfo],
708) -> plugins::AggregatedPluginResult {
709    let registry = plugins::PluginRegistry::new(config.external_plugins.clone());
710    let file_paths: Vec<std::path::PathBuf> = files.iter().map(|f| f.path.clone()).collect();
711
712    // Run plugins for root project (full run with external plugins, inline config, etc.)
713    let pkg_path = config.root.join("package.json");
714    let mut result = PackageJson::load(&pkg_path).map_or_else(
715        |_| plugins::AggregatedPluginResult::default(),
716        |pkg| registry.run(&pkg, &config.root, &file_paths),
717    );
718
719    if workspaces.is_empty() {
720        return result;
721    }
722
723    // Pre-compile config matchers and relative files once for all workspace runs.
724    // This avoids re-compiling glob patterns and re-computing relative paths per workspace
725    // (previously O(workspaces × plugins × files) glob compilations).
726    let precompiled_matchers = registry.precompile_config_matchers();
727    let relative_files: Vec<(&std::path::PathBuf, String)> = file_paths
728        .iter()
729        .map(|f| {
730            let rel = f
731                .strip_prefix(&config.root)
732                .unwrap_or(f)
733                .to_string_lossy()
734                .into_owned();
735            (f, rel)
736        })
737        .collect();
738
739    // Run plugins for each workspace package in parallel, then merge results.
740    let ws_results: Vec<_> = workspaces
741        .par_iter()
742        .filter_map(|ws| {
743            let ws_pkg_path = ws.root.join("package.json");
744            let ws_pkg = PackageJson::load(&ws_pkg_path).ok()?;
745            let ws_result = registry.run_workspace_fast(
746                &ws_pkg,
747                &ws.root,
748                &config.root,
749                &precompiled_matchers,
750                &relative_files,
751            );
752            if ws_result.active_plugins.is_empty() {
753                return None;
754            }
755            let ws_prefix = ws
756                .root
757                .strip_prefix(&config.root)
758                .unwrap_or(&ws.root)
759                .to_string_lossy()
760                .into_owned();
761            Some((ws_result, ws_prefix))
762        })
763        .collect();
764
765    // Merge workspace results sequentially (deterministic order via par_iter index stability)
766    // Track seen names for O(1) dedup instead of O(n) Vec::contains
767    let mut seen_plugins: rustc_hash::FxHashSet<String> =
768        result.active_plugins.iter().cloned().collect();
769    let mut seen_prefixes: rustc_hash::FxHashSet<String> =
770        result.virtual_module_prefixes.iter().cloned().collect();
771    let mut seen_generated: rustc_hash::FxHashSet<String> =
772        result.generated_import_patterns.iter().cloned().collect();
773    for (ws_result, ws_prefix) in ws_results {
774        // Prefix helper: workspace-relative patterns need the workspace prefix
775        // to be matchable from the monorepo root. But patterns that are already
776        // project-root-relative (e.g., from angular.json which uses absolute paths
777        // like "apps/client/src/styles.css") should not be double-prefixed.
778        let prefix_if_needed = |pat: &str| -> String {
779            if pat.starts_with(ws_prefix.as_str()) || pat.starts_with('/') {
780                pat.to_string()
781            } else {
782                format!("{ws_prefix}/{pat}")
783            }
784        };
785
786        for (rule, pname) in &ws_result.entry_patterns {
787            result
788                .entry_patterns
789                .push((rule.prefixed(&ws_prefix), pname.clone()));
790        }
791        for (plugin_name, role) in ws_result.entry_point_roles {
792            result.entry_point_roles.entry(plugin_name).or_insert(role);
793        }
794        for (pat, pname) in &ws_result.always_used {
795            result
796                .always_used
797                .push((prefix_if_needed(pat), pname.clone()));
798        }
799        for (pat, pname) in &ws_result.discovered_always_used {
800            result
801                .discovered_always_used
802                .push((prefix_if_needed(pat), pname.clone()));
803        }
804        for (pat, pname) in &ws_result.fixture_patterns {
805            result
806                .fixture_patterns
807                .push((prefix_if_needed(pat), pname.clone()));
808        }
809        for rule in &ws_result.used_exports {
810            result.used_exports.push(rule.prefixed(&ws_prefix));
811        }
812        // Merge active plugin names (deduplicated via HashSet)
813        for plugin_name in ws_result.active_plugins {
814            if !seen_plugins.contains(&plugin_name) {
815                seen_plugins.insert(plugin_name.clone());
816                result.active_plugins.push(plugin_name);
817            }
818        }
819        // These don't need prefixing (absolute paths / package names)
820        result
821            .referenced_dependencies
822            .extend(ws_result.referenced_dependencies);
823        result.setup_files.extend(ws_result.setup_files);
824        result
825            .tooling_dependencies
826            .extend(ws_result.tooling_dependencies);
827        // Virtual module prefixes (e.g., Docusaurus @theme/, @site/) are
828        // package-name prefixes, not file paths — no workspace prefix needed.
829        for prefix in ws_result.virtual_module_prefixes {
830            if !seen_prefixes.contains(&prefix) {
831                seen_prefixes.insert(prefix.clone());
832                result.virtual_module_prefixes.push(prefix);
833            }
834        }
835        // Generated import patterns (e.g., SvelteKit /$types) are suffix
836        // matches on specifiers, not file paths — no workspace prefix needed.
837        for pattern in ws_result.generated_import_patterns {
838            if !seen_generated.contains(&pattern) {
839                seen_generated.insert(pattern.clone());
840                result.generated_import_patterns.push(pattern);
841            }
842        }
843        // Path aliases from workspace plugins (e.g., SvelteKit $lib/ → src/lib).
844        // Prefix the replacement directory so it resolves from the monorepo root.
845        for (prefix, replacement) in ws_result.path_aliases {
846            result
847                .path_aliases
848                .push((prefix, format!("{ws_prefix}/{replacement}")));
849        }
850    }
851
852    result
853}
854
855/// Run analysis on a project directory (with export usages for LSP Code Lens).
856///
857/// # Errors
858///
859/// Returns an error if config loading, file discovery, parsing, or analysis fails.
860pub fn analyze_project(root: &Path) -> Result<AnalysisResults, FallowError> {
861    let config = default_config(root);
862    analyze_with_usages(&config)
863}
864
865/// Create a default config for a project root.
866pub(crate) fn default_config(root: &Path) -> ResolvedConfig {
867    let user_config = fallow_config::FallowConfig::find_and_load(root)
868        .ok()
869        .flatten();
870    match user_config {
871        Some((config, _path)) => config.resolve(
872            root.to_path_buf(),
873            fallow_config::OutputFormat::Human,
874            num_cpus(),
875            false,
876            true, // quiet: LSP/programmatic callers don't need progress bars
877        ),
878        None => fallow_config::FallowConfig::default().resolve(
879            root.to_path_buf(),
880            fallow_config::OutputFormat::Human,
881            num_cpus(),
882            false,
883            true,
884        ),
885    }
886}
887
888fn num_cpus() -> usize {
889    std::thread::available_parallelism().map_or(4, std::num::NonZeroUsize::get)
890}