Skip to main content

fallow_core/
lib.rs

1pub mod analyze;
2pub mod cache;
3pub mod churn;
4pub mod cross_reference;
5pub mod discover;
6pub mod duplicates;
7pub(crate) mod errors;
8pub mod extract;
9pub mod plugins;
10pub(crate) mod progress;
11pub mod results;
12pub(crate) mod scripts;
13pub mod suppress;
14pub mod trace;
15
16// Re-export from fallow-graph for backwards compatibility
17pub use fallow_graph::graph;
18pub use fallow_graph::project;
19pub use fallow_graph::resolve;
20
21use std::path::Path;
22use std::time::Instant;
23
24use errors::FallowError;
25use fallow_config::{
26    EntryPointRole, PackageJson, ResolvedConfig, discover_workspaces, find_undeclared_workspaces,
27};
28use rayon::prelude::*;
29use results::AnalysisResults;
30use trace::PipelineTimings;
31
32/// Result of the full analysis pipeline, including optional performance timings.
33pub struct AnalysisOutput {
34    pub results: AnalysisResults,
35    pub timings: Option<PipelineTimings>,
36    pub graph: Option<graph::ModuleGraph>,
37    /// Parsed modules from the pipeline, available when `retain_modules` is true.
38    /// Used by the combined command to share a single parse across dead-code and health.
39    pub modules: Option<Vec<extract::ModuleInfo>>,
40    /// Discovered files from the pipeline, available when `retain_modules` is true.
41    pub files: Option<Vec<discover::DiscoveredFile>>,
42}
43
44/// Update cache: write freshly parsed modules and refresh stale mtime/size entries.
45fn update_cache(
46    store: &mut cache::CacheStore,
47    modules: &[extract::ModuleInfo],
48    files: &[discover::DiscoveredFile],
49) {
50    for module in modules {
51        if let Some(file) = files.get(module.file_id.0 as usize) {
52            let (mt, sz) = file_mtime_and_size(&file.path);
53            // If content hash matches, just refresh mtime/size if stale (e.g. `touch`ed file)
54            if let Some(cached) = store.get_by_path_only(&file.path)
55                && cached.content_hash == module.content_hash
56            {
57                if cached.mtime_secs != mt || cached.file_size != sz {
58                    store.insert(&file.path, cache::module_to_cached(module, mt, sz));
59                }
60                continue;
61            }
62            store.insert(&file.path, cache::module_to_cached(module, mt, sz));
63        }
64    }
65    store.retain_paths(files);
66}
67
68/// Extract mtime (seconds since epoch) and file size from a path.
69fn file_mtime_and_size(path: &std::path::Path) -> (u64, u64) {
70    std::fs::metadata(path)
71        .map(|m| {
72            let mt = m
73                .modified()
74                .ok()
75                .and_then(|t| t.duration_since(std::time::SystemTime::UNIX_EPOCH).ok())
76                .map_or(0, |d| d.as_secs());
77            (mt, m.len())
78        })
79        .unwrap_or((0, 0))
80}
81
82/// Run the full analysis pipeline.
83///
84/// # Errors
85///
86/// Returns an error if file discovery, parsing, or analysis fails.
87pub fn analyze(config: &ResolvedConfig) -> Result<AnalysisResults, FallowError> {
88    let output = analyze_full(config, false, false, false, false)?;
89    Ok(output.results)
90}
91
92/// Run the full analysis pipeline with export usage collection (for LSP Code Lens).
93///
94/// # Errors
95///
96/// Returns an error if file discovery, parsing, or analysis fails.
97pub fn analyze_with_usages(config: &ResolvedConfig) -> Result<AnalysisResults, FallowError> {
98    let output = analyze_full(config, false, true, false, false)?;
99    Ok(output.results)
100}
101
102/// Run the full analysis pipeline with optional performance timings and graph retention.
103///
104/// # Errors
105///
106/// Returns an error if file discovery, parsing, or analysis fails.
107pub fn analyze_with_trace(config: &ResolvedConfig) -> Result<AnalysisOutput, FallowError> {
108    analyze_full(config, true, false, false, false)
109}
110
111/// Run the full analysis pipeline, retaining parsed modules and discovered files.
112///
113/// Used by the combined command to share a single parse across dead-code and health.
114/// When `need_complexity` is true, the `ComplexityVisitor` runs during parsing so
115/// the returned modules contain per-function complexity data.
116///
117/// # Errors
118///
119/// Returns an error if file discovery, parsing, or analysis fails.
120pub fn analyze_retaining_modules(
121    config: &ResolvedConfig,
122    need_complexity: bool,
123    retain_graph: bool,
124) -> Result<AnalysisOutput, FallowError> {
125    analyze_full(config, retain_graph, false, need_complexity, true)
126}
127
128/// Run the analysis pipeline using pre-parsed modules, skipping the parsing stage.
129///
130/// This avoids re-parsing files when the caller already has a `ParseResult` (e.g., from
131/// `fallow_core::extract::parse_all_files`). Discovery, plugins, scripts, entry points,
132/// import resolution, graph construction, and dead code detection still run normally.
133/// The graph is always retained (needed for file scores).
134///
135/// # Errors
136///
137/// Returns an error if discovery, graph construction, or analysis fails.
138pub fn analyze_with_parse_result(
139    config: &ResolvedConfig,
140    modules: &[extract::ModuleInfo],
141) -> Result<AnalysisOutput, FallowError> {
142    let _span = tracing::info_span!("fallow_analyze_with_parse_result").entered();
143    let pipeline_start = Instant::now();
144
145    let show_progress = !config.quiet
146        && std::io::IsTerminal::is_terminal(&std::io::stderr())
147        && matches!(
148            config.output,
149            fallow_config::OutputFormat::Human
150                | fallow_config::OutputFormat::Compact
151                | fallow_config::OutputFormat::Markdown
152        );
153    let progress = progress::AnalysisProgress::new(show_progress);
154
155    if !config.root.join("node_modules").is_dir() {
156        tracing::warn!(
157            "node_modules directory not found. Run `npm install` / `pnpm install` first for accurate results."
158        );
159    }
160
161    // Discover workspaces
162    let t = Instant::now();
163    let workspaces_vec = discover_workspaces(&config.root);
164    let workspaces_ms = t.elapsed().as_secs_f64() * 1000.0;
165    if !workspaces_vec.is_empty() {
166        tracing::info!(count = workspaces_vec.len(), "workspaces discovered");
167    }
168
169    // Warn about directories with package.json not declared as workspaces
170    if !config.quiet {
171        let undeclared = find_undeclared_workspaces(&config.root, &workspaces_vec);
172        for diag in &undeclared {
173            tracing::warn!("{}", diag.message);
174        }
175    }
176
177    // Stage 1: Discover files (cheap — needed for file registry and resolution)
178    let t = Instant::now();
179    let pb = progress.stage_spinner("Discovering files...");
180    let discovered_files = discover::discover_files(config);
181    let discover_ms = t.elapsed().as_secs_f64() * 1000.0;
182    pb.finish_and_clear();
183
184    let project = project::ProjectState::new(discovered_files, workspaces_vec);
185    let files = project.files();
186    let workspaces = project.workspaces();
187
188    // Stage 1.5: Run plugin system
189    let t = Instant::now();
190    let pb = progress.stage_spinner("Detecting plugins...");
191    let mut plugin_result = run_plugins(config, files, workspaces);
192    let plugins_ms = t.elapsed().as_secs_f64() * 1000.0;
193    pb.finish_and_clear();
194
195    // Stage 1.6: Analyze package.json scripts
196    let t = Instant::now();
197    analyze_all_scripts(config, workspaces, &mut plugin_result);
198    let scripts_ms = t.elapsed().as_secs_f64() * 1000.0;
199
200    // Stage 2: SKIPPED — using pre-parsed modules from caller
201
202    // Stage 3: Discover entry points
203    let t = Instant::now();
204    let entry_points = discover_all_entry_points(config, files, workspaces, &plugin_result);
205    let entry_points_ms = t.elapsed().as_secs_f64() * 1000.0;
206
207    // Compute entry-point summary before the graph consumes the entry_points vec
208    let ep_summary = summarize_entry_points(&entry_points.all);
209
210    // Stage 4: Resolve imports to file IDs
211    let t = Instant::now();
212    let pb = progress.stage_spinner("Resolving imports...");
213    let resolved = resolve::resolve_all_imports(
214        modules,
215        files,
216        workspaces,
217        &plugin_result.active_plugins,
218        &plugin_result.path_aliases,
219        &plugin_result.scss_include_paths,
220        &config.root,
221    );
222    let resolve_ms = t.elapsed().as_secs_f64() * 1000.0;
223    pb.finish_and_clear();
224
225    // Stage 5: Build module graph
226    let t = Instant::now();
227    let pb = progress.stage_spinner("Building module graph...");
228    let graph = graph::ModuleGraph::build_with_reachability_roots(
229        &resolved,
230        &entry_points.all,
231        &entry_points.runtime,
232        &entry_points.test,
233        files,
234    );
235    let graph_ms = t.elapsed().as_secs_f64() * 1000.0;
236    pb.finish_and_clear();
237
238    // Stage 6: Analyze for dead code
239    let t = Instant::now();
240    let pb = progress.stage_spinner("Analyzing...");
241    let mut result = analyze::find_dead_code_full(
242        &graph,
243        config,
244        &resolved,
245        Some(&plugin_result),
246        workspaces,
247        modules,
248        false,
249    );
250    let analyze_ms = t.elapsed().as_secs_f64() * 1000.0;
251    pb.finish_and_clear();
252    progress.finish();
253
254    result.entry_point_summary = Some(ep_summary);
255
256    let total_ms = pipeline_start.elapsed().as_secs_f64() * 1000.0;
257
258    tracing::debug!(
259        "\n┌─ Pipeline Profile (reuse) ─────────────────────\n\
260         │  discover files:   {:>8.1}ms  ({} files)\n\
261         │  workspaces:       {:>8.1}ms\n\
262         │  plugins:          {:>8.1}ms\n\
263         │  script analysis:  {:>8.1}ms\n\
264         │  parse/extract:    SKIPPED (reused {} modules)\n\
265         │  entry points:     {:>8.1}ms  ({} entries)\n\
266         │  resolve imports:  {:>8.1}ms\n\
267         │  build graph:      {:>8.1}ms\n\
268         │  analyze:          {:>8.1}ms\n\
269         │  ────────────────────────────────────────────\n\
270         │  TOTAL:            {:>8.1}ms\n\
271         └─────────────────────────────────────────────────",
272        discover_ms,
273        files.len(),
274        workspaces_ms,
275        plugins_ms,
276        scripts_ms,
277        modules.len(),
278        entry_points_ms,
279        entry_points.all.len(),
280        resolve_ms,
281        graph_ms,
282        analyze_ms,
283        total_ms,
284    );
285
286    let timings = Some(PipelineTimings {
287        discover_files_ms: discover_ms,
288        file_count: files.len(),
289        workspaces_ms,
290        workspace_count: workspaces.len(),
291        plugins_ms,
292        script_analysis_ms: scripts_ms,
293        parse_extract_ms: 0.0, // Skipped — modules were reused
294        module_count: modules.len(),
295        cache_hits: 0,
296        cache_misses: 0,
297        cache_update_ms: 0.0,
298        entry_points_ms,
299        entry_point_count: entry_points.all.len(),
300        resolve_imports_ms: resolve_ms,
301        build_graph_ms: graph_ms,
302        analyze_ms,
303        total_ms,
304    });
305
306    Ok(AnalysisOutput {
307        results: result,
308        timings,
309        graph: Some(graph),
310        modules: None,
311        files: None,
312    })
313}
314
315#[expect(
316    clippy::unnecessary_wraps,
317    reason = "Result kept for future error handling"
318)]
319#[expect(
320    clippy::too_many_lines,
321    reason = "main pipeline function; split candidate for sig-audit-loop"
322)]
323fn analyze_full(
324    config: &ResolvedConfig,
325    retain: bool,
326    collect_usages: bool,
327    need_complexity: bool,
328    retain_modules: bool,
329) -> Result<AnalysisOutput, FallowError> {
330    let _span = tracing::info_span!("fallow_analyze").entered();
331    let pipeline_start = Instant::now();
332
333    // Progress bars: enabled when not quiet, stderr is a terminal, and output is human-readable.
334    // Structured formats (JSON, SARIF) suppress spinners even on TTY — users piping structured
335    // output don't expect progress noise on stderr.
336    let show_progress = !config.quiet
337        && std::io::IsTerminal::is_terminal(&std::io::stderr())
338        && matches!(
339            config.output,
340            fallow_config::OutputFormat::Human
341                | fallow_config::OutputFormat::Compact
342                | fallow_config::OutputFormat::Markdown
343        );
344    let progress = progress::AnalysisProgress::new(show_progress);
345
346    // Warn if node_modules is missing — resolution will be severely degraded
347    if !config.root.join("node_modules").is_dir() {
348        tracing::warn!(
349            "node_modules directory not found. Run `npm install` / `pnpm install` first for accurate results."
350        );
351    }
352
353    // Discover workspaces if in a monorepo
354    let t = Instant::now();
355    let workspaces_vec = discover_workspaces(&config.root);
356    let workspaces_ms = t.elapsed().as_secs_f64() * 1000.0;
357    if !workspaces_vec.is_empty() {
358        tracing::info!(count = workspaces_vec.len(), "workspaces discovered");
359    }
360
361    // Warn about directories with package.json not declared as workspaces
362    if !config.quiet {
363        let undeclared = find_undeclared_workspaces(&config.root, &workspaces_vec);
364        for diag in &undeclared {
365            tracing::warn!("{}", diag.message);
366        }
367    }
368
369    // Stage 1: Discover all source files
370    let t = Instant::now();
371    let pb = progress.stage_spinner("Discovering files...");
372    let discovered_files = discover::discover_files(config);
373    let discover_ms = t.elapsed().as_secs_f64() * 1000.0;
374    pb.finish_and_clear();
375
376    // Build ProjectState: owns the file registry with stable FileIds and workspace metadata.
377    // This is the foundation for cross-workspace resolution and future incremental analysis.
378    let project = project::ProjectState::new(discovered_files, workspaces_vec);
379    let files = project.files();
380    let workspaces = project.workspaces();
381
382    // Stage 1.5: Run plugin system — parse config files, discover dynamic entries
383    let t = Instant::now();
384    let pb = progress.stage_spinner("Detecting plugins...");
385    let mut plugin_result = run_plugins(config, files, workspaces);
386    let plugins_ms = t.elapsed().as_secs_f64() * 1000.0;
387    pb.finish_and_clear();
388
389    // Stage 1.6: Analyze package.json scripts for binary usage and config file refs
390    let t = Instant::now();
391    analyze_all_scripts(config, workspaces, &mut plugin_result);
392    let scripts_ms = t.elapsed().as_secs_f64() * 1000.0;
393
394    // Stage 2: Parse all files in parallel and extract imports/exports
395    let t = Instant::now();
396    let pb = progress.stage_spinner(&format!("Parsing {} files...", files.len()));
397    let mut cache_store = if config.no_cache {
398        None
399    } else {
400        cache::CacheStore::load(&config.cache_dir)
401    };
402
403    let parse_result = extract::parse_all_files(files, cache_store.as_ref(), need_complexity);
404    let modules = parse_result.modules;
405    let cache_hits = parse_result.cache_hits;
406    let cache_misses = parse_result.cache_misses;
407    let parse_ms = t.elapsed().as_secs_f64() * 1000.0;
408    pb.finish_and_clear();
409
410    // Update cache with freshly parsed modules and refresh stale mtime/size entries.
411    let t = Instant::now();
412    if !config.no_cache {
413        let store = cache_store.get_or_insert_with(cache::CacheStore::new);
414        update_cache(store, &modules, files);
415        if let Err(e) = store.save(&config.cache_dir) {
416            tracing::warn!("Failed to save cache: {e}");
417        }
418    }
419    let cache_ms = t.elapsed().as_secs_f64() * 1000.0;
420
421    // Stage 3: Discover entry points (static patterns + plugin-discovered patterns)
422    let t = Instant::now();
423    let entry_points = discover_all_entry_points(config, files, workspaces, &plugin_result);
424    let entry_points_ms = t.elapsed().as_secs_f64() * 1000.0;
425
426    // Stage 4: Resolve imports to file IDs
427    let t = Instant::now();
428    let pb = progress.stage_spinner("Resolving imports...");
429    let resolved = resolve::resolve_all_imports(
430        &modules,
431        files,
432        workspaces,
433        &plugin_result.active_plugins,
434        &plugin_result.path_aliases,
435        &plugin_result.scss_include_paths,
436        &config.root,
437    );
438    let resolve_ms = t.elapsed().as_secs_f64() * 1000.0;
439    pb.finish_and_clear();
440
441    // Stage 5: Build module graph
442    let t = Instant::now();
443    let pb = progress.stage_spinner("Building module graph...");
444    let graph = graph::ModuleGraph::build_with_reachability_roots(
445        &resolved,
446        &entry_points.all,
447        &entry_points.runtime,
448        &entry_points.test,
449        files,
450    );
451    let graph_ms = t.elapsed().as_secs_f64() * 1000.0;
452    pb.finish_and_clear();
453
454    // Compute entry-point summary before the graph consumes the entry_points vec
455    let ep_summary = summarize_entry_points(&entry_points.all);
456
457    // Stage 6: Analyze for dead code (with plugin context and workspace info)
458    let t = Instant::now();
459    let pb = progress.stage_spinner("Analyzing...");
460    let mut result = analyze::find_dead_code_full(
461        &graph,
462        config,
463        &resolved,
464        Some(&plugin_result),
465        workspaces,
466        &modules,
467        collect_usages,
468    );
469    let analyze_ms = t.elapsed().as_secs_f64() * 1000.0;
470    pb.finish_and_clear();
471    progress.finish();
472
473    result.entry_point_summary = Some(ep_summary);
474
475    let total_ms = pipeline_start.elapsed().as_secs_f64() * 1000.0;
476
477    let cache_summary = if cache_hits > 0 {
478        format!(" ({cache_hits} cached, {cache_misses} parsed)")
479    } else {
480        String::new()
481    };
482
483    tracing::debug!(
484        "\n┌─ Pipeline Profile ─────────────────────────────\n\
485         │  discover files:   {:>8.1}ms  ({} files)\n\
486         │  workspaces:       {:>8.1}ms\n\
487         │  plugins:          {:>8.1}ms\n\
488         │  script analysis:  {:>8.1}ms\n\
489         │  parse/extract:    {:>8.1}ms  ({} modules{})\n\
490         │  cache update:     {:>8.1}ms\n\
491         │  entry points:     {:>8.1}ms  ({} entries)\n\
492         │  resolve imports:  {:>8.1}ms\n\
493         │  build graph:      {:>8.1}ms\n\
494         │  analyze:          {:>8.1}ms\n\
495         │  ────────────────────────────────────────────\n\
496         │  TOTAL:            {:>8.1}ms\n\
497         └─────────────────────────────────────────────────",
498        discover_ms,
499        files.len(),
500        workspaces_ms,
501        plugins_ms,
502        scripts_ms,
503        parse_ms,
504        modules.len(),
505        cache_summary,
506        cache_ms,
507        entry_points_ms,
508        entry_points.all.len(),
509        resolve_ms,
510        graph_ms,
511        analyze_ms,
512        total_ms,
513    );
514
515    let timings = if retain {
516        Some(PipelineTimings {
517            discover_files_ms: discover_ms,
518            file_count: files.len(),
519            workspaces_ms,
520            workspace_count: workspaces.len(),
521            plugins_ms,
522            script_analysis_ms: scripts_ms,
523            parse_extract_ms: parse_ms,
524            module_count: modules.len(),
525            cache_hits,
526            cache_misses,
527            cache_update_ms: cache_ms,
528            entry_points_ms,
529            entry_point_count: entry_points.all.len(),
530            resolve_imports_ms: resolve_ms,
531            build_graph_ms: graph_ms,
532            analyze_ms,
533            total_ms,
534        })
535    } else {
536        None
537    };
538
539    Ok(AnalysisOutput {
540        results: result,
541        timings,
542        graph: if retain { Some(graph) } else { None },
543        modules: if retain_modules { Some(modules) } else { None },
544        files: if retain_modules {
545            Some(files.to_vec())
546        } else {
547            None
548        },
549    })
550}
551
552/// Analyze package.json scripts from root and all workspace packages.
553///
554/// Populates the plugin result with script-used packages and config file
555/// entry patterns. Also scans CI config files for binary invocations.
556fn analyze_all_scripts(
557    config: &ResolvedConfig,
558    workspaces: &[fallow_config::WorkspaceInfo],
559    plugin_result: &mut plugins::AggregatedPluginResult,
560) {
561    // Load all package.jsons once: root + workspaces. Each is reused for both
562    // dep name collection (bin map) and script analysis (no double I/O).
563    let pkg_path = config.root.join("package.json");
564    let root_pkg = PackageJson::load(&pkg_path).ok();
565
566    let ws_pkgs: Vec<_> = workspaces
567        .iter()
568        .filter_map(|ws| {
569            PackageJson::load(&ws.root.join("package.json"))
570                .ok()
571                .map(|pkg| (ws, pkg))
572        })
573        .collect();
574
575    // Collect all dependency names to build the bin-name → package-name reverse map.
576    // This resolves binaries like "attw" to "@arethetypeswrong/cli" even without
577    // node_modules/.bin symlinks.
578    let mut all_dep_names: Vec<String> = Vec::new();
579    if let Some(ref pkg) = root_pkg {
580        all_dep_names.extend(pkg.all_dependency_names());
581    }
582    for (_, ws_pkg) in &ws_pkgs {
583        all_dep_names.extend(ws_pkg.all_dependency_names());
584    }
585    all_dep_names.sort_unstable();
586    all_dep_names.dedup();
587
588    // Probe node_modules/ at project root and each workspace root so non-hoisted
589    // deps (pnpm strict, Yarn workspaces) are also discovered.
590    let mut nm_roots: Vec<&std::path::Path> = vec![&config.root];
591    for ws in workspaces {
592        nm_roots.push(&ws.root);
593    }
594    let bin_map = scripts::build_bin_to_package_map(&nm_roots, &all_dep_names);
595
596    if let Some(ref pkg) = root_pkg
597        && let Some(ref pkg_scripts) = pkg.scripts
598    {
599        let scripts_to_analyze = if config.production {
600            scripts::filter_production_scripts(pkg_scripts)
601        } else {
602            pkg_scripts.clone()
603        };
604        let script_analysis = scripts::analyze_scripts(&scripts_to_analyze, &config.root, &bin_map);
605        plugin_result.script_used_packages = script_analysis.used_packages;
606
607        for config_file in &script_analysis.config_files {
608            plugin_result
609                .discovered_always_used
610                .push((config_file.clone(), "scripts".to_string()));
611        }
612    }
613    for (ws, ws_pkg) in &ws_pkgs {
614        if let Some(ref ws_scripts) = ws_pkg.scripts {
615            let scripts_to_analyze = if config.production {
616                scripts::filter_production_scripts(ws_scripts)
617            } else {
618                ws_scripts.clone()
619            };
620            let ws_analysis = scripts::analyze_scripts(&scripts_to_analyze, &ws.root, &bin_map);
621            plugin_result
622                .script_used_packages
623                .extend(ws_analysis.used_packages);
624
625            let ws_prefix = ws
626                .root
627                .strip_prefix(&config.root)
628                .unwrap_or(&ws.root)
629                .to_string_lossy();
630            for config_file in &ws_analysis.config_files {
631                plugin_result
632                    .discovered_always_used
633                    .push((format!("{ws_prefix}/{config_file}"), "scripts".to_string()));
634            }
635        }
636    }
637
638    // Scan CI config files for binary invocations
639    let ci_packages = scripts::ci::analyze_ci_files(&config.root, &bin_map);
640    plugin_result.script_used_packages.extend(ci_packages);
641    plugin_result
642        .entry_point_roles
643        .entry("scripts".to_string())
644        .or_insert(EntryPointRole::Support);
645}
646
647/// Discover all entry points from static patterns, workspaces, plugins, and infrastructure.
648fn discover_all_entry_points(
649    config: &ResolvedConfig,
650    files: &[discover::DiscoveredFile],
651    workspaces: &[fallow_config::WorkspaceInfo],
652    plugin_result: &plugins::AggregatedPluginResult,
653) -> discover::CategorizedEntryPoints {
654    let mut entry_points = discover::CategorizedEntryPoints::default();
655    entry_points.extend_runtime(discover::discover_entry_points(config, files));
656
657    let ws_entries: Vec<_> = workspaces
658        .par_iter()
659        .flat_map(|ws| discover::discover_workspace_entry_points(&ws.root, config, files))
660        .collect();
661    entry_points.extend_runtime(ws_entries);
662
663    let plugin_entries = discover::discover_plugin_entry_point_sets(plugin_result, config, files);
664    entry_points.extend(plugin_entries);
665
666    let infra_entries = discover::discover_infrastructure_entry_points(&config.root);
667    entry_points.extend_runtime(infra_entries);
668
669    // Add dynamically loaded files from config as entry points
670    if !config.dynamically_loaded.is_empty() {
671        let dynamic_entries = discover::discover_dynamically_loaded_entry_points(config, files);
672        entry_points.extend_runtime(dynamic_entries);
673    }
674
675    entry_points.dedup()
676}
677
678/// Summarize entry points by source category for user-facing output.
679fn summarize_entry_points(entry_points: &[discover::EntryPoint]) -> results::EntryPointSummary {
680    let mut counts: rustc_hash::FxHashMap<String, usize> = rustc_hash::FxHashMap::default();
681    for ep in entry_points {
682        let category = match &ep.source {
683            discover::EntryPointSource::PackageJsonMain
684            | discover::EntryPointSource::PackageJsonModule
685            | discover::EntryPointSource::PackageJsonExports
686            | discover::EntryPointSource::PackageJsonBin
687            | discover::EntryPointSource::PackageJsonScript => "package.json",
688            discover::EntryPointSource::Plugin { .. } => "plugin",
689            discover::EntryPointSource::TestFile => "test file",
690            discover::EntryPointSource::DefaultIndex => "default index",
691            discover::EntryPointSource::ManualEntry => "manual entry",
692            discover::EntryPointSource::InfrastructureConfig => "config",
693            discover::EntryPointSource::DynamicallyLoaded => "dynamically loaded",
694        };
695        *counts.entry(category.to_string()).or_insert(0) += 1;
696    }
697    let mut by_source: Vec<(String, usize)> = counts.into_iter().collect();
698    by_source.sort_by(|a, b| b.1.cmp(&a.1).then_with(|| a.0.cmp(&b.0)));
699    results::EntryPointSummary {
700        total: entry_points.len(),
701        by_source,
702    }
703}
704
705/// Run plugins for root project and all workspace packages.
706fn run_plugins(
707    config: &ResolvedConfig,
708    files: &[discover::DiscoveredFile],
709    workspaces: &[fallow_config::WorkspaceInfo],
710) -> plugins::AggregatedPluginResult {
711    let registry = plugins::PluginRegistry::new(config.external_plugins.clone());
712    let file_paths: Vec<std::path::PathBuf> = files.iter().map(|f| f.path.clone()).collect();
713
714    // Run plugins for root project (full run with external plugins, inline config, etc.)
715    let pkg_path = config.root.join("package.json");
716    let mut result = PackageJson::load(&pkg_path).map_or_else(
717        |_| plugins::AggregatedPluginResult::default(),
718        |pkg| registry.run(&pkg, &config.root, &file_paths),
719    );
720
721    if workspaces.is_empty() {
722        return result;
723    }
724
725    // Pre-compile config matchers and relative files once for all workspace runs.
726    // This avoids re-compiling glob patterns and re-computing relative paths per workspace
727    // (previously O(workspaces × plugins × files) glob compilations).
728    let precompiled_matchers = registry.precompile_config_matchers();
729    let relative_files: Vec<(&std::path::PathBuf, String)> = file_paths
730        .iter()
731        .map(|f| {
732            let rel = f
733                .strip_prefix(&config.root)
734                .unwrap_or(f)
735                .to_string_lossy()
736                .into_owned();
737            (f, rel)
738        })
739        .collect();
740
741    // Run plugins for each workspace package in parallel, then merge results.
742    let ws_results: Vec<_> = workspaces
743        .par_iter()
744        .filter_map(|ws| {
745            let ws_pkg_path = ws.root.join("package.json");
746            let ws_pkg = PackageJson::load(&ws_pkg_path).ok()?;
747            let ws_result = registry.run_workspace_fast(
748                &ws_pkg,
749                &ws.root,
750                &config.root,
751                &precompiled_matchers,
752                &relative_files,
753            );
754            if ws_result.active_plugins.is_empty() {
755                return None;
756            }
757            let ws_prefix = ws
758                .root
759                .strip_prefix(&config.root)
760                .unwrap_or(&ws.root)
761                .to_string_lossy()
762                .into_owned();
763            Some((ws_result, ws_prefix))
764        })
765        .collect();
766
767    // Merge workspace results sequentially (deterministic order via par_iter index stability)
768    // Track seen names for O(1) dedup instead of O(n) Vec::contains
769    let mut seen_plugins: rustc_hash::FxHashSet<String> =
770        result.active_plugins.iter().cloned().collect();
771    let mut seen_prefixes: rustc_hash::FxHashSet<String> =
772        result.virtual_module_prefixes.iter().cloned().collect();
773    let mut seen_generated: rustc_hash::FxHashSet<String> =
774        result.generated_import_patterns.iter().cloned().collect();
775    for (ws_result, ws_prefix) in ws_results {
776        // Prefix helper: workspace-relative patterns need the workspace prefix
777        // to be matchable from the monorepo root. But patterns that are already
778        // project-root-relative (e.g., from angular.json which uses absolute paths
779        // like "apps/client/src/styles.css") should not be double-prefixed.
780        let prefix_if_needed = |pat: &str| -> String {
781            if pat.starts_with(ws_prefix.as_str()) || pat.starts_with('/') {
782                pat.to_string()
783            } else {
784                format!("{ws_prefix}/{pat}")
785            }
786        };
787
788        for (rule, pname) in &ws_result.entry_patterns {
789            result
790                .entry_patterns
791                .push((rule.prefixed(&ws_prefix), pname.clone()));
792        }
793        for (plugin_name, role) in ws_result.entry_point_roles {
794            result.entry_point_roles.entry(plugin_name).or_insert(role);
795        }
796        for (pat, pname) in &ws_result.always_used {
797            result
798                .always_used
799                .push((prefix_if_needed(pat), pname.clone()));
800        }
801        for (pat, pname) in &ws_result.discovered_always_used {
802            result
803                .discovered_always_used
804                .push((prefix_if_needed(pat), pname.clone()));
805        }
806        for (pat, pname) in &ws_result.fixture_patterns {
807            result
808                .fixture_patterns
809                .push((prefix_if_needed(pat), pname.clone()));
810        }
811        for rule in &ws_result.used_exports {
812            result.used_exports.push(rule.prefixed(&ws_prefix));
813        }
814        // Merge active plugin names (deduplicated via HashSet)
815        for plugin_name in ws_result.active_plugins {
816            if !seen_plugins.contains(&plugin_name) {
817                seen_plugins.insert(plugin_name.clone());
818                result.active_plugins.push(plugin_name);
819            }
820        }
821        // These don't need prefixing (absolute paths / package names)
822        result
823            .referenced_dependencies
824            .extend(ws_result.referenced_dependencies);
825        result.setup_files.extend(ws_result.setup_files);
826        result
827            .tooling_dependencies
828            .extend(ws_result.tooling_dependencies);
829        // Virtual module prefixes (e.g., Docusaurus @theme/, @site/) are
830        // package-name prefixes, not file paths — no workspace prefix needed.
831        for prefix in ws_result.virtual_module_prefixes {
832            if !seen_prefixes.contains(&prefix) {
833                seen_prefixes.insert(prefix.clone());
834                result.virtual_module_prefixes.push(prefix);
835            }
836        }
837        // Generated import patterns (e.g., SvelteKit /$types) are suffix
838        // matches on specifiers, not file paths — no workspace prefix needed.
839        for pattern in ws_result.generated_import_patterns {
840            if !seen_generated.contains(&pattern) {
841                seen_generated.insert(pattern.clone());
842                result.generated_import_patterns.push(pattern);
843            }
844        }
845        // Path aliases from workspace plugins (e.g., SvelteKit $lib/ → src/lib).
846        // Prefix the replacement directory so it resolves from the monorepo root.
847        for (prefix, replacement) in ws_result.path_aliases {
848            result
849                .path_aliases
850                .push((prefix, format!("{ws_prefix}/{replacement}")));
851        }
852    }
853
854    result
855}
856
857/// Run analysis on a project directory (with export usages for LSP Code Lens).
858///
859/// # Errors
860///
861/// Returns an error if config loading, file discovery, parsing, or analysis fails.
862pub fn analyze_project(root: &Path) -> Result<AnalysisResults, FallowError> {
863    let config = default_config(root);
864    analyze_with_usages(&config)
865}
866
867/// Create a default config for a project root.
868pub(crate) fn default_config(root: &Path) -> ResolvedConfig {
869    let user_config = fallow_config::FallowConfig::find_and_load(root)
870        .ok()
871        .flatten();
872    match user_config {
873        Some((config, _path)) => config.resolve(
874            root.to_path_buf(),
875            fallow_config::OutputFormat::Human,
876            num_cpus(),
877            false,
878            true, // quiet: LSP/programmatic callers don't need progress bars
879        ),
880        None => fallow_config::FallowConfig::default().resolve(
881            root.to_path_buf(),
882            fallow_config::OutputFormat::Human,
883            num_cpus(),
884            false,
885            true,
886        ),
887    }
888}
889
890fn num_cpus() -> usize {
891    std::thread::available_parallelism()
892        .map(std::num::NonZeroUsize::get)
893        .unwrap_or(4)
894}