Skip to main content

fallow_core/
lib.rs

1pub mod analyze;
2pub mod cache;
3pub mod churn;
4pub mod cross_reference;
5pub mod discover;
6pub mod duplicates;
7pub(crate) mod errors;
8mod external_style_usage;
9pub mod extract;
10pub mod plugins;
11pub(crate) mod progress;
12pub mod results;
13pub(crate) mod scripts;
14pub mod suppress;
15pub mod trace;
16
17// Re-export from fallow-graph for backwards compatibility
18pub use fallow_graph::graph;
19pub use fallow_graph::project;
20pub use fallow_graph::resolve;
21
22use std::path::Path;
23use std::time::Instant;
24
25use errors::FallowError;
26use fallow_config::{
27    EntryPointRole, PackageJson, ResolvedConfig, discover_workspaces, find_undeclared_workspaces,
28};
29use rayon::prelude::*;
30use results::AnalysisResults;
31use trace::PipelineTimings;
32
33const UNDECLARED_WORKSPACE_WARNING_PREVIEW: usize = 5;
34type LoadedWorkspacePackage<'a> = (&'a fallow_config::WorkspaceInfo, PackageJson);
35
36/// Result of the full analysis pipeline, including optional performance timings.
37pub struct AnalysisOutput {
38    pub results: AnalysisResults,
39    pub timings: Option<PipelineTimings>,
40    pub graph: Option<graph::ModuleGraph>,
41    /// Parsed modules from the pipeline, available when `retain_modules` is true.
42    /// Used by the combined command to share a single parse across dead-code and health.
43    pub modules: Option<Vec<extract::ModuleInfo>>,
44    /// Discovered files from the pipeline, available when `retain_modules` is true.
45    pub files: Option<Vec<discover::DiscoveredFile>>,
46    /// Package names invoked from package.json scripts and CI configs, mirroring
47    /// what the unused-deps detector consults. Populated for every pipeline run;
48    /// trace tooling reads it so `trace_dependency` agrees with `unused-deps` on
49    /// "used vs unused" instead of returning false-negatives for script-only deps.
50    pub script_used_packages: rustc_hash::FxHashSet<String>,
51}
52
53/// Update cache: write freshly parsed modules and refresh stale mtime/size entries.
54fn update_cache(
55    store: &mut cache::CacheStore,
56    modules: &[extract::ModuleInfo],
57    files: &[discover::DiscoveredFile],
58) {
59    for module in modules {
60        if let Some(file) = files.get(module.file_id.0 as usize) {
61            let (mt, sz) = file_mtime_and_size(&file.path);
62            // If content hash matches, just refresh mtime/size if stale (e.g. `touch`ed file)
63            if let Some(cached) = store.get_by_path_only(&file.path)
64                && cached.content_hash == module.content_hash
65            {
66                if cached.mtime_secs != mt || cached.file_size != sz {
67                    store.insert(&file.path, cache::module_to_cached(module, mt, sz));
68                }
69                continue;
70            }
71            store.insert(&file.path, cache::module_to_cached(module, mt, sz));
72        }
73    }
74    store.retain_paths(files);
75}
76
77/// Extract mtime (seconds since epoch) and file size from a path.
78fn file_mtime_and_size(path: &std::path::Path) -> (u64, u64) {
79    std::fs::metadata(path).map_or((0, 0), |m| {
80        let mt = m
81            .modified()
82            .ok()
83            .and_then(|t| t.duration_since(std::time::SystemTime::UNIX_EPOCH).ok())
84            .map_or(0, |d| d.as_secs());
85        (mt, m.len())
86    })
87}
88
89fn format_undeclared_workspace_warning(
90    root: &Path,
91    undeclared: &[fallow_config::WorkspaceDiagnostic],
92) -> Option<String> {
93    if undeclared.is_empty() {
94        return None;
95    }
96
97    let preview = undeclared
98        .iter()
99        .take(UNDECLARED_WORKSPACE_WARNING_PREVIEW)
100        .map(|diag| {
101            diag.path
102                .strip_prefix(root)
103                .unwrap_or(&diag.path)
104                .display()
105                .to_string()
106                .replace('\\', "/")
107        })
108        .collect::<Vec<_>>();
109    let remaining = undeclared
110        .len()
111        .saturating_sub(UNDECLARED_WORKSPACE_WARNING_PREVIEW);
112    let tail = if remaining > 0 {
113        format!(" (and {remaining} more)")
114    } else {
115        String::new()
116    };
117    let noun = if undeclared.len() == 1 {
118        "directory with package.json is"
119    } else {
120        "directories with package.json are"
121    };
122    let guidance = if undeclared.len() == 1 {
123        "Add that path to package.json workspaces or pnpm-workspace.yaml if it should be analyzed as a workspace."
124    } else {
125        "Add those paths to package.json workspaces or pnpm-workspace.yaml if they should be analyzed as workspaces."
126    };
127
128    Some(format!(
129        "{} {} not declared as {}: {}{}. {}",
130        undeclared.len(),
131        noun,
132        if undeclared.len() == 1 {
133            "a workspace"
134        } else {
135            "workspaces"
136        },
137        preview.join(", "),
138        tail,
139        guidance
140    ))
141}
142
143fn warn_undeclared_workspaces(
144    root: &Path,
145    workspaces_vec: &[fallow_config::WorkspaceInfo],
146    quiet: bool,
147) {
148    if quiet {
149        return;
150    }
151
152    let undeclared = find_undeclared_workspaces(root, workspaces_vec);
153    if let Some(message) = format_undeclared_workspace_warning(root, &undeclared) {
154        tracing::warn!("{message}");
155    }
156}
157
158/// Run the full analysis pipeline.
159///
160/// # Errors
161///
162/// Returns an error if file discovery, parsing, or analysis fails.
163pub fn analyze(config: &ResolvedConfig) -> Result<AnalysisResults, FallowError> {
164    let output = analyze_full(config, false, false, false, false)?;
165    Ok(output.results)
166}
167
168/// Run the full analysis pipeline with export usage collection (for LSP Code Lens).
169///
170/// # Errors
171///
172/// Returns an error if file discovery, parsing, or analysis fails.
173pub fn analyze_with_usages(config: &ResolvedConfig) -> Result<AnalysisResults, FallowError> {
174    let output = analyze_full(config, false, true, false, false)?;
175    Ok(output.results)
176}
177
178/// Run the full analysis pipeline with optional performance timings and graph retention.
179///
180/// # Errors
181///
182/// Returns an error if file discovery, parsing, or analysis fails.
183pub fn analyze_with_trace(config: &ResolvedConfig) -> Result<AnalysisOutput, FallowError> {
184    analyze_full(config, true, false, false, false)
185}
186
187/// Run the full analysis pipeline, retaining parsed modules and discovered files.
188///
189/// Used by the combined command to share a single parse across dead-code and health.
190/// When `need_complexity` is true, the `ComplexityVisitor` runs during parsing so
191/// the returned modules contain per-function complexity data.
192///
193/// # Errors
194///
195/// Returns an error if file discovery, parsing, or analysis fails.
196pub fn analyze_retaining_modules(
197    config: &ResolvedConfig,
198    need_complexity: bool,
199    retain_graph: bool,
200) -> Result<AnalysisOutput, FallowError> {
201    analyze_full(config, retain_graph, false, need_complexity, true)
202}
203
204/// Run the analysis pipeline using pre-parsed modules, skipping the parsing stage.
205///
206/// This avoids re-parsing files when the caller already has a `ParseResult` (e.g., from
207/// `fallow_core::extract::parse_all_files`). Discovery, plugins, scripts, entry points,
208/// import resolution, graph construction, and dead code detection still run normally.
209/// The graph is always retained (needed for file scores).
210///
211/// # Errors
212///
213/// Returns an error if discovery, graph construction, or analysis fails.
214#[allow(
215    clippy::too_many_lines,
216    reason = "pipeline orchestration stays easier to audit in one place"
217)]
218pub fn analyze_with_parse_result(
219    config: &ResolvedConfig,
220    modules: &[extract::ModuleInfo],
221) -> Result<AnalysisOutput, FallowError> {
222    let _span = tracing::info_span!("fallow_analyze_with_parse_result").entered();
223    let pipeline_start = Instant::now();
224
225    let show_progress = !config.quiet
226        && std::io::IsTerminal::is_terminal(&std::io::stderr())
227        && matches!(
228            config.output,
229            fallow_config::OutputFormat::Human
230                | fallow_config::OutputFormat::Compact
231                | fallow_config::OutputFormat::Markdown
232        );
233    let progress = progress::AnalysisProgress::new(show_progress);
234
235    if !config.root.join("node_modules").is_dir() {
236        tracing::warn!(
237            "node_modules directory not found. Run `npm install` / `pnpm install` first for accurate results."
238        );
239    }
240
241    // Discover workspaces
242    let t = Instant::now();
243    let workspaces_vec = discover_workspaces(&config.root);
244    let workspaces_ms = t.elapsed().as_secs_f64() * 1000.0;
245    if !workspaces_vec.is_empty() {
246        tracing::info!(count = workspaces_vec.len(), "workspaces discovered");
247    }
248
249    // Warn about directories with package.json not declared as workspaces
250    warn_undeclared_workspaces(&config.root, &workspaces_vec, config.quiet);
251
252    // Stage 1: Discover files (cheap — needed for file registry and resolution)
253    let t = Instant::now();
254    let pb = progress.stage_spinner("Discovering files...");
255    let discovered_files = discover::discover_files(config);
256    let discover_ms = t.elapsed().as_secs_f64() * 1000.0;
257    pb.finish_and_clear();
258
259    let project = project::ProjectState::new(discovered_files, workspaces_vec);
260    let files = project.files();
261    let workspaces = project.workspaces();
262    let root_pkg = load_root_package_json(config);
263    let workspace_pkgs = load_workspace_packages(workspaces);
264
265    // Stage 1.5: Run plugin system
266    let t = Instant::now();
267    let pb = progress.stage_spinner("Detecting plugins...");
268    let mut plugin_result = run_plugins(
269        config,
270        files,
271        workspaces,
272        root_pkg.as_ref(),
273        &workspace_pkgs,
274    );
275    let plugins_ms = t.elapsed().as_secs_f64() * 1000.0;
276    pb.finish_and_clear();
277
278    // Stage 1.6: Analyze package.json scripts
279    let t = Instant::now();
280    analyze_all_scripts(
281        config,
282        workspaces,
283        root_pkg.as_ref(),
284        &workspace_pkgs,
285        &mut plugin_result,
286    );
287    let scripts_ms = t.elapsed().as_secs_f64() * 1000.0;
288
289    // Stage 2: SKIPPED — using pre-parsed modules from caller
290
291    // Stage 3: Discover entry points
292    let t = Instant::now();
293    let entry_points = discover_all_entry_points(
294        config,
295        files,
296        workspaces,
297        root_pkg.as_ref(),
298        &workspace_pkgs,
299        &plugin_result,
300    );
301    let entry_points_ms = t.elapsed().as_secs_f64() * 1000.0;
302
303    // Compute entry-point summary before the graph consumes the entry_points vec
304    let ep_summary = summarize_entry_points(&entry_points.all);
305
306    // Stage 4: Resolve imports to file IDs
307    let t = Instant::now();
308    let pb = progress.stage_spinner("Resolving imports...");
309    let mut resolved = resolve::resolve_all_imports(
310        modules,
311        files,
312        workspaces,
313        &plugin_result.active_plugins,
314        &plugin_result.path_aliases,
315        &plugin_result.scss_include_paths,
316        &config.root,
317        &config.resolve.conditions,
318    );
319    external_style_usage::augment_external_style_package_usage(
320        &mut resolved,
321        config,
322        workspaces,
323        &plugin_result,
324    );
325    let resolve_ms = t.elapsed().as_secs_f64() * 1000.0;
326    pb.finish_and_clear();
327
328    // Stage 5: Build module graph
329    let t = Instant::now();
330    let pb = progress.stage_spinner("Building module graph...");
331    let graph = graph::ModuleGraph::build_with_reachability_roots(
332        &resolved,
333        &entry_points.all,
334        &entry_points.runtime,
335        &entry_points.test,
336        files,
337    );
338    let graph_ms = t.elapsed().as_secs_f64() * 1000.0;
339    pb.finish_and_clear();
340
341    // Stage 6: Analyze for dead code
342    let t = Instant::now();
343    let pb = progress.stage_spinner("Analyzing...");
344    let mut result = analyze::find_dead_code_full(
345        &graph,
346        config,
347        &resolved,
348        Some(&plugin_result),
349        workspaces,
350        modules,
351        false,
352    );
353    let analyze_ms = t.elapsed().as_secs_f64() * 1000.0;
354    pb.finish_and_clear();
355    progress.finish();
356
357    result.entry_point_summary = Some(ep_summary);
358
359    let total_ms = pipeline_start.elapsed().as_secs_f64() * 1000.0;
360
361    tracing::debug!(
362        "\n┌─ Pipeline Profile (reuse) ─────────────────────\n\
363         │  discover files:   {:>8.1}ms  ({} files)\n\
364         │  workspaces:       {:>8.1}ms\n\
365         │  plugins:          {:>8.1}ms\n\
366         │  script analysis:  {:>8.1}ms\n\
367         │  parse/extract:    SKIPPED (reused {} modules)\n\
368         │  entry points:     {:>8.1}ms  ({} entries)\n\
369         │  resolve imports:  {:>8.1}ms\n\
370         │  build graph:      {:>8.1}ms\n\
371         │  analyze:          {:>8.1}ms\n\
372         │  ────────────────────────────────────────────\n\
373         │  TOTAL:            {:>8.1}ms\n\
374         └─────────────────────────────────────────────────",
375        discover_ms,
376        files.len(),
377        workspaces_ms,
378        plugins_ms,
379        scripts_ms,
380        modules.len(),
381        entry_points_ms,
382        entry_points.all.len(),
383        resolve_ms,
384        graph_ms,
385        analyze_ms,
386        total_ms,
387    );
388
389    let timings = Some(PipelineTimings {
390        discover_files_ms: discover_ms,
391        file_count: files.len(),
392        workspaces_ms,
393        workspace_count: workspaces.len(),
394        plugins_ms,
395        script_analysis_ms: scripts_ms,
396        parse_extract_ms: 0.0, // Skipped — modules were reused
397        module_count: modules.len(),
398        cache_hits: 0,
399        cache_misses: 0,
400        cache_update_ms: 0.0,
401        entry_points_ms,
402        entry_point_count: entry_points.all.len(),
403        resolve_imports_ms: resolve_ms,
404        build_graph_ms: graph_ms,
405        analyze_ms,
406        total_ms,
407    });
408
409    Ok(AnalysisOutput {
410        results: result,
411        timings,
412        graph: Some(graph),
413        modules: None,
414        files: None,
415        script_used_packages: plugin_result.script_used_packages.clone(),
416    })
417}
418
419#[expect(
420    clippy::unnecessary_wraps,
421    reason = "Result kept for future error handling"
422)]
423#[expect(
424    clippy::too_many_lines,
425    reason = "main pipeline function; sequential phases are held together for clarity"
426)]
427fn analyze_full(
428    config: &ResolvedConfig,
429    retain: bool,
430    collect_usages: bool,
431    need_complexity: bool,
432    retain_modules: bool,
433) -> Result<AnalysisOutput, FallowError> {
434    let _span = tracing::info_span!("fallow_analyze").entered();
435    let pipeline_start = Instant::now();
436
437    // Progress bars: enabled when not quiet, stderr is a terminal, and output is human-readable.
438    // Structured formats (JSON, SARIF) suppress spinners even on TTY — users piping structured
439    // output don't expect progress noise on stderr.
440    let show_progress = !config.quiet
441        && std::io::IsTerminal::is_terminal(&std::io::stderr())
442        && matches!(
443            config.output,
444            fallow_config::OutputFormat::Human
445                | fallow_config::OutputFormat::Compact
446                | fallow_config::OutputFormat::Markdown
447        );
448    let progress = progress::AnalysisProgress::new(show_progress);
449
450    // Warn if node_modules is missing — resolution will be severely degraded
451    if !config.root.join("node_modules").is_dir() {
452        tracing::warn!(
453            "node_modules directory not found. Run `npm install` / `pnpm install` first for accurate results."
454        );
455    }
456
457    // Discover workspaces if in a monorepo
458    let t = Instant::now();
459    let workspaces_vec = discover_workspaces(&config.root);
460    let workspaces_ms = t.elapsed().as_secs_f64() * 1000.0;
461    if !workspaces_vec.is_empty() {
462        tracing::info!(count = workspaces_vec.len(), "workspaces discovered");
463    }
464
465    // Warn about directories with package.json not declared as workspaces
466    warn_undeclared_workspaces(&config.root, &workspaces_vec, config.quiet);
467
468    // Stage 1: Discover all source files
469    let t = Instant::now();
470    let pb = progress.stage_spinner("Discovering files...");
471    let discovered_files = discover::discover_files(config);
472    let discover_ms = t.elapsed().as_secs_f64() * 1000.0;
473    pb.finish_and_clear();
474
475    // Build ProjectState: owns the file registry with stable FileIds and workspace metadata.
476    // This is the foundation for cross-workspace resolution and future incremental analysis.
477    let project = project::ProjectState::new(discovered_files, workspaces_vec);
478    let files = project.files();
479    let workspaces = project.workspaces();
480    let root_pkg = load_root_package_json(config);
481    let workspace_pkgs = load_workspace_packages(workspaces);
482
483    // Stage 1.5: Run plugin system — parse config files, discover dynamic entries
484    let t = Instant::now();
485    let pb = progress.stage_spinner("Detecting plugins...");
486    let mut plugin_result = run_plugins(
487        config,
488        files,
489        workspaces,
490        root_pkg.as_ref(),
491        &workspace_pkgs,
492    );
493    let plugins_ms = t.elapsed().as_secs_f64() * 1000.0;
494    pb.finish_and_clear();
495
496    // Stage 1.6: Analyze package.json scripts for binary usage and config file refs
497    let t = Instant::now();
498    analyze_all_scripts(
499        config,
500        workspaces,
501        root_pkg.as_ref(),
502        &workspace_pkgs,
503        &mut plugin_result,
504    );
505    let scripts_ms = t.elapsed().as_secs_f64() * 1000.0;
506
507    // Stage 2: Parse all files in parallel and extract imports/exports
508    let t = Instant::now();
509    let pb = progress.stage_spinner(&format!("Parsing {} files...", files.len()));
510    let mut cache_store = if config.no_cache {
511        None
512    } else {
513        cache::CacheStore::load(&config.cache_dir)
514    };
515
516    let parse_result = extract::parse_all_files(files, cache_store.as_ref(), need_complexity);
517    let modules = parse_result.modules;
518    let cache_hits = parse_result.cache_hits;
519    let cache_misses = parse_result.cache_misses;
520    let parse_ms = t.elapsed().as_secs_f64() * 1000.0;
521    pb.finish_and_clear();
522
523    // Update cache with freshly parsed modules and refresh stale mtime/size entries.
524    let t = Instant::now();
525    if !config.no_cache {
526        let store = cache_store.get_or_insert_with(cache::CacheStore::new);
527        update_cache(store, &modules, files);
528        if let Err(e) = store.save(&config.cache_dir) {
529            tracing::warn!("Failed to save cache: {e}");
530        }
531    }
532    let cache_ms = t.elapsed().as_secs_f64() * 1000.0;
533
534    // Stage 3: Discover entry points (static patterns + plugin-discovered patterns)
535    let t = Instant::now();
536    let entry_points = discover_all_entry_points(
537        config,
538        files,
539        workspaces,
540        root_pkg.as_ref(),
541        &workspace_pkgs,
542        &plugin_result,
543    );
544    let entry_points_ms = t.elapsed().as_secs_f64() * 1000.0;
545
546    // Stage 4: Resolve imports to file IDs
547    let t = Instant::now();
548    let pb = progress.stage_spinner("Resolving imports...");
549    let mut resolved = resolve::resolve_all_imports(
550        &modules,
551        files,
552        workspaces,
553        &plugin_result.active_plugins,
554        &plugin_result.path_aliases,
555        &plugin_result.scss_include_paths,
556        &config.root,
557        &config.resolve.conditions,
558    );
559    external_style_usage::augment_external_style_package_usage(
560        &mut resolved,
561        config,
562        workspaces,
563        &plugin_result,
564    );
565    let resolve_ms = t.elapsed().as_secs_f64() * 1000.0;
566    pb.finish_and_clear();
567
568    // Stage 5: Build module graph
569    let t = Instant::now();
570    let pb = progress.stage_spinner("Building module graph...");
571    let graph = graph::ModuleGraph::build_with_reachability_roots(
572        &resolved,
573        &entry_points.all,
574        &entry_points.runtime,
575        &entry_points.test,
576        files,
577    );
578    let graph_ms = t.elapsed().as_secs_f64() * 1000.0;
579    pb.finish_and_clear();
580
581    // Compute entry-point summary before the graph consumes the entry_points vec
582    let ep_summary = summarize_entry_points(&entry_points.all);
583
584    // Stage 6: Analyze for dead code (with plugin context and workspace info)
585    let t = Instant::now();
586    let pb = progress.stage_spinner("Analyzing...");
587    let mut result = analyze::find_dead_code_full(
588        &graph,
589        config,
590        &resolved,
591        Some(&plugin_result),
592        workspaces,
593        &modules,
594        collect_usages,
595    );
596    let analyze_ms = t.elapsed().as_secs_f64() * 1000.0;
597    pb.finish_and_clear();
598    progress.finish();
599
600    result.entry_point_summary = Some(ep_summary);
601
602    let total_ms = pipeline_start.elapsed().as_secs_f64() * 1000.0;
603
604    let cache_summary = if cache_hits > 0 {
605        format!(" ({cache_hits} cached, {cache_misses} parsed)")
606    } else {
607        String::new()
608    };
609
610    tracing::debug!(
611        "\n┌─ Pipeline Profile ─────────────────────────────\n\
612         │  discover files:   {:>8.1}ms  ({} files)\n\
613         │  workspaces:       {:>8.1}ms\n\
614         │  plugins:          {:>8.1}ms\n\
615         │  script analysis:  {:>8.1}ms\n\
616         │  parse/extract:    {:>8.1}ms  ({} modules{})\n\
617         │  cache update:     {:>8.1}ms\n\
618         │  entry points:     {:>8.1}ms  ({} entries)\n\
619         │  resolve imports:  {:>8.1}ms\n\
620         │  build graph:      {:>8.1}ms\n\
621         │  analyze:          {:>8.1}ms\n\
622         │  ────────────────────────────────────────────\n\
623         │  TOTAL:            {:>8.1}ms\n\
624         └─────────────────────────────────────────────────",
625        discover_ms,
626        files.len(),
627        workspaces_ms,
628        plugins_ms,
629        scripts_ms,
630        parse_ms,
631        modules.len(),
632        cache_summary,
633        cache_ms,
634        entry_points_ms,
635        entry_points.all.len(),
636        resolve_ms,
637        graph_ms,
638        analyze_ms,
639        total_ms,
640    );
641
642    let timings = if retain {
643        Some(PipelineTimings {
644            discover_files_ms: discover_ms,
645            file_count: files.len(),
646            workspaces_ms,
647            workspace_count: workspaces.len(),
648            plugins_ms,
649            script_analysis_ms: scripts_ms,
650            parse_extract_ms: parse_ms,
651            module_count: modules.len(),
652            cache_hits,
653            cache_misses,
654            cache_update_ms: cache_ms,
655            entry_points_ms,
656            entry_point_count: entry_points.all.len(),
657            resolve_imports_ms: resolve_ms,
658            build_graph_ms: graph_ms,
659            analyze_ms,
660            total_ms,
661        })
662    } else {
663        None
664    };
665
666    Ok(AnalysisOutput {
667        results: result,
668        timings,
669        graph: if retain { Some(graph) } else { None },
670        modules: if retain_modules { Some(modules) } else { None },
671        files: if retain_modules {
672            Some(files.to_vec())
673        } else {
674            None
675        },
676        script_used_packages: plugin_result.script_used_packages,
677    })
678}
679
680/// Analyze package.json scripts from root and all workspace packages.
681///
682/// Populates the plugin result with script-used packages and config file
683/// entry patterns. Also scans CI config files for binary invocations.
684fn load_root_package_json(config: &ResolvedConfig) -> Option<PackageJson> {
685    PackageJson::load(&config.root.join("package.json")).ok()
686}
687
688fn load_workspace_packages(
689    workspaces: &[fallow_config::WorkspaceInfo],
690) -> Vec<LoadedWorkspacePackage<'_>> {
691    workspaces
692        .iter()
693        .filter_map(|ws| {
694            PackageJson::load(&ws.root.join("package.json"))
695                .ok()
696                .map(|pkg| (ws, pkg))
697        })
698        .collect()
699}
700
701fn analyze_all_scripts(
702    config: &ResolvedConfig,
703    workspaces: &[fallow_config::WorkspaceInfo],
704    root_pkg: Option<&PackageJson>,
705    workspace_pkgs: &[LoadedWorkspacePackage<'_>],
706    plugin_result: &mut plugins::AggregatedPluginResult,
707) {
708    // Collect all dependency names to build the bin-name → package-name reverse map.
709    // This resolves binaries like "attw" to "@arethetypeswrong/cli" even without
710    // node_modules/.bin symlinks.
711    let mut all_dep_names: Vec<String> = Vec::new();
712    if let Some(pkg) = root_pkg {
713        all_dep_names.extend(pkg.all_dependency_names());
714    }
715    for (_, ws_pkg) in workspace_pkgs {
716        all_dep_names.extend(ws_pkg.all_dependency_names());
717    }
718    all_dep_names.sort_unstable();
719    all_dep_names.dedup();
720
721    // Probe node_modules/ at project root and each workspace root so non-hoisted
722    // deps (pnpm strict, Yarn workspaces) are also discovered.
723    let mut nm_roots: Vec<&std::path::Path> = Vec::new();
724    if config.root.join("node_modules").is_dir() {
725        nm_roots.push(&config.root);
726    }
727    for ws in workspaces {
728        if ws.root.join("node_modules").is_dir() {
729            nm_roots.push(&ws.root);
730        }
731    }
732    let bin_map = scripts::build_bin_to_package_map(&nm_roots, &all_dep_names);
733
734    if let Some(pkg) = root_pkg
735        && let Some(ref pkg_scripts) = pkg.scripts
736    {
737        let scripts_to_analyze = if config.production {
738            scripts::filter_production_scripts(pkg_scripts)
739        } else {
740            pkg_scripts.clone()
741        };
742        let script_analysis = scripts::analyze_scripts(&scripts_to_analyze, &config.root, &bin_map);
743        plugin_result.script_used_packages = script_analysis.used_packages;
744
745        for config_file in &script_analysis.config_files {
746            plugin_result
747                .discovered_always_used
748                .push((config_file.clone(), "scripts".to_string()));
749        }
750    }
751    for (ws, ws_pkg) in workspace_pkgs {
752        if let Some(ref ws_scripts) = ws_pkg.scripts {
753            let scripts_to_analyze = if config.production {
754                scripts::filter_production_scripts(ws_scripts)
755            } else {
756                ws_scripts.clone()
757            };
758            let ws_analysis = scripts::analyze_scripts(&scripts_to_analyze, &ws.root, &bin_map);
759            plugin_result
760                .script_used_packages
761                .extend(ws_analysis.used_packages);
762
763            let ws_prefix = ws
764                .root
765                .strip_prefix(&config.root)
766                .unwrap_or(&ws.root)
767                .to_string_lossy();
768            for config_file in &ws_analysis.config_files {
769                plugin_result
770                    .discovered_always_used
771                    .push((format!("{ws_prefix}/{config_file}"), "scripts".to_string()));
772            }
773        }
774    }
775
776    // Scan CI config files for binary invocations
777    let ci_packages = scripts::ci::analyze_ci_files(&config.root, &bin_map);
778    plugin_result.script_used_packages.extend(ci_packages);
779    plugin_result
780        .entry_point_roles
781        .entry("scripts".to_string())
782        .or_insert(EntryPointRole::Support);
783}
784
785/// Discover all entry points from static patterns, workspaces, plugins, and infrastructure.
786fn discover_all_entry_points(
787    config: &ResolvedConfig,
788    files: &[discover::DiscoveredFile],
789    workspaces: &[fallow_config::WorkspaceInfo],
790    root_pkg: Option<&PackageJson>,
791    workspace_pkgs: &[LoadedWorkspacePackage<'_>],
792    plugin_result: &plugins::AggregatedPluginResult,
793) -> discover::CategorizedEntryPoints {
794    let mut entry_points = discover::CategorizedEntryPoints::default();
795    let root_discovery = discover::discover_entry_points_with_warnings_from_pkg(
796        config,
797        files,
798        root_pkg,
799        workspaces.is_empty(),
800    );
801
802    let workspace_pkg_by_root: rustc_hash::FxHashMap<std::path::PathBuf, &PackageJson> =
803        workspace_pkgs
804            .iter()
805            .map(|(ws, pkg)| (ws.root.clone(), pkg))
806            .collect();
807
808    let workspace_discovery: Vec<discover::EntryPointDiscovery> = workspaces
809        .par_iter()
810        .map(|ws| {
811            let pkg = workspace_pkg_by_root.get(&ws.root).copied();
812            discover::discover_workspace_entry_points_with_warnings_from_pkg(&ws.root, files, pkg)
813        })
814        .collect();
815    let mut skipped_entries = rustc_hash::FxHashMap::default();
816    entry_points.extend_runtime(root_discovery.entries);
817    for (path, count) in root_discovery.skipped_entries {
818        *skipped_entries.entry(path).or_insert(0) += count;
819    }
820    let mut ws_entries = Vec::new();
821    for workspace in workspace_discovery {
822        ws_entries.extend(workspace.entries);
823        for (path, count) in workspace.skipped_entries {
824            *skipped_entries.entry(path).or_insert(0) += count;
825        }
826    }
827    discover::warn_skipped_entry_summary(&skipped_entries);
828    entry_points.extend_runtime(ws_entries);
829
830    let plugin_entries = discover::discover_plugin_entry_point_sets(plugin_result, config, files);
831    entry_points.extend(plugin_entries);
832
833    let infra_entries = discover::discover_infrastructure_entry_points(&config.root);
834    entry_points.extend_runtime(infra_entries);
835
836    // Add dynamically loaded files from config as entry points
837    if !config.dynamically_loaded.is_empty() {
838        let dynamic_entries = discover::discover_dynamically_loaded_entry_points(config, files);
839        entry_points.extend_runtime(dynamic_entries);
840    }
841
842    entry_points.dedup()
843}
844
845/// Summarize entry points by source category for user-facing output.
846fn summarize_entry_points(entry_points: &[discover::EntryPoint]) -> results::EntryPointSummary {
847    let mut counts: rustc_hash::FxHashMap<String, usize> = rustc_hash::FxHashMap::default();
848    for ep in entry_points {
849        let category = match &ep.source {
850            discover::EntryPointSource::PackageJsonMain
851            | discover::EntryPointSource::PackageJsonModule
852            | discover::EntryPointSource::PackageJsonExports
853            | discover::EntryPointSource::PackageJsonBin
854            | discover::EntryPointSource::PackageJsonScript => "package.json",
855            discover::EntryPointSource::Plugin { .. } => "plugin",
856            discover::EntryPointSource::TestFile => "test file",
857            discover::EntryPointSource::DefaultIndex => "default index",
858            discover::EntryPointSource::ManualEntry => "manual entry",
859            discover::EntryPointSource::InfrastructureConfig => "config",
860            discover::EntryPointSource::DynamicallyLoaded => "dynamically loaded",
861        };
862        *counts.entry(category.to_string()).or_insert(0) += 1;
863    }
864    let mut by_source: Vec<(String, usize)> = counts.into_iter().collect();
865    by_source.sort_by(|a, b| b.1.cmp(&a.1).then_with(|| a.0.cmp(&b.0)));
866    results::EntryPointSummary {
867        total: entry_points.len(),
868        by_source,
869    }
870}
871
872/// Run plugins for root project and all workspace packages.
873fn run_plugins(
874    config: &ResolvedConfig,
875    files: &[discover::DiscoveredFile],
876    workspaces: &[fallow_config::WorkspaceInfo],
877    root_pkg: Option<&PackageJson>,
878    workspace_pkgs: &[LoadedWorkspacePackage<'_>],
879) -> plugins::AggregatedPluginResult {
880    let registry = plugins::PluginRegistry::new(config.external_plugins.clone());
881    let file_paths: Vec<std::path::PathBuf> = files.iter().map(|f| f.path.clone()).collect();
882    let root_config_search_roots = collect_config_search_roots(&config.root, &file_paths);
883    let root_config_search_root_refs: Vec<&Path> = root_config_search_roots
884        .iter()
885        .map(std::path::PathBuf::as_path)
886        .collect();
887
888    // Run plugins for root project (full run with external plugins, inline config, etc.)
889    let mut result = root_pkg.map_or_else(plugins::AggregatedPluginResult::default, |pkg| {
890        registry.run_with_search_roots(
891            pkg,
892            &config.root,
893            &file_paths,
894            &root_config_search_root_refs,
895        )
896    });
897
898    if workspaces.is_empty() {
899        return result;
900    }
901
902    let root_active_plugins: rustc_hash::FxHashSet<&str> =
903        result.active_plugins.iter().map(String::as_str).collect();
904
905    // Pre-compile config matchers and relative files once for all workspace runs.
906    // This avoids re-compiling glob patterns and re-computing relative paths per workspace
907    // (previously O(workspaces × plugins × files) glob compilations).
908    let precompiled_matchers = registry.precompile_config_matchers();
909    let relative_files: Vec<(&std::path::PathBuf, String)> = file_paths
910        .iter()
911        .map(|f| {
912            let rel = f
913                .strip_prefix(&config.root)
914                .unwrap_or(f)
915                .to_string_lossy()
916                .into_owned();
917            (f, rel)
918        })
919        .collect();
920
921    // Run plugins for each workspace package in parallel, then merge results.
922    let ws_results: Vec<_> = workspace_pkgs
923        .par_iter()
924        .filter_map(|(ws, ws_pkg)| {
925            let ws_result = registry.run_workspace_fast(
926                ws_pkg,
927                &ws.root,
928                &config.root,
929                &precompiled_matchers,
930                &relative_files,
931                &root_active_plugins,
932            );
933            if ws_result.active_plugins.is_empty() {
934                return None;
935            }
936            let ws_prefix = ws
937                .root
938                .strip_prefix(&config.root)
939                .unwrap_or(&ws.root)
940                .to_string_lossy()
941                .into_owned();
942            Some((ws_result, ws_prefix))
943        })
944        .collect();
945
946    // Merge workspace results sequentially (deterministic order via par_iter index stability)
947    // Track seen names for O(1) dedup instead of O(n) Vec::contains
948    let mut seen_plugins: rustc_hash::FxHashSet<String> =
949        result.active_plugins.iter().cloned().collect();
950    let mut seen_prefixes: rustc_hash::FxHashSet<String> =
951        result.virtual_module_prefixes.iter().cloned().collect();
952    let mut seen_generated: rustc_hash::FxHashSet<String> =
953        result.generated_import_patterns.iter().cloned().collect();
954    for (ws_result, ws_prefix) in ws_results {
955        // Prefix helper: workspace-relative patterns need the workspace prefix
956        // to be matchable from the monorepo root. But patterns that are already
957        // project-root-relative (e.g., from angular.json which uses absolute paths
958        // like "apps/client/src/styles.css") should not be double-prefixed.
959        let prefix_if_needed = |pat: &str| -> String {
960            if pat.starts_with(ws_prefix.as_str()) || pat.starts_with('/') {
961                pat.to_string()
962            } else {
963                format!("{ws_prefix}/{pat}")
964            }
965        };
966
967        for (rule, pname) in &ws_result.entry_patterns {
968            result
969                .entry_patterns
970                .push((rule.prefixed(&ws_prefix), pname.clone()));
971        }
972        for (plugin_name, role) in ws_result.entry_point_roles {
973            result.entry_point_roles.entry(plugin_name).or_insert(role);
974        }
975        for (pat, pname) in &ws_result.always_used {
976            result
977                .always_used
978                .push((prefix_if_needed(pat), pname.clone()));
979        }
980        for (pat, pname) in &ws_result.discovered_always_used {
981            result
982                .discovered_always_used
983                .push((prefix_if_needed(pat), pname.clone()));
984        }
985        for (pat, pname) in &ws_result.fixture_patterns {
986            result
987                .fixture_patterns
988                .push((prefix_if_needed(pat), pname.clone()));
989        }
990        for rule in &ws_result.used_exports {
991            result.used_exports.push(rule.prefixed(&ws_prefix));
992        }
993        // Merge active plugin names (deduplicated via HashSet)
994        for plugin_name in ws_result.active_plugins {
995            if !seen_plugins.contains(&plugin_name) {
996                seen_plugins.insert(plugin_name.clone());
997                result.active_plugins.push(plugin_name);
998            }
999        }
1000        // These don't need prefixing (absolute paths / package names)
1001        result
1002            .referenced_dependencies
1003            .extend(ws_result.referenced_dependencies);
1004        result.setup_files.extend(ws_result.setup_files);
1005        result
1006            .tooling_dependencies
1007            .extend(ws_result.tooling_dependencies);
1008        // Virtual module prefixes (e.g., Docusaurus @theme/, @site/) are
1009        // package-name prefixes, not file paths — no workspace prefix needed.
1010        for prefix in ws_result.virtual_module_prefixes {
1011            if !seen_prefixes.contains(&prefix) {
1012                seen_prefixes.insert(prefix.clone());
1013                result.virtual_module_prefixes.push(prefix);
1014            }
1015        }
1016        // Generated import patterns (e.g., SvelteKit /$types) are suffix
1017        // matches on specifiers, not file paths — no workspace prefix needed.
1018        for pattern in ws_result.generated_import_patterns {
1019            if !seen_generated.contains(&pattern) {
1020                seen_generated.insert(pattern.clone());
1021                result.generated_import_patterns.push(pattern);
1022            }
1023        }
1024        // Path aliases from workspace plugins (e.g., SvelteKit $lib/ → src/lib).
1025        // Prefix the replacement directory so it resolves from the monorepo root.
1026        for (prefix, replacement) in ws_result.path_aliases {
1027            result
1028                .path_aliases
1029                .push((prefix, format!("{ws_prefix}/{replacement}")));
1030        }
1031    }
1032
1033    result
1034}
1035
1036fn collect_config_search_roots(
1037    root: &Path,
1038    file_paths: &[std::path::PathBuf],
1039) -> Vec<std::path::PathBuf> {
1040    let mut roots: rustc_hash::FxHashSet<std::path::PathBuf> = rustc_hash::FxHashSet::default();
1041    roots.insert(root.to_path_buf());
1042
1043    for file_path in file_paths {
1044        let mut current = file_path.parent();
1045        while let Some(dir) = current {
1046            if !dir.starts_with(root) {
1047                break;
1048            }
1049            roots.insert(dir.to_path_buf());
1050            if dir == root {
1051                break;
1052            }
1053            current = dir.parent();
1054        }
1055    }
1056
1057    let mut roots_vec: Vec<_> = roots.into_iter().collect();
1058    roots_vec.sort();
1059    roots_vec
1060}
1061
1062/// Run analysis on a project directory (with export usages for LSP Code Lens).
1063///
1064/// # Errors
1065///
1066/// Returns an error if config loading, file discovery, parsing, or analysis fails.
1067pub fn analyze_project(root: &Path) -> Result<AnalysisResults, FallowError> {
1068    let config = default_config(root);
1069    analyze_with_usages(&config)
1070}
1071
1072/// Create a default config for a project root.
1073pub(crate) fn default_config(root: &Path) -> ResolvedConfig {
1074    let user_config = fallow_config::FallowConfig::find_and_load(root)
1075        .ok()
1076        .flatten();
1077    match user_config {
1078        Some((config, _path)) => config.resolve(
1079            root.to_path_buf(),
1080            fallow_config::OutputFormat::Human,
1081            num_cpus(),
1082            false,
1083            true, // quiet: LSP/programmatic callers don't need progress bars
1084        ),
1085        None => fallow_config::FallowConfig::default().resolve(
1086            root.to_path_buf(),
1087            fallow_config::OutputFormat::Human,
1088            num_cpus(),
1089            false,
1090            true,
1091        ),
1092    }
1093}
1094
1095fn num_cpus() -> usize {
1096    std::thread::available_parallelism().map_or(4, std::num::NonZeroUsize::get)
1097}
1098
1099#[cfg(test)]
1100mod tests {
1101    use super::{collect_config_search_roots, format_undeclared_workspace_warning};
1102    use std::path::{Path, PathBuf};
1103
1104    use fallow_config::WorkspaceDiagnostic;
1105
1106    fn diag(root: &Path, relative: &str) -> WorkspaceDiagnostic {
1107        WorkspaceDiagnostic {
1108            path: root.join(relative),
1109            message: String::new(),
1110        }
1111    }
1112
1113    #[test]
1114    fn undeclared_workspace_warning_is_singular_for_one_path() {
1115        let root = Path::new("/repo");
1116        let warning = format_undeclared_workspace_warning(root, &[diag(root, "packages/api")])
1117            .expect("warning should be rendered");
1118
1119        assert_eq!(
1120            warning,
1121            "1 directory with package.json is not declared as a workspace: packages/api. Add that path to package.json workspaces or pnpm-workspace.yaml if it should be analyzed as a workspace."
1122        );
1123    }
1124
1125    #[test]
1126    fn undeclared_workspace_warning_summarizes_many_paths() {
1127        let root = PathBuf::from("/repo");
1128        let diagnostics = [
1129            "examples/a",
1130            "examples/b",
1131            "examples/c",
1132            "examples/d",
1133            "examples/e",
1134            "examples/f",
1135        ]
1136        .into_iter()
1137        .map(|path| diag(&root, path))
1138        .collect::<Vec<_>>();
1139
1140        let warning = format_undeclared_workspace_warning(&root, &diagnostics)
1141            .expect("warning should be rendered");
1142
1143        assert_eq!(
1144            warning,
1145            "6 directories with package.json are not declared as workspaces: examples/a, examples/b, examples/c, examples/d, examples/e (and 1 more). Add those paths to package.json workspaces or pnpm-workspace.yaml if they should be analyzed as workspaces."
1146        );
1147    }
1148
1149    #[test]
1150    fn collect_config_search_roots_includes_file_ancestors_once() {
1151        let root = PathBuf::from("/repo");
1152        let search_roots = collect_config_search_roots(
1153            &root,
1154            &[
1155                root.join("apps/query/src/main.ts"),
1156                root.join("packages/shared/lib/index.ts"),
1157            ],
1158        );
1159
1160        assert_eq!(
1161            search_roots,
1162            vec![
1163                root.clone(),
1164                root.join("apps"),
1165                root.join("apps/query"),
1166                root.join("apps/query/src"),
1167                root.join("packages"),
1168                root.join("packages/shared"),
1169                root.join("packages/shared/lib"),
1170            ]
1171        );
1172    }
1173}