Skip to main content

fallow_core/
lib.rs

1pub mod analyze;
2pub mod cache;
3pub mod changed_files;
4pub mod churn;
5pub mod cross_reference;
6pub mod discover;
7pub mod duplicates;
8pub(crate) mod errors;
9mod external_style_usage;
10pub mod extract;
11pub mod plugins;
12pub(crate) mod progress;
13pub mod results;
14pub(crate) mod scripts;
15pub mod suppress;
16pub mod trace;
17
18// Re-export from fallow-graph for backwards compatibility
19pub use fallow_graph::graph;
20pub use fallow_graph::project;
21pub use fallow_graph::resolve;
22
23use std::path::Path;
24use std::time::Instant;
25
26use errors::FallowError;
27use fallow_config::{
28    EntryPointRole, PackageJson, ResolvedConfig, discover_workspaces, find_undeclared_workspaces,
29};
30use rayon::prelude::*;
31use results::AnalysisResults;
32use trace::PipelineTimings;
33
34const UNDECLARED_WORKSPACE_WARNING_PREVIEW: usize = 5;
35type LoadedWorkspacePackage<'a> = (&'a fallow_config::WorkspaceInfo, PackageJson);
36
37/// Result of the full analysis pipeline, including optional performance timings.
38pub struct AnalysisOutput {
39    pub results: AnalysisResults,
40    pub timings: Option<PipelineTimings>,
41    pub graph: Option<graph::ModuleGraph>,
42    /// Parsed modules from the pipeline, available when `retain_modules` is true.
43    /// Used by the combined command to share a single parse across dead-code and health.
44    pub modules: Option<Vec<extract::ModuleInfo>>,
45    /// Discovered files from the pipeline, available when `retain_modules` is true.
46    pub files: Option<Vec<discover::DiscoveredFile>>,
47    /// Package names invoked from package.json scripts and CI configs, mirroring
48    /// what the unused-deps detector consults. Populated for every pipeline run;
49    /// trace tooling reads it so `trace_dependency` agrees with `unused-deps` on
50    /// "used vs unused" instead of returning false-negatives for script-only deps.
51    pub script_used_packages: rustc_hash::FxHashSet<String>,
52}
53
54/// Update cache: write freshly parsed modules and refresh stale mtime/size entries.
55fn update_cache(
56    store: &mut cache::CacheStore,
57    modules: &[extract::ModuleInfo],
58    files: &[discover::DiscoveredFile],
59) {
60    for module in modules {
61        if let Some(file) = files.get(module.file_id.0 as usize) {
62            let (mt, sz) = file_mtime_and_size(&file.path);
63            // If content hash matches, just refresh mtime/size if stale (e.g. `touch`ed file)
64            if let Some(cached) = store.get_by_path_only(&file.path)
65                && cached.content_hash == module.content_hash
66            {
67                if cached.mtime_secs != mt || cached.file_size != sz {
68                    store.insert(&file.path, cache::module_to_cached(module, mt, sz));
69                }
70                continue;
71            }
72            store.insert(&file.path, cache::module_to_cached(module, mt, sz));
73        }
74    }
75    store.retain_paths(files);
76}
77
78/// Extract mtime (seconds since epoch) and file size from a path.
79fn file_mtime_and_size(path: &std::path::Path) -> (u64, u64) {
80    std::fs::metadata(path).map_or((0, 0), |m| {
81        let mt = m
82            .modified()
83            .ok()
84            .and_then(|t| t.duration_since(std::time::SystemTime::UNIX_EPOCH).ok())
85            .map_or(0, |d| d.as_secs());
86        (mt, m.len())
87    })
88}
89
90fn format_undeclared_workspace_warning(
91    root: &Path,
92    undeclared: &[fallow_config::WorkspaceDiagnostic],
93) -> Option<String> {
94    if undeclared.is_empty() {
95        return None;
96    }
97
98    let preview = undeclared
99        .iter()
100        .take(UNDECLARED_WORKSPACE_WARNING_PREVIEW)
101        .map(|diag| {
102            diag.path
103                .strip_prefix(root)
104                .unwrap_or(&diag.path)
105                .display()
106                .to_string()
107                .replace('\\', "/")
108        })
109        .collect::<Vec<_>>();
110    let remaining = undeclared
111        .len()
112        .saturating_sub(UNDECLARED_WORKSPACE_WARNING_PREVIEW);
113    let tail = if remaining > 0 {
114        format!(" (and {remaining} more)")
115    } else {
116        String::new()
117    };
118    let noun = if undeclared.len() == 1 {
119        "directory with package.json is"
120    } else {
121        "directories with package.json are"
122    };
123    let guidance = if undeclared.len() == 1 {
124        "Add that path to package.json workspaces or pnpm-workspace.yaml if it should be analyzed as a workspace."
125    } else {
126        "Add those paths to package.json workspaces or pnpm-workspace.yaml if they should be analyzed as workspaces."
127    };
128
129    Some(format!(
130        "{} {} not declared as {}: {}{}. {}",
131        undeclared.len(),
132        noun,
133        if undeclared.len() == 1 {
134            "a workspace"
135        } else {
136            "workspaces"
137        },
138        preview.join(", "),
139        tail,
140        guidance
141    ))
142}
143
144fn warn_undeclared_workspaces(
145    root: &Path,
146    workspaces_vec: &[fallow_config::WorkspaceInfo],
147    quiet: bool,
148) {
149    if quiet {
150        return;
151    }
152
153    let undeclared = find_undeclared_workspaces(root, workspaces_vec);
154    if let Some(message) = format_undeclared_workspace_warning(root, &undeclared) {
155        tracing::warn!("{message}");
156    }
157}
158
159/// Run the full analysis pipeline.
160///
161/// # Errors
162///
163/// Returns an error if file discovery, parsing, or analysis fails.
164pub fn analyze(config: &ResolvedConfig) -> Result<AnalysisResults, FallowError> {
165    let output = analyze_full(config, false, false, false, false)?;
166    Ok(output.results)
167}
168
169/// Run the full analysis pipeline with export usage collection (for LSP Code Lens).
170///
171/// # Errors
172///
173/// Returns an error if file discovery, parsing, or analysis fails.
174pub fn analyze_with_usages(config: &ResolvedConfig) -> Result<AnalysisResults, FallowError> {
175    let output = analyze_full(config, false, true, false, false)?;
176    Ok(output.results)
177}
178
179/// Run the full analysis pipeline with optional performance timings and graph retention.
180///
181/// # Errors
182///
183/// Returns an error if file discovery, parsing, or analysis fails.
184pub fn analyze_with_trace(config: &ResolvedConfig) -> Result<AnalysisOutput, FallowError> {
185    analyze_full(config, true, false, false, false)
186}
187
188/// Run the full analysis pipeline, retaining parsed modules and discovered files.
189///
190/// Used by the combined command to share a single parse across dead-code and health.
191/// When `need_complexity` is true, the `ComplexityVisitor` runs during parsing so
192/// the returned modules contain per-function complexity data.
193///
194/// # Errors
195///
196/// Returns an error if file discovery, parsing, or analysis fails.
197pub fn analyze_retaining_modules(
198    config: &ResolvedConfig,
199    need_complexity: bool,
200    retain_graph: bool,
201) -> Result<AnalysisOutput, FallowError> {
202    analyze_full(config, retain_graph, false, need_complexity, true)
203}
204
205/// Run the analysis pipeline using pre-parsed modules, skipping the parsing stage.
206///
207/// This avoids re-parsing files when the caller already has a `ParseResult` (e.g., from
208/// `fallow_core::extract::parse_all_files`). Discovery, plugins, scripts, entry points,
209/// import resolution, graph construction, and dead code detection still run normally.
210/// The graph is always retained (needed for file scores).
211///
212/// # Errors
213///
214/// Returns an error if discovery, graph construction, or analysis fails.
215#[allow(
216    clippy::too_many_lines,
217    reason = "pipeline orchestration stays easier to audit in one place"
218)]
219pub fn analyze_with_parse_result(
220    config: &ResolvedConfig,
221    modules: &[extract::ModuleInfo],
222) -> Result<AnalysisOutput, FallowError> {
223    let _span = tracing::info_span!("fallow_analyze_with_parse_result").entered();
224    let pipeline_start = Instant::now();
225
226    let show_progress = !config.quiet
227        && std::io::IsTerminal::is_terminal(&std::io::stderr())
228        && matches!(
229            config.output,
230            fallow_config::OutputFormat::Human
231                | fallow_config::OutputFormat::Compact
232                | fallow_config::OutputFormat::Markdown
233        );
234    let progress = progress::AnalysisProgress::new(show_progress);
235
236    if !config.root.join("node_modules").is_dir() {
237        tracing::warn!(
238            "node_modules directory not found. Run `npm install` / `pnpm install` first for accurate results."
239        );
240    }
241
242    // Discover workspaces
243    let t = Instant::now();
244    let workspaces_vec = discover_workspaces(&config.root);
245    let workspaces_ms = t.elapsed().as_secs_f64() * 1000.0;
246    if !workspaces_vec.is_empty() {
247        tracing::info!(count = workspaces_vec.len(), "workspaces discovered");
248    }
249
250    // Warn about directories with package.json not declared as workspaces
251    warn_undeclared_workspaces(&config.root, &workspaces_vec, config.quiet);
252
253    // Stage 1: Discover files (cheap — needed for file registry and resolution)
254    let t = Instant::now();
255    let pb = progress.stage_spinner("Discovering files...");
256    let discovered_files = discover::discover_files(config);
257    let discover_ms = t.elapsed().as_secs_f64() * 1000.0;
258    pb.finish_and_clear();
259
260    let project = project::ProjectState::new(discovered_files, workspaces_vec);
261    let files = project.files();
262    let workspaces = project.workspaces();
263    let root_pkg = load_root_package_json(config);
264    let workspace_pkgs = load_workspace_packages(workspaces);
265
266    // Stage 1.5: Run plugin system
267    let t = Instant::now();
268    let pb = progress.stage_spinner("Detecting plugins...");
269    let mut plugin_result = run_plugins(
270        config,
271        files,
272        workspaces,
273        root_pkg.as_ref(),
274        &workspace_pkgs,
275    );
276    let plugins_ms = t.elapsed().as_secs_f64() * 1000.0;
277    pb.finish_and_clear();
278
279    // Stage 1.6: Analyze package.json scripts
280    let t = Instant::now();
281    analyze_all_scripts(
282        config,
283        workspaces,
284        root_pkg.as_ref(),
285        &workspace_pkgs,
286        &mut plugin_result,
287    );
288    let scripts_ms = t.elapsed().as_secs_f64() * 1000.0;
289
290    // Stage 2: SKIPPED — using pre-parsed modules from caller
291
292    // Stage 3: Discover entry points
293    let t = Instant::now();
294    let entry_points = discover_all_entry_points(
295        config,
296        files,
297        workspaces,
298        root_pkg.as_ref(),
299        &workspace_pkgs,
300        &plugin_result,
301    );
302    let entry_points_ms = t.elapsed().as_secs_f64() * 1000.0;
303
304    // Compute entry-point summary before the graph consumes the entry_points vec
305    let ep_summary = summarize_entry_points(&entry_points.all);
306
307    // Stage 4: Resolve imports to file IDs
308    let t = Instant::now();
309    let pb = progress.stage_spinner("Resolving imports...");
310    let mut resolved = resolve::resolve_all_imports(
311        modules,
312        files,
313        workspaces,
314        &plugin_result.active_plugins,
315        &plugin_result.path_aliases,
316        &plugin_result.scss_include_paths,
317        &config.root,
318        &config.resolve.conditions,
319    );
320    external_style_usage::augment_external_style_package_usage(
321        &mut resolved,
322        config,
323        workspaces,
324        &plugin_result,
325    );
326    let resolve_ms = t.elapsed().as_secs_f64() * 1000.0;
327    pb.finish_and_clear();
328
329    // Stage 5: Build module graph
330    let t = Instant::now();
331    let pb = progress.stage_spinner("Building module graph...");
332    let graph = graph::ModuleGraph::build_with_reachability_roots(
333        &resolved,
334        &entry_points.all,
335        &entry_points.runtime,
336        &entry_points.test,
337        files,
338    );
339    let graph_ms = t.elapsed().as_secs_f64() * 1000.0;
340    pb.finish_and_clear();
341
342    // Stage 6: Analyze for dead code
343    let t = Instant::now();
344    let pb = progress.stage_spinner("Analyzing...");
345    let mut result = analyze::find_dead_code_full(
346        &graph,
347        config,
348        &resolved,
349        Some(&plugin_result),
350        workspaces,
351        modules,
352        false,
353    );
354    let analyze_ms = t.elapsed().as_secs_f64() * 1000.0;
355    pb.finish_and_clear();
356    progress.finish();
357
358    result.entry_point_summary = Some(ep_summary);
359
360    let total_ms = pipeline_start.elapsed().as_secs_f64() * 1000.0;
361
362    tracing::debug!(
363        "\n┌─ Pipeline Profile (reuse) ─────────────────────\n\
364         │  discover files:   {:>8.1}ms  ({} files)\n\
365         │  workspaces:       {:>8.1}ms\n\
366         │  plugins:          {:>8.1}ms\n\
367         │  script analysis:  {:>8.1}ms\n\
368         │  parse/extract:    SKIPPED (reused {} modules)\n\
369         │  entry points:     {:>8.1}ms  ({} entries)\n\
370         │  resolve imports:  {:>8.1}ms\n\
371         │  build graph:      {:>8.1}ms\n\
372         │  analyze:          {:>8.1}ms\n\
373         │  ────────────────────────────────────────────\n\
374         │  TOTAL:            {:>8.1}ms\n\
375         └─────────────────────────────────────────────────",
376        discover_ms,
377        files.len(),
378        workspaces_ms,
379        plugins_ms,
380        scripts_ms,
381        modules.len(),
382        entry_points_ms,
383        entry_points.all.len(),
384        resolve_ms,
385        graph_ms,
386        analyze_ms,
387        total_ms,
388    );
389
390    let timings = Some(PipelineTimings {
391        discover_files_ms: discover_ms,
392        file_count: files.len(),
393        workspaces_ms,
394        workspace_count: workspaces.len(),
395        plugins_ms,
396        script_analysis_ms: scripts_ms,
397        parse_extract_ms: 0.0, // Skipped — modules were reused
398        module_count: modules.len(),
399        cache_hits: 0,
400        cache_misses: 0,
401        cache_update_ms: 0.0,
402        entry_points_ms,
403        entry_point_count: entry_points.all.len(),
404        resolve_imports_ms: resolve_ms,
405        build_graph_ms: graph_ms,
406        analyze_ms,
407        total_ms,
408    });
409
410    Ok(AnalysisOutput {
411        results: result,
412        timings,
413        graph: Some(graph),
414        modules: None,
415        files: None,
416        script_used_packages: plugin_result.script_used_packages.clone(),
417    })
418}
419
420#[expect(
421    clippy::unnecessary_wraps,
422    reason = "Result kept for future error handling"
423)]
424#[expect(
425    clippy::too_many_lines,
426    reason = "main pipeline function; sequential phases are held together for clarity"
427)]
428fn analyze_full(
429    config: &ResolvedConfig,
430    retain: bool,
431    collect_usages: bool,
432    need_complexity: bool,
433    retain_modules: bool,
434) -> Result<AnalysisOutput, FallowError> {
435    let _span = tracing::info_span!("fallow_analyze").entered();
436    let pipeline_start = Instant::now();
437
438    // Progress bars: enabled when not quiet, stderr is a terminal, and output is human-readable.
439    // Structured formats (JSON, SARIF) suppress spinners even on TTY — users piping structured
440    // output don't expect progress noise on stderr.
441    let show_progress = !config.quiet
442        && std::io::IsTerminal::is_terminal(&std::io::stderr())
443        && matches!(
444            config.output,
445            fallow_config::OutputFormat::Human
446                | fallow_config::OutputFormat::Compact
447                | fallow_config::OutputFormat::Markdown
448        );
449    let progress = progress::AnalysisProgress::new(show_progress);
450
451    // Warn if node_modules is missing — resolution will be severely degraded
452    if !config.root.join("node_modules").is_dir() {
453        tracing::warn!(
454            "node_modules directory not found. Run `npm install` / `pnpm install` first for accurate results."
455        );
456    }
457
458    // Discover workspaces if in a monorepo
459    let t = Instant::now();
460    let workspaces_vec = discover_workspaces(&config.root);
461    let workspaces_ms = t.elapsed().as_secs_f64() * 1000.0;
462    if !workspaces_vec.is_empty() {
463        tracing::info!(count = workspaces_vec.len(), "workspaces discovered");
464    }
465
466    // Warn about directories with package.json not declared as workspaces
467    warn_undeclared_workspaces(&config.root, &workspaces_vec, config.quiet);
468
469    // Stage 1: Discover all source files
470    let t = Instant::now();
471    let pb = progress.stage_spinner("Discovering files...");
472    let discovered_files = discover::discover_files(config);
473    let discover_ms = t.elapsed().as_secs_f64() * 1000.0;
474    pb.finish_and_clear();
475
476    // Build ProjectState: owns the file registry with stable FileIds and workspace metadata.
477    // This is the foundation for cross-workspace resolution and future incremental analysis.
478    let project = project::ProjectState::new(discovered_files, workspaces_vec);
479    let files = project.files();
480    let workspaces = project.workspaces();
481    let root_pkg = load_root_package_json(config);
482    let workspace_pkgs = load_workspace_packages(workspaces);
483
484    // Stage 1.5: Run plugin system — parse config files, discover dynamic entries
485    let t = Instant::now();
486    let pb = progress.stage_spinner("Detecting plugins...");
487    let mut plugin_result = run_plugins(
488        config,
489        files,
490        workspaces,
491        root_pkg.as_ref(),
492        &workspace_pkgs,
493    );
494    let plugins_ms = t.elapsed().as_secs_f64() * 1000.0;
495    pb.finish_and_clear();
496
497    // Stage 1.6: Analyze package.json scripts for binary usage and config file refs
498    let t = Instant::now();
499    analyze_all_scripts(
500        config,
501        workspaces,
502        root_pkg.as_ref(),
503        &workspace_pkgs,
504        &mut plugin_result,
505    );
506    let scripts_ms = t.elapsed().as_secs_f64() * 1000.0;
507
508    // Stage 2: Parse all files in parallel and extract imports/exports
509    let t = Instant::now();
510    let pb = progress.stage_spinner(&format!("Parsing {} files...", files.len()));
511    let mut cache_store = if config.no_cache {
512        None
513    } else {
514        cache::CacheStore::load(&config.cache_dir)
515    };
516
517    let parse_result = extract::parse_all_files(files, cache_store.as_ref(), need_complexity);
518    let modules = parse_result.modules;
519    let cache_hits = parse_result.cache_hits;
520    let cache_misses = parse_result.cache_misses;
521    let parse_ms = t.elapsed().as_secs_f64() * 1000.0;
522    pb.finish_and_clear();
523
524    // Update cache with freshly parsed modules and refresh stale mtime/size entries.
525    let t = Instant::now();
526    if !config.no_cache {
527        let store = cache_store.get_or_insert_with(cache::CacheStore::new);
528        update_cache(store, &modules, files);
529        if let Err(e) = store.save(&config.cache_dir) {
530            tracing::warn!("Failed to save cache: {e}");
531        }
532    }
533    let cache_ms = t.elapsed().as_secs_f64() * 1000.0;
534
535    // Stage 3: Discover entry points (static patterns + plugin-discovered patterns)
536    let t = Instant::now();
537    let entry_points = discover_all_entry_points(
538        config,
539        files,
540        workspaces,
541        root_pkg.as_ref(),
542        &workspace_pkgs,
543        &plugin_result,
544    );
545    let entry_points_ms = t.elapsed().as_secs_f64() * 1000.0;
546
547    // Stage 4: Resolve imports to file IDs
548    let t = Instant::now();
549    let pb = progress.stage_spinner("Resolving imports...");
550    let mut resolved = resolve::resolve_all_imports(
551        &modules,
552        files,
553        workspaces,
554        &plugin_result.active_plugins,
555        &plugin_result.path_aliases,
556        &plugin_result.scss_include_paths,
557        &config.root,
558        &config.resolve.conditions,
559    );
560    external_style_usage::augment_external_style_package_usage(
561        &mut resolved,
562        config,
563        workspaces,
564        &plugin_result,
565    );
566    let resolve_ms = t.elapsed().as_secs_f64() * 1000.0;
567    pb.finish_and_clear();
568
569    // Stage 5: Build module graph
570    let t = Instant::now();
571    let pb = progress.stage_spinner("Building module graph...");
572    let graph = graph::ModuleGraph::build_with_reachability_roots(
573        &resolved,
574        &entry_points.all,
575        &entry_points.runtime,
576        &entry_points.test,
577        files,
578    );
579    let graph_ms = t.elapsed().as_secs_f64() * 1000.0;
580    pb.finish_and_clear();
581
582    // Compute entry-point summary before the graph consumes the entry_points vec
583    let ep_summary = summarize_entry_points(&entry_points.all);
584
585    // Stage 6: Analyze for dead code (with plugin context and workspace info)
586    let t = Instant::now();
587    let pb = progress.stage_spinner("Analyzing...");
588    let mut result = analyze::find_dead_code_full(
589        &graph,
590        config,
591        &resolved,
592        Some(&plugin_result),
593        workspaces,
594        &modules,
595        collect_usages,
596    );
597    let analyze_ms = t.elapsed().as_secs_f64() * 1000.0;
598    pb.finish_and_clear();
599    progress.finish();
600
601    result.entry_point_summary = Some(ep_summary);
602
603    let total_ms = pipeline_start.elapsed().as_secs_f64() * 1000.0;
604
605    let cache_summary = if cache_hits > 0 {
606        format!(" ({cache_hits} cached, {cache_misses} parsed)")
607    } else {
608        String::new()
609    };
610
611    tracing::debug!(
612        "\n┌─ Pipeline Profile ─────────────────────────────\n\
613         │  discover files:   {:>8.1}ms  ({} files)\n\
614         │  workspaces:       {:>8.1}ms\n\
615         │  plugins:          {:>8.1}ms\n\
616         │  script analysis:  {:>8.1}ms\n\
617         │  parse/extract:    {:>8.1}ms  ({} modules{})\n\
618         │  cache update:     {:>8.1}ms\n\
619         │  entry points:     {:>8.1}ms  ({} entries)\n\
620         │  resolve imports:  {:>8.1}ms\n\
621         │  build graph:      {:>8.1}ms\n\
622         │  analyze:          {:>8.1}ms\n\
623         │  ────────────────────────────────────────────\n\
624         │  TOTAL:            {:>8.1}ms\n\
625         └─────────────────────────────────────────────────",
626        discover_ms,
627        files.len(),
628        workspaces_ms,
629        plugins_ms,
630        scripts_ms,
631        parse_ms,
632        modules.len(),
633        cache_summary,
634        cache_ms,
635        entry_points_ms,
636        entry_points.all.len(),
637        resolve_ms,
638        graph_ms,
639        analyze_ms,
640        total_ms,
641    );
642
643    let timings = if retain {
644        Some(PipelineTimings {
645            discover_files_ms: discover_ms,
646            file_count: files.len(),
647            workspaces_ms,
648            workspace_count: workspaces.len(),
649            plugins_ms,
650            script_analysis_ms: scripts_ms,
651            parse_extract_ms: parse_ms,
652            module_count: modules.len(),
653            cache_hits,
654            cache_misses,
655            cache_update_ms: cache_ms,
656            entry_points_ms,
657            entry_point_count: entry_points.all.len(),
658            resolve_imports_ms: resolve_ms,
659            build_graph_ms: graph_ms,
660            analyze_ms,
661            total_ms,
662        })
663    } else {
664        None
665    };
666
667    Ok(AnalysisOutput {
668        results: result,
669        timings,
670        graph: if retain { Some(graph) } else { None },
671        modules: if retain_modules { Some(modules) } else { None },
672        files: if retain_modules {
673            Some(files.to_vec())
674        } else {
675            None
676        },
677        script_used_packages: plugin_result.script_used_packages,
678    })
679}
680
681/// Analyze package.json scripts from root and all workspace packages.
682///
683/// Populates the plugin result with script-used packages and config file
684/// entry patterns. Also scans CI config files for binary invocations.
685fn load_root_package_json(config: &ResolvedConfig) -> Option<PackageJson> {
686    PackageJson::load(&config.root.join("package.json")).ok()
687}
688
689fn load_workspace_packages(
690    workspaces: &[fallow_config::WorkspaceInfo],
691) -> Vec<LoadedWorkspacePackage<'_>> {
692    workspaces
693        .iter()
694        .filter_map(|ws| {
695            PackageJson::load(&ws.root.join("package.json"))
696                .ok()
697                .map(|pkg| (ws, pkg))
698        })
699        .collect()
700}
701
702fn analyze_all_scripts(
703    config: &ResolvedConfig,
704    workspaces: &[fallow_config::WorkspaceInfo],
705    root_pkg: Option<&PackageJson>,
706    workspace_pkgs: &[LoadedWorkspacePackage<'_>],
707    plugin_result: &mut plugins::AggregatedPluginResult,
708) {
709    // Collect all dependency names to build the bin-name → package-name reverse map.
710    // This resolves binaries like "attw" to "@arethetypeswrong/cli" even without
711    // node_modules/.bin symlinks.
712    let mut all_dep_names: Vec<String> = Vec::new();
713    if let Some(pkg) = root_pkg {
714        all_dep_names.extend(pkg.all_dependency_names());
715    }
716    for (_, ws_pkg) in workspace_pkgs {
717        all_dep_names.extend(ws_pkg.all_dependency_names());
718    }
719    all_dep_names.sort_unstable();
720    all_dep_names.dedup();
721
722    // Probe node_modules/ at project root and each workspace root so non-hoisted
723    // deps (pnpm strict, Yarn workspaces) are also discovered.
724    let mut nm_roots: Vec<&std::path::Path> = Vec::new();
725    if config.root.join("node_modules").is_dir() {
726        nm_roots.push(&config.root);
727    }
728    for ws in workspaces {
729        if ws.root.join("node_modules").is_dir() {
730            nm_roots.push(&ws.root);
731        }
732    }
733    let bin_map = scripts::build_bin_to_package_map(&nm_roots, &all_dep_names);
734
735    if let Some(pkg) = root_pkg
736        && let Some(ref pkg_scripts) = pkg.scripts
737    {
738        let scripts_to_analyze = if config.production {
739            scripts::filter_production_scripts(pkg_scripts)
740        } else {
741            pkg_scripts.clone()
742        };
743        let script_analysis = scripts::analyze_scripts(&scripts_to_analyze, &config.root, &bin_map);
744        plugin_result.script_used_packages = script_analysis.used_packages;
745
746        for config_file in &script_analysis.config_files {
747            plugin_result
748                .discovered_always_used
749                .push((config_file.clone(), "scripts".to_string()));
750        }
751    }
752    for (ws, ws_pkg) in workspace_pkgs {
753        if let Some(ref ws_scripts) = ws_pkg.scripts {
754            let scripts_to_analyze = if config.production {
755                scripts::filter_production_scripts(ws_scripts)
756            } else {
757                ws_scripts.clone()
758            };
759            let ws_analysis = scripts::analyze_scripts(&scripts_to_analyze, &ws.root, &bin_map);
760            plugin_result
761                .script_used_packages
762                .extend(ws_analysis.used_packages);
763
764            let ws_prefix = ws
765                .root
766                .strip_prefix(&config.root)
767                .unwrap_or(&ws.root)
768                .to_string_lossy();
769            for config_file in &ws_analysis.config_files {
770                plugin_result
771                    .discovered_always_used
772                    .push((format!("{ws_prefix}/{config_file}"), "scripts".to_string()));
773            }
774        }
775    }
776
777    // Scan CI config files for binary invocations
778    let ci_packages = scripts::ci::analyze_ci_files(&config.root, &bin_map);
779    plugin_result.script_used_packages.extend(ci_packages);
780    plugin_result
781        .entry_point_roles
782        .entry("scripts".to_string())
783        .or_insert(EntryPointRole::Support);
784}
785
786/// Discover all entry points from static patterns, workspaces, plugins, and infrastructure.
787fn discover_all_entry_points(
788    config: &ResolvedConfig,
789    files: &[discover::DiscoveredFile],
790    workspaces: &[fallow_config::WorkspaceInfo],
791    root_pkg: Option<&PackageJson>,
792    workspace_pkgs: &[LoadedWorkspacePackage<'_>],
793    plugin_result: &plugins::AggregatedPluginResult,
794) -> discover::CategorizedEntryPoints {
795    let mut entry_points = discover::CategorizedEntryPoints::default();
796    let root_discovery = discover::discover_entry_points_with_warnings_from_pkg(
797        config,
798        files,
799        root_pkg,
800        workspaces.is_empty(),
801    );
802
803    let workspace_pkg_by_root: rustc_hash::FxHashMap<std::path::PathBuf, &PackageJson> =
804        workspace_pkgs
805            .iter()
806            .map(|(ws, pkg)| (ws.root.clone(), pkg))
807            .collect();
808
809    let workspace_discovery: Vec<discover::EntryPointDiscovery> = workspaces
810        .par_iter()
811        .map(|ws| {
812            let pkg = workspace_pkg_by_root.get(&ws.root).copied();
813            discover::discover_workspace_entry_points_with_warnings_from_pkg(&ws.root, files, pkg)
814        })
815        .collect();
816    let mut skipped_entries = rustc_hash::FxHashMap::default();
817    entry_points.extend_runtime(root_discovery.entries);
818    for (path, count) in root_discovery.skipped_entries {
819        *skipped_entries.entry(path).or_insert(0) += count;
820    }
821    let mut ws_entries = Vec::new();
822    for workspace in workspace_discovery {
823        ws_entries.extend(workspace.entries);
824        for (path, count) in workspace.skipped_entries {
825            *skipped_entries.entry(path).or_insert(0) += count;
826        }
827    }
828    discover::warn_skipped_entry_summary(&skipped_entries);
829    entry_points.extend_runtime(ws_entries);
830
831    let plugin_entries = discover::discover_plugin_entry_point_sets(plugin_result, config, files);
832    entry_points.extend(plugin_entries);
833
834    let infra_entries = discover::discover_infrastructure_entry_points(&config.root);
835    entry_points.extend_runtime(infra_entries);
836
837    // Add dynamically loaded files from config as entry points
838    if !config.dynamically_loaded.is_empty() {
839        let dynamic_entries = discover::discover_dynamically_loaded_entry_points(config, files);
840        entry_points.extend_runtime(dynamic_entries);
841    }
842
843    entry_points.dedup()
844}
845
846/// Summarize entry points by source category for user-facing output.
847fn summarize_entry_points(entry_points: &[discover::EntryPoint]) -> results::EntryPointSummary {
848    let mut counts: rustc_hash::FxHashMap<String, usize> = rustc_hash::FxHashMap::default();
849    for ep in entry_points {
850        let category = match &ep.source {
851            discover::EntryPointSource::PackageJsonMain
852            | discover::EntryPointSource::PackageJsonModule
853            | discover::EntryPointSource::PackageJsonExports
854            | discover::EntryPointSource::PackageJsonBin
855            | discover::EntryPointSource::PackageJsonScript => "package.json",
856            discover::EntryPointSource::Plugin { .. } => "plugin",
857            discover::EntryPointSource::TestFile => "test file",
858            discover::EntryPointSource::DefaultIndex => "default index",
859            discover::EntryPointSource::ManualEntry => "manual entry",
860            discover::EntryPointSource::InfrastructureConfig => "config",
861            discover::EntryPointSource::DynamicallyLoaded => "dynamically loaded",
862        };
863        *counts.entry(category.to_string()).or_insert(0) += 1;
864    }
865    let mut by_source: Vec<(String, usize)> = counts.into_iter().collect();
866    by_source.sort_by(|a, b| b.1.cmp(&a.1).then_with(|| a.0.cmp(&b.0)));
867    results::EntryPointSummary {
868        total: entry_points.len(),
869        by_source,
870    }
871}
872
873/// Run plugins for root project and all workspace packages.
874fn run_plugins(
875    config: &ResolvedConfig,
876    files: &[discover::DiscoveredFile],
877    workspaces: &[fallow_config::WorkspaceInfo],
878    root_pkg: Option<&PackageJson>,
879    workspace_pkgs: &[LoadedWorkspacePackage<'_>],
880) -> plugins::AggregatedPluginResult {
881    let registry = plugins::PluginRegistry::new(config.external_plugins.clone());
882    let file_paths: Vec<std::path::PathBuf> = files.iter().map(|f| f.path.clone()).collect();
883    let root_config_search_roots = collect_config_search_roots(&config.root, &file_paths);
884    let root_config_search_root_refs: Vec<&Path> = root_config_search_roots
885        .iter()
886        .map(std::path::PathBuf::as_path)
887        .collect();
888
889    // Run plugins for root project (full run with external plugins, inline config, etc.)
890    let mut result = root_pkg.map_or_else(plugins::AggregatedPluginResult::default, |pkg| {
891        registry.run_with_search_roots(
892            pkg,
893            &config.root,
894            &file_paths,
895            &root_config_search_root_refs,
896        )
897    });
898
899    if workspaces.is_empty() {
900        return result;
901    }
902
903    let root_active_plugins: rustc_hash::FxHashSet<&str> =
904        result.active_plugins.iter().map(String::as_str).collect();
905
906    // Pre-compile config matchers and relative files once for all workspace runs.
907    // This avoids re-compiling glob patterns and re-computing relative paths per workspace
908    // (previously O(workspaces × plugins × files) glob compilations).
909    let precompiled_matchers = registry.precompile_config_matchers();
910    let relative_files: Vec<(&std::path::PathBuf, String)> = file_paths
911        .iter()
912        .map(|f| {
913            let rel = f
914                .strip_prefix(&config.root)
915                .unwrap_or(f)
916                .to_string_lossy()
917                .into_owned();
918            (f, rel)
919        })
920        .collect();
921
922    // Run plugins for each workspace package in parallel, then merge results.
923    let ws_results: Vec<_> = workspace_pkgs
924        .par_iter()
925        .filter_map(|(ws, ws_pkg)| {
926            let ws_result = registry.run_workspace_fast(
927                ws_pkg,
928                &ws.root,
929                &config.root,
930                &precompiled_matchers,
931                &relative_files,
932                &root_active_plugins,
933            );
934            if ws_result.active_plugins.is_empty() {
935                return None;
936            }
937            let ws_prefix = ws
938                .root
939                .strip_prefix(&config.root)
940                .unwrap_or(&ws.root)
941                .to_string_lossy()
942                .into_owned();
943            Some((ws_result, ws_prefix))
944        })
945        .collect();
946
947    // Merge workspace results sequentially (deterministic order via par_iter index stability)
948    // Track seen names for O(1) dedup instead of O(n) Vec::contains
949    let mut seen_plugins: rustc_hash::FxHashSet<String> =
950        result.active_plugins.iter().cloned().collect();
951    let mut seen_prefixes: rustc_hash::FxHashSet<String> =
952        result.virtual_module_prefixes.iter().cloned().collect();
953    let mut seen_generated: rustc_hash::FxHashSet<String> =
954        result.generated_import_patterns.iter().cloned().collect();
955    for (ws_result, ws_prefix) in ws_results {
956        // Prefix helper: workspace-relative patterns need the workspace prefix
957        // to be matchable from the monorepo root. But patterns that are already
958        // project-root-relative (e.g., from angular.json which uses absolute paths
959        // like "apps/client/src/styles.css") should not be double-prefixed.
960        let prefix_if_needed = |pat: &str| -> String {
961            if pat.starts_with(ws_prefix.as_str()) || pat.starts_with('/') {
962                pat.to_string()
963            } else {
964                format!("{ws_prefix}/{pat}")
965            }
966        };
967
968        for (rule, pname) in &ws_result.entry_patterns {
969            result
970                .entry_patterns
971                .push((rule.prefixed(&ws_prefix), pname.clone()));
972        }
973        for (plugin_name, role) in ws_result.entry_point_roles {
974            result.entry_point_roles.entry(plugin_name).or_insert(role);
975        }
976        for (pat, pname) in &ws_result.always_used {
977            result
978                .always_used
979                .push((prefix_if_needed(pat), pname.clone()));
980        }
981        for (pat, pname) in &ws_result.discovered_always_used {
982            result
983                .discovered_always_used
984                .push((prefix_if_needed(pat), pname.clone()));
985        }
986        for (pat, pname) in &ws_result.fixture_patterns {
987            result
988                .fixture_patterns
989                .push((prefix_if_needed(pat), pname.clone()));
990        }
991        for rule in &ws_result.used_exports {
992            result.used_exports.push(rule.prefixed(&ws_prefix));
993        }
994        // Merge active plugin names (deduplicated via HashSet)
995        for plugin_name in ws_result.active_plugins {
996            if !seen_plugins.contains(&plugin_name) {
997                seen_plugins.insert(plugin_name.clone());
998                result.active_plugins.push(plugin_name);
999            }
1000        }
1001        // These don't need prefixing (absolute paths / package names)
1002        result
1003            .referenced_dependencies
1004            .extend(ws_result.referenced_dependencies);
1005        result.setup_files.extend(ws_result.setup_files);
1006        result
1007            .tooling_dependencies
1008            .extend(ws_result.tooling_dependencies);
1009        // Virtual module prefixes (e.g., Docusaurus @theme/, @site/) are
1010        // package-name prefixes, not file paths — no workspace prefix needed.
1011        for prefix in ws_result.virtual_module_prefixes {
1012            if !seen_prefixes.contains(&prefix) {
1013                seen_prefixes.insert(prefix.clone());
1014                result.virtual_module_prefixes.push(prefix);
1015            }
1016        }
1017        // Generated import patterns (e.g., SvelteKit /$types) are suffix
1018        // matches on specifiers, not file paths — no workspace prefix needed.
1019        for pattern in ws_result.generated_import_patterns {
1020            if !seen_generated.contains(&pattern) {
1021                seen_generated.insert(pattern.clone());
1022                result.generated_import_patterns.push(pattern);
1023            }
1024        }
1025        // Path aliases from workspace plugins (e.g., SvelteKit $lib/ → src/lib).
1026        // Prefix the replacement directory so it resolves from the monorepo root.
1027        for (prefix, replacement) in ws_result.path_aliases {
1028            result
1029                .path_aliases
1030                .push((prefix, format!("{ws_prefix}/{replacement}")));
1031        }
1032    }
1033
1034    result
1035}
1036
1037fn collect_config_search_roots(
1038    root: &Path,
1039    file_paths: &[std::path::PathBuf],
1040) -> Vec<std::path::PathBuf> {
1041    let mut roots: rustc_hash::FxHashSet<std::path::PathBuf> = rustc_hash::FxHashSet::default();
1042    roots.insert(root.to_path_buf());
1043
1044    for file_path in file_paths {
1045        let mut current = file_path.parent();
1046        while let Some(dir) = current {
1047            if !dir.starts_with(root) {
1048                break;
1049            }
1050            roots.insert(dir.to_path_buf());
1051            if dir == root {
1052                break;
1053            }
1054            current = dir.parent();
1055        }
1056    }
1057
1058    let mut roots_vec: Vec<_> = roots.into_iter().collect();
1059    roots_vec.sort();
1060    roots_vec
1061}
1062
1063/// Run analysis on a project directory (with export usages for LSP Code Lens).
1064///
1065/// # Errors
1066///
1067/// Returns an error if config loading, file discovery, parsing, or analysis fails.
1068pub fn analyze_project(root: &Path) -> Result<AnalysisResults, FallowError> {
1069    let config = default_config(root);
1070    analyze_with_usages(&config)
1071}
1072
1073/// Create a default config for a project root.
1074///
1075/// `analyze_project` is the dead-code entry point used by the LSP and other
1076/// programmatic embedders. When the loaded config uses the per-analysis
1077/// production form (`production: { deadCode: true, ... }`), the production
1078/// flag must be flattened to the dead-code analysis here. Otherwise
1079/// `ResolvedConfig::resolve` calls `.global()` which returns false for the
1080/// per-analysis variant and the production-mode rule overrides
1081/// (`unused_dev_dependencies: off`, etc.) plus `resolved.production = true`
1082/// are silently dropped.
1083pub(crate) fn default_config(root: &Path) -> ResolvedConfig {
1084    let user_config = fallow_config::FallowConfig::find_and_load(root)
1085        .ok()
1086        .flatten();
1087    match user_config {
1088        Some((mut config, _path)) => {
1089            let dead_code_production = config
1090                .production
1091                .for_analysis(fallow_config::ProductionAnalysis::DeadCode);
1092            config.production = dead_code_production.into();
1093            config.resolve(
1094                root.to_path_buf(),
1095                fallow_config::OutputFormat::Human,
1096                num_cpus(),
1097                false,
1098                true, // quiet: LSP/programmatic callers don't need progress bars
1099            )
1100        }
1101        None => fallow_config::FallowConfig::default().resolve(
1102            root.to_path_buf(),
1103            fallow_config::OutputFormat::Human,
1104            num_cpus(),
1105            false,
1106            true,
1107        ),
1108    }
1109}
1110
1111fn num_cpus() -> usize {
1112    std::thread::available_parallelism().map_or(4, std::num::NonZeroUsize::get)
1113}
1114
1115#[cfg(test)]
1116mod tests {
1117    use super::{collect_config_search_roots, format_undeclared_workspace_warning};
1118    use std::path::{Path, PathBuf};
1119
1120    use fallow_config::WorkspaceDiagnostic;
1121
1122    fn diag(root: &Path, relative: &str) -> WorkspaceDiagnostic {
1123        WorkspaceDiagnostic {
1124            path: root.join(relative),
1125            message: String::new(),
1126        }
1127    }
1128
1129    #[test]
1130    fn undeclared_workspace_warning_is_singular_for_one_path() {
1131        let root = Path::new("/repo");
1132        let warning = format_undeclared_workspace_warning(root, &[diag(root, "packages/api")])
1133            .expect("warning should be rendered");
1134
1135        assert_eq!(
1136            warning,
1137            "1 directory with package.json is not declared as a workspace: packages/api. Add that path to package.json workspaces or pnpm-workspace.yaml if it should be analyzed as a workspace."
1138        );
1139    }
1140
1141    #[test]
1142    fn undeclared_workspace_warning_summarizes_many_paths() {
1143        let root = PathBuf::from("/repo");
1144        let diagnostics = [
1145            "examples/a",
1146            "examples/b",
1147            "examples/c",
1148            "examples/d",
1149            "examples/e",
1150            "examples/f",
1151        ]
1152        .into_iter()
1153        .map(|path| diag(&root, path))
1154        .collect::<Vec<_>>();
1155
1156        let warning = format_undeclared_workspace_warning(&root, &diagnostics)
1157            .expect("warning should be rendered");
1158
1159        assert_eq!(
1160            warning,
1161            "6 directories with package.json are not declared as workspaces: examples/a, examples/b, examples/c, examples/d, examples/e (and 1 more). Add those paths to package.json workspaces or pnpm-workspace.yaml if they should be analyzed as workspaces."
1162        );
1163    }
1164
1165    #[test]
1166    fn collect_config_search_roots_includes_file_ancestors_once() {
1167        let root = PathBuf::from("/repo");
1168        let search_roots = collect_config_search_roots(
1169            &root,
1170            &[
1171                root.join("apps/query/src/main.ts"),
1172                root.join("packages/shared/lib/index.ts"),
1173            ],
1174        );
1175
1176        assert_eq!(
1177            search_roots,
1178            vec![
1179                root.clone(),
1180                root.join("apps"),
1181                root.join("apps/query"),
1182                root.join("apps/query/src"),
1183                root.join("packages"),
1184                root.join("packages/shared"),
1185                root.join("packages/shared/lib"),
1186            ]
1187        );
1188    }
1189}