Skip to main content

fallow_core/
lib.rs

1pub mod analyze;
2pub mod cache;
3pub mod churn;
4pub mod cross_reference;
5pub mod discover;
6pub mod duplicates;
7pub(crate) mod errors;
8mod external_style_usage;
9pub mod extract;
10pub mod plugins;
11pub(crate) mod progress;
12pub mod results;
13pub(crate) mod scripts;
14pub mod suppress;
15pub mod trace;
16
17// Re-export from fallow-graph for backwards compatibility
18pub use fallow_graph::graph;
19pub use fallow_graph::project;
20pub use fallow_graph::resolve;
21
22use std::path::Path;
23use std::time::Instant;
24
25use errors::FallowError;
26use fallow_config::{
27    EntryPointRole, PackageJson, ResolvedConfig, discover_workspaces, find_undeclared_workspaces,
28};
29use rayon::prelude::*;
30use results::AnalysisResults;
31use trace::PipelineTimings;
32
33const UNDECLARED_WORKSPACE_WARNING_PREVIEW: usize = 5;
34
35/// Result of the full analysis pipeline, including optional performance timings.
36pub struct AnalysisOutput {
37    pub results: AnalysisResults,
38    pub timings: Option<PipelineTimings>,
39    pub graph: Option<graph::ModuleGraph>,
40    /// Parsed modules from the pipeline, available when `retain_modules` is true.
41    /// Used by the combined command to share a single parse across dead-code and health.
42    pub modules: Option<Vec<extract::ModuleInfo>>,
43    /// Discovered files from the pipeline, available when `retain_modules` is true.
44    pub files: Option<Vec<discover::DiscoveredFile>>,
45}
46
47/// Update cache: write freshly parsed modules and refresh stale mtime/size entries.
48fn update_cache(
49    store: &mut cache::CacheStore,
50    modules: &[extract::ModuleInfo],
51    files: &[discover::DiscoveredFile],
52) {
53    for module in modules {
54        if let Some(file) = files.get(module.file_id.0 as usize) {
55            let (mt, sz) = file_mtime_and_size(&file.path);
56            // If content hash matches, just refresh mtime/size if stale (e.g. `touch`ed file)
57            if let Some(cached) = store.get_by_path_only(&file.path)
58                && cached.content_hash == module.content_hash
59            {
60                if cached.mtime_secs != mt || cached.file_size != sz {
61                    store.insert(&file.path, cache::module_to_cached(module, mt, sz));
62                }
63                continue;
64            }
65            store.insert(&file.path, cache::module_to_cached(module, mt, sz));
66        }
67    }
68    store.retain_paths(files);
69}
70
71/// Extract mtime (seconds since epoch) and file size from a path.
72fn file_mtime_and_size(path: &std::path::Path) -> (u64, u64) {
73    std::fs::metadata(path).map_or((0, 0), |m| {
74        let mt = m
75            .modified()
76            .ok()
77            .and_then(|t| t.duration_since(std::time::SystemTime::UNIX_EPOCH).ok())
78            .map_or(0, |d| d.as_secs());
79        (mt, m.len())
80    })
81}
82
83fn format_undeclared_workspace_warning(
84    root: &Path,
85    undeclared: &[fallow_config::WorkspaceDiagnostic],
86) -> Option<String> {
87    if undeclared.is_empty() {
88        return None;
89    }
90
91    let preview = undeclared
92        .iter()
93        .take(UNDECLARED_WORKSPACE_WARNING_PREVIEW)
94        .map(|diag| {
95            diag.path
96                .strip_prefix(root)
97                .unwrap_or(&diag.path)
98                .display()
99                .to_string()
100                .replace('\\', "/")
101        })
102        .collect::<Vec<_>>();
103    let remaining = undeclared
104        .len()
105        .saturating_sub(UNDECLARED_WORKSPACE_WARNING_PREVIEW);
106    let tail = if remaining > 0 {
107        format!(" (and {remaining} more)")
108    } else {
109        String::new()
110    };
111    let noun = if undeclared.len() == 1 {
112        "directory with package.json is"
113    } else {
114        "directories with package.json are"
115    };
116    let guidance = if undeclared.len() == 1 {
117        "Add that path to package.json workspaces or pnpm-workspace.yaml if it should be analyzed as a workspace."
118    } else {
119        "Add those paths to package.json workspaces or pnpm-workspace.yaml if they should be analyzed as workspaces."
120    };
121
122    Some(format!(
123        "{} {} not declared as {}: {}{}. {}",
124        undeclared.len(),
125        noun,
126        if undeclared.len() == 1 {
127            "a workspace"
128        } else {
129            "workspaces"
130        },
131        preview.join(", "),
132        tail,
133        guidance
134    ))
135}
136
137fn warn_undeclared_workspaces(
138    root: &Path,
139    workspaces_vec: &[fallow_config::WorkspaceInfo],
140    quiet: bool,
141) {
142    if quiet {
143        return;
144    }
145
146    let undeclared = find_undeclared_workspaces(root, workspaces_vec);
147    if let Some(message) = format_undeclared_workspace_warning(root, &undeclared) {
148        tracing::warn!("{message}");
149    }
150}
151
152/// Run the full analysis pipeline.
153///
154/// # Errors
155///
156/// Returns an error if file discovery, parsing, or analysis fails.
157pub fn analyze(config: &ResolvedConfig) -> Result<AnalysisResults, FallowError> {
158    let output = analyze_full(config, false, false, false, false)?;
159    Ok(output.results)
160}
161
162/// Run the full analysis pipeline with export usage collection (for LSP Code Lens).
163///
164/// # Errors
165///
166/// Returns an error if file discovery, parsing, or analysis fails.
167pub fn analyze_with_usages(config: &ResolvedConfig) -> Result<AnalysisResults, FallowError> {
168    let output = analyze_full(config, false, true, false, false)?;
169    Ok(output.results)
170}
171
172/// Run the full analysis pipeline with optional performance timings and graph retention.
173///
174/// # Errors
175///
176/// Returns an error if file discovery, parsing, or analysis fails.
177pub fn analyze_with_trace(config: &ResolvedConfig) -> Result<AnalysisOutput, FallowError> {
178    analyze_full(config, true, false, false, false)
179}
180
181/// Run the full analysis pipeline, retaining parsed modules and discovered files.
182///
183/// Used by the combined command to share a single parse across dead-code and health.
184/// When `need_complexity` is true, the `ComplexityVisitor` runs during parsing so
185/// the returned modules contain per-function complexity data.
186///
187/// # Errors
188///
189/// Returns an error if file discovery, parsing, or analysis fails.
190pub fn analyze_retaining_modules(
191    config: &ResolvedConfig,
192    need_complexity: bool,
193    retain_graph: bool,
194) -> Result<AnalysisOutput, FallowError> {
195    analyze_full(config, retain_graph, false, need_complexity, true)
196}
197
198/// Run the analysis pipeline using pre-parsed modules, skipping the parsing stage.
199///
200/// This avoids re-parsing files when the caller already has a `ParseResult` (e.g., from
201/// `fallow_core::extract::parse_all_files`). Discovery, plugins, scripts, entry points,
202/// import resolution, graph construction, and dead code detection still run normally.
203/// The graph is always retained (needed for file scores).
204///
205/// # Errors
206///
207/// Returns an error if discovery, graph construction, or analysis fails.
208pub fn analyze_with_parse_result(
209    config: &ResolvedConfig,
210    modules: &[extract::ModuleInfo],
211) -> Result<AnalysisOutput, FallowError> {
212    let _span = tracing::info_span!("fallow_analyze_with_parse_result").entered();
213    let pipeline_start = Instant::now();
214
215    let show_progress = !config.quiet
216        && std::io::IsTerminal::is_terminal(&std::io::stderr())
217        && matches!(
218            config.output,
219            fallow_config::OutputFormat::Human
220                | fallow_config::OutputFormat::Compact
221                | fallow_config::OutputFormat::Markdown
222        );
223    let progress = progress::AnalysisProgress::new(show_progress);
224
225    if !config.root.join("node_modules").is_dir() {
226        tracing::warn!(
227            "node_modules directory not found. Run `npm install` / `pnpm install` first for accurate results."
228        );
229    }
230
231    // Discover workspaces
232    let t = Instant::now();
233    let workspaces_vec = discover_workspaces(&config.root);
234    let workspaces_ms = t.elapsed().as_secs_f64() * 1000.0;
235    if !workspaces_vec.is_empty() {
236        tracing::info!(count = workspaces_vec.len(), "workspaces discovered");
237    }
238
239    // Warn about directories with package.json not declared as workspaces
240    warn_undeclared_workspaces(&config.root, &workspaces_vec, config.quiet);
241
242    // Stage 1: Discover files (cheap — needed for file registry and resolution)
243    let t = Instant::now();
244    let pb = progress.stage_spinner("Discovering files...");
245    let discovered_files = discover::discover_files(config);
246    let discover_ms = t.elapsed().as_secs_f64() * 1000.0;
247    pb.finish_and_clear();
248
249    let project = project::ProjectState::new(discovered_files, workspaces_vec);
250    let files = project.files();
251    let workspaces = project.workspaces();
252
253    // Stage 1.5: Run plugin system
254    let t = Instant::now();
255    let pb = progress.stage_spinner("Detecting plugins...");
256    let mut plugin_result = run_plugins(config, files, workspaces);
257    let plugins_ms = t.elapsed().as_secs_f64() * 1000.0;
258    pb.finish_and_clear();
259
260    // Stage 1.6: Analyze package.json scripts
261    let t = Instant::now();
262    analyze_all_scripts(config, workspaces, &mut plugin_result);
263    let scripts_ms = t.elapsed().as_secs_f64() * 1000.0;
264
265    // Stage 2: SKIPPED — using pre-parsed modules from caller
266
267    // Stage 3: Discover entry points
268    let t = Instant::now();
269    let entry_points = discover_all_entry_points(config, files, workspaces, &plugin_result);
270    let entry_points_ms = t.elapsed().as_secs_f64() * 1000.0;
271
272    // Compute entry-point summary before the graph consumes the entry_points vec
273    let ep_summary = summarize_entry_points(&entry_points.all);
274
275    // Stage 4: Resolve imports to file IDs
276    let t = Instant::now();
277    let pb = progress.stage_spinner("Resolving imports...");
278    let mut resolved = resolve::resolve_all_imports(
279        modules,
280        files,
281        workspaces,
282        &plugin_result.active_plugins,
283        &plugin_result.path_aliases,
284        &plugin_result.scss_include_paths,
285        &config.root,
286        &config.resolve.conditions,
287    );
288    external_style_usage::augment_external_style_package_usage(
289        &mut resolved,
290        config,
291        workspaces,
292        &plugin_result,
293    );
294    let resolve_ms = t.elapsed().as_secs_f64() * 1000.0;
295    pb.finish_and_clear();
296
297    // Stage 5: Build module graph
298    let t = Instant::now();
299    let pb = progress.stage_spinner("Building module graph...");
300    let graph = graph::ModuleGraph::build_with_reachability_roots(
301        &resolved,
302        &entry_points.all,
303        &entry_points.runtime,
304        &entry_points.test,
305        files,
306    );
307    let graph_ms = t.elapsed().as_secs_f64() * 1000.0;
308    pb.finish_and_clear();
309
310    // Stage 6: Analyze for dead code
311    let t = Instant::now();
312    let pb = progress.stage_spinner("Analyzing...");
313    let mut result = analyze::find_dead_code_full(
314        &graph,
315        config,
316        &resolved,
317        Some(&plugin_result),
318        workspaces,
319        modules,
320        false,
321    );
322    let analyze_ms = t.elapsed().as_secs_f64() * 1000.0;
323    pb.finish_and_clear();
324    progress.finish();
325
326    result.entry_point_summary = Some(ep_summary);
327
328    let total_ms = pipeline_start.elapsed().as_secs_f64() * 1000.0;
329
330    tracing::debug!(
331        "\n┌─ Pipeline Profile (reuse) ─────────────────────\n\
332         │  discover files:   {:>8.1}ms  ({} files)\n\
333         │  workspaces:       {:>8.1}ms\n\
334         │  plugins:          {:>8.1}ms\n\
335         │  script analysis:  {:>8.1}ms\n\
336         │  parse/extract:    SKIPPED (reused {} modules)\n\
337         │  entry points:     {:>8.1}ms  ({} entries)\n\
338         │  resolve imports:  {:>8.1}ms\n\
339         │  build graph:      {:>8.1}ms\n\
340         │  analyze:          {:>8.1}ms\n\
341         │  ────────────────────────────────────────────\n\
342         │  TOTAL:            {:>8.1}ms\n\
343         └─────────────────────────────────────────────────",
344        discover_ms,
345        files.len(),
346        workspaces_ms,
347        plugins_ms,
348        scripts_ms,
349        modules.len(),
350        entry_points_ms,
351        entry_points.all.len(),
352        resolve_ms,
353        graph_ms,
354        analyze_ms,
355        total_ms,
356    );
357
358    let timings = Some(PipelineTimings {
359        discover_files_ms: discover_ms,
360        file_count: files.len(),
361        workspaces_ms,
362        workspace_count: workspaces.len(),
363        plugins_ms,
364        script_analysis_ms: scripts_ms,
365        parse_extract_ms: 0.0, // Skipped — modules were reused
366        module_count: modules.len(),
367        cache_hits: 0,
368        cache_misses: 0,
369        cache_update_ms: 0.0,
370        entry_points_ms,
371        entry_point_count: entry_points.all.len(),
372        resolve_imports_ms: resolve_ms,
373        build_graph_ms: graph_ms,
374        analyze_ms,
375        total_ms,
376    });
377
378    Ok(AnalysisOutput {
379        results: result,
380        timings,
381        graph: Some(graph),
382        modules: None,
383        files: None,
384    })
385}
386
387#[expect(
388    clippy::unnecessary_wraps,
389    reason = "Result kept for future error handling"
390)]
391#[expect(
392    clippy::too_many_lines,
393    reason = "main pipeline function; split candidate for sig-audit-loop"
394)]
395fn analyze_full(
396    config: &ResolvedConfig,
397    retain: bool,
398    collect_usages: bool,
399    need_complexity: bool,
400    retain_modules: bool,
401) -> Result<AnalysisOutput, FallowError> {
402    let _span = tracing::info_span!("fallow_analyze").entered();
403    let pipeline_start = Instant::now();
404
405    // Progress bars: enabled when not quiet, stderr is a terminal, and output is human-readable.
406    // Structured formats (JSON, SARIF) suppress spinners even on TTY — users piping structured
407    // output don't expect progress noise on stderr.
408    let show_progress = !config.quiet
409        && std::io::IsTerminal::is_terminal(&std::io::stderr())
410        && matches!(
411            config.output,
412            fallow_config::OutputFormat::Human
413                | fallow_config::OutputFormat::Compact
414                | fallow_config::OutputFormat::Markdown
415        );
416    let progress = progress::AnalysisProgress::new(show_progress);
417
418    // Warn if node_modules is missing — resolution will be severely degraded
419    if !config.root.join("node_modules").is_dir() {
420        tracing::warn!(
421            "node_modules directory not found. Run `npm install` / `pnpm install` first for accurate results."
422        );
423    }
424
425    // Discover workspaces if in a monorepo
426    let t = Instant::now();
427    let workspaces_vec = discover_workspaces(&config.root);
428    let workspaces_ms = t.elapsed().as_secs_f64() * 1000.0;
429    if !workspaces_vec.is_empty() {
430        tracing::info!(count = workspaces_vec.len(), "workspaces discovered");
431    }
432
433    // Warn about directories with package.json not declared as workspaces
434    warn_undeclared_workspaces(&config.root, &workspaces_vec, config.quiet);
435
436    // Stage 1: Discover all source files
437    let t = Instant::now();
438    let pb = progress.stage_spinner("Discovering files...");
439    let discovered_files = discover::discover_files(config);
440    let discover_ms = t.elapsed().as_secs_f64() * 1000.0;
441    pb.finish_and_clear();
442
443    // Build ProjectState: owns the file registry with stable FileIds and workspace metadata.
444    // This is the foundation for cross-workspace resolution and future incremental analysis.
445    let project = project::ProjectState::new(discovered_files, workspaces_vec);
446    let files = project.files();
447    let workspaces = project.workspaces();
448
449    // Stage 1.5: Run plugin system — parse config files, discover dynamic entries
450    let t = Instant::now();
451    let pb = progress.stage_spinner("Detecting plugins...");
452    let mut plugin_result = run_plugins(config, files, workspaces);
453    let plugins_ms = t.elapsed().as_secs_f64() * 1000.0;
454    pb.finish_and_clear();
455
456    // Stage 1.6: Analyze package.json scripts for binary usage and config file refs
457    let t = Instant::now();
458    analyze_all_scripts(config, workspaces, &mut plugin_result);
459    let scripts_ms = t.elapsed().as_secs_f64() * 1000.0;
460
461    // Stage 2: Parse all files in parallel and extract imports/exports
462    let t = Instant::now();
463    let pb = progress.stage_spinner(&format!("Parsing {} files...", files.len()));
464    let mut cache_store = if config.no_cache {
465        None
466    } else {
467        cache::CacheStore::load(&config.cache_dir)
468    };
469
470    let parse_result = extract::parse_all_files(files, cache_store.as_ref(), need_complexity);
471    let modules = parse_result.modules;
472    let cache_hits = parse_result.cache_hits;
473    let cache_misses = parse_result.cache_misses;
474    let parse_ms = t.elapsed().as_secs_f64() * 1000.0;
475    pb.finish_and_clear();
476
477    // Update cache with freshly parsed modules and refresh stale mtime/size entries.
478    let t = Instant::now();
479    if !config.no_cache {
480        let store = cache_store.get_or_insert_with(cache::CacheStore::new);
481        update_cache(store, &modules, files);
482        if let Err(e) = store.save(&config.cache_dir) {
483            tracing::warn!("Failed to save cache: {e}");
484        }
485    }
486    let cache_ms = t.elapsed().as_secs_f64() * 1000.0;
487
488    // Stage 3: Discover entry points (static patterns + plugin-discovered patterns)
489    let t = Instant::now();
490    let entry_points = discover_all_entry_points(config, files, workspaces, &plugin_result);
491    let entry_points_ms = t.elapsed().as_secs_f64() * 1000.0;
492
493    // Stage 4: Resolve imports to file IDs
494    let t = Instant::now();
495    let pb = progress.stage_spinner("Resolving imports...");
496    let mut resolved = resolve::resolve_all_imports(
497        &modules,
498        files,
499        workspaces,
500        &plugin_result.active_plugins,
501        &plugin_result.path_aliases,
502        &plugin_result.scss_include_paths,
503        &config.root,
504        &config.resolve.conditions,
505    );
506    external_style_usage::augment_external_style_package_usage(
507        &mut resolved,
508        config,
509        workspaces,
510        &plugin_result,
511    );
512    let resolve_ms = t.elapsed().as_secs_f64() * 1000.0;
513    pb.finish_and_clear();
514
515    // Stage 5: Build module graph
516    let t = Instant::now();
517    let pb = progress.stage_spinner("Building module graph...");
518    let graph = graph::ModuleGraph::build_with_reachability_roots(
519        &resolved,
520        &entry_points.all,
521        &entry_points.runtime,
522        &entry_points.test,
523        files,
524    );
525    let graph_ms = t.elapsed().as_secs_f64() * 1000.0;
526    pb.finish_and_clear();
527
528    // Compute entry-point summary before the graph consumes the entry_points vec
529    let ep_summary = summarize_entry_points(&entry_points.all);
530
531    // Stage 6: Analyze for dead code (with plugin context and workspace info)
532    let t = Instant::now();
533    let pb = progress.stage_spinner("Analyzing...");
534    let mut result = analyze::find_dead_code_full(
535        &graph,
536        config,
537        &resolved,
538        Some(&plugin_result),
539        workspaces,
540        &modules,
541        collect_usages,
542    );
543    let analyze_ms = t.elapsed().as_secs_f64() * 1000.0;
544    pb.finish_and_clear();
545    progress.finish();
546
547    result.entry_point_summary = Some(ep_summary);
548
549    let total_ms = pipeline_start.elapsed().as_secs_f64() * 1000.0;
550
551    let cache_summary = if cache_hits > 0 {
552        format!(" ({cache_hits} cached, {cache_misses} parsed)")
553    } else {
554        String::new()
555    };
556
557    tracing::debug!(
558        "\n┌─ Pipeline Profile ─────────────────────────────\n\
559         │  discover files:   {:>8.1}ms  ({} files)\n\
560         │  workspaces:       {:>8.1}ms\n\
561         │  plugins:          {:>8.1}ms\n\
562         │  script analysis:  {:>8.1}ms\n\
563         │  parse/extract:    {:>8.1}ms  ({} modules{})\n\
564         │  cache update:     {:>8.1}ms\n\
565         │  entry points:     {:>8.1}ms  ({} entries)\n\
566         │  resolve imports:  {:>8.1}ms\n\
567         │  build graph:      {:>8.1}ms\n\
568         │  analyze:          {:>8.1}ms\n\
569         │  ────────────────────────────────────────────\n\
570         │  TOTAL:            {:>8.1}ms\n\
571         └─────────────────────────────────────────────────",
572        discover_ms,
573        files.len(),
574        workspaces_ms,
575        plugins_ms,
576        scripts_ms,
577        parse_ms,
578        modules.len(),
579        cache_summary,
580        cache_ms,
581        entry_points_ms,
582        entry_points.all.len(),
583        resolve_ms,
584        graph_ms,
585        analyze_ms,
586        total_ms,
587    );
588
589    let timings = if retain {
590        Some(PipelineTimings {
591            discover_files_ms: discover_ms,
592            file_count: files.len(),
593            workspaces_ms,
594            workspace_count: workspaces.len(),
595            plugins_ms,
596            script_analysis_ms: scripts_ms,
597            parse_extract_ms: parse_ms,
598            module_count: modules.len(),
599            cache_hits,
600            cache_misses,
601            cache_update_ms: cache_ms,
602            entry_points_ms,
603            entry_point_count: entry_points.all.len(),
604            resolve_imports_ms: resolve_ms,
605            build_graph_ms: graph_ms,
606            analyze_ms,
607            total_ms,
608        })
609    } else {
610        None
611    };
612
613    Ok(AnalysisOutput {
614        results: result,
615        timings,
616        graph: if retain { Some(graph) } else { None },
617        modules: if retain_modules { Some(modules) } else { None },
618        files: if retain_modules {
619            Some(files.to_vec())
620        } else {
621            None
622        },
623    })
624}
625
626/// Analyze package.json scripts from root and all workspace packages.
627///
628/// Populates the plugin result with script-used packages and config file
629/// entry patterns. Also scans CI config files for binary invocations.
630fn analyze_all_scripts(
631    config: &ResolvedConfig,
632    workspaces: &[fallow_config::WorkspaceInfo],
633    plugin_result: &mut plugins::AggregatedPluginResult,
634) {
635    // Load all package.jsons once: root + workspaces. Each is reused for both
636    // dep name collection (bin map) and script analysis (no double I/O).
637    let pkg_path = config.root.join("package.json");
638    let root_pkg = PackageJson::load(&pkg_path).ok();
639
640    let ws_pkgs: Vec<_> = workspaces
641        .iter()
642        .filter_map(|ws| {
643            PackageJson::load(&ws.root.join("package.json"))
644                .ok()
645                .map(|pkg| (ws, pkg))
646        })
647        .collect();
648
649    // Collect all dependency names to build the bin-name → package-name reverse map.
650    // This resolves binaries like "attw" to "@arethetypeswrong/cli" even without
651    // node_modules/.bin symlinks.
652    let mut all_dep_names: Vec<String> = Vec::new();
653    if let Some(ref pkg) = root_pkg {
654        all_dep_names.extend(pkg.all_dependency_names());
655    }
656    for (_, ws_pkg) in &ws_pkgs {
657        all_dep_names.extend(ws_pkg.all_dependency_names());
658    }
659    all_dep_names.sort_unstable();
660    all_dep_names.dedup();
661
662    // Probe node_modules/ at project root and each workspace root so non-hoisted
663    // deps (pnpm strict, Yarn workspaces) are also discovered.
664    let mut nm_roots: Vec<&std::path::Path> = vec![&config.root];
665    for ws in workspaces {
666        nm_roots.push(&ws.root);
667    }
668    let bin_map = scripts::build_bin_to_package_map(&nm_roots, &all_dep_names);
669
670    if let Some(ref pkg) = root_pkg
671        && let Some(ref pkg_scripts) = pkg.scripts
672    {
673        let scripts_to_analyze = if config.production {
674            scripts::filter_production_scripts(pkg_scripts)
675        } else {
676            pkg_scripts.clone()
677        };
678        let script_analysis = scripts::analyze_scripts(&scripts_to_analyze, &config.root, &bin_map);
679        plugin_result.script_used_packages = script_analysis.used_packages;
680
681        for config_file in &script_analysis.config_files {
682            plugin_result
683                .discovered_always_used
684                .push((config_file.clone(), "scripts".to_string()));
685        }
686    }
687    for (ws, ws_pkg) in &ws_pkgs {
688        if let Some(ref ws_scripts) = ws_pkg.scripts {
689            let scripts_to_analyze = if config.production {
690                scripts::filter_production_scripts(ws_scripts)
691            } else {
692                ws_scripts.clone()
693            };
694            let ws_analysis = scripts::analyze_scripts(&scripts_to_analyze, &ws.root, &bin_map);
695            plugin_result
696                .script_used_packages
697                .extend(ws_analysis.used_packages);
698
699            let ws_prefix = ws
700                .root
701                .strip_prefix(&config.root)
702                .unwrap_or(&ws.root)
703                .to_string_lossy();
704            for config_file in &ws_analysis.config_files {
705                plugin_result
706                    .discovered_always_used
707                    .push((format!("{ws_prefix}/{config_file}"), "scripts".to_string()));
708            }
709        }
710    }
711
712    // Scan CI config files for binary invocations
713    let ci_packages = scripts::ci::analyze_ci_files(&config.root, &bin_map);
714    plugin_result.script_used_packages.extend(ci_packages);
715    plugin_result
716        .entry_point_roles
717        .entry("scripts".to_string())
718        .or_insert(EntryPointRole::Support);
719}
720
721/// Discover all entry points from static patterns, workspaces, plugins, and infrastructure.
722fn discover_all_entry_points(
723    config: &ResolvedConfig,
724    files: &[discover::DiscoveredFile],
725    workspaces: &[fallow_config::WorkspaceInfo],
726    plugin_result: &plugins::AggregatedPluginResult,
727) -> discover::CategorizedEntryPoints {
728    let mut entry_points = discover::CategorizedEntryPoints::default();
729    let root_discovery = discover::discover_entry_points_with_warnings(config, files);
730
731    let workspace_discovery: Vec<discover::EntryPointDiscovery> = workspaces
732        .par_iter()
733        .map(|ws| discover::discover_workspace_entry_points_with_warnings(&ws.root, config, files))
734        .collect();
735    let mut skipped_entries = rustc_hash::FxHashMap::default();
736    entry_points.extend_runtime(root_discovery.entries);
737    for (path, count) in root_discovery.skipped_entries {
738        *skipped_entries.entry(path).or_insert(0) += count;
739    }
740    let mut ws_entries = Vec::new();
741    for workspace in workspace_discovery {
742        ws_entries.extend(workspace.entries);
743        for (path, count) in workspace.skipped_entries {
744            *skipped_entries.entry(path).or_insert(0) += count;
745        }
746    }
747    discover::warn_skipped_entry_summary(&skipped_entries);
748    entry_points.extend_runtime(ws_entries);
749
750    let plugin_entries = discover::discover_plugin_entry_point_sets(plugin_result, config, files);
751    entry_points.extend(plugin_entries);
752
753    let infra_entries = discover::discover_infrastructure_entry_points(&config.root);
754    entry_points.extend_runtime(infra_entries);
755
756    // Add dynamically loaded files from config as entry points
757    if !config.dynamically_loaded.is_empty() {
758        let dynamic_entries = discover::discover_dynamically_loaded_entry_points(config, files);
759        entry_points.extend_runtime(dynamic_entries);
760    }
761
762    entry_points.dedup()
763}
764
765/// Summarize entry points by source category for user-facing output.
766fn summarize_entry_points(entry_points: &[discover::EntryPoint]) -> results::EntryPointSummary {
767    let mut counts: rustc_hash::FxHashMap<String, usize> = rustc_hash::FxHashMap::default();
768    for ep in entry_points {
769        let category = match &ep.source {
770            discover::EntryPointSource::PackageJsonMain
771            | discover::EntryPointSource::PackageJsonModule
772            | discover::EntryPointSource::PackageJsonExports
773            | discover::EntryPointSource::PackageJsonBin
774            | discover::EntryPointSource::PackageJsonScript => "package.json",
775            discover::EntryPointSource::Plugin { .. } => "plugin",
776            discover::EntryPointSource::TestFile => "test file",
777            discover::EntryPointSource::DefaultIndex => "default index",
778            discover::EntryPointSource::ManualEntry => "manual entry",
779            discover::EntryPointSource::InfrastructureConfig => "config",
780            discover::EntryPointSource::DynamicallyLoaded => "dynamically loaded",
781        };
782        *counts.entry(category.to_string()).or_insert(0) += 1;
783    }
784    let mut by_source: Vec<(String, usize)> = counts.into_iter().collect();
785    by_source.sort_by(|a, b| b.1.cmp(&a.1).then_with(|| a.0.cmp(&b.0)));
786    results::EntryPointSummary {
787        total: entry_points.len(),
788        by_source,
789    }
790}
791
792/// Run plugins for root project and all workspace packages.
793fn run_plugins(
794    config: &ResolvedConfig,
795    files: &[discover::DiscoveredFile],
796    workspaces: &[fallow_config::WorkspaceInfo],
797) -> plugins::AggregatedPluginResult {
798    let registry = plugins::PluginRegistry::new(config.external_plugins.clone());
799    let file_paths: Vec<std::path::PathBuf> = files.iter().map(|f| f.path.clone()).collect();
800
801    // Run plugins for root project (full run with external plugins, inline config, etc.)
802    let pkg_path = config.root.join("package.json");
803    let mut result = PackageJson::load(&pkg_path).map_or_else(
804        |_| plugins::AggregatedPluginResult::default(),
805        |pkg| registry.run(&pkg, &config.root, &file_paths),
806    );
807
808    if workspaces.is_empty() {
809        return result;
810    }
811
812    // Pre-compile config matchers and relative files once for all workspace runs.
813    // This avoids re-compiling glob patterns and re-computing relative paths per workspace
814    // (previously O(workspaces × plugins × files) glob compilations).
815    let precompiled_matchers = registry.precompile_config_matchers();
816    let relative_files: Vec<(&std::path::PathBuf, String)> = file_paths
817        .iter()
818        .map(|f| {
819            let rel = f
820                .strip_prefix(&config.root)
821                .unwrap_or(f)
822                .to_string_lossy()
823                .into_owned();
824            (f, rel)
825        })
826        .collect();
827
828    // Run plugins for each workspace package in parallel, then merge results.
829    let ws_results: Vec<_> = workspaces
830        .par_iter()
831        .filter_map(|ws| {
832            let ws_pkg_path = ws.root.join("package.json");
833            let ws_pkg = PackageJson::load(&ws_pkg_path).ok()?;
834            let ws_result = registry.run_workspace_fast(
835                &ws_pkg,
836                &ws.root,
837                &config.root,
838                &precompiled_matchers,
839                &relative_files,
840            );
841            if ws_result.active_plugins.is_empty() {
842                return None;
843            }
844            let ws_prefix = ws
845                .root
846                .strip_prefix(&config.root)
847                .unwrap_or(&ws.root)
848                .to_string_lossy()
849                .into_owned();
850            Some((ws_result, ws_prefix))
851        })
852        .collect();
853
854    // Merge workspace results sequentially (deterministic order via par_iter index stability)
855    // Track seen names for O(1) dedup instead of O(n) Vec::contains
856    let mut seen_plugins: rustc_hash::FxHashSet<String> =
857        result.active_plugins.iter().cloned().collect();
858    let mut seen_prefixes: rustc_hash::FxHashSet<String> =
859        result.virtual_module_prefixes.iter().cloned().collect();
860    let mut seen_generated: rustc_hash::FxHashSet<String> =
861        result.generated_import_patterns.iter().cloned().collect();
862    for (ws_result, ws_prefix) in ws_results {
863        // Prefix helper: workspace-relative patterns need the workspace prefix
864        // to be matchable from the monorepo root. But patterns that are already
865        // project-root-relative (e.g., from angular.json which uses absolute paths
866        // like "apps/client/src/styles.css") should not be double-prefixed.
867        let prefix_if_needed = |pat: &str| -> String {
868            if pat.starts_with(ws_prefix.as_str()) || pat.starts_with('/') {
869                pat.to_string()
870            } else {
871                format!("{ws_prefix}/{pat}")
872            }
873        };
874
875        for (rule, pname) in &ws_result.entry_patterns {
876            result
877                .entry_patterns
878                .push((rule.prefixed(&ws_prefix), pname.clone()));
879        }
880        for (plugin_name, role) in ws_result.entry_point_roles {
881            result.entry_point_roles.entry(plugin_name).or_insert(role);
882        }
883        for (pat, pname) in &ws_result.always_used {
884            result
885                .always_used
886                .push((prefix_if_needed(pat), pname.clone()));
887        }
888        for (pat, pname) in &ws_result.discovered_always_used {
889            result
890                .discovered_always_used
891                .push((prefix_if_needed(pat), pname.clone()));
892        }
893        for (pat, pname) in &ws_result.fixture_patterns {
894            result
895                .fixture_patterns
896                .push((prefix_if_needed(pat), pname.clone()));
897        }
898        for rule in &ws_result.used_exports {
899            result.used_exports.push(rule.prefixed(&ws_prefix));
900        }
901        // Merge active plugin names (deduplicated via HashSet)
902        for plugin_name in ws_result.active_plugins {
903            if !seen_plugins.contains(&plugin_name) {
904                seen_plugins.insert(plugin_name.clone());
905                result.active_plugins.push(plugin_name);
906            }
907        }
908        // These don't need prefixing (absolute paths / package names)
909        result
910            .referenced_dependencies
911            .extend(ws_result.referenced_dependencies);
912        result.setup_files.extend(ws_result.setup_files);
913        result
914            .tooling_dependencies
915            .extend(ws_result.tooling_dependencies);
916        // Virtual module prefixes (e.g., Docusaurus @theme/, @site/) are
917        // package-name prefixes, not file paths — no workspace prefix needed.
918        for prefix in ws_result.virtual_module_prefixes {
919            if !seen_prefixes.contains(&prefix) {
920                seen_prefixes.insert(prefix.clone());
921                result.virtual_module_prefixes.push(prefix);
922            }
923        }
924        // Generated import patterns (e.g., SvelteKit /$types) are suffix
925        // matches on specifiers, not file paths — no workspace prefix needed.
926        for pattern in ws_result.generated_import_patterns {
927            if !seen_generated.contains(&pattern) {
928                seen_generated.insert(pattern.clone());
929                result.generated_import_patterns.push(pattern);
930            }
931        }
932        // Path aliases from workspace plugins (e.g., SvelteKit $lib/ → src/lib).
933        // Prefix the replacement directory so it resolves from the monorepo root.
934        for (prefix, replacement) in ws_result.path_aliases {
935            result
936                .path_aliases
937                .push((prefix, format!("{ws_prefix}/{replacement}")));
938        }
939    }
940
941    result
942}
943
944/// Run analysis on a project directory (with export usages for LSP Code Lens).
945///
946/// # Errors
947///
948/// Returns an error if config loading, file discovery, parsing, or analysis fails.
949pub fn analyze_project(root: &Path) -> Result<AnalysisResults, FallowError> {
950    let config = default_config(root);
951    analyze_with_usages(&config)
952}
953
954/// Create a default config for a project root.
955pub(crate) fn default_config(root: &Path) -> ResolvedConfig {
956    let user_config = fallow_config::FallowConfig::find_and_load(root)
957        .ok()
958        .flatten();
959    match user_config {
960        Some((config, _path)) => config.resolve(
961            root.to_path_buf(),
962            fallow_config::OutputFormat::Human,
963            num_cpus(),
964            false,
965            true, // quiet: LSP/programmatic callers don't need progress bars
966        ),
967        None => fallow_config::FallowConfig::default().resolve(
968            root.to_path_buf(),
969            fallow_config::OutputFormat::Human,
970            num_cpus(),
971            false,
972            true,
973        ),
974    }
975}
976
977fn num_cpus() -> usize {
978    std::thread::available_parallelism().map_or(4, std::num::NonZeroUsize::get)
979}
980
981#[cfg(test)]
982mod tests {
983    use super::format_undeclared_workspace_warning;
984    use std::path::{Path, PathBuf};
985
986    use fallow_config::WorkspaceDiagnostic;
987
988    fn diag(root: &Path, relative: &str) -> WorkspaceDiagnostic {
989        WorkspaceDiagnostic {
990            path: root.join(relative),
991            message: String::new(),
992        }
993    }
994
995    #[test]
996    fn undeclared_workspace_warning_is_singular_for_one_path() {
997        let root = Path::new("/repo");
998        let warning = format_undeclared_workspace_warning(root, &[diag(root, "packages/api")])
999            .expect("warning should be rendered");
1000
1001        assert_eq!(
1002            warning,
1003            "1 directory with package.json is not declared as a workspace: packages/api. Add that path to package.json workspaces or pnpm-workspace.yaml if it should be analyzed as a workspace."
1004        );
1005    }
1006
1007    #[test]
1008    fn undeclared_workspace_warning_summarizes_many_paths() {
1009        let root = PathBuf::from("/repo");
1010        let diagnostics = [
1011            "examples/a",
1012            "examples/b",
1013            "examples/c",
1014            "examples/d",
1015            "examples/e",
1016            "examples/f",
1017        ]
1018        .into_iter()
1019        .map(|path| diag(&root, path))
1020        .collect::<Vec<_>>();
1021
1022        let warning = format_undeclared_workspace_warning(&root, &diagnostics)
1023            .expect("warning should be rendered");
1024
1025        assert_eq!(
1026            warning,
1027            "6 directories with package.json are not declared as workspaces: examples/a, examples/b, examples/c, examples/d, examples/e (and 1 more). Add those paths to package.json workspaces or pnpm-workspace.yaml if they should be analyzed as workspaces."
1028        );
1029    }
1030}