Skip to main content

fallow_core/
lib.rs

1pub mod analyze;
2pub mod cache;
3pub mod changed_files;
4pub mod churn;
5pub mod cross_reference;
6pub mod discover;
7pub mod duplicates;
8pub(crate) mod errors;
9mod external_style_usage;
10pub mod extract;
11pub mod plugins;
12pub(crate) mod progress;
13pub mod results;
14pub(crate) mod scripts;
15pub mod suppress;
16pub mod trace;
17
18// Re-export from fallow-graph for backwards compatibility
19pub use fallow_graph::graph;
20pub use fallow_graph::project;
21pub use fallow_graph::resolve;
22
23use std::path::Path;
24use std::time::Instant;
25
26use errors::FallowError;
27use fallow_config::{
28    EntryPointRole, PackageJson, ResolvedConfig, discover_workspaces,
29    find_undeclared_workspaces_with_ignores,
30};
31use rayon::prelude::*;
32use results::AnalysisResults;
33use rustc_hash::FxHashSet;
34use trace::PipelineTimings;
35
36const UNDECLARED_WORKSPACE_WARNING_PREVIEW: usize = 5;
37type LoadedWorkspacePackage<'a> = (&'a fallow_config::WorkspaceInfo, PackageJson);
38
39fn record_graph_package_usage(
40    graph: &mut graph::ModuleGraph,
41    package_name: &str,
42    file_id: discover::FileId,
43    is_type_only: bool,
44) {
45    graph
46        .package_usage
47        .entry(package_name.to_owned())
48        .or_default()
49        .push(file_id);
50    if is_type_only {
51        graph
52            .type_only_package_usage
53            .entry(package_name.to_owned())
54            .or_default()
55            .push(file_id);
56    }
57}
58
59fn workspace_package_name<'a>(
60    source: &str,
61    workspace_names: &'a FxHashSet<&str>,
62) -> Option<&'a str> {
63    if !resolve::is_bare_specifier(source) {
64        return None;
65    }
66    let package_name = resolve::extract_package_name(source);
67    workspace_names.get(package_name.as_str()).copied()
68}
69
70fn credit_workspace_package_usage(
71    graph: &mut graph::ModuleGraph,
72    resolved: &[resolve::ResolvedModule],
73    workspaces: &[fallow_config::WorkspaceInfo],
74) {
75    if workspaces.is_empty() {
76        return;
77    }
78
79    let workspace_names: FxHashSet<&str> = workspaces.iter().map(|ws| ws.name.as_str()).collect();
80    for module in resolved {
81        for import in module
82            .resolved_imports
83            .iter()
84            .chain(module.resolved_dynamic_imports.iter())
85        {
86            if matches!(import.target, resolve::ResolveResult::InternalModule(_))
87                && let Some(package_name) =
88                    workspace_package_name(&import.info.source, &workspace_names)
89            {
90                record_graph_package_usage(
91                    graph,
92                    package_name,
93                    module.file_id,
94                    import.info.is_type_only,
95                );
96            }
97        }
98
99        for re_export in &module.re_exports {
100            if matches!(re_export.target, resolve::ResolveResult::InternalModule(_))
101                && let Some(package_name) =
102                    workspace_package_name(&re_export.info.source, &workspace_names)
103            {
104                record_graph_package_usage(
105                    graph,
106                    package_name,
107                    module.file_id,
108                    re_export.info.is_type_only,
109                );
110            }
111        }
112    }
113}
114
115/// Result of the full analysis pipeline, including optional performance timings.
116pub struct AnalysisOutput {
117    pub results: AnalysisResults,
118    pub timings: Option<PipelineTimings>,
119    pub graph: Option<graph::ModuleGraph>,
120    /// Parsed modules from the pipeline, available when `retain_modules` is true.
121    /// Used by the combined command to share a single parse across dead-code and health.
122    pub modules: Option<Vec<extract::ModuleInfo>>,
123    /// Discovered files from the pipeline, available when `retain_modules` is true.
124    pub files: Option<Vec<discover::DiscoveredFile>>,
125    /// Package names invoked from package.json scripts and CI configs, mirroring
126    /// what the unused-deps detector consults. Populated for every pipeline run;
127    /// trace tooling reads it so `trace_dependency` agrees with `unused-deps` on
128    /// "used vs unused" instead of returning false-negatives for script-only deps.
129    pub script_used_packages: rustc_hash::FxHashSet<String>,
130}
131
132/// Update cache: write freshly parsed modules and refresh stale mtime/size entries.
133fn update_cache(
134    store: &mut cache::CacheStore,
135    modules: &[extract::ModuleInfo],
136    files: &[discover::DiscoveredFile],
137) {
138    for module in modules {
139        if let Some(file) = files.get(module.file_id.0 as usize) {
140            let (mt, sz) = file_mtime_and_size(&file.path);
141            // If content hash matches, just refresh mtime/size if stale (e.g. `touch`ed file)
142            if let Some(cached) = store.get_by_path_only(&file.path)
143                && cached.content_hash == module.content_hash
144            {
145                if cached.mtime_secs != mt || cached.file_size != sz {
146                    store.insert(&file.path, cache::module_to_cached(module, mt, sz));
147                }
148                continue;
149            }
150            store.insert(&file.path, cache::module_to_cached(module, mt, sz));
151        }
152    }
153    store.retain_paths(files);
154}
155
156/// Extract mtime (seconds since epoch) and file size from a path.
157fn file_mtime_and_size(path: &std::path::Path) -> (u64, u64) {
158    std::fs::metadata(path).map_or((0, 0), |m| {
159        let mt = m
160            .modified()
161            .ok()
162            .and_then(|t| t.duration_since(std::time::SystemTime::UNIX_EPOCH).ok())
163            .map_or(0, |d| d.as_secs());
164        (mt, m.len())
165    })
166}
167
168fn format_undeclared_workspace_warning(
169    root: &Path,
170    undeclared: &[fallow_config::WorkspaceDiagnostic],
171) -> Option<String> {
172    if undeclared.is_empty() {
173        return None;
174    }
175
176    let preview = undeclared
177        .iter()
178        .take(UNDECLARED_WORKSPACE_WARNING_PREVIEW)
179        .map(|diag| {
180            diag.path
181                .strip_prefix(root)
182                .unwrap_or(&diag.path)
183                .display()
184                .to_string()
185                .replace('\\', "/")
186        })
187        .collect::<Vec<_>>();
188    let remaining = undeclared
189        .len()
190        .saturating_sub(UNDECLARED_WORKSPACE_WARNING_PREVIEW);
191    let tail = if remaining > 0 {
192        format!(" (and {remaining} more)")
193    } else {
194        String::new()
195    };
196    let noun = if undeclared.len() == 1 {
197        "directory with package.json is"
198    } else {
199        "directories with package.json are"
200    };
201    let guidance = if undeclared.len() == 1 {
202        "Add that path to package.json workspaces or pnpm-workspace.yaml if it should be analyzed as a workspace."
203    } else {
204        "Add those paths to package.json workspaces or pnpm-workspace.yaml if they should be analyzed as workspaces."
205    };
206
207    Some(format!(
208        "{} {} not declared as {}: {}{}. {}",
209        undeclared.len(),
210        noun,
211        if undeclared.len() == 1 {
212            "a workspace"
213        } else {
214            "workspaces"
215        },
216        preview.join(", "),
217        tail,
218        guidance
219    ))
220}
221
222fn warn_undeclared_workspaces(
223    root: &Path,
224    workspaces_vec: &[fallow_config::WorkspaceInfo],
225    ignore_patterns: &globset::GlobSet,
226    quiet: bool,
227) {
228    if quiet {
229        return;
230    }
231
232    let undeclared = find_undeclared_workspaces_with_ignores(root, workspaces_vec, ignore_patterns);
233    if let Some(message) = format_undeclared_workspace_warning(root, &undeclared) {
234        tracing::warn!("{message}");
235    }
236}
237
238/// Run the full analysis pipeline.
239///
240/// # Errors
241///
242/// Returns an error if file discovery, parsing, or analysis fails.
243pub fn analyze(config: &ResolvedConfig) -> Result<AnalysisResults, FallowError> {
244    let output = analyze_full(config, false, false, false, false)?;
245    Ok(output.results)
246}
247
248/// Run the full analysis pipeline with export usage collection (for LSP Code Lens).
249///
250/// # Errors
251///
252/// Returns an error if file discovery, parsing, or analysis fails.
253pub fn analyze_with_usages(config: &ResolvedConfig) -> Result<AnalysisResults, FallowError> {
254    let output = analyze_full(config, false, true, false, false)?;
255    Ok(output.results)
256}
257
258/// Run the full analysis pipeline with optional performance timings and graph retention.
259///
260/// # Errors
261///
262/// Returns an error if file discovery, parsing, or analysis fails.
263pub fn analyze_with_trace(config: &ResolvedConfig) -> Result<AnalysisOutput, FallowError> {
264    analyze_full(config, true, false, false, false)
265}
266
267/// Run the full analysis pipeline, retaining parsed modules and discovered files.
268///
269/// Used by the combined command to share a single parse across dead-code and health.
270/// When `need_complexity` is true, the `ComplexityVisitor` runs during parsing so
271/// the returned modules contain per-function complexity data.
272///
273/// # Errors
274///
275/// Returns an error if file discovery, parsing, or analysis fails.
276pub fn analyze_retaining_modules(
277    config: &ResolvedConfig,
278    need_complexity: bool,
279    retain_graph: bool,
280) -> Result<AnalysisOutput, FallowError> {
281    analyze_full(config, retain_graph, false, need_complexity, true)
282}
283
284/// Run the analysis pipeline using pre-parsed modules, skipping the parsing stage.
285///
286/// This avoids re-parsing files when the caller already has a `ParseResult` (e.g., from
287/// `fallow_core::extract::parse_all_files`). Discovery, plugins, scripts, entry points,
288/// import resolution, graph construction, and dead code detection still run normally.
289/// The graph is always retained (needed for file scores).
290///
291/// # Errors
292///
293/// Returns an error if discovery, graph construction, or analysis fails.
294#[allow(
295    clippy::too_many_lines,
296    reason = "pipeline orchestration stays easier to audit in one place"
297)]
298pub fn analyze_with_parse_result(
299    config: &ResolvedConfig,
300    modules: &[extract::ModuleInfo],
301) -> Result<AnalysisOutput, FallowError> {
302    let _span = tracing::info_span!("fallow_analyze_with_parse_result").entered();
303    let pipeline_start = Instant::now();
304
305    let show_progress = !config.quiet
306        && std::io::IsTerminal::is_terminal(&std::io::stderr())
307        && matches!(
308            config.output,
309            fallow_config::OutputFormat::Human
310                | fallow_config::OutputFormat::Compact
311                | fallow_config::OutputFormat::Markdown
312        );
313    let progress = progress::AnalysisProgress::new(show_progress);
314
315    if !config.root.join("node_modules").is_dir() {
316        tracing::warn!(
317            "node_modules directory not found. Run `npm install` / `pnpm install` first for accurate results."
318        );
319    }
320
321    // Discover workspaces
322    let t = Instant::now();
323    let workspaces_vec = discover_workspaces(&config.root);
324    let workspaces_ms = t.elapsed().as_secs_f64() * 1000.0;
325    if !workspaces_vec.is_empty() {
326        tracing::info!(count = workspaces_vec.len(), "workspaces discovered");
327    }
328
329    // Warn about directories with package.json not declared as workspaces
330    warn_undeclared_workspaces(
331        &config.root,
332        &workspaces_vec,
333        &config.ignore_patterns,
334        config.quiet,
335    );
336
337    // Stage 1: Discover files (cheap — needed for file registry and resolution)
338    let t = Instant::now();
339    let pb = progress.stage_spinner("Discovering files...");
340    let discovered_files = discover::discover_files(config);
341    let discover_ms = t.elapsed().as_secs_f64() * 1000.0;
342    pb.finish_and_clear();
343
344    let project = project::ProjectState::new(discovered_files, workspaces_vec);
345    let files = project.files();
346    let workspaces = project.workspaces();
347    let root_pkg = load_root_package_json(config);
348    let workspace_pkgs = load_workspace_packages(workspaces);
349
350    // Stage 1.5: Run plugin system
351    let t = Instant::now();
352    let pb = progress.stage_spinner("Detecting plugins...");
353    let mut plugin_result = run_plugins(
354        config,
355        files,
356        workspaces,
357        root_pkg.as_ref(),
358        &workspace_pkgs,
359    );
360    let plugins_ms = t.elapsed().as_secs_f64() * 1000.0;
361    pb.finish_and_clear();
362
363    // Stage 1.6: Analyze package.json scripts
364    let t = Instant::now();
365    analyze_all_scripts(
366        config,
367        workspaces,
368        root_pkg.as_ref(),
369        &workspace_pkgs,
370        &mut plugin_result,
371    );
372    let scripts_ms = t.elapsed().as_secs_f64() * 1000.0;
373
374    // Stage 2: SKIPPED — using pre-parsed modules from caller
375
376    // Stage 3: Discover entry points
377    let t = Instant::now();
378    let entry_points = discover_all_entry_points(
379        config,
380        files,
381        workspaces,
382        root_pkg.as_ref(),
383        &workspace_pkgs,
384        &plugin_result,
385    );
386    let entry_points_ms = t.elapsed().as_secs_f64() * 1000.0;
387
388    // Compute entry-point summary before the graph consumes the entry_points vec
389    let ep_summary = summarize_entry_points(&entry_points.all);
390
391    // Stage 4: Resolve imports to file IDs
392    let t = Instant::now();
393    let pb = progress.stage_spinner("Resolving imports...");
394    let mut resolved = resolve::resolve_all_imports(
395        modules,
396        files,
397        workspaces,
398        &plugin_result.active_plugins,
399        &plugin_result.path_aliases,
400        &plugin_result.scss_include_paths,
401        &config.root,
402        &config.resolve.conditions,
403    );
404    external_style_usage::augment_external_style_package_usage(
405        &mut resolved,
406        config,
407        workspaces,
408        &plugin_result,
409    );
410    let resolve_ms = t.elapsed().as_secs_f64() * 1000.0;
411    pb.finish_and_clear();
412
413    // Stage 5: Build module graph
414    let t = Instant::now();
415    let pb = progress.stage_spinner("Building module graph...");
416    let mut graph = graph::ModuleGraph::build_with_reachability_roots(
417        &resolved,
418        &entry_points.all,
419        &entry_points.runtime,
420        &entry_points.test,
421        files,
422    );
423    credit_workspace_package_usage(&mut graph, &resolved, workspaces);
424    let graph_ms = t.elapsed().as_secs_f64() * 1000.0;
425    pb.finish_and_clear();
426
427    // Stage 6: Analyze for dead code
428    let t = Instant::now();
429    let pb = progress.stage_spinner("Analyzing...");
430    let mut result = analyze::find_dead_code_full(
431        &graph,
432        config,
433        &resolved,
434        Some(&plugin_result),
435        workspaces,
436        modules,
437        false,
438    );
439    let analyze_ms = t.elapsed().as_secs_f64() * 1000.0;
440    pb.finish_and_clear();
441    progress.finish();
442
443    result.entry_point_summary = Some(ep_summary);
444
445    let total_ms = pipeline_start.elapsed().as_secs_f64() * 1000.0;
446
447    tracing::debug!(
448        "\n┌─ Pipeline Profile (reuse) ─────────────────────\n\
449         │  discover files:   {:>8.1}ms  ({} files)\n\
450         │  workspaces:       {:>8.1}ms\n\
451         │  plugins:          {:>8.1}ms\n\
452         │  script analysis:  {:>8.1}ms\n\
453         │  parse/extract:    SKIPPED (reused {} modules)\n\
454         │  entry points:     {:>8.1}ms  ({} entries)\n\
455         │  resolve imports:  {:>8.1}ms\n\
456         │  build graph:      {:>8.1}ms\n\
457         │  analyze:          {:>8.1}ms\n\
458         │  ────────────────────────────────────────────\n\
459         │  TOTAL:            {:>8.1}ms\n\
460         └─────────────────────────────────────────────────",
461        discover_ms,
462        files.len(),
463        workspaces_ms,
464        plugins_ms,
465        scripts_ms,
466        modules.len(),
467        entry_points_ms,
468        entry_points.all.len(),
469        resolve_ms,
470        graph_ms,
471        analyze_ms,
472        total_ms,
473    );
474
475    let timings = Some(PipelineTimings {
476        discover_files_ms: discover_ms,
477        file_count: files.len(),
478        workspaces_ms,
479        workspace_count: workspaces.len(),
480        plugins_ms,
481        script_analysis_ms: scripts_ms,
482        parse_extract_ms: 0.0, // Skipped — modules were reused
483        module_count: modules.len(),
484        cache_hits: 0,
485        cache_misses: 0,
486        cache_update_ms: 0.0,
487        entry_points_ms,
488        entry_point_count: entry_points.all.len(),
489        resolve_imports_ms: resolve_ms,
490        build_graph_ms: graph_ms,
491        analyze_ms,
492        duplication_ms: None,
493        total_ms,
494    });
495
496    Ok(AnalysisOutput {
497        results: result,
498        timings,
499        graph: Some(graph),
500        modules: None,
501        files: None,
502        script_used_packages: plugin_result.script_used_packages.clone(),
503    })
504}
505
506#[expect(
507    clippy::unnecessary_wraps,
508    reason = "Result kept for future error handling"
509)]
510#[expect(
511    clippy::too_many_lines,
512    reason = "main pipeline function; sequential phases are held together for clarity"
513)]
514fn analyze_full(
515    config: &ResolvedConfig,
516    retain: bool,
517    collect_usages: bool,
518    need_complexity: bool,
519    retain_modules: bool,
520) -> Result<AnalysisOutput, FallowError> {
521    let _span = tracing::info_span!("fallow_analyze").entered();
522    let pipeline_start = Instant::now();
523
524    // Progress bars: enabled when not quiet, stderr is a terminal, and output is human-readable.
525    // Structured formats (JSON, SARIF) suppress spinners even on TTY — users piping structured
526    // output don't expect progress noise on stderr.
527    let show_progress = !config.quiet
528        && std::io::IsTerminal::is_terminal(&std::io::stderr())
529        && matches!(
530            config.output,
531            fallow_config::OutputFormat::Human
532                | fallow_config::OutputFormat::Compact
533                | fallow_config::OutputFormat::Markdown
534        );
535    let progress = progress::AnalysisProgress::new(show_progress);
536
537    // Warn if node_modules is missing — resolution will be severely degraded
538    if !config.root.join("node_modules").is_dir() {
539        tracing::warn!(
540            "node_modules directory not found. Run `npm install` / `pnpm install` first for accurate results."
541        );
542    }
543
544    // Discover workspaces if in a monorepo
545    let t = Instant::now();
546    let workspaces_vec = discover_workspaces(&config.root);
547    let workspaces_ms = t.elapsed().as_secs_f64() * 1000.0;
548    if !workspaces_vec.is_empty() {
549        tracing::info!(count = workspaces_vec.len(), "workspaces discovered");
550    }
551
552    // Warn about directories with package.json not declared as workspaces
553    warn_undeclared_workspaces(
554        &config.root,
555        &workspaces_vec,
556        &config.ignore_patterns,
557        config.quiet,
558    );
559
560    // Stage 1: Discover all source files
561    let t = Instant::now();
562    let pb = progress.stage_spinner("Discovering files...");
563    let discovered_files = discover::discover_files(config);
564    let discover_ms = t.elapsed().as_secs_f64() * 1000.0;
565    pb.finish_and_clear();
566
567    // Build ProjectState: owns the file registry with stable FileIds and workspace metadata.
568    // This is the foundation for cross-workspace resolution and future incremental analysis.
569    let project = project::ProjectState::new(discovered_files, workspaces_vec);
570    let files = project.files();
571    let workspaces = project.workspaces();
572    let root_pkg = load_root_package_json(config);
573    let workspace_pkgs = load_workspace_packages(workspaces);
574
575    // Stage 1.5: Run plugin system — parse config files, discover dynamic entries
576    let t = Instant::now();
577    let pb = progress.stage_spinner("Detecting plugins...");
578    let mut plugin_result = run_plugins(
579        config,
580        files,
581        workspaces,
582        root_pkg.as_ref(),
583        &workspace_pkgs,
584    );
585    let plugins_ms = t.elapsed().as_secs_f64() * 1000.0;
586    pb.finish_and_clear();
587
588    // Stage 1.6: Analyze package.json scripts for binary usage and config file refs
589    let t = Instant::now();
590    analyze_all_scripts(
591        config,
592        workspaces,
593        root_pkg.as_ref(),
594        &workspace_pkgs,
595        &mut plugin_result,
596    );
597    let scripts_ms = t.elapsed().as_secs_f64() * 1000.0;
598
599    // Stage 2: Parse all files in parallel and extract imports/exports
600    let t = Instant::now();
601    let pb = progress.stage_spinner(&format!("Parsing {} files...", files.len()));
602    let mut cache_store = if config.no_cache {
603        None
604    } else {
605        cache::CacheStore::load(&config.cache_dir)
606    };
607
608    let parse_result = extract::parse_all_files(files, cache_store.as_ref(), need_complexity);
609    let modules = parse_result.modules;
610    let cache_hits = parse_result.cache_hits;
611    let cache_misses = parse_result.cache_misses;
612    let parse_ms = t.elapsed().as_secs_f64() * 1000.0;
613    pb.finish_and_clear();
614
615    // Update cache with freshly parsed modules and refresh stale mtime/size entries.
616    let t = Instant::now();
617    if !config.no_cache {
618        let store = cache_store.get_or_insert_with(cache::CacheStore::new);
619        update_cache(store, &modules, files);
620        if let Err(e) = store.save(&config.cache_dir) {
621            tracing::warn!("Failed to save cache: {e}");
622        }
623    }
624    let cache_ms = t.elapsed().as_secs_f64() * 1000.0;
625
626    // Stage 3: Discover entry points (static patterns + plugin-discovered patterns)
627    let t = Instant::now();
628    let entry_points = discover_all_entry_points(
629        config,
630        files,
631        workspaces,
632        root_pkg.as_ref(),
633        &workspace_pkgs,
634        &plugin_result,
635    );
636    let entry_points_ms = t.elapsed().as_secs_f64() * 1000.0;
637
638    // Stage 4: Resolve imports to file IDs
639    let t = Instant::now();
640    let pb = progress.stage_spinner("Resolving imports...");
641    let mut resolved = resolve::resolve_all_imports(
642        &modules,
643        files,
644        workspaces,
645        &plugin_result.active_plugins,
646        &plugin_result.path_aliases,
647        &plugin_result.scss_include_paths,
648        &config.root,
649        &config.resolve.conditions,
650    );
651    external_style_usage::augment_external_style_package_usage(
652        &mut resolved,
653        config,
654        workspaces,
655        &plugin_result,
656    );
657    let resolve_ms = t.elapsed().as_secs_f64() * 1000.0;
658    pb.finish_and_clear();
659
660    // Stage 5: Build module graph
661    let t = Instant::now();
662    let pb = progress.stage_spinner("Building module graph...");
663    let mut graph = graph::ModuleGraph::build_with_reachability_roots(
664        &resolved,
665        &entry_points.all,
666        &entry_points.runtime,
667        &entry_points.test,
668        files,
669    );
670    credit_workspace_package_usage(&mut graph, &resolved, workspaces);
671    let graph_ms = t.elapsed().as_secs_f64() * 1000.0;
672    pb.finish_and_clear();
673
674    // Compute entry-point summary before the graph consumes the entry_points vec
675    let ep_summary = summarize_entry_points(&entry_points.all);
676
677    // Stage 6: Analyze for dead code (with plugin context and workspace info)
678    let t = Instant::now();
679    let pb = progress.stage_spinner("Analyzing...");
680    let mut result = analyze::find_dead_code_full(
681        &graph,
682        config,
683        &resolved,
684        Some(&plugin_result),
685        workspaces,
686        &modules,
687        collect_usages,
688    );
689    let analyze_ms = t.elapsed().as_secs_f64() * 1000.0;
690    pb.finish_and_clear();
691    progress.finish();
692
693    result.entry_point_summary = Some(ep_summary);
694
695    let total_ms = pipeline_start.elapsed().as_secs_f64() * 1000.0;
696
697    let cache_summary = if cache_hits > 0 {
698        format!(" ({cache_hits} cached, {cache_misses} parsed)")
699    } else {
700        String::new()
701    };
702
703    tracing::debug!(
704        "\n┌─ Pipeline Profile ─────────────────────────────\n\
705         │  discover files:   {:>8.1}ms  ({} files)\n\
706         │  workspaces:       {:>8.1}ms\n\
707         │  plugins:          {:>8.1}ms\n\
708         │  script analysis:  {:>8.1}ms\n\
709         │  parse/extract:    {:>8.1}ms  ({} modules{})\n\
710         │  cache update:     {:>8.1}ms\n\
711         │  entry points:     {:>8.1}ms  ({} entries)\n\
712         │  resolve imports:  {:>8.1}ms\n\
713         │  build graph:      {:>8.1}ms\n\
714         │  analyze:          {:>8.1}ms\n\
715         │  ────────────────────────────────────────────\n\
716         │  TOTAL:            {:>8.1}ms\n\
717         └─────────────────────────────────────────────────",
718        discover_ms,
719        files.len(),
720        workspaces_ms,
721        plugins_ms,
722        scripts_ms,
723        parse_ms,
724        modules.len(),
725        cache_summary,
726        cache_ms,
727        entry_points_ms,
728        entry_points.all.len(),
729        resolve_ms,
730        graph_ms,
731        analyze_ms,
732        total_ms,
733    );
734
735    let timings = if retain {
736        Some(PipelineTimings {
737            discover_files_ms: discover_ms,
738            file_count: files.len(),
739            workspaces_ms,
740            workspace_count: workspaces.len(),
741            plugins_ms,
742            script_analysis_ms: scripts_ms,
743            parse_extract_ms: parse_ms,
744            module_count: modules.len(),
745            cache_hits,
746            cache_misses,
747            cache_update_ms: cache_ms,
748            entry_points_ms,
749            entry_point_count: entry_points.all.len(),
750            resolve_imports_ms: resolve_ms,
751            build_graph_ms: graph_ms,
752            analyze_ms,
753            duplication_ms: None,
754            total_ms,
755        })
756    } else {
757        None
758    };
759
760    Ok(AnalysisOutput {
761        results: result,
762        timings,
763        graph: if retain { Some(graph) } else { None },
764        modules: if retain_modules { Some(modules) } else { None },
765        files: if retain_modules {
766            Some(files.to_vec())
767        } else {
768            None
769        },
770        script_used_packages: plugin_result.script_used_packages,
771    })
772}
773
774/// Analyze package.json scripts from root and all workspace packages.
775///
776/// Populates the plugin result with script-used packages and config file
777/// entry patterns. Also scans CI config files for binary invocations.
778fn load_root_package_json(config: &ResolvedConfig) -> Option<PackageJson> {
779    PackageJson::load(&config.root.join("package.json")).ok()
780}
781
782fn load_workspace_packages(
783    workspaces: &[fallow_config::WorkspaceInfo],
784) -> Vec<LoadedWorkspacePackage<'_>> {
785    workspaces
786        .iter()
787        .filter_map(|ws| {
788            PackageJson::load(&ws.root.join("package.json"))
789                .ok()
790                .map(|pkg| (ws, pkg))
791        })
792        .collect()
793}
794
795fn analyze_all_scripts(
796    config: &ResolvedConfig,
797    workspaces: &[fallow_config::WorkspaceInfo],
798    root_pkg: Option<&PackageJson>,
799    workspace_pkgs: &[LoadedWorkspacePackage<'_>],
800    plugin_result: &mut plugins::AggregatedPluginResult,
801) {
802    // Collect all dependency names to build the bin-name → package-name reverse map.
803    // This resolves binaries like "attw" to "@arethetypeswrong/cli" even without
804    // node_modules/.bin symlinks.
805    let mut all_dep_names: Vec<String> = Vec::new();
806    if let Some(pkg) = root_pkg {
807        all_dep_names.extend(pkg.all_dependency_names());
808    }
809    for (_, ws_pkg) in workspace_pkgs {
810        all_dep_names.extend(ws_pkg.all_dependency_names());
811    }
812    all_dep_names.sort_unstable();
813    all_dep_names.dedup();
814
815    // Probe node_modules/ at project root and each workspace root so non-hoisted
816    // deps (pnpm strict, Yarn workspaces) are also discovered.
817    let mut nm_roots: Vec<&std::path::Path> = Vec::new();
818    if config.root.join("node_modules").is_dir() {
819        nm_roots.push(&config.root);
820    }
821    for ws in workspaces {
822        if ws.root.join("node_modules").is_dir() {
823            nm_roots.push(&ws.root);
824        }
825    }
826    let bin_map = scripts::build_bin_to_package_map(&nm_roots, &all_dep_names);
827
828    if let Some(pkg) = root_pkg
829        && let Some(ref pkg_scripts) = pkg.scripts
830    {
831        let scripts_to_analyze = if config.production {
832            scripts::filter_production_scripts(pkg_scripts)
833        } else {
834            pkg_scripts.clone()
835        };
836        let script_analysis = scripts::analyze_scripts(&scripts_to_analyze, &config.root, &bin_map);
837        plugin_result.script_used_packages = script_analysis.used_packages;
838
839        for config_file in &script_analysis.config_files {
840            plugin_result
841                .discovered_always_used
842                .push((config_file.clone(), "scripts".to_string()));
843        }
844        for entry in &script_analysis.entry_files {
845            if let Some(pat) = scripts::normalize_script_entry_pattern("", entry) {
846                plugin_result
847                    .entry_patterns
848                    .push((plugins::PathRule::new(pat), "scripts".to_string()));
849            }
850        }
851    }
852    for (ws, ws_pkg) in workspace_pkgs {
853        if let Some(ref ws_scripts) = ws_pkg.scripts {
854            let scripts_to_analyze = if config.production {
855                scripts::filter_production_scripts(ws_scripts)
856            } else {
857                ws_scripts.clone()
858            };
859            let ws_analysis = scripts::analyze_scripts(&scripts_to_analyze, &ws.root, &bin_map);
860            plugin_result
861                .script_used_packages
862                .extend(ws_analysis.used_packages);
863
864            let ws_prefix = ws
865                .root
866                .strip_prefix(&config.root)
867                .unwrap_or(&ws.root)
868                .to_string_lossy();
869            for config_file in &ws_analysis.config_files {
870                plugin_result
871                    .discovered_always_used
872                    .push((format!("{ws_prefix}/{config_file}"), "scripts".to_string()));
873            }
874            for entry in &ws_analysis.entry_files {
875                if let Some(pat) = scripts::normalize_script_entry_pattern(&ws_prefix, entry) {
876                    plugin_result
877                        .entry_patterns
878                        .push((plugins::PathRule::new(pat), "scripts".to_string()));
879                }
880            }
881        }
882    }
883
884    // Scan CI config files for binary invocations and positional file references.
885    // Returns both packages used by CI tooling AND project-relative file paths
886    // referenced as command-line arguments (e.g., `node scripts/deploy.ts` in a
887    // GitHub Actions `run:` block) so the referenced files become reachable
888    // entry points. CI files always live at the project root, so file paths
889    // need no workspace-prefix transformation. See issue #195 (Case D).
890    let ci_analysis = scripts::ci::analyze_ci_files(&config.root, &bin_map);
891    plugin_result
892        .script_used_packages
893        .extend(ci_analysis.used_packages);
894    for entry in &ci_analysis.entry_files {
895        if let Some(pat) = scripts::normalize_script_entry_pattern("", entry) {
896            plugin_result
897                .entry_patterns
898                .push((plugins::PathRule::new(pat), "scripts".to_string()));
899        }
900    }
901    plugin_result
902        .entry_point_roles
903        .entry("scripts".to_string())
904        .or_insert(EntryPointRole::Support);
905}
906
907/// Discover all entry points from static patterns, workspaces, plugins, and infrastructure.
908fn discover_all_entry_points(
909    config: &ResolvedConfig,
910    files: &[discover::DiscoveredFile],
911    workspaces: &[fallow_config::WorkspaceInfo],
912    root_pkg: Option<&PackageJson>,
913    workspace_pkgs: &[LoadedWorkspacePackage<'_>],
914    plugin_result: &plugins::AggregatedPluginResult,
915) -> discover::CategorizedEntryPoints {
916    let mut entry_points = discover::CategorizedEntryPoints::default();
917    let root_discovery = discover::discover_entry_points_with_warnings_from_pkg(
918        config,
919        files,
920        root_pkg,
921        workspaces.is_empty(),
922    );
923
924    let workspace_pkg_by_root: rustc_hash::FxHashMap<std::path::PathBuf, &PackageJson> =
925        workspace_pkgs
926            .iter()
927            .map(|(ws, pkg)| (ws.root.clone(), pkg))
928            .collect();
929
930    let workspace_discovery: Vec<discover::EntryPointDiscovery> = workspaces
931        .par_iter()
932        .map(|ws| {
933            let pkg = workspace_pkg_by_root.get(&ws.root).copied();
934            discover::discover_workspace_entry_points_with_warnings_from_pkg(&ws.root, files, pkg)
935        })
936        .collect();
937    let mut skipped_entries = rustc_hash::FxHashMap::default();
938    entry_points.extend_runtime(root_discovery.entries);
939    for (path, count) in root_discovery.skipped_entries {
940        *skipped_entries.entry(path).or_insert(0) += count;
941    }
942    let mut ws_entries = Vec::new();
943    for workspace in workspace_discovery {
944        ws_entries.extend(workspace.entries);
945        for (path, count) in workspace.skipped_entries {
946            *skipped_entries.entry(path).or_insert(0) += count;
947        }
948    }
949    discover::warn_skipped_entry_summary(&skipped_entries);
950    entry_points.extend_runtime(ws_entries);
951
952    let plugin_entries = discover::discover_plugin_entry_point_sets(plugin_result, config, files);
953    entry_points.extend(plugin_entries);
954
955    let infra_entries = discover::discover_infrastructure_entry_points(&config.root);
956    entry_points.extend_runtime(infra_entries);
957
958    // Add dynamically loaded files from config as entry points
959    if !config.dynamically_loaded.is_empty() {
960        let dynamic_entries = discover::discover_dynamically_loaded_entry_points(config, files);
961        entry_points.extend_runtime(dynamic_entries);
962    }
963
964    entry_points.dedup()
965}
966
967/// Summarize entry points by source category for user-facing output.
968fn summarize_entry_points(entry_points: &[discover::EntryPoint]) -> results::EntryPointSummary {
969    let mut counts: rustc_hash::FxHashMap<String, usize> = rustc_hash::FxHashMap::default();
970    for ep in entry_points {
971        let category = match &ep.source {
972            discover::EntryPointSource::PackageJsonMain
973            | discover::EntryPointSource::PackageJsonModule
974            | discover::EntryPointSource::PackageJsonExports
975            | discover::EntryPointSource::PackageJsonBin
976            | discover::EntryPointSource::PackageJsonScript => "package.json",
977            discover::EntryPointSource::Plugin { .. } => "plugin",
978            discover::EntryPointSource::TestFile => "test file",
979            discover::EntryPointSource::DefaultIndex => "default index",
980            discover::EntryPointSource::ManualEntry => "manual entry",
981            discover::EntryPointSource::InfrastructureConfig => "config",
982            discover::EntryPointSource::DynamicallyLoaded => "dynamically loaded",
983        };
984        *counts.entry(category.to_string()).or_insert(0) += 1;
985    }
986    let mut by_source: Vec<(String, usize)> = counts.into_iter().collect();
987    by_source.sort_by(|a, b| b.1.cmp(&a.1).then_with(|| a.0.cmp(&b.0)));
988    results::EntryPointSummary {
989        total: entry_points.len(),
990        by_source,
991    }
992}
993
994/// Run plugins for root project and all workspace packages.
995fn run_plugins(
996    config: &ResolvedConfig,
997    files: &[discover::DiscoveredFile],
998    workspaces: &[fallow_config::WorkspaceInfo],
999    root_pkg: Option<&PackageJson>,
1000    workspace_pkgs: &[LoadedWorkspacePackage<'_>],
1001) -> plugins::AggregatedPluginResult {
1002    let registry = plugins::PluginRegistry::new(config.external_plugins.clone());
1003    let file_paths: Vec<std::path::PathBuf> = files.iter().map(|f| f.path.clone()).collect();
1004    let root_config_search_roots = collect_config_search_roots(&config.root, &file_paths);
1005    let root_config_search_root_refs: Vec<&Path> = root_config_search_roots
1006        .iter()
1007        .map(std::path::PathBuf::as_path)
1008        .collect();
1009
1010    // Run plugins for root project (full run with external plugins, inline config, etc.)
1011    let mut result = root_pkg.map_or_else(plugins::AggregatedPluginResult::default, |pkg| {
1012        registry.run_with_search_roots(
1013            pkg,
1014            &config.root,
1015            &file_paths,
1016            &root_config_search_root_refs,
1017        )
1018    });
1019
1020    if workspaces.is_empty() {
1021        return result;
1022    }
1023
1024    let root_active_plugins: rustc_hash::FxHashSet<&str> =
1025        result.active_plugins.iter().map(String::as_str).collect();
1026
1027    // Pre-compile config matchers once and bucket source files by workspace.
1028    // Workspace config matching can then scan only files below that workspace
1029    // instead of every project file for every active matcher.
1030    let precompiled_matchers = registry.precompile_config_matchers();
1031    let workspace_relative_files = bucket_files_by_workspace(workspace_pkgs, &file_paths);
1032
1033    // Run plugins for each workspace package in parallel, then merge results.
1034    let ws_results: Vec<_> = workspace_pkgs
1035        .par_iter()
1036        .zip(workspace_relative_files.par_iter())
1037        .filter_map(|((ws, ws_pkg), relative_files)| {
1038            let ws_result = registry.run_workspace_fast(
1039                ws_pkg,
1040                &ws.root,
1041                &config.root,
1042                &precompiled_matchers,
1043                relative_files,
1044                &root_active_plugins,
1045            );
1046            if ws_result.active_plugins.is_empty() {
1047                return None;
1048            }
1049            let ws_prefix = ws
1050                .root
1051                .strip_prefix(&config.root)
1052                .unwrap_or(&ws.root)
1053                .to_string_lossy()
1054                .into_owned();
1055            Some((ws_result, ws_prefix))
1056        })
1057        .collect();
1058
1059    // Merge workspace results sequentially (deterministic order via par_iter index stability)
1060    // Track seen names for O(1) dedup instead of O(n) Vec::contains
1061    let mut seen_plugins: rustc_hash::FxHashSet<String> =
1062        result.active_plugins.iter().cloned().collect();
1063    let mut seen_prefixes: rustc_hash::FxHashSet<String> =
1064        result.virtual_module_prefixes.iter().cloned().collect();
1065    let mut seen_generated: rustc_hash::FxHashSet<String> =
1066        result.generated_import_patterns.iter().cloned().collect();
1067    for (ws_result, ws_prefix) in ws_results {
1068        // Prefix helper: workspace-relative patterns need the workspace prefix
1069        // to be matchable from the monorepo root. But patterns that are already
1070        // project-root-relative (e.g., from angular.json which uses absolute paths
1071        // like "apps/client/src/styles.css") should not be double-prefixed.
1072        let prefix_if_needed = |pat: &str| -> String {
1073            if pat.starts_with(ws_prefix.as_str()) || pat.starts_with('/') {
1074                pat.to_string()
1075            } else {
1076                format!("{ws_prefix}/{pat}")
1077            }
1078        };
1079
1080        for (rule, pname) in &ws_result.entry_patterns {
1081            result
1082                .entry_patterns
1083                .push((rule.prefixed(&ws_prefix), pname.clone()));
1084        }
1085        for (plugin_name, role) in ws_result.entry_point_roles {
1086            result.entry_point_roles.entry(plugin_name).or_insert(role);
1087        }
1088        for (pat, pname) in &ws_result.always_used {
1089            result
1090                .always_used
1091                .push((prefix_if_needed(pat), pname.clone()));
1092        }
1093        for (pat, pname) in &ws_result.discovered_always_used {
1094            result
1095                .discovered_always_used
1096                .push((prefix_if_needed(pat), pname.clone()));
1097        }
1098        for (pat, pname) in &ws_result.fixture_patterns {
1099            result
1100                .fixture_patterns
1101                .push((prefix_if_needed(pat), pname.clone()));
1102        }
1103        for rule in &ws_result.used_exports {
1104            result.used_exports.push(rule.prefixed(&ws_prefix));
1105        }
1106        // Merge active plugin names (deduplicated via HashSet)
1107        for plugin_name in ws_result.active_plugins {
1108            if !seen_plugins.contains(&plugin_name) {
1109                seen_plugins.insert(plugin_name.clone());
1110                result.active_plugins.push(plugin_name);
1111            }
1112        }
1113        // These don't need prefixing (absolute paths / package names)
1114        result
1115            .referenced_dependencies
1116            .extend(ws_result.referenced_dependencies);
1117        result.setup_files.extend(ws_result.setup_files);
1118        result
1119            .tooling_dependencies
1120            .extend(ws_result.tooling_dependencies);
1121        // Virtual module prefixes (e.g., Docusaurus @theme/, @site/) are
1122        // package-name prefixes, not file paths — no workspace prefix needed.
1123        for prefix in ws_result.virtual_module_prefixes {
1124            if !seen_prefixes.contains(&prefix) {
1125                seen_prefixes.insert(prefix.clone());
1126                result.virtual_module_prefixes.push(prefix);
1127            }
1128        }
1129        // Generated import patterns (e.g., SvelteKit /$types) are suffix
1130        // matches on specifiers, not file paths — no workspace prefix needed.
1131        for pattern in ws_result.generated_import_patterns {
1132            if !seen_generated.contains(&pattern) {
1133                seen_generated.insert(pattern.clone());
1134                result.generated_import_patterns.push(pattern);
1135            }
1136        }
1137        // Path aliases from workspace plugins (e.g., SvelteKit $lib/ → src/lib).
1138        // Prefix the replacement directory so it resolves from the monorepo root.
1139        for (prefix, replacement) in ws_result.path_aliases {
1140            result
1141                .path_aliases
1142                .push((prefix, format!("{ws_prefix}/{replacement}")));
1143        }
1144    }
1145
1146    result
1147}
1148
1149fn bucket_files_by_workspace(
1150    workspace_pkgs: &[LoadedWorkspacePackage<'_>],
1151    file_paths: &[std::path::PathBuf],
1152) -> Vec<Vec<(std::path::PathBuf, String)>> {
1153    let mut buckets = vec![Vec::new(); workspace_pkgs.len()];
1154
1155    for file_path in file_paths {
1156        for (idx, (ws, _)) in workspace_pkgs.iter().enumerate() {
1157            if let Ok(relative) = file_path.strip_prefix(&ws.root) {
1158                buckets[idx].push((file_path.clone(), relative.to_string_lossy().into_owned()));
1159                break;
1160            }
1161        }
1162    }
1163
1164    buckets
1165}
1166
1167fn collect_config_search_roots(
1168    root: &Path,
1169    file_paths: &[std::path::PathBuf],
1170) -> Vec<std::path::PathBuf> {
1171    let mut roots: rustc_hash::FxHashSet<std::path::PathBuf> = rustc_hash::FxHashSet::default();
1172    roots.insert(root.to_path_buf());
1173
1174    for file_path in file_paths {
1175        let mut current = file_path.parent();
1176        while let Some(dir) = current {
1177            if !dir.starts_with(root) {
1178                break;
1179            }
1180            roots.insert(dir.to_path_buf());
1181            if dir == root {
1182                break;
1183            }
1184            current = dir.parent();
1185        }
1186    }
1187
1188    let mut roots_vec: Vec<_> = roots.into_iter().collect();
1189    roots_vec.sort();
1190    roots_vec
1191}
1192
1193/// Run analysis on a project directory (with export usages for LSP Code Lens).
1194///
1195/// # Errors
1196///
1197/// Returns an error if config loading, file discovery, parsing, or analysis fails.
1198pub fn analyze_project(root: &Path) -> Result<AnalysisResults, FallowError> {
1199    let config = default_config(root);
1200    analyze_with_usages(&config)
1201}
1202
1203/// Create a default config for a project root.
1204///
1205/// `analyze_project` is the dead-code entry point used by the LSP and other
1206/// programmatic embedders. When the loaded config uses the per-analysis
1207/// production form (`production: { deadCode: true, ... }`), the production
1208/// flag must be flattened to the dead-code analysis here. Otherwise
1209/// `ResolvedConfig::resolve` calls `.global()` which returns false for the
1210/// per-analysis variant and the production-mode rule overrides
1211/// (`unused_dev_dependencies: off`, etc.) plus `resolved.production = true`
1212/// are silently dropped.
1213pub(crate) fn default_config(root: &Path) -> ResolvedConfig {
1214    let user_config = fallow_config::FallowConfig::find_and_load(root)
1215        .ok()
1216        .flatten();
1217    match user_config {
1218        Some((mut config, _path)) => {
1219            let dead_code_production = config
1220                .production
1221                .for_analysis(fallow_config::ProductionAnalysis::DeadCode);
1222            config.production = dead_code_production.into();
1223            config.resolve(
1224                root.to_path_buf(),
1225                fallow_config::OutputFormat::Human,
1226                num_cpus(),
1227                false,
1228                true, // quiet: LSP/programmatic callers don't need progress bars
1229            )
1230        }
1231        None => fallow_config::FallowConfig::default().resolve(
1232            root.to_path_buf(),
1233            fallow_config::OutputFormat::Human,
1234            num_cpus(),
1235            false,
1236            true,
1237        ),
1238    }
1239}
1240
1241fn num_cpus() -> usize {
1242    std::thread::available_parallelism().map_or(4, std::num::NonZeroUsize::get)
1243}
1244
1245#[cfg(test)]
1246mod tests {
1247    use super::{
1248        bucket_files_by_workspace, collect_config_search_roots, format_undeclared_workspace_warning,
1249    };
1250    use std::path::{Path, PathBuf};
1251
1252    use fallow_config::WorkspaceDiagnostic;
1253
1254    fn diag(root: &Path, relative: &str) -> WorkspaceDiagnostic {
1255        WorkspaceDiagnostic {
1256            path: root.join(relative),
1257            message: String::new(),
1258        }
1259    }
1260
1261    #[test]
1262    fn undeclared_workspace_warning_is_singular_for_one_path() {
1263        let root = Path::new("/repo");
1264        let warning = format_undeclared_workspace_warning(root, &[diag(root, "packages/api")])
1265            .expect("warning should be rendered");
1266
1267        assert_eq!(
1268            warning,
1269            "1 directory with package.json is not declared as a workspace: packages/api. Add that path to package.json workspaces or pnpm-workspace.yaml if it should be analyzed as a workspace."
1270        );
1271    }
1272
1273    #[test]
1274    fn undeclared_workspace_warning_summarizes_many_paths() {
1275        let root = PathBuf::from("/repo");
1276        let diagnostics = [
1277            "examples/a",
1278            "examples/b",
1279            "examples/c",
1280            "examples/d",
1281            "examples/e",
1282            "examples/f",
1283        ]
1284        .into_iter()
1285        .map(|path| diag(&root, path))
1286        .collect::<Vec<_>>();
1287
1288        let warning = format_undeclared_workspace_warning(&root, &diagnostics)
1289            .expect("warning should be rendered");
1290
1291        assert_eq!(
1292            warning,
1293            "6 directories with package.json are not declared as workspaces: examples/a, examples/b, examples/c, examples/d, examples/e (and 1 more). Add those paths to package.json workspaces or pnpm-workspace.yaml if they should be analyzed as workspaces."
1294        );
1295    }
1296
1297    #[test]
1298    fn collect_config_search_roots_includes_file_ancestors_once() {
1299        let root = PathBuf::from("/repo");
1300        let search_roots = collect_config_search_roots(
1301            &root,
1302            &[
1303                root.join("apps/query/src/main.ts"),
1304                root.join("packages/shared/lib/index.ts"),
1305            ],
1306        );
1307
1308        assert_eq!(
1309            search_roots,
1310            vec![
1311                root.clone(),
1312                root.join("apps"),
1313                root.join("apps/query"),
1314                root.join("apps/query/src"),
1315                root.join("packages"),
1316                root.join("packages/shared"),
1317                root.join("packages/shared/lib"),
1318            ]
1319        );
1320    }
1321
1322    #[test]
1323    fn bucket_files_by_workspace_uses_workspace_relative_paths() {
1324        let root = PathBuf::from("/repo");
1325        let ui = fallow_config::WorkspaceInfo {
1326            root: root.join("apps/ui"),
1327            name: "ui".to_string(),
1328            is_internal_dependency: false,
1329        };
1330        let api = fallow_config::WorkspaceInfo {
1331            root: root.join("apps/api"),
1332            name: "api".to_string(),
1333            is_internal_dependency: false,
1334        };
1335        let workspace_pkgs = vec![
1336            (
1337                &ui,
1338                fallow_config::PackageJson {
1339                    name: Some("ui".to_string()),
1340                    ..Default::default()
1341                },
1342            ),
1343            (
1344                &api,
1345                fallow_config::PackageJson {
1346                    name: Some("api".to_string()),
1347                    ..Default::default()
1348                },
1349            ),
1350        ];
1351        let files = vec![
1352            root.join("apps/ui/vite.config.ts"),
1353            root.join("apps/ui/src/main.ts"),
1354            root.join("apps/api/src/server.ts"),
1355            root.join("tools/build.ts"),
1356        ];
1357
1358        let buckets = bucket_files_by_workspace(&workspace_pkgs, &files);
1359
1360        assert_eq!(
1361            buckets[0],
1362            vec![
1363                (
1364                    root.join("apps/ui/vite.config.ts"),
1365                    "vite.config.ts".to_string()
1366                ),
1367                (root.join("apps/ui/src/main.ts"), "src/main.ts".to_string()),
1368            ]
1369        );
1370        assert_eq!(
1371            buckets[1],
1372            vec![(
1373                root.join("apps/api/src/server.ts"),
1374                "src/server.ts".to_string()
1375            )]
1376        );
1377    }
1378}