Skip to main content

fallow_core/
lib.rs

1pub mod analyze;
2pub mod cache;
3pub mod changed_files;
4pub mod churn;
5pub mod cross_reference;
6pub mod discover;
7pub mod duplicates;
8pub(crate) mod errors;
9mod external_style_usage;
10pub mod extract;
11pub mod git_env;
12pub mod plugins;
13pub(crate) mod progress;
14pub mod results;
15pub(crate) mod scripts;
16pub mod suppress;
17pub mod trace;
18
19// Re-export from fallow-graph for backwards compatibility
20pub use fallow_graph::graph;
21pub use fallow_graph::project;
22pub use fallow_graph::resolve;
23
24use std::path::Path;
25use std::time::Instant;
26
27use errors::FallowError;
28use fallow_config::{
29    EntryPointRole, PackageJson, ResolvedConfig, discover_workspaces,
30    find_undeclared_workspaces_with_ignores,
31};
32use rayon::prelude::*;
33use results::AnalysisResults;
34use rustc_hash::FxHashSet;
35use trace::PipelineTimings;
36
37const UNDECLARED_WORKSPACE_WARNING_PREVIEW: usize = 5;
38type LoadedWorkspacePackage<'a> = (&'a fallow_config::WorkspaceInfo, PackageJson);
39
40fn record_graph_package_usage(
41    graph: &mut graph::ModuleGraph,
42    package_name: &str,
43    file_id: discover::FileId,
44    is_type_only: bool,
45) {
46    graph
47        .package_usage
48        .entry(package_name.to_owned())
49        .or_default()
50        .push(file_id);
51    if is_type_only {
52        graph
53            .type_only_package_usage
54            .entry(package_name.to_owned())
55            .or_default()
56            .push(file_id);
57    }
58}
59
60fn workspace_package_name<'a>(
61    source: &str,
62    workspace_names: &'a FxHashSet<&str>,
63) -> Option<&'a str> {
64    if !resolve::is_bare_specifier(source) {
65        return None;
66    }
67    let package_name = resolve::extract_package_name(source);
68    workspace_names.get(package_name.as_str()).copied()
69}
70
71fn credit_workspace_package_usage(
72    graph: &mut graph::ModuleGraph,
73    resolved: &[resolve::ResolvedModule],
74    workspaces: &[fallow_config::WorkspaceInfo],
75) {
76    if workspaces.is_empty() {
77        return;
78    }
79
80    let workspace_names: FxHashSet<&str> = workspaces.iter().map(|ws| ws.name.as_str()).collect();
81    for module in resolved {
82        for import in module
83            .resolved_imports
84            .iter()
85            .chain(module.resolved_dynamic_imports.iter())
86        {
87            if matches!(import.target, resolve::ResolveResult::InternalModule(_))
88                && let Some(package_name) =
89                    workspace_package_name(&import.info.source, &workspace_names)
90            {
91                record_graph_package_usage(
92                    graph,
93                    package_name,
94                    module.file_id,
95                    import.info.is_type_only,
96                );
97            }
98        }
99
100        for re_export in &module.re_exports {
101            if matches!(re_export.target, resolve::ResolveResult::InternalModule(_))
102                && let Some(package_name) =
103                    workspace_package_name(&re_export.info.source, &workspace_names)
104            {
105                record_graph_package_usage(
106                    graph,
107                    package_name,
108                    module.file_id,
109                    re_export.info.is_type_only,
110                );
111            }
112        }
113    }
114}
115
116/// Result of the full analysis pipeline, including optional performance timings.
117pub struct AnalysisOutput {
118    pub results: AnalysisResults,
119    pub timings: Option<PipelineTimings>,
120    pub graph: Option<graph::ModuleGraph>,
121    /// Parsed modules from the pipeline, available when `retain_modules` is true.
122    /// Used by the combined command to share a single parse across dead-code and health.
123    pub modules: Option<Vec<extract::ModuleInfo>>,
124    /// Discovered files from the pipeline, available when `retain_modules` is true.
125    pub files: Option<Vec<discover::DiscoveredFile>>,
126    /// Package names invoked from package.json scripts and CI configs, mirroring
127    /// what the unused-deps detector consults. Populated for every pipeline run;
128    /// trace tooling reads it so `trace_dependency` agrees with `unused-deps` on
129    /// "used vs unused" instead of returning false-negatives for script-only deps.
130    pub script_used_packages: rustc_hash::FxHashSet<String>,
131}
132
133/// Update cache: write freshly parsed modules and refresh stale mtime/size entries.
134fn update_cache(
135    store: &mut cache::CacheStore,
136    modules: &[extract::ModuleInfo],
137    files: &[discover::DiscoveredFile],
138) {
139    for module in modules {
140        if let Some(file) = files.get(module.file_id.0 as usize) {
141            let (mt, sz) = file_mtime_and_size(&file.path);
142            // If content hash matches, just refresh mtime/size if stale (e.g. `touch`ed file)
143            if let Some(cached) = store.get_by_path_only(&file.path)
144                && cached.content_hash == module.content_hash
145            {
146                if cached.mtime_secs != mt || cached.file_size != sz {
147                    store.insert(&file.path, cache::module_to_cached(module, mt, sz));
148                }
149                continue;
150            }
151            store.insert(&file.path, cache::module_to_cached(module, mt, sz));
152        }
153    }
154    store.retain_paths(files);
155}
156
157/// Extract mtime (seconds since epoch) and file size from a path.
158fn file_mtime_and_size(path: &std::path::Path) -> (u64, u64) {
159    std::fs::metadata(path).map_or((0, 0), |m| {
160        let mt = m
161            .modified()
162            .ok()
163            .and_then(|t| t.duration_since(std::time::SystemTime::UNIX_EPOCH).ok())
164            .map_or(0, |d| d.as_secs());
165        (mt, m.len())
166    })
167}
168
169fn format_undeclared_workspace_warning(
170    root: &Path,
171    undeclared: &[fallow_config::WorkspaceDiagnostic],
172) -> Option<String> {
173    if undeclared.is_empty() {
174        return None;
175    }
176
177    let preview = undeclared
178        .iter()
179        .take(UNDECLARED_WORKSPACE_WARNING_PREVIEW)
180        .map(|diag| {
181            diag.path
182                .strip_prefix(root)
183                .unwrap_or(&diag.path)
184                .display()
185                .to_string()
186                .replace('\\', "/")
187        })
188        .collect::<Vec<_>>();
189    let remaining = undeclared
190        .len()
191        .saturating_sub(UNDECLARED_WORKSPACE_WARNING_PREVIEW);
192    let tail = if remaining > 0 {
193        format!(" (and {remaining} more)")
194    } else {
195        String::new()
196    };
197    let noun = if undeclared.len() == 1 {
198        "directory with package.json is"
199    } else {
200        "directories with package.json are"
201    };
202    let guidance = if undeclared.len() == 1 {
203        "Add that path to package.json workspaces or pnpm-workspace.yaml if it should be analyzed as a workspace."
204    } else {
205        "Add those paths to package.json workspaces or pnpm-workspace.yaml if they should be analyzed as workspaces."
206    };
207
208    Some(format!(
209        "{} {} not declared as {}: {}{}. {}",
210        undeclared.len(),
211        noun,
212        if undeclared.len() == 1 {
213            "a workspace"
214        } else {
215            "workspaces"
216        },
217        preview.join(", "),
218        tail,
219        guidance
220    ))
221}
222
223fn warn_undeclared_workspaces(
224    root: &Path,
225    workspaces_vec: &[fallow_config::WorkspaceInfo],
226    ignore_patterns: &globset::GlobSet,
227    quiet: bool,
228) {
229    if quiet {
230        return;
231    }
232
233    let undeclared = find_undeclared_workspaces_with_ignores(root, workspaces_vec, ignore_patterns);
234    if let Some(message) = format_undeclared_workspace_warning(root, &undeclared) {
235        tracing::warn!("{message}");
236    }
237}
238
239/// Run the full analysis pipeline.
240///
241/// # Errors
242///
243/// Returns an error if file discovery, parsing, or analysis fails.
244pub fn analyze(config: &ResolvedConfig) -> Result<AnalysisResults, FallowError> {
245    let output = analyze_full(config, false, false, false, false)?;
246    Ok(output.results)
247}
248
249/// Run the full analysis pipeline with export usage collection (for LSP Code Lens).
250///
251/// # Errors
252///
253/// Returns an error if file discovery, parsing, or analysis fails.
254pub fn analyze_with_usages(config: &ResolvedConfig) -> Result<AnalysisResults, FallowError> {
255    let output = analyze_full(config, false, true, false, false)?;
256    Ok(output.results)
257}
258
259/// Run the full analysis pipeline with optional performance timings and graph retention.
260///
261/// # Errors
262///
263/// Returns an error if file discovery, parsing, or analysis fails.
264pub fn analyze_with_trace(config: &ResolvedConfig) -> Result<AnalysisOutput, FallowError> {
265    analyze_full(config, true, false, false, false)
266}
267
268/// Run the full analysis pipeline, retaining parsed modules and discovered files.
269///
270/// Used by the combined command to share a single parse across dead-code and health.
271/// When `need_complexity` is true, the `ComplexityVisitor` runs during parsing so
272/// the returned modules contain per-function complexity data.
273///
274/// # Errors
275///
276/// Returns an error if file discovery, parsing, or analysis fails.
277pub fn analyze_retaining_modules(
278    config: &ResolvedConfig,
279    need_complexity: bool,
280    retain_graph: bool,
281) -> Result<AnalysisOutput, FallowError> {
282    analyze_full(config, retain_graph, false, need_complexity, true)
283}
284
285/// Run the analysis pipeline using pre-parsed modules, skipping the parsing stage.
286///
287/// This avoids re-parsing files when the caller already has a `ParseResult` (e.g., from
288/// `fallow_core::extract::parse_all_files`). Discovery, plugins, scripts, entry points,
289/// import resolution, graph construction, and dead code detection still run normally.
290/// The graph is always retained (needed for file scores).
291///
292/// # Errors
293///
294/// Returns an error if discovery, graph construction, or analysis fails.
295#[allow(
296    clippy::too_many_lines,
297    reason = "pipeline orchestration stays easier to audit in one place"
298)]
299pub fn analyze_with_parse_result(
300    config: &ResolvedConfig,
301    modules: &[extract::ModuleInfo],
302) -> Result<AnalysisOutput, FallowError> {
303    let _span = tracing::info_span!("fallow_analyze_with_parse_result").entered();
304    let pipeline_start = Instant::now();
305
306    let show_progress = !config.quiet
307        && std::io::IsTerminal::is_terminal(&std::io::stderr())
308        && matches!(
309            config.output,
310            fallow_config::OutputFormat::Human
311                | fallow_config::OutputFormat::Compact
312                | fallow_config::OutputFormat::Markdown
313        );
314    let progress = progress::AnalysisProgress::new(show_progress);
315
316    if !config.root.join("node_modules").is_dir() {
317        tracing::warn!(
318            "node_modules directory not found. Run `npm install` / `pnpm install` first for accurate results."
319        );
320    }
321
322    // Discover workspaces
323    let t = Instant::now();
324    let workspaces_vec = discover_workspaces(&config.root);
325    let workspaces_ms = t.elapsed().as_secs_f64() * 1000.0;
326    if !workspaces_vec.is_empty() {
327        tracing::info!(count = workspaces_vec.len(), "workspaces discovered");
328    }
329
330    // Warn about directories with package.json not declared as workspaces
331    warn_undeclared_workspaces(
332        &config.root,
333        &workspaces_vec,
334        &config.ignore_patterns,
335        config.quiet,
336    );
337
338    // Stage 1: Discover files (cheap — needed for file registry and resolution)
339    let t = Instant::now();
340    let pb = progress.stage_spinner("Discovering files...");
341    let discovered_files = discover::discover_files(config);
342    let discover_ms = t.elapsed().as_secs_f64() * 1000.0;
343    pb.finish_and_clear();
344
345    let project = project::ProjectState::new(discovered_files, workspaces_vec);
346    let files = project.files();
347    let workspaces = project.workspaces();
348    let root_pkg = load_root_package_json(config);
349    let workspace_pkgs = load_workspace_packages(workspaces);
350
351    // Stage 1.5: Run plugin system
352    let t = Instant::now();
353    let pb = progress.stage_spinner("Detecting plugins...");
354    let mut plugin_result = run_plugins(
355        config,
356        files,
357        workspaces,
358        root_pkg.as_ref(),
359        &workspace_pkgs,
360    );
361    let plugins_ms = t.elapsed().as_secs_f64() * 1000.0;
362    pb.finish_and_clear();
363
364    // Stage 1.6: Analyze package.json scripts
365    let t = Instant::now();
366    analyze_all_scripts(
367        config,
368        workspaces,
369        root_pkg.as_ref(),
370        &workspace_pkgs,
371        &mut plugin_result,
372    );
373    let scripts_ms = t.elapsed().as_secs_f64() * 1000.0;
374
375    // Stage 2: SKIPPED — using pre-parsed modules from caller
376
377    // Stage 3: Discover entry points
378    let t = Instant::now();
379    let entry_points = discover_all_entry_points(
380        config,
381        files,
382        workspaces,
383        root_pkg.as_ref(),
384        &workspace_pkgs,
385        &plugin_result,
386    );
387    let entry_points_ms = t.elapsed().as_secs_f64() * 1000.0;
388
389    // Compute entry-point summary before the graph consumes the entry_points vec
390    let ep_summary = summarize_entry_points(&entry_points.all);
391
392    // Stage 4: Resolve imports to file IDs
393    let t = Instant::now();
394    let pb = progress.stage_spinner("Resolving imports...");
395    let mut resolved = resolve::resolve_all_imports(
396        modules,
397        files,
398        workspaces,
399        &plugin_result.active_plugins,
400        &plugin_result.path_aliases,
401        &plugin_result.scss_include_paths,
402        &config.root,
403        &config.resolve.conditions,
404    );
405    external_style_usage::augment_external_style_package_usage(
406        &mut resolved,
407        config,
408        workspaces,
409        &plugin_result,
410    );
411    let resolve_ms = t.elapsed().as_secs_f64() * 1000.0;
412    pb.finish_and_clear();
413
414    // Stage 5: Build module graph
415    let t = Instant::now();
416    let pb = progress.stage_spinner("Building module graph...");
417    let mut graph = graph::ModuleGraph::build_with_reachability_roots(
418        &resolved,
419        &entry_points.all,
420        &entry_points.runtime,
421        &entry_points.test,
422        files,
423    );
424    credit_workspace_package_usage(&mut graph, &resolved, workspaces);
425    let graph_ms = t.elapsed().as_secs_f64() * 1000.0;
426    pb.finish_and_clear();
427
428    // Stage 6: Analyze for dead code
429    let t = Instant::now();
430    let pb = progress.stage_spinner("Analyzing...");
431    let mut result = analyze::find_dead_code_full(
432        &graph,
433        config,
434        &resolved,
435        Some(&plugin_result),
436        workspaces,
437        modules,
438        false,
439    );
440    let analyze_ms = t.elapsed().as_secs_f64() * 1000.0;
441    pb.finish_and_clear();
442    progress.finish();
443
444    result.entry_point_summary = Some(ep_summary);
445
446    let total_ms = pipeline_start.elapsed().as_secs_f64() * 1000.0;
447
448    tracing::debug!(
449        "\n┌─ Pipeline Profile (reuse) ─────────────────────\n\
450         │  discover files:   {:>8.1}ms  ({} files)\n\
451         │  workspaces:       {:>8.1}ms\n\
452         │  plugins:          {:>8.1}ms\n\
453         │  script analysis:  {:>8.1}ms\n\
454         │  parse/extract:    SKIPPED (reused {} modules)\n\
455         │  entry points:     {:>8.1}ms  ({} entries)\n\
456         │  resolve imports:  {:>8.1}ms\n\
457         │  build graph:      {:>8.1}ms\n\
458         │  analyze:          {:>8.1}ms\n\
459         │  ────────────────────────────────────────────\n\
460         │  TOTAL:            {:>8.1}ms\n\
461         └─────────────────────────────────────────────────",
462        discover_ms,
463        files.len(),
464        workspaces_ms,
465        plugins_ms,
466        scripts_ms,
467        modules.len(),
468        entry_points_ms,
469        entry_points.all.len(),
470        resolve_ms,
471        graph_ms,
472        analyze_ms,
473        total_ms,
474    );
475
476    let timings = Some(PipelineTimings {
477        discover_files_ms: discover_ms,
478        file_count: files.len(),
479        workspaces_ms,
480        workspace_count: workspaces.len(),
481        plugins_ms,
482        script_analysis_ms: scripts_ms,
483        parse_extract_ms: 0.0, // Skipped — modules were reused
484        module_count: modules.len(),
485        cache_hits: 0,
486        cache_misses: 0,
487        cache_update_ms: 0.0,
488        entry_points_ms,
489        entry_point_count: entry_points.all.len(),
490        resolve_imports_ms: resolve_ms,
491        build_graph_ms: graph_ms,
492        analyze_ms,
493        duplication_ms: None,
494        total_ms,
495    });
496
497    Ok(AnalysisOutput {
498        results: result,
499        timings,
500        graph: Some(graph),
501        modules: None,
502        files: None,
503        script_used_packages: plugin_result.script_used_packages.clone(),
504    })
505}
506
507#[expect(
508    clippy::unnecessary_wraps,
509    reason = "Result kept for future error handling"
510)]
511#[expect(
512    clippy::too_many_lines,
513    reason = "main pipeline function; sequential phases are held together for clarity"
514)]
515fn analyze_full(
516    config: &ResolvedConfig,
517    retain: bool,
518    collect_usages: bool,
519    need_complexity: bool,
520    retain_modules: bool,
521) -> Result<AnalysisOutput, FallowError> {
522    let _span = tracing::info_span!("fallow_analyze").entered();
523    let pipeline_start = Instant::now();
524
525    // Progress bars: enabled when not quiet, stderr is a terminal, and output is human-readable.
526    // Structured formats (JSON, SARIF) suppress spinners even on TTY — users piping structured
527    // output don't expect progress noise on stderr.
528    let show_progress = !config.quiet
529        && std::io::IsTerminal::is_terminal(&std::io::stderr())
530        && matches!(
531            config.output,
532            fallow_config::OutputFormat::Human
533                | fallow_config::OutputFormat::Compact
534                | fallow_config::OutputFormat::Markdown
535        );
536    let progress = progress::AnalysisProgress::new(show_progress);
537
538    // Warn if node_modules is missing — resolution will be severely degraded
539    if !config.root.join("node_modules").is_dir() {
540        tracing::warn!(
541            "node_modules directory not found. Run `npm install` / `pnpm install` first for accurate results."
542        );
543    }
544
545    // Discover workspaces if in a monorepo
546    let t = Instant::now();
547    let workspaces_vec = discover_workspaces(&config.root);
548    let workspaces_ms = t.elapsed().as_secs_f64() * 1000.0;
549    if !workspaces_vec.is_empty() {
550        tracing::info!(count = workspaces_vec.len(), "workspaces discovered");
551    }
552
553    // Warn about directories with package.json not declared as workspaces
554    warn_undeclared_workspaces(
555        &config.root,
556        &workspaces_vec,
557        &config.ignore_patterns,
558        config.quiet,
559    );
560
561    // Stage 1: Discover all source files
562    let t = Instant::now();
563    let pb = progress.stage_spinner("Discovering files...");
564    let discovered_files = discover::discover_files(config);
565    let discover_ms = t.elapsed().as_secs_f64() * 1000.0;
566    pb.finish_and_clear();
567
568    // Build ProjectState: owns the file registry with stable FileIds and workspace metadata.
569    // This is the foundation for cross-workspace resolution and future incremental analysis.
570    let project = project::ProjectState::new(discovered_files, workspaces_vec);
571    let files = project.files();
572    let workspaces = project.workspaces();
573    let root_pkg = load_root_package_json(config);
574    let workspace_pkgs = load_workspace_packages(workspaces);
575
576    // Stage 1.5: Run plugin system — parse config files, discover dynamic entries
577    let t = Instant::now();
578    let pb = progress.stage_spinner("Detecting plugins...");
579    let mut plugin_result = run_plugins(
580        config,
581        files,
582        workspaces,
583        root_pkg.as_ref(),
584        &workspace_pkgs,
585    );
586    let plugins_ms = t.elapsed().as_secs_f64() * 1000.0;
587    pb.finish_and_clear();
588
589    // Stage 1.6: Analyze package.json scripts for binary usage and config file refs
590    let t = Instant::now();
591    analyze_all_scripts(
592        config,
593        workspaces,
594        root_pkg.as_ref(),
595        &workspace_pkgs,
596        &mut plugin_result,
597    );
598    let scripts_ms = t.elapsed().as_secs_f64() * 1000.0;
599
600    // Stage 2: Parse all files in parallel and extract imports/exports
601    let t = Instant::now();
602    let pb = progress.stage_spinner(&format!("Parsing {} files...", files.len()));
603    let mut cache_store = if config.no_cache {
604        None
605    } else {
606        cache::CacheStore::load(&config.cache_dir)
607    };
608
609    let parse_result = extract::parse_all_files(files, cache_store.as_ref(), need_complexity);
610    let modules = parse_result.modules;
611    let cache_hits = parse_result.cache_hits;
612    let cache_misses = parse_result.cache_misses;
613    let parse_ms = t.elapsed().as_secs_f64() * 1000.0;
614    pb.finish_and_clear();
615
616    // Update cache with freshly parsed modules and refresh stale mtime/size entries.
617    let t = Instant::now();
618    if !config.no_cache {
619        let store = cache_store.get_or_insert_with(cache::CacheStore::new);
620        update_cache(store, &modules, files);
621        if let Err(e) = store.save(&config.cache_dir) {
622            tracing::warn!("Failed to save cache: {e}");
623        }
624    }
625    let cache_ms = t.elapsed().as_secs_f64() * 1000.0;
626
627    // Stage 3: Discover entry points (static patterns + plugin-discovered patterns)
628    let t = Instant::now();
629    let entry_points = discover_all_entry_points(
630        config,
631        files,
632        workspaces,
633        root_pkg.as_ref(),
634        &workspace_pkgs,
635        &plugin_result,
636    );
637    let entry_points_ms = t.elapsed().as_secs_f64() * 1000.0;
638
639    // Stage 4: Resolve imports to file IDs
640    let t = Instant::now();
641    let pb = progress.stage_spinner("Resolving imports...");
642    let mut resolved = resolve::resolve_all_imports(
643        &modules,
644        files,
645        workspaces,
646        &plugin_result.active_plugins,
647        &plugin_result.path_aliases,
648        &plugin_result.scss_include_paths,
649        &config.root,
650        &config.resolve.conditions,
651    );
652    external_style_usage::augment_external_style_package_usage(
653        &mut resolved,
654        config,
655        workspaces,
656        &plugin_result,
657    );
658    let resolve_ms = t.elapsed().as_secs_f64() * 1000.0;
659    pb.finish_and_clear();
660
661    // Stage 5: Build module graph
662    let t = Instant::now();
663    let pb = progress.stage_spinner("Building module graph...");
664    let mut graph = graph::ModuleGraph::build_with_reachability_roots(
665        &resolved,
666        &entry_points.all,
667        &entry_points.runtime,
668        &entry_points.test,
669        files,
670    );
671    credit_workspace_package_usage(&mut graph, &resolved, workspaces);
672    let graph_ms = t.elapsed().as_secs_f64() * 1000.0;
673    pb.finish_and_clear();
674
675    // Compute entry-point summary before the graph consumes the entry_points vec
676    let ep_summary = summarize_entry_points(&entry_points.all);
677
678    // Stage 6: Analyze for dead code (with plugin context and workspace info)
679    let t = Instant::now();
680    let pb = progress.stage_spinner("Analyzing...");
681    let mut result = analyze::find_dead_code_full(
682        &graph,
683        config,
684        &resolved,
685        Some(&plugin_result),
686        workspaces,
687        &modules,
688        collect_usages,
689    );
690    let analyze_ms = t.elapsed().as_secs_f64() * 1000.0;
691    pb.finish_and_clear();
692    progress.finish();
693
694    result.entry_point_summary = Some(ep_summary);
695
696    let total_ms = pipeline_start.elapsed().as_secs_f64() * 1000.0;
697
698    let cache_summary = if cache_hits > 0 {
699        format!(" ({cache_hits} cached, {cache_misses} parsed)")
700    } else {
701        String::new()
702    };
703
704    tracing::debug!(
705        "\n┌─ Pipeline Profile ─────────────────────────────\n\
706         │  discover files:   {:>8.1}ms  ({} files)\n\
707         │  workspaces:       {:>8.1}ms\n\
708         │  plugins:          {:>8.1}ms\n\
709         │  script analysis:  {:>8.1}ms\n\
710         │  parse/extract:    {:>8.1}ms  ({} modules{})\n\
711         │  cache update:     {:>8.1}ms\n\
712         │  entry points:     {:>8.1}ms  ({} entries)\n\
713         │  resolve imports:  {:>8.1}ms\n\
714         │  build graph:      {:>8.1}ms\n\
715         │  analyze:          {:>8.1}ms\n\
716         │  ────────────────────────────────────────────\n\
717         │  TOTAL:            {:>8.1}ms\n\
718         └─────────────────────────────────────────────────",
719        discover_ms,
720        files.len(),
721        workspaces_ms,
722        plugins_ms,
723        scripts_ms,
724        parse_ms,
725        modules.len(),
726        cache_summary,
727        cache_ms,
728        entry_points_ms,
729        entry_points.all.len(),
730        resolve_ms,
731        graph_ms,
732        analyze_ms,
733        total_ms,
734    );
735
736    let timings = if retain {
737        Some(PipelineTimings {
738            discover_files_ms: discover_ms,
739            file_count: files.len(),
740            workspaces_ms,
741            workspace_count: workspaces.len(),
742            plugins_ms,
743            script_analysis_ms: scripts_ms,
744            parse_extract_ms: parse_ms,
745            module_count: modules.len(),
746            cache_hits,
747            cache_misses,
748            cache_update_ms: cache_ms,
749            entry_points_ms,
750            entry_point_count: entry_points.all.len(),
751            resolve_imports_ms: resolve_ms,
752            build_graph_ms: graph_ms,
753            analyze_ms,
754            duplication_ms: None,
755            total_ms,
756        })
757    } else {
758        None
759    };
760
761    Ok(AnalysisOutput {
762        results: result,
763        timings,
764        graph: if retain { Some(graph) } else { None },
765        modules: if retain_modules { Some(modules) } else { None },
766        files: if retain_modules {
767            Some(files.to_vec())
768        } else {
769            None
770        },
771        script_used_packages: plugin_result.script_used_packages,
772    })
773}
774
775/// Analyze package.json scripts from root and all workspace packages.
776///
777/// Populates the plugin result with script-used packages and config file
778/// entry patterns. Also scans CI config files for binary invocations.
779fn load_root_package_json(config: &ResolvedConfig) -> Option<PackageJson> {
780    PackageJson::load(&config.root.join("package.json")).ok()
781}
782
783fn load_workspace_packages(
784    workspaces: &[fallow_config::WorkspaceInfo],
785) -> Vec<LoadedWorkspacePackage<'_>> {
786    workspaces
787        .iter()
788        .filter_map(|ws| {
789            PackageJson::load(&ws.root.join("package.json"))
790                .ok()
791                .map(|pkg| (ws, pkg))
792        })
793        .collect()
794}
795
796fn analyze_all_scripts(
797    config: &ResolvedConfig,
798    workspaces: &[fallow_config::WorkspaceInfo],
799    root_pkg: Option<&PackageJson>,
800    workspace_pkgs: &[LoadedWorkspacePackage<'_>],
801    plugin_result: &mut plugins::AggregatedPluginResult,
802) {
803    // Collect all dependency names to build the bin-name → package-name reverse map.
804    // This resolves binaries like "attw" to "@arethetypeswrong/cli" even without
805    // node_modules/.bin symlinks.
806    let mut all_dep_names: Vec<String> = Vec::new();
807    if let Some(pkg) = root_pkg {
808        all_dep_names.extend(pkg.all_dependency_names());
809    }
810    for (_, ws_pkg) in workspace_pkgs {
811        all_dep_names.extend(ws_pkg.all_dependency_names());
812    }
813    all_dep_names.sort_unstable();
814    all_dep_names.dedup();
815
816    // Probe node_modules/ at project root and each workspace root so non-hoisted
817    // deps (pnpm strict, Yarn workspaces) are also discovered.
818    let mut nm_roots: Vec<&std::path::Path> = Vec::new();
819    if config.root.join("node_modules").is_dir() {
820        nm_roots.push(&config.root);
821    }
822    for ws in workspaces {
823        if ws.root.join("node_modules").is_dir() {
824            nm_roots.push(&ws.root);
825        }
826    }
827    let bin_map = scripts::build_bin_to_package_map(&nm_roots, &all_dep_names);
828
829    if let Some(pkg) = root_pkg
830        && let Some(ref pkg_scripts) = pkg.scripts
831    {
832        let scripts_to_analyze = if config.production {
833            scripts::filter_production_scripts(pkg_scripts)
834        } else {
835            pkg_scripts.clone()
836        };
837        let script_analysis = scripts::analyze_scripts(&scripts_to_analyze, &config.root, &bin_map);
838        plugin_result.script_used_packages = script_analysis.used_packages;
839
840        for config_file in &script_analysis.config_files {
841            plugin_result
842                .discovered_always_used
843                .push((config_file.clone(), "scripts".to_string()));
844        }
845        for entry in &script_analysis.entry_files {
846            if let Some(pat) = scripts::normalize_script_entry_pattern("", entry) {
847                plugin_result
848                    .entry_patterns
849                    .push((plugins::PathRule::new(pat), "scripts".to_string()));
850            }
851        }
852    }
853    for (ws, ws_pkg) in workspace_pkgs {
854        if let Some(ref ws_scripts) = ws_pkg.scripts {
855            let scripts_to_analyze = if config.production {
856                scripts::filter_production_scripts(ws_scripts)
857            } else {
858                ws_scripts.clone()
859            };
860            let ws_analysis = scripts::analyze_scripts(&scripts_to_analyze, &ws.root, &bin_map);
861            plugin_result
862                .script_used_packages
863                .extend(ws_analysis.used_packages);
864
865            let ws_prefix = ws
866                .root
867                .strip_prefix(&config.root)
868                .unwrap_or(&ws.root)
869                .to_string_lossy();
870            for config_file in &ws_analysis.config_files {
871                plugin_result
872                    .discovered_always_used
873                    .push((format!("{ws_prefix}/{config_file}"), "scripts".to_string()));
874            }
875            for entry in &ws_analysis.entry_files {
876                if let Some(pat) = scripts::normalize_script_entry_pattern(&ws_prefix, entry) {
877                    plugin_result
878                        .entry_patterns
879                        .push((plugins::PathRule::new(pat), "scripts".to_string()));
880                }
881            }
882        }
883    }
884
885    // Scan CI config files for binary invocations and positional file references.
886    // Returns both packages used by CI tooling AND project-relative file paths
887    // referenced as command-line arguments (e.g., `node scripts/deploy.ts` in a
888    // GitHub Actions `run:` block) so the referenced files become reachable
889    // entry points. CI files always live at the project root, so file paths
890    // need no workspace-prefix transformation. See issue #195 (Case D).
891    let ci_analysis = scripts::ci::analyze_ci_files(&config.root, &bin_map);
892    plugin_result
893        .script_used_packages
894        .extend(ci_analysis.used_packages);
895    for entry in &ci_analysis.entry_files {
896        if let Some(pat) = scripts::normalize_script_entry_pattern("", entry) {
897            plugin_result
898                .entry_patterns
899                .push((plugins::PathRule::new(pat), "scripts".to_string()));
900        }
901    }
902    plugin_result
903        .entry_point_roles
904        .entry("scripts".to_string())
905        .or_insert(EntryPointRole::Support);
906}
907
908/// Discover all entry points from static patterns, workspaces, plugins, and infrastructure.
909fn discover_all_entry_points(
910    config: &ResolvedConfig,
911    files: &[discover::DiscoveredFile],
912    workspaces: &[fallow_config::WorkspaceInfo],
913    root_pkg: Option<&PackageJson>,
914    workspace_pkgs: &[LoadedWorkspacePackage<'_>],
915    plugin_result: &plugins::AggregatedPluginResult,
916) -> discover::CategorizedEntryPoints {
917    let mut entry_points = discover::CategorizedEntryPoints::default();
918    let root_discovery = discover::discover_entry_points_with_warnings_from_pkg(
919        config,
920        files,
921        root_pkg,
922        workspaces.is_empty(),
923    );
924
925    let workspace_pkg_by_root: rustc_hash::FxHashMap<std::path::PathBuf, &PackageJson> =
926        workspace_pkgs
927            .iter()
928            .map(|(ws, pkg)| (ws.root.clone(), pkg))
929            .collect();
930
931    let workspace_discovery: Vec<discover::EntryPointDiscovery> = workspaces
932        .par_iter()
933        .map(|ws| {
934            let pkg = workspace_pkg_by_root.get(&ws.root).copied();
935            discover::discover_workspace_entry_points_with_warnings_from_pkg(&ws.root, files, pkg)
936        })
937        .collect();
938    let mut skipped_entries = rustc_hash::FxHashMap::default();
939    entry_points.extend_runtime(root_discovery.entries);
940    for (path, count) in root_discovery.skipped_entries {
941        *skipped_entries.entry(path).or_insert(0) += count;
942    }
943    let mut ws_entries = Vec::new();
944    for workspace in workspace_discovery {
945        ws_entries.extend(workspace.entries);
946        for (path, count) in workspace.skipped_entries {
947            *skipped_entries.entry(path).or_insert(0) += count;
948        }
949    }
950    discover::warn_skipped_entry_summary(&skipped_entries);
951    entry_points.extend_runtime(ws_entries);
952
953    let plugin_entries = discover::discover_plugin_entry_point_sets(plugin_result, config, files);
954    entry_points.extend(plugin_entries);
955
956    let infra_entries = discover::discover_infrastructure_entry_points(&config.root);
957    entry_points.extend_runtime(infra_entries);
958
959    // Add dynamically loaded files from config as entry points
960    if !config.dynamically_loaded.is_empty() {
961        let dynamic_entries = discover::discover_dynamically_loaded_entry_points(config, files);
962        entry_points.extend_runtime(dynamic_entries);
963    }
964
965    entry_points.dedup()
966}
967
968/// Summarize entry points by source category for user-facing output.
969fn summarize_entry_points(entry_points: &[discover::EntryPoint]) -> results::EntryPointSummary {
970    let mut counts: rustc_hash::FxHashMap<String, usize> = rustc_hash::FxHashMap::default();
971    for ep in entry_points {
972        let category = match &ep.source {
973            discover::EntryPointSource::PackageJsonMain
974            | discover::EntryPointSource::PackageJsonModule
975            | discover::EntryPointSource::PackageJsonExports
976            | discover::EntryPointSource::PackageJsonBin
977            | discover::EntryPointSource::PackageJsonScript => "package.json",
978            discover::EntryPointSource::Plugin { .. } => "plugin",
979            discover::EntryPointSource::TestFile => "test file",
980            discover::EntryPointSource::DefaultIndex => "default index",
981            discover::EntryPointSource::ManualEntry => "manual entry",
982            discover::EntryPointSource::InfrastructureConfig => "config",
983            discover::EntryPointSource::DynamicallyLoaded => "dynamically loaded",
984        };
985        *counts.entry(category.to_string()).or_insert(0) += 1;
986    }
987    let mut by_source: Vec<(String, usize)> = counts.into_iter().collect();
988    by_source.sort_by(|a, b| b.1.cmp(&a.1).then_with(|| a.0.cmp(&b.0)));
989    results::EntryPointSummary {
990        total: entry_points.len(),
991        by_source,
992    }
993}
994
995/// Run plugins for root project and all workspace packages.
996fn run_plugins(
997    config: &ResolvedConfig,
998    files: &[discover::DiscoveredFile],
999    workspaces: &[fallow_config::WorkspaceInfo],
1000    root_pkg: Option<&PackageJson>,
1001    workspace_pkgs: &[LoadedWorkspacePackage<'_>],
1002) -> plugins::AggregatedPluginResult {
1003    let registry = plugins::PluginRegistry::new(config.external_plugins.clone());
1004    let file_paths: Vec<std::path::PathBuf> = files.iter().map(|f| f.path.clone()).collect();
1005    let root_config_search_roots = collect_config_search_roots(&config.root, &file_paths);
1006    let root_config_search_root_refs: Vec<&Path> = root_config_search_roots
1007        .iter()
1008        .map(std::path::PathBuf::as_path)
1009        .collect();
1010
1011    // Run plugins for root project (full run with external plugins, inline config, etc.)
1012    let mut result = root_pkg.map_or_else(plugins::AggregatedPluginResult::default, |pkg| {
1013        registry.run_with_search_roots(
1014            pkg,
1015            &config.root,
1016            &file_paths,
1017            &root_config_search_root_refs,
1018            config.production,
1019        )
1020    });
1021
1022    if workspaces.is_empty() {
1023        return result;
1024    }
1025
1026    let root_active_plugins: rustc_hash::FxHashSet<&str> =
1027        result.active_plugins.iter().map(String::as_str).collect();
1028
1029    // Pre-compile config matchers once and bucket source files by workspace.
1030    // Workspace config matching can then scan only files below that workspace
1031    // instead of every project file for every active matcher.
1032    let precompiled_matchers = registry.precompile_config_matchers();
1033    let workspace_relative_files = bucket_files_by_workspace(workspace_pkgs, &file_paths);
1034
1035    // Run plugins for each workspace package in parallel, then merge results.
1036    let ws_results: Vec<_> = workspace_pkgs
1037        .par_iter()
1038        .zip(workspace_relative_files.par_iter())
1039        .filter_map(|((ws, ws_pkg), relative_files)| {
1040            let ws_result = registry.run_workspace_fast(
1041                ws_pkg,
1042                &ws.root,
1043                &config.root,
1044                &precompiled_matchers,
1045                relative_files,
1046                &root_active_plugins,
1047                config.production,
1048            );
1049            if ws_result.active_plugins.is_empty() {
1050                return None;
1051            }
1052            let ws_prefix = ws
1053                .root
1054                .strip_prefix(&config.root)
1055                .unwrap_or(&ws.root)
1056                .to_string_lossy()
1057                .into_owned();
1058            Some((ws_result, ws_prefix))
1059        })
1060        .collect();
1061
1062    // Merge workspace results sequentially (deterministic order via par_iter index stability)
1063    // Track seen names for O(1) dedup instead of O(n) Vec::contains
1064    let mut seen_plugins: rustc_hash::FxHashSet<String> =
1065        result.active_plugins.iter().cloned().collect();
1066    let mut seen_prefixes: rustc_hash::FxHashSet<String> =
1067        result.virtual_module_prefixes.iter().cloned().collect();
1068    let mut seen_generated: rustc_hash::FxHashSet<String> =
1069        result.generated_import_patterns.iter().cloned().collect();
1070    let mut seen_suffixes: rustc_hash::FxHashSet<String> =
1071        result.virtual_package_suffixes.iter().cloned().collect();
1072
1073    fn extend_unique(
1074        target: &mut Vec<String>,
1075        seen: &mut rustc_hash::FxHashSet<String>,
1076        items: Vec<String>,
1077    ) {
1078        for item in items {
1079            if seen.insert(item.clone()) {
1080                target.push(item);
1081            }
1082        }
1083    }
1084    for (ws_result, ws_prefix) in ws_results {
1085        // Prefix helper: workspace-relative patterns need the workspace prefix
1086        // to be matchable from the monorepo root. But patterns that are already
1087        // project-root-relative (e.g., from angular.json which uses absolute paths
1088        // like "apps/client/src/styles.css") should not be double-prefixed.
1089        let prefix_if_needed = |pat: &str| -> String {
1090            if pat.starts_with(ws_prefix.as_str()) || pat.starts_with('/') {
1091                pat.to_string()
1092            } else {
1093                format!("{ws_prefix}/{pat}")
1094            }
1095        };
1096
1097        for (rule, pname) in &ws_result.entry_patterns {
1098            result
1099                .entry_patterns
1100                .push((rule.prefixed(&ws_prefix), pname.clone()));
1101        }
1102        for (plugin_name, role) in ws_result.entry_point_roles {
1103            result.entry_point_roles.entry(plugin_name).or_insert(role);
1104        }
1105        for (pat, pname) in &ws_result.always_used {
1106            result
1107                .always_used
1108                .push((prefix_if_needed(pat), pname.clone()));
1109        }
1110        for (pat, pname) in &ws_result.discovered_always_used {
1111            result
1112                .discovered_always_used
1113                .push((prefix_if_needed(pat), pname.clone()));
1114        }
1115        for (pat, pname) in &ws_result.fixture_patterns {
1116            result
1117                .fixture_patterns
1118                .push((prefix_if_needed(pat), pname.clone()));
1119        }
1120        for rule in &ws_result.used_exports {
1121            result.used_exports.push(rule.prefixed(&ws_prefix));
1122        }
1123        // Merge active plugin names (deduplicated via HashSet)
1124        for plugin_name in ws_result.active_plugins {
1125            if !seen_plugins.contains(&plugin_name) {
1126                seen_plugins.insert(plugin_name.clone());
1127                result.active_plugins.push(plugin_name);
1128            }
1129        }
1130        // These don't need prefixing (absolute paths / package names)
1131        result
1132            .referenced_dependencies
1133            .extend(ws_result.referenced_dependencies);
1134        result.setup_files.extend(ws_result.setup_files);
1135        result
1136            .tooling_dependencies
1137            .extend(ws_result.tooling_dependencies);
1138        // Virtual import boundaries — prefixes (e.g., Docusaurus `@theme/`),
1139        // generated import patterns (e.g., SvelteKit `/$types`), and package-name
1140        // suffixes (e.g., Vitest `/__mocks__`) — match against import specifiers
1141        // or package names, never file paths, so no workspace prefix is applied.
1142        extend_unique(
1143            &mut result.virtual_module_prefixes,
1144            &mut seen_prefixes,
1145            ws_result.virtual_module_prefixes,
1146        );
1147        extend_unique(
1148            &mut result.generated_import_patterns,
1149            &mut seen_generated,
1150            ws_result.generated_import_patterns,
1151        );
1152        extend_unique(
1153            &mut result.virtual_package_suffixes,
1154            &mut seen_suffixes,
1155            ws_result.virtual_package_suffixes,
1156        );
1157        // Path aliases from workspace plugins (e.g., SvelteKit $lib/ → src/lib).
1158        // Prefix the replacement directory so it resolves from the monorepo root.
1159        for (prefix, replacement) in ws_result.path_aliases {
1160            result
1161                .path_aliases
1162                .push((prefix, format!("{ws_prefix}/{replacement}")));
1163        }
1164    }
1165
1166    result
1167}
1168
1169fn bucket_files_by_workspace(
1170    workspace_pkgs: &[LoadedWorkspacePackage<'_>],
1171    file_paths: &[std::path::PathBuf],
1172) -> Vec<Vec<(std::path::PathBuf, String)>> {
1173    let mut buckets = vec![Vec::new(); workspace_pkgs.len()];
1174
1175    for file_path in file_paths {
1176        for (idx, (ws, _)) in workspace_pkgs.iter().enumerate() {
1177            if let Ok(relative) = file_path.strip_prefix(&ws.root) {
1178                buckets[idx].push((file_path.clone(), relative.to_string_lossy().into_owned()));
1179                break;
1180            }
1181        }
1182    }
1183
1184    buckets
1185}
1186
1187fn collect_config_search_roots(
1188    root: &Path,
1189    file_paths: &[std::path::PathBuf],
1190) -> Vec<std::path::PathBuf> {
1191    let mut roots: rustc_hash::FxHashSet<std::path::PathBuf> = rustc_hash::FxHashSet::default();
1192    roots.insert(root.to_path_buf());
1193
1194    for file_path in file_paths {
1195        let mut current = file_path.parent();
1196        while let Some(dir) = current {
1197            if !dir.starts_with(root) {
1198                break;
1199            }
1200            roots.insert(dir.to_path_buf());
1201            if dir == root {
1202                break;
1203            }
1204            current = dir.parent();
1205        }
1206    }
1207
1208    let mut roots_vec: Vec<_> = roots.into_iter().collect();
1209    roots_vec.sort();
1210    roots_vec
1211}
1212
1213/// Run analysis on a project directory (with export usages for LSP Code Lens).
1214///
1215/// # Errors
1216///
1217/// Returns an error if config loading, file discovery, parsing, or analysis fails.
1218pub fn analyze_project(root: &Path) -> Result<AnalysisResults, FallowError> {
1219    let config = default_config(root);
1220    analyze_with_usages(&config)
1221}
1222
1223/// Create a default config for a project root.
1224///
1225/// `analyze_project` is the dead-code entry point used by the LSP and other
1226/// programmatic embedders. When the loaded config uses the per-analysis
1227/// production form (`production: { deadCode: true, ... }`), the production
1228/// flag must be flattened to the dead-code analysis here. Otherwise
1229/// `ResolvedConfig::resolve` calls `.global()` which returns false for the
1230/// per-analysis variant and the production-mode rule overrides
1231/// (`unused_dev_dependencies: off`, etc.) plus `resolved.production = true`
1232/// are silently dropped.
1233pub(crate) fn default_config(root: &Path) -> ResolvedConfig {
1234    let user_config = fallow_config::FallowConfig::find_and_load(root)
1235        .ok()
1236        .flatten();
1237    match user_config {
1238        Some((mut config, _path)) => {
1239            let dead_code_production = config
1240                .production
1241                .for_analysis(fallow_config::ProductionAnalysis::DeadCode);
1242            config.production = dead_code_production.into();
1243            config.resolve(
1244                root.to_path_buf(),
1245                fallow_config::OutputFormat::Human,
1246                num_cpus(),
1247                false,
1248                true, // quiet: LSP/programmatic callers don't need progress bars
1249            )
1250        }
1251        None => fallow_config::FallowConfig::default().resolve(
1252            root.to_path_buf(),
1253            fallow_config::OutputFormat::Human,
1254            num_cpus(),
1255            false,
1256            true,
1257        ),
1258    }
1259}
1260
1261fn num_cpus() -> usize {
1262    std::thread::available_parallelism().map_or(4, std::num::NonZeroUsize::get)
1263}
1264
1265#[cfg(test)]
1266mod tests {
1267    use super::{
1268        bucket_files_by_workspace, collect_config_search_roots, format_undeclared_workspace_warning,
1269    };
1270    use std::path::{Path, PathBuf};
1271
1272    use fallow_config::WorkspaceDiagnostic;
1273
1274    fn diag(root: &Path, relative: &str) -> WorkspaceDiagnostic {
1275        WorkspaceDiagnostic {
1276            path: root.join(relative),
1277            message: String::new(),
1278        }
1279    }
1280
1281    #[test]
1282    fn undeclared_workspace_warning_is_singular_for_one_path() {
1283        let root = Path::new("/repo");
1284        let warning = format_undeclared_workspace_warning(root, &[diag(root, "packages/api")])
1285            .expect("warning should be rendered");
1286
1287        assert_eq!(
1288            warning,
1289            "1 directory with package.json is not declared as a workspace: packages/api. Add that path to package.json workspaces or pnpm-workspace.yaml if it should be analyzed as a workspace."
1290        );
1291    }
1292
1293    #[test]
1294    fn undeclared_workspace_warning_summarizes_many_paths() {
1295        let root = PathBuf::from("/repo");
1296        let diagnostics = [
1297            "examples/a",
1298            "examples/b",
1299            "examples/c",
1300            "examples/d",
1301            "examples/e",
1302            "examples/f",
1303        ]
1304        .into_iter()
1305        .map(|path| diag(&root, path))
1306        .collect::<Vec<_>>();
1307
1308        let warning = format_undeclared_workspace_warning(&root, &diagnostics)
1309            .expect("warning should be rendered");
1310
1311        assert_eq!(
1312            warning,
1313            "6 directories with package.json are not declared as workspaces: examples/a, examples/b, examples/c, examples/d, examples/e (and 1 more). Add those paths to package.json workspaces or pnpm-workspace.yaml if they should be analyzed as workspaces."
1314        );
1315    }
1316
1317    #[test]
1318    fn collect_config_search_roots_includes_file_ancestors_once() {
1319        let root = PathBuf::from("/repo");
1320        let search_roots = collect_config_search_roots(
1321            &root,
1322            &[
1323                root.join("apps/query/src/main.ts"),
1324                root.join("packages/shared/lib/index.ts"),
1325            ],
1326        );
1327
1328        assert_eq!(
1329            search_roots,
1330            vec![
1331                root.clone(),
1332                root.join("apps"),
1333                root.join("apps/query"),
1334                root.join("apps/query/src"),
1335                root.join("packages"),
1336                root.join("packages/shared"),
1337                root.join("packages/shared/lib"),
1338            ]
1339        );
1340    }
1341
1342    #[test]
1343    fn bucket_files_by_workspace_uses_workspace_relative_paths() {
1344        let root = PathBuf::from("/repo");
1345        let ui = fallow_config::WorkspaceInfo {
1346            root: root.join("apps/ui"),
1347            name: "ui".to_string(),
1348            is_internal_dependency: false,
1349        };
1350        let api = fallow_config::WorkspaceInfo {
1351            root: root.join("apps/api"),
1352            name: "api".to_string(),
1353            is_internal_dependency: false,
1354        };
1355        let workspace_pkgs = vec![
1356            (
1357                &ui,
1358                fallow_config::PackageJson {
1359                    name: Some("ui".to_string()),
1360                    ..Default::default()
1361                },
1362            ),
1363            (
1364                &api,
1365                fallow_config::PackageJson {
1366                    name: Some("api".to_string()),
1367                    ..Default::default()
1368                },
1369            ),
1370        ];
1371        let files = vec![
1372            root.join("apps/ui/vite.config.ts"),
1373            root.join("apps/ui/src/main.ts"),
1374            root.join("apps/api/src/server.ts"),
1375            root.join("tools/build.ts"),
1376        ];
1377
1378        let buckets = bucket_files_by_workspace(&workspace_pkgs, &files);
1379
1380        assert_eq!(
1381            buckets[0],
1382            vec![
1383                (
1384                    root.join("apps/ui/vite.config.ts"),
1385                    "vite.config.ts".to_string()
1386                ),
1387                (root.join("apps/ui/src/main.ts"), "src/main.ts".to_string()),
1388            ]
1389        );
1390        assert_eq!(
1391            buckets[1],
1392            vec![(
1393                root.join("apps/api/src/server.ts"),
1394                "src/server.ts".to_string()
1395            )]
1396        );
1397    }
1398}