Skip to main content

fallow_core/
lib.rs

1pub mod analyze;
2pub mod cache;
3pub mod cross_reference;
4pub mod discover;
5pub mod duplicates;
6pub mod errors;
7pub mod extract;
8pub mod plugins;
9pub mod progress;
10pub mod results;
11pub mod scripts;
12pub mod suppress;
13pub mod trace;
14
15// Re-export from fallow-graph for backwards compatibility
16pub use fallow_graph::graph;
17pub use fallow_graph::project;
18pub use fallow_graph::resolve;
19
20use std::path::Path;
21use std::time::Instant;
22
23use errors::FallowError;
24use fallow_config::{PackageJson, ResolvedConfig, discover_workspaces};
25use results::AnalysisResults;
26use trace::PipelineTimings;
27
28/// Result of the full analysis pipeline, including optional performance timings.
29pub struct AnalysisOutput {
30    pub results: AnalysisResults,
31    pub timings: Option<PipelineTimings>,
32    pub graph: Option<graph::ModuleGraph>,
33}
34
35/// Update cache: write freshly parsed modules and refresh stale mtime/size entries.
36fn update_cache(
37    store: &mut cache::CacheStore,
38    modules: &[extract::ModuleInfo],
39    files: &[discover::DiscoveredFile],
40) {
41    for module in modules {
42        if let Some(file) = files.get(module.file_id.0 as usize) {
43            let (mt, sz) = file_mtime_and_size(&file.path);
44            // If content hash matches, just refresh mtime/size if stale (e.g. `touch`ed file)
45            if let Some(cached) = store.get_by_path_only(&file.path)
46                && cached.content_hash == module.content_hash
47            {
48                if cached.mtime_secs != mt || cached.file_size != sz {
49                    store.insert(&file.path, cache::module_to_cached(module, mt, sz));
50                }
51                continue;
52            }
53            store.insert(&file.path, cache::module_to_cached(module, mt, sz));
54        }
55    }
56    store.retain_paths(files);
57}
58
59/// Extract mtime (seconds since epoch) and file size from a path.
60fn file_mtime_and_size(path: &std::path::Path) -> (u64, u64) {
61    std::fs::metadata(path)
62        .map(|m| {
63            let mt = m
64                .modified()
65                .ok()
66                .and_then(|t| t.duration_since(std::time::SystemTime::UNIX_EPOCH).ok())
67                .map_or(0, |d| d.as_secs());
68            (mt, m.len())
69        })
70        .unwrap_or((0, 0))
71}
72
73/// Run the full analysis pipeline.
74pub fn analyze(config: &ResolvedConfig) -> Result<AnalysisResults, FallowError> {
75    let output = analyze_full(config, false, false)?;
76    Ok(output.results)
77}
78
79/// Run the full analysis pipeline with export usage collection (for LSP Code Lens).
80pub fn analyze_with_usages(config: &ResolvedConfig) -> Result<AnalysisResults, FallowError> {
81    let output = analyze_full(config, false, true)?;
82    Ok(output.results)
83}
84
85/// Run the full analysis pipeline with optional performance timings and graph retention.
86pub fn analyze_with_trace(config: &ResolvedConfig) -> Result<AnalysisOutput, FallowError> {
87    analyze_full(config, true, false)
88}
89
90#[expect(clippy::unnecessary_wraps)] // Result kept for future error handling
91fn analyze_full(
92    config: &ResolvedConfig,
93    retain: bool,
94    collect_usages: bool,
95) -> Result<AnalysisOutput, FallowError> {
96    let _span = tracing::info_span!("fallow_analyze").entered();
97    let pipeline_start = Instant::now();
98
99    // Progress bars: enabled when not quiet, stderr is a terminal, and output is human-readable.
100    // Structured formats (JSON, SARIF) suppress spinners even on TTY — users piping structured
101    // output don't expect progress noise on stderr.
102    let show_progress = !config.quiet
103        && std::io::IsTerminal::is_terminal(&std::io::stderr())
104        && matches!(
105            config.output,
106            fallow_config::OutputFormat::Human
107                | fallow_config::OutputFormat::Compact
108                | fallow_config::OutputFormat::Markdown
109        );
110    let progress = progress::AnalysisProgress::new(show_progress);
111
112    // Warn if node_modules is missing — resolution will be severely degraded
113    if !config.root.join("node_modules").is_dir() {
114        tracing::warn!(
115            "node_modules directory not found. Run `npm install` / `pnpm install` first for accurate results."
116        );
117    }
118
119    // Discover workspaces if in a monorepo
120    let t = Instant::now();
121    let workspaces_vec = discover_workspaces(&config.root);
122    let workspaces_ms = t.elapsed().as_secs_f64() * 1000.0;
123    if !workspaces_vec.is_empty() {
124        tracing::info!(count = workspaces_vec.len(), "workspaces discovered");
125    }
126
127    // Stage 1: Discover all source files
128    let t = Instant::now();
129    let pb = progress.stage_spinner("Discovering files...");
130    let discovered_files = discover::discover_files(config);
131    let discover_ms = t.elapsed().as_secs_f64() * 1000.0;
132    pb.finish_and_clear();
133
134    // Build ProjectState: owns the file registry with stable FileIds and workspace metadata.
135    // This is the foundation for cross-workspace resolution and future incremental analysis.
136    let project = project::ProjectState::new(discovered_files, workspaces_vec);
137    let files = project.files();
138    let workspaces = project.workspaces();
139
140    // Stage 1.5: Run plugin system — parse config files, discover dynamic entries
141    let t = Instant::now();
142    let pb = progress.stage_spinner("Detecting plugins...");
143    let mut plugin_result = run_plugins(config, files, workspaces);
144    let plugins_ms = t.elapsed().as_secs_f64() * 1000.0;
145    pb.finish_and_clear();
146
147    // Stage 1.6: Analyze package.json scripts for binary usage and config file refs
148    let t = Instant::now();
149    let pkg_path = config.root.join("package.json");
150    if let Ok(pkg) = PackageJson::load(&pkg_path)
151        && let Some(ref pkg_scripts) = pkg.scripts
152    {
153        // In production mode, only analyze start/build scripts
154        let scripts_to_analyze = if config.production {
155            scripts::filter_production_scripts(pkg_scripts)
156        } else {
157            pkg_scripts.clone()
158        };
159        let script_analysis = scripts::analyze_scripts(&scripts_to_analyze, &config.root);
160        plugin_result.script_used_packages = script_analysis.used_packages;
161
162        // Add config files from scripts as entry points (resolved later)
163        for config_file in &script_analysis.config_files {
164            plugin_result
165                .entry_patterns
166                .push((config_file.clone(), "scripts".to_string()));
167        }
168    }
169    // Also analyze workspace package.json scripts
170    for ws in workspaces {
171        let ws_pkg_path = ws.root.join("package.json");
172        if let Ok(ws_pkg) = PackageJson::load(&ws_pkg_path)
173            && let Some(ref ws_scripts) = ws_pkg.scripts
174        {
175            let scripts_to_analyze = if config.production {
176                scripts::filter_production_scripts(ws_scripts)
177            } else {
178                ws_scripts.clone()
179            };
180            let ws_analysis = scripts::analyze_scripts(&scripts_to_analyze, &ws.root);
181            plugin_result
182                .script_used_packages
183                .extend(ws_analysis.used_packages);
184
185            let ws_prefix = ws
186                .root
187                .strip_prefix(&config.root)
188                .unwrap_or(&ws.root)
189                .to_string_lossy();
190            for config_file in &ws_analysis.config_files {
191                plugin_result
192                    .entry_patterns
193                    .push((format!("{ws_prefix}/{config_file}"), "scripts".to_string()));
194            }
195        }
196    }
197    let scripts_ms = t.elapsed().as_secs_f64() * 1000.0;
198
199    // Stage 2: Parse all files in parallel and extract imports/exports
200    let t = Instant::now();
201    let pb = progress.stage_spinner(&format!("Parsing {} files...", files.len()));
202    let mut cache_store = if config.no_cache {
203        None
204    } else {
205        cache::CacheStore::load(&config.cache_dir)
206    };
207
208    let parse_result = extract::parse_all_files(files, cache_store.as_ref());
209    let modules = parse_result.modules;
210    let cache_hits = parse_result.cache_hits;
211    let cache_misses = parse_result.cache_misses;
212    let parse_ms = t.elapsed().as_secs_f64() * 1000.0;
213    pb.finish_and_clear();
214
215    // Update cache with freshly parsed modules and refresh stale mtime/size entries.
216    let t = Instant::now();
217    if !config.no_cache {
218        let store = cache_store.get_or_insert_with(cache::CacheStore::new);
219        update_cache(store, &modules, files);
220        if let Err(e) = store.save(&config.cache_dir) {
221            tracing::warn!("Failed to save cache: {e}");
222        }
223    }
224    let cache_ms = t.elapsed().as_secs_f64() * 1000.0;
225
226    // Stage 3: Discover entry points (static patterns + plugin-discovered patterns)
227    let t = Instant::now();
228    let mut entry_points = discover::discover_entry_points(config, files);
229    for ws in workspaces {
230        let ws_entries = discover::discover_workspace_entry_points(&ws.root, config, files);
231        entry_points.extend(ws_entries);
232    }
233    let plugin_entries = discover::discover_plugin_entry_points(&plugin_result, config, files);
234    entry_points.extend(plugin_entries);
235    let infra_entries = discover::discover_infrastructure_entry_points(&config.root);
236    entry_points.extend(infra_entries);
237    let entry_points_ms = t.elapsed().as_secs_f64() * 1000.0;
238
239    // Stage 4: Resolve imports to file IDs
240    let t = Instant::now();
241    let pb = progress.stage_spinner("Resolving imports...");
242    let resolved = resolve::resolve_all_imports(
243        &modules,
244        files,
245        workspaces,
246        &plugin_result.active_plugins,
247        &plugin_result.path_aliases,
248        &config.root,
249    );
250    let resolve_ms = t.elapsed().as_secs_f64() * 1000.0;
251    pb.finish_and_clear();
252
253    // Stage 5: Build module graph
254    let t = Instant::now();
255    let pb = progress.stage_spinner("Building module graph...");
256    let graph = graph::ModuleGraph::build(&resolved, &entry_points, files);
257    let graph_ms = t.elapsed().as_secs_f64() * 1000.0;
258    pb.finish_and_clear();
259
260    // Stage 6: Analyze for dead code (with plugin context and workspace info)
261    let t = Instant::now();
262    let pb = progress.stage_spinner("Analyzing...");
263    let result = analyze::find_dead_code_full(
264        &graph,
265        config,
266        &resolved,
267        Some(&plugin_result),
268        workspaces,
269        &modules,
270        collect_usages,
271    );
272    let analyze_ms = t.elapsed().as_secs_f64() * 1000.0;
273    pb.finish_and_clear();
274    progress.finish();
275
276    let total_ms = pipeline_start.elapsed().as_secs_f64() * 1000.0;
277
278    let cache_summary = if cache_hits > 0 {
279        format!(" ({cache_hits} cached, {cache_misses} parsed)")
280    } else {
281        String::new()
282    };
283
284    tracing::debug!(
285        "\n┌─ Pipeline Profile ─────────────────────────────\n\
286         │  discover files:   {:>8.1}ms  ({} files)\n\
287         │  workspaces:       {:>8.1}ms\n\
288         │  plugins:          {:>8.1}ms\n\
289         │  script analysis:  {:>8.1}ms\n\
290         │  parse/extract:    {:>8.1}ms  ({} modules{})\n\
291         │  cache update:     {:>8.1}ms\n\
292         │  entry points:     {:>8.1}ms  ({} entries)\n\
293         │  resolve imports:  {:>8.1}ms\n\
294         │  build graph:      {:>8.1}ms\n\
295         │  analyze:          {:>8.1}ms\n\
296         │  ────────────────────────────────────────────\n\
297         │  TOTAL:            {:>8.1}ms\n\
298         └─────────────────────────────────────────────────",
299        discover_ms,
300        files.len(),
301        workspaces_ms,
302        plugins_ms,
303        scripts_ms,
304        parse_ms,
305        modules.len(),
306        cache_summary,
307        cache_ms,
308        entry_points_ms,
309        entry_points.len(),
310        resolve_ms,
311        graph_ms,
312        analyze_ms,
313        total_ms,
314    );
315
316    let timings = if retain {
317        Some(PipelineTimings {
318            discover_files_ms: discover_ms,
319            file_count: files.len(),
320            workspaces_ms,
321            workspace_count: workspaces.len(),
322            plugins_ms,
323            script_analysis_ms: scripts_ms,
324            parse_extract_ms: parse_ms,
325            module_count: modules.len(),
326            cache_hits,
327            cache_misses,
328            cache_update_ms: cache_ms,
329            entry_points_ms,
330            entry_point_count: entry_points.len(),
331            resolve_imports_ms: resolve_ms,
332            build_graph_ms: graph_ms,
333            analyze_ms,
334            total_ms,
335        })
336    } else {
337        None
338    };
339
340    Ok(AnalysisOutput {
341        results: result,
342        timings,
343        graph: if retain { Some(graph) } else { None },
344    })
345}
346
347/// Run plugins for root project and all workspace packages.
348fn run_plugins(
349    config: &ResolvedConfig,
350    files: &[discover::DiscoveredFile],
351    workspaces: &[fallow_config::WorkspaceInfo],
352) -> plugins::AggregatedPluginResult {
353    let registry = plugins::PluginRegistry::new(config.external_plugins.clone());
354    let file_paths: Vec<std::path::PathBuf> = files.iter().map(|f| f.path.clone()).collect();
355
356    // Run plugins for root project (full run with external plugins, inline config, etc.)
357    let pkg_path = config.root.join("package.json");
358    let mut result = PackageJson::load(&pkg_path).map_or_else(
359        |_| plugins::AggregatedPluginResult::default(),
360        |pkg| registry.run(&pkg, &config.root, &file_paths),
361    );
362
363    if workspaces.is_empty() {
364        return result;
365    }
366
367    // Pre-compile config matchers and relative files once for all workspace runs.
368    // This avoids re-compiling glob patterns and re-computing relative paths per workspace
369    // (previously O(workspaces × plugins × files) glob compilations).
370    let precompiled_matchers = registry.precompile_config_matchers();
371    let relative_files: Vec<(&std::path::PathBuf, String)> = file_paths
372        .iter()
373        .map(|f| {
374            let rel = f
375                .strip_prefix(&config.root)
376                .unwrap_or(f)
377                .to_string_lossy()
378                .into_owned();
379            (f, rel)
380        })
381        .collect();
382
383    // Run plugins for each workspace package using the fast path
384    for ws in workspaces {
385        let ws_pkg_path = ws.root.join("package.json");
386        if let Ok(ws_pkg) = PackageJson::load(&ws_pkg_path) {
387            let ws_result = registry.run_workspace_fast(
388                &ws_pkg,
389                &ws.root,
390                &config.root,
391                &precompiled_matchers,
392                &relative_files,
393            );
394
395            // Early skip if workspace produced no results (common for leaf packages)
396            if ws_result.active_plugins.is_empty() {
397                continue;
398            }
399
400            // Workspace plugin patterns are relative to the workspace root (e.g., `jest.setup.ts`),
401            // but `discover_plugin_entry_points` matches against paths relative to the monorepo root
402            // (e.g., `packages/foo/jest.setup.ts`). Prefix workspace patterns with the workspace
403            // path to make them matchable from the monorepo root.
404            let ws_prefix = ws
405                .root
406                .strip_prefix(&config.root)
407                .unwrap_or(&ws.root)
408                .to_string_lossy();
409
410            // Prefix helper: workspace-relative patterns need the workspace prefix
411            // to be matchable from the monorepo root. But patterns that are already
412            // project-root-relative (e.g., from angular.json which uses absolute paths
413            // like "apps/client/src/styles.css") should not be double-prefixed.
414            let prefix_if_needed = |pat: &str| -> String {
415                if pat.starts_with(ws_prefix.as_ref()) || pat.starts_with('/') {
416                    pat.to_string()
417                } else {
418                    format!("{ws_prefix}/{pat}")
419                }
420            };
421
422            for (pat, pname) in &ws_result.entry_patterns {
423                result
424                    .entry_patterns
425                    .push((prefix_if_needed(pat), pname.clone()));
426            }
427            for (pat, pname) in &ws_result.always_used {
428                result
429                    .always_used
430                    .push((prefix_if_needed(pat), pname.clone()));
431            }
432            for (pat, pname) in &ws_result.discovered_always_used {
433                result
434                    .discovered_always_used
435                    .push((prefix_if_needed(pat), pname.clone()));
436            }
437            for (file_pat, exports) in &ws_result.used_exports {
438                result
439                    .used_exports
440                    .push((prefix_if_needed(file_pat), exports.clone()));
441            }
442            // Merge active plugin names (deduplicated)
443            for plugin_name in &ws_result.active_plugins {
444                if !result.active_plugins.contains(plugin_name) {
445                    result.active_plugins.push(plugin_name.clone());
446                }
447            }
448            // These don't need prefixing (absolute paths / package names)
449            result
450                .referenced_dependencies
451                .extend(ws_result.referenced_dependencies);
452            result.setup_files.extend(ws_result.setup_files);
453            result
454                .tooling_dependencies
455                .extend(ws_result.tooling_dependencies);
456            // Virtual module prefixes (e.g., Docusaurus @theme/, @site/) are
457            // package-name prefixes, not file paths — no workspace prefix needed.
458            for prefix in &ws_result.virtual_module_prefixes {
459                if !result.virtual_module_prefixes.contains(prefix) {
460                    result.virtual_module_prefixes.push(prefix.clone());
461                }
462            }
463        }
464    }
465
466    result
467}
468
469/// Run analysis on a project directory (with export usages for LSP Code Lens).
470pub fn analyze_project(root: &Path) -> Result<AnalysisResults, FallowError> {
471    let config = default_config(root);
472    analyze_with_usages(&config)
473}
474
475/// Create a default config for a project root.
476pub(crate) fn default_config(root: &Path) -> ResolvedConfig {
477    let user_config = fallow_config::FallowConfig::find_and_load(root)
478        .ok()
479        .flatten();
480    match user_config {
481        Some((config, _path)) => config.resolve(
482            root.to_path_buf(),
483            fallow_config::OutputFormat::Human,
484            num_cpus(),
485            false,
486            true, // quiet: LSP/programmatic callers don't need progress bars
487        ),
488        None => fallow_config::FallowConfig {
489            schema: None,
490            extends: vec![],
491            entry: vec![],
492            ignore_patterns: vec![],
493            framework: vec![],
494            workspaces: None,
495            ignore_dependencies: vec![],
496            ignore_exports: vec![],
497            duplicates: fallow_config::DuplicatesConfig::default(),
498            rules: fallow_config::RulesConfig::default(),
499            production: false,
500            plugins: vec![],
501            overrides: vec![],
502        }
503        .resolve(
504            root.to_path_buf(),
505            fallow_config::OutputFormat::Human,
506            num_cpus(),
507            false,
508            true,
509        ),
510    }
511}
512
513fn num_cpus() -> usize {
514    std::thread::available_parallelism()
515        .map(|n| n.get())
516        .unwrap_or(4)
517}