Skip to main content

fallow_core/analyze/
mod.rs

1mod predicates;
2mod unused_deps;
3mod unused_exports;
4mod unused_files;
5mod unused_members;
6
7use rustc_hash::FxHashMap;
8
9use fallow_config::{PackageJson, ResolvedConfig, Severity};
10
11use crate::discover::FileId;
12use crate::extract::ModuleInfo;
13use crate::graph::ModuleGraph;
14use crate::resolve::ResolvedModule;
15use crate::results::*;
16use crate::suppress::{self, IssueKind, Suppression};
17
18use unused_deps::{
19    find_type_only_dependencies, find_unlisted_dependencies, find_unresolved_imports,
20    find_unused_dependencies,
21};
22use unused_exports::{collect_export_usages, find_duplicate_exports, find_unused_exports};
23use unused_files::find_unused_files;
24use unused_members::find_unused_members;
25
26/// Pre-computed line offset tables indexed by `FileId`, built during parse and
27/// carried through the cache. Eliminates redundant file reads during analysis.
28pub(crate) type LineOffsetsMap<'a> = FxHashMap<FileId, &'a [u32]>;
29
30/// Convert a byte offset to (line, col) using pre-computed line offsets.
31/// Falls back to `(1, byte_offset)` when no line table is available.
32pub(crate) fn byte_offset_to_line_col(
33    line_offsets_map: &LineOffsetsMap<'_>,
34    file_id: FileId,
35    byte_offset: u32,
36) -> (u32, u32) {
37    line_offsets_map
38        .get(&file_id)
39        .map_or((1, byte_offset), |offsets| {
40            fallow_types::extract::byte_offset_to_line_col(offsets, byte_offset)
41        })
42}
43
44/// Read source content from disk, returning empty string on failure.
45/// Only used for LSP Code Lens reference resolution where the referencing
46/// file may not be in the line offsets map.
47fn read_source(path: &std::path::Path) -> String {
48    std::fs::read_to_string(path).unwrap_or_default()
49}
50
51/// Find all dead code in the project.
52pub fn find_dead_code(graph: &ModuleGraph, config: &ResolvedConfig) -> AnalysisResults {
53    find_dead_code_with_resolved(graph, config, &[], None)
54}
55
56/// Find all dead code, with optional resolved module data and plugin context.
57pub fn find_dead_code_with_resolved(
58    graph: &ModuleGraph,
59    config: &ResolvedConfig,
60    resolved_modules: &[ResolvedModule],
61    plugin_result: Option<&crate::plugins::AggregatedPluginResult>,
62) -> AnalysisResults {
63    find_dead_code_full(
64        graph,
65        config,
66        resolved_modules,
67        plugin_result,
68        &[],
69        &[],
70        false,
71    )
72}
73
74/// Find all dead code, with optional resolved module data, plugin context, and workspace info.
75pub fn find_dead_code_full(
76    graph: &ModuleGraph,
77    config: &ResolvedConfig,
78    resolved_modules: &[ResolvedModule],
79    plugin_result: Option<&crate::plugins::AggregatedPluginResult>,
80    workspaces: &[fallow_config::WorkspaceInfo],
81    modules: &[ModuleInfo],
82    collect_usages: bool,
83) -> AnalysisResults {
84    let _span = tracing::info_span!("find_dead_code").entered();
85
86    // Build suppression index: FileId -> suppressions
87    let suppressions_by_file: FxHashMap<FileId, &[Suppression]> = modules
88        .iter()
89        .filter(|m| !m.suppressions.is_empty())
90        .map(|m| (m.file_id, m.suppressions.as_slice()))
91        .collect();
92
93    // Build line offset index: FileId -> pre-computed line start offsets.
94    // Eliminates redundant file reads for byte-to-line/col conversion.
95    let line_offsets_by_file: LineOffsetsMap<'_> = modules
96        .iter()
97        .filter(|m| !m.line_offsets.is_empty())
98        .map(|m| (m.file_id, m.line_offsets.as_slice()))
99        .collect();
100
101    let mut results = AnalysisResults::default();
102
103    if config.rules.unused_files != Severity::Off {
104        results.unused_files = find_unused_files(graph, &suppressions_by_file);
105    }
106
107    if config.rules.unused_exports != Severity::Off || config.rules.unused_types != Severity::Off {
108        let (exports, types) = find_unused_exports(
109            graph,
110            config,
111            plugin_result,
112            &suppressions_by_file,
113            &line_offsets_by_file,
114        );
115        if config.rules.unused_exports != Severity::Off {
116            results.unused_exports = exports;
117        }
118        if config.rules.unused_types != Severity::Off {
119            results.unused_types = types;
120        }
121    }
122
123    if config.rules.unused_enum_members != Severity::Off
124        || config.rules.unused_class_members != Severity::Off
125    {
126        let (enum_members, class_members) = find_unused_members(
127            graph,
128            config,
129            resolved_modules,
130            &suppressions_by_file,
131            &line_offsets_by_file,
132        );
133        if config.rules.unused_enum_members != Severity::Off {
134            results.unused_enum_members = enum_members;
135        }
136        if config.rules.unused_class_members != Severity::Off {
137            results.unused_class_members = class_members;
138        }
139    }
140
141    // Build merged dependency set from root + all workspace package.json files
142    let pkg_path = config.root.join("package.json");
143    let pkg = PackageJson::load(&pkg_path).ok();
144    if let Some(ref pkg) = pkg {
145        if config.rules.unused_dependencies != Severity::Off
146            || config.rules.unused_dev_dependencies != Severity::Off
147        {
148            let (deps, dev_deps) =
149                find_unused_dependencies(graph, pkg, config, plugin_result, workspaces);
150            if config.rules.unused_dependencies != Severity::Off {
151                results.unused_dependencies = deps;
152            }
153            if config.rules.unused_dev_dependencies != Severity::Off {
154                results.unused_dev_dependencies = dev_deps;
155            }
156        }
157
158        if config.rules.unlisted_dependencies != Severity::Off {
159            results.unlisted_dependencies =
160                find_unlisted_dependencies(graph, pkg, config, workspaces, plugin_result);
161        }
162    }
163
164    if config.rules.unresolved_imports != Severity::Off && !resolved_modules.is_empty() {
165        let virtual_prefixes: Vec<&str> = plugin_result
166            .map(|pr| {
167                pr.virtual_module_prefixes
168                    .iter()
169                    .map(|s| s.as_str())
170                    .collect()
171            })
172            .unwrap_or_default();
173        results.unresolved_imports = find_unresolved_imports(
174            resolved_modules,
175            config,
176            &suppressions_by_file,
177            &virtual_prefixes,
178            &line_offsets_by_file,
179        );
180    }
181
182    if config.rules.duplicate_exports != Severity::Off {
183        results.duplicate_exports = find_duplicate_exports(graph, config, &suppressions_by_file);
184    }
185
186    // In production mode, detect dependencies that are only used via type-only imports
187    if config.production
188        && let Some(ref pkg) = pkg
189    {
190        results.type_only_dependencies =
191            find_type_only_dependencies(graph, pkg, config, workspaces);
192    }
193
194    // Detect circular dependencies
195    if config.rules.circular_dependencies != Severity::Off {
196        let cycles = graph.find_cycles();
197        results.circular_dependencies = cycles
198            .into_iter()
199            .filter(|cycle| {
200                // Skip cycles where any participating file has a file-level suppression
201                !cycle.iter().any(|&id| {
202                    suppressions_by_file.get(&id).is_some_and(|supps| {
203                        suppress::is_file_suppressed(supps, IssueKind::CircularDependency)
204                    })
205                })
206            })
207            .map(|cycle| {
208                let files: Vec<std::path::PathBuf> = cycle
209                    .iter()
210                    .map(|&id| graph.modules[id.0 as usize].path.clone())
211                    .collect();
212                let length = files.len();
213                CircularDependency { files, length }
214            })
215            .collect();
216    }
217
218    // Collect export usage counts for Code Lens (LSP feature).
219    // Skipped in CLI mode since the field is #[serde(skip)] in all output formats.
220    if collect_usages {
221        results.export_usages = collect_export_usages(graph, &line_offsets_by_file);
222    }
223
224    results
225}
226
227#[cfg(test)]
228mod tests {
229    use fallow_types::extract::{byte_offset_to_line_col, compute_line_offsets};
230
231    // Helper: compute line offsets from source and convert byte offset
232    fn line_col(source: &str, byte_offset: u32) -> (u32, u32) {
233        let offsets = compute_line_offsets(source);
234        byte_offset_to_line_col(&offsets, byte_offset)
235    }
236
237    // ── compute_line_offsets ─────────────────────────────────────
238
239    #[test]
240    fn compute_offsets_empty() {
241        assert_eq!(compute_line_offsets(""), vec![0]);
242    }
243
244    #[test]
245    fn compute_offsets_single_line() {
246        assert_eq!(compute_line_offsets("hello"), vec![0]);
247    }
248
249    #[test]
250    fn compute_offsets_multiline() {
251        assert_eq!(compute_line_offsets("abc\ndef\nghi"), vec![0, 4, 8]);
252    }
253
254    #[test]
255    fn compute_offsets_trailing_newline() {
256        assert_eq!(compute_line_offsets("abc\n"), vec![0, 4]);
257    }
258
259    #[test]
260    fn compute_offsets_crlf() {
261        assert_eq!(compute_line_offsets("ab\r\ncd"), vec![0, 4]);
262    }
263
264    #[test]
265    fn compute_offsets_consecutive_newlines() {
266        assert_eq!(compute_line_offsets("\n\n"), vec![0, 1, 2]);
267    }
268
269    // ── byte_offset_to_line_col ─────────────────────────────────
270
271    #[test]
272    fn byte_offset_empty_source() {
273        assert_eq!(line_col("", 0), (1, 0));
274    }
275
276    #[test]
277    fn byte_offset_single_line_start() {
278        assert_eq!(line_col("hello", 0), (1, 0));
279    }
280
281    #[test]
282    fn byte_offset_single_line_middle() {
283        assert_eq!(line_col("hello", 4), (1, 4));
284    }
285
286    #[test]
287    fn byte_offset_multiline_start_of_line2() {
288        assert_eq!(line_col("line1\nline2\nline3", 6), (2, 0));
289    }
290
291    #[test]
292    fn byte_offset_multiline_middle_of_line3() {
293        assert_eq!(line_col("line1\nline2\nline3", 14), (3, 2));
294    }
295
296    #[test]
297    fn byte_offset_at_newline_boundary() {
298        assert_eq!(line_col("line1\nline2", 5), (1, 5));
299    }
300
301    #[test]
302    fn byte_offset_multibyte_utf8() {
303        let source = "hi\n\u{1F600}x";
304        assert_eq!(line_col(source, 3), (2, 0));
305        assert_eq!(line_col(source, 7), (2, 4));
306    }
307
308    #[test]
309    fn byte_offset_multibyte_accented_chars() {
310        let source = "caf\u{00E9}\nbar";
311        assert_eq!(line_col(source, 6), (2, 0));
312        assert_eq!(line_col(source, 3), (1, 3));
313    }
314
315    #[test]
316    fn byte_offset_via_map_fallback() {
317        use super::*;
318        let map: LineOffsetsMap<'_> = FxHashMap::default();
319        assert_eq!(
320            super::byte_offset_to_line_col(&map, FileId(99), 42),
321            (1, 42)
322        );
323    }
324
325    #[test]
326    fn byte_offset_via_map_lookup() {
327        use super::*;
328        let offsets = compute_line_offsets("abc\ndef\nghi");
329        let mut map: LineOffsetsMap<'_> = FxHashMap::default();
330        map.insert(FileId(0), &offsets);
331        assert_eq!(super::byte_offset_to_line_col(&map, FileId(0), 5), (2, 1));
332    }
333}