1pub mod detect;
9pub mod families;
10pub mod normalize;
11pub mod token_types;
12mod token_visitor;
13pub mod tokenize;
14pub(crate) mod types;
15
16use rustc_hash::FxHashMap;
17use std::path::{Path, PathBuf};
18
19use globset::{Glob, GlobSet, GlobSetBuilder};
20use rayon::prelude::*;
21
22use detect::CloneDetector;
23use normalize::normalize_and_hash_resolved;
24use tokenize::{tokenize_file, tokenize_file_cross_language};
25pub use types::{
26 CloneFamily, CloneGroup, CloneInstance, DetectionMode, DuplicatesConfig, DuplicationReport,
27 DuplicationStats, RefactoringKind, RefactoringSuggestion,
28};
29
30use crate::discover::{self, DiscoveredFile};
31use crate::suppress::{self, IssueKind, Suppression};
32
33pub fn find_duplicates(
42 root: &Path,
43 files: &[DiscoveredFile],
44 config: &DuplicatesConfig,
45) -> DuplicationReport {
46 let _span = tracing::info_span!("find_duplicates").entered();
47
48 let extra_ignores = build_ignore_set(&config.ignore);
50
51 let normalization =
53 fallow_config::ResolvedNormalization::resolve(config.mode, &config.normalization);
54
55 let strip_types = config.cross_language;
56
57 let file_data: Vec<(
59 PathBuf,
60 Vec<normalize::HashedToken>,
61 tokenize::FileTokens,
62 Vec<Suppression>,
63 )> = files
64 .par_iter()
65 .filter_map(|file| {
66 let relative = file.path.strip_prefix(root).unwrap_or(&file.path);
68 if let Some(ref ignores) = extra_ignores
69 && ignores.is_match(relative)
70 {
71 return None;
72 }
73
74 let source = std::fs::read_to_string(&file.path).ok()?;
76
77 let suppressions = suppress::parse_suppressions_from_source(&source);
79
80 if suppress::is_file_suppressed(&suppressions, IssueKind::CodeDuplication) {
82 return None;
83 }
84
85 let file_tokens = if strip_types {
87 tokenize_file_cross_language(&file.path, &source, true)
88 } else {
89 tokenize_file(&file.path, &source)
90 };
91 if file_tokens.tokens.is_empty() {
92 return None;
93 }
94
95 let hashed = normalize_and_hash_resolved(&file_tokens.tokens, normalization);
97 if hashed.len() < config.min_tokens {
98 return None;
99 }
100
101 Some((file.path.clone(), hashed, file_tokens, suppressions))
102 })
103 .collect();
104
105 tracing::info!(
106 files = file_data.len(),
107 "tokenized files for duplication analysis"
108 );
109
110 let suppressions_by_file: FxHashMap<PathBuf, Vec<Suppression>> = file_data
112 .iter()
113 .filter(|(_, _, _, supps)| !supps.is_empty())
114 .map(|(path, _, _, supps)| (path.clone(), supps.clone()))
115 .collect();
116
117 let detector_data: Vec<(PathBuf, Vec<normalize::HashedToken>, tokenize::FileTokens)> =
119 file_data
120 .into_iter()
121 .map(|(path, hashed, tokens, _)| (path, hashed, tokens))
122 .collect();
123
124 let detector = CloneDetector::new(config.min_tokens, config.min_lines, config.skip_local);
126 let mut report = detector.detect(detector_data);
127
128 if !suppressions_by_file.is_empty() {
130 apply_line_suppressions(&mut report, &suppressions_by_file);
131 }
132
133 report.clone_families = families::group_into_families(&report.clone_groups, root);
135
136 report
137}
138
139#[expect(
141 clippy::cast_possible_truncation,
142 reason = "line numbers are bounded by source size"
143)]
144fn apply_line_suppressions(
145 report: &mut DuplicationReport,
146 suppressions_by_file: &FxHashMap<PathBuf, Vec<Suppression>>,
147) {
148 report.clone_groups.retain_mut(|group| {
149 group.instances.retain(|instance| {
150 if let Some(supps) = suppressions_by_file.get(&instance.file) {
151 for line in instance.start_line..=instance.end_line {
153 if suppress::is_suppressed(supps, line as u32, IssueKind::CodeDuplication) {
154 return false;
155 }
156 }
157 }
158 true
159 });
160 group.instances.len() >= 2
162 });
163}
164
165#[must_use]
169pub fn find_duplicates_in_project(root: &Path, config: &DuplicatesConfig) -> DuplicationReport {
170 let resolved = crate::default_config(root);
171 let files = discover::discover_files(&resolved);
172 find_duplicates(root, &files, config)
173}
174
175fn build_ignore_set(patterns: &[String]) -> Option<GlobSet> {
177 if patterns.is_empty() {
178 return None;
179 }
180
181 let mut builder = GlobSetBuilder::new();
182 for pattern in patterns {
183 match Glob::new(pattern) {
184 Ok(glob) => {
185 builder.add(glob);
186 }
187 Err(e) => {
188 tracing::warn!("Invalid duplication ignore pattern '{pattern}': {e}");
189 }
190 }
191 }
192
193 builder.build().ok()
194}
195
196#[cfg(test)]
197mod tests {
198 use super::*;
199 use crate::discover::FileId;
200
201 #[test]
202 fn find_duplicates_empty_files() {
203 let config = DuplicatesConfig::default();
204 let report = find_duplicates(Path::new("/tmp"), &[], &config);
205 assert!(report.clone_groups.is_empty());
206 assert!(report.clone_families.is_empty());
207 assert_eq!(report.stats.total_files, 0);
208 }
209
210 #[test]
211 fn build_ignore_set_empty() {
212 assert!(build_ignore_set(&[]).is_none());
213 }
214
215 #[test]
216 fn build_ignore_set_valid_patterns() {
217 let set = build_ignore_set(&["**/*.test.ts".to_string(), "**/*.spec.ts".to_string()]);
218 assert!(set.is_some());
219 let set = set.unwrap();
220 assert!(set.is_match("src/foo.test.ts"));
221 assert!(set.is_match("src/bar.spec.ts"));
222 assert!(!set.is_match("src/baz.ts"));
223 }
224
225 #[test]
226 fn find_duplicates_with_real_files() {
227 let dir = tempfile::tempdir().expect("create temp dir");
229 let src_dir = dir.path().join("src");
230 std::fs::create_dir_all(&src_dir).expect("create src dir");
231
232 let code = r#"
233export function processData(input: string): string {
234 const trimmed = input.trim();
235 if (trimmed.length === 0) {
236 return "";
237 }
238 const parts = trimmed.split(",");
239 const filtered = parts.filter(p => p.length > 0);
240 const mapped = filtered.map(p => p.toUpperCase());
241 return mapped.join(", ");
242}
243
244export function validateInput(data: string): boolean {
245 if (data === null || data === undefined) {
246 return false;
247 }
248 const cleaned = data.trim();
249 if (cleaned.length < 3) {
250 return false;
251 }
252 return true;
253}
254"#;
255
256 std::fs::write(src_dir.join("original.ts"), code).expect("write original");
257 std::fs::write(src_dir.join("copy.ts"), code).expect("write copy");
258 std::fs::write(dir.path().join("package.json"), r#"{"name": "test"}"#)
259 .expect("write package.json");
260
261 let files = vec![
262 DiscoveredFile {
263 id: FileId(0),
264 path: src_dir.join("original.ts"),
265 size_bytes: code.len() as u64,
266 },
267 DiscoveredFile {
268 id: FileId(1),
269 path: src_dir.join("copy.ts"),
270 size_bytes: code.len() as u64,
271 },
272 ];
273
274 let config = DuplicatesConfig {
275 min_tokens: 10,
276 min_lines: 2,
277 ..DuplicatesConfig::default()
278 };
279
280 let report = find_duplicates(dir.path(), &files, &config);
281 assert!(
282 !report.clone_groups.is_empty(),
283 "Should detect clones in identical files"
284 );
285 assert!(report.stats.files_with_clones >= 2);
286
287 assert!(
289 !report.clone_families.is_empty(),
290 "Should group clones into families"
291 );
292 }
293
294 #[test]
295 fn file_wide_suppression_excludes_file() {
296 let dir = tempfile::tempdir().expect("create temp dir");
297 let src_dir = dir.path().join("src");
298 std::fs::create_dir_all(&src_dir).expect("create src dir");
299
300 let code = r#"
301export function processData(input: string): string {
302 const trimmed = input.trim();
303 if (trimmed.length === 0) {
304 return "";
305 }
306 const parts = trimmed.split(",");
307 const filtered = parts.filter(p => p.length > 0);
308 const mapped = filtered.map(p => p.toUpperCase());
309 return mapped.join(", ");
310}
311"#;
312 let suppressed_code = format!("// fallow-ignore-file code-duplication\n{code}");
313
314 std::fs::write(src_dir.join("original.ts"), code).expect("write original");
315 std::fs::write(src_dir.join("suppressed.ts"), &suppressed_code).expect("write suppressed");
316 std::fs::write(dir.path().join("package.json"), r#"{"name": "test"}"#)
317 .expect("write package.json");
318
319 let files = vec![
320 DiscoveredFile {
321 id: FileId(0),
322 path: src_dir.join("original.ts"),
323 size_bytes: code.len() as u64,
324 },
325 DiscoveredFile {
326 id: FileId(1),
327 path: src_dir.join("suppressed.ts"),
328 size_bytes: suppressed_code.len() as u64,
329 },
330 ];
331
332 let config = DuplicatesConfig {
333 min_tokens: 10,
334 min_lines: 2,
335 ..DuplicatesConfig::default()
336 };
337
338 let report = find_duplicates(dir.path(), &files, &config);
339 assert!(
341 report.clone_groups.is_empty(),
342 "File-wide suppression should exclude file from duplication analysis"
343 );
344 }
345}