1use std::path::{Path, PathBuf};
17
18use rustc_hash::{FxHashMap, FxHashSet};
19
20use crate::duplicates::{DuplicationReport, DuplicationStats, families};
21use crate::results::AnalysisResults;
22
23pub fn validate_git_ref(s: &str) -> Result<&str, String> {
36 if s.is_empty() {
37 return Err("git ref cannot be empty".to_string());
38 }
39 if s.starts_with('-') {
40 return Err("git ref cannot start with '-'".to_string());
41 }
42 let mut in_braces = false;
43 for c in s.chars() {
44 match c {
45 '{' => in_braces = true,
46 '}' => in_braces = false,
47 ':' | ' ' if in_braces => {}
48 c if c.is_ascii_alphanumeric()
49 || matches!(c, '.' | '_' | '-' | '/' | '~' | '^' | '@' | '{' | '}') => {}
50 _ => return Err(format!("git ref contains disallowed character: '{c}'")),
51 }
52 }
53 if in_braces {
54 return Err("git ref has unclosed '{'".to_string());
55 }
56 Ok(s)
57}
58
59#[derive(Debug)]
62pub enum ChangedFilesError {
63 InvalidRef(String),
65 GitMissing(String),
67 NotARepository,
69 GitFailed(String),
71}
72
73impl ChangedFilesError {
74 pub fn describe(&self) -> String {
78 match self {
79 Self::InvalidRef(e) => format!("invalid git ref: {e}"),
80 Self::GitMissing(e) => format!("failed to run git: {e}"),
81 Self::NotARepository => "not a git repository".to_owned(),
82 Self::GitFailed(stderr) => augment_git_failed(stderr),
83 }
84 }
85}
86
87fn augment_git_failed(stderr: &str) -> String {
93 let lower = stderr.to_ascii_lowercase();
94 if lower.contains("not a valid object name")
95 || lower.contains("unknown revision")
96 || lower.contains("ambiguous argument")
97 {
98 format!(
99 "{stderr} (shallow clone? try `git fetch --unshallow`, or set `fetch-depth: 0` on actions/checkout / `GIT_DEPTH: 0` in GitLab CI)"
100 )
101 } else {
102 stderr.to_owned()
103 }
104}
105
106pub fn resolve_git_toplevel(cwd: &Path) -> Result<PathBuf, ChangedFilesError> {
117 let output = std::process::Command::new("git")
118 .args(["rev-parse", "--show-toplevel"])
119 .current_dir(cwd)
120 .output()
121 .map_err(|e| ChangedFilesError::GitMissing(e.to_string()))?;
122
123 if !output.status.success() {
124 let stderr = String::from_utf8_lossy(&output.stderr);
125 return Err(if stderr.contains("not a git repository") {
126 ChangedFilesError::NotARepository
127 } else {
128 ChangedFilesError::GitFailed(stderr.trim().to_owned())
129 });
130 }
131
132 let raw = String::from_utf8_lossy(&output.stdout);
133 let trimmed = raw.trim();
134 if trimmed.is_empty() {
135 return Err(ChangedFilesError::GitFailed(
136 "git rev-parse --show-toplevel returned empty output".to_owned(),
137 ));
138 }
139
140 let path = PathBuf::from(trimmed);
141 Ok(path.canonicalize().unwrap_or(path))
142}
143
144fn collect_git_paths(
145 cwd: &Path,
146 toplevel: &Path,
147 args: &[&str],
148) -> Result<FxHashSet<PathBuf>, ChangedFilesError> {
149 let output = std::process::Command::new("git")
150 .args(args)
151 .current_dir(cwd)
152 .output()
153 .map_err(|e| ChangedFilesError::GitMissing(e.to_string()))?;
154
155 if !output.status.success() {
156 let stderr = String::from_utf8_lossy(&output.stderr);
157 return Err(if stderr.contains("not a git repository") {
158 ChangedFilesError::NotARepository
159 } else {
160 ChangedFilesError::GitFailed(stderr.trim().to_owned())
161 });
162 }
163
164 let files: FxHashSet<PathBuf> = String::from_utf8_lossy(&output.stdout)
170 .lines()
171 .filter(|line| !line.is_empty())
172 .map(|line| toplevel.join(line))
173 .collect();
174
175 Ok(files)
176}
177
178pub fn try_get_changed_files(
196 root: &Path,
197 git_ref: &str,
198) -> Result<FxHashSet<PathBuf>, ChangedFilesError> {
199 validate_git_ref(git_ref).map_err(ChangedFilesError::InvalidRef)?;
205 let toplevel = resolve_git_toplevel(root)?;
206 try_get_changed_files_with_toplevel(root, &toplevel, git_ref)
207}
208
209pub fn try_get_changed_files_with_toplevel(
217 cwd: &Path,
218 toplevel: &Path,
219 git_ref: &str,
220) -> Result<FxHashSet<PathBuf>, ChangedFilesError> {
221 validate_git_ref(git_ref).map_err(ChangedFilesError::InvalidRef)?;
222
223 let mut files = collect_git_paths(
224 cwd,
225 toplevel,
226 &[
227 "diff",
228 "--name-only",
229 "--end-of-options",
230 &format!("{git_ref}...HEAD"),
231 ],
232 )?;
233 files.extend(collect_git_paths(
234 cwd,
235 toplevel,
236 &["diff", "--name-only", "HEAD"],
237 )?);
238 files.extend(collect_git_paths(
243 cwd,
244 toplevel,
245 &["ls-files", "--full-name", "--others", "--exclude-standard"],
246 )?);
247 Ok(files)
248}
249
250#[expect(
254 clippy::print_stderr,
255 reason = "intentional user-facing warning for the CLI's --changed-since fallback path; LSP callers use try_get_changed_files instead"
256)]
257pub fn get_changed_files(root: &Path, git_ref: &str) -> Option<FxHashSet<PathBuf>> {
258 match try_get_changed_files(root, git_ref) {
259 Ok(files) => Some(files),
260 Err(ChangedFilesError::InvalidRef(e)) => {
261 eprintln!("Warning: --changed-since ignored: invalid git ref: {e}");
262 None
263 }
264 Err(ChangedFilesError::GitMissing(e)) => {
265 eprintln!("Warning: --changed-since ignored: failed to run git: {e}");
266 None
267 }
268 Err(ChangedFilesError::NotARepository) => {
269 eprintln!("Warning: --changed-since ignored: not a git repository");
270 None
271 }
272 Err(ChangedFilesError::GitFailed(stderr)) => {
273 eprintln!("Warning: --changed-since failed for ref '{git_ref}': {stderr}");
274 None
275 }
276 }
277}
278
279#[expect(
287 clippy::implicit_hasher,
288 reason = "fallow standardizes on FxHashSet across the workspace"
289)]
290pub fn filter_results_by_changed_files(
291 results: &mut AnalysisResults,
292 changed_files: &FxHashSet<PathBuf>,
293) {
294 results
295 .unused_files
296 .retain(|f| changed_files.contains(&f.path));
297 results
298 .unused_exports
299 .retain(|e| changed_files.contains(&e.path));
300 results
301 .unused_types
302 .retain(|e| changed_files.contains(&e.path));
303 results
304 .unused_enum_members
305 .retain(|m| changed_files.contains(&m.path));
306 results
307 .unused_class_members
308 .retain(|m| changed_files.contains(&m.path));
309 results
310 .unresolved_imports
311 .retain(|i| changed_files.contains(&i.path));
312
313 results.unlisted_dependencies.retain(|d| {
315 d.imported_from
316 .iter()
317 .any(|s| changed_files.contains(&s.path))
318 });
319
320 for dup in &mut results.duplicate_exports {
322 dup.locations
323 .retain(|loc| changed_files.contains(&loc.path));
324 }
325 results.duplicate_exports.retain(|d| d.locations.len() >= 2);
326
327 results
329 .circular_dependencies
330 .retain(|c| c.files.iter().any(|f| changed_files.contains(f)));
331
332 results
334 .boundary_violations
335 .retain(|v| changed_files.contains(&v.from_path));
336
337 results
339 .stale_suppressions
340 .retain(|s| changed_files.contains(&s.path));
341}
342
343fn recompute_duplication_stats(report: &DuplicationReport) -> DuplicationStats {
349 let mut files_with_clones: FxHashSet<&Path> = FxHashSet::default();
350 let mut file_dup_lines: FxHashMap<&Path, FxHashSet<usize>> = FxHashMap::default();
351 let mut duplicated_tokens = 0_usize;
352 let mut clone_instances = 0_usize;
353
354 for group in &report.clone_groups {
355 for instance in &group.instances {
356 files_with_clones.insert(&instance.file);
357 clone_instances += 1;
358 let lines = file_dup_lines.entry(&instance.file).or_default();
359 for line in instance.start_line..=instance.end_line {
360 lines.insert(line);
361 }
362 }
363 duplicated_tokens += group.token_count * group.instances.len();
364 }
365
366 let duplicated_lines: usize = file_dup_lines.values().map(FxHashSet::len).sum();
367
368 DuplicationStats {
369 total_files: report.stats.total_files,
370 files_with_clones: files_with_clones.len(),
371 total_lines: report.stats.total_lines,
372 duplicated_lines,
373 total_tokens: report.stats.total_tokens,
374 duplicated_tokens,
375 clone_groups: report.clone_groups.len(),
376 clone_instances,
377 #[expect(
378 clippy::cast_precision_loss,
379 reason = "stat percentages are display-only; precision loss at usize::MAX line counts is acceptable"
380 )]
381 duplication_percentage: if report.stats.total_lines > 0 {
382 (duplicated_lines as f64 / report.stats.total_lines as f64) * 100.0
383 } else {
384 0.0
385 },
386 }
387}
388
389#[expect(
394 clippy::implicit_hasher,
395 reason = "fallow standardizes on FxHashSet across the workspace"
396)]
397pub fn filter_duplication_by_changed_files(
398 report: &mut DuplicationReport,
399 changed_files: &FxHashSet<PathBuf>,
400 root: &Path,
401) {
402 report
403 .clone_groups
404 .retain(|g| g.instances.iter().any(|i| changed_files.contains(&i.file)));
405 report.clone_families = families::group_into_families(&report.clone_groups, root);
406 report.mirrored_directories =
407 families::detect_mirrored_directories(&report.clone_families, root);
408 report.stats = recompute_duplication_stats(report);
409}
410
411#[cfg(test)]
412mod tests {
413 use super::*;
414 use crate::duplicates::{CloneGroup, CloneInstance};
415 use crate::results::{BoundaryViolation, CircularDependency, UnusedExport, UnusedFile};
416
417 #[test]
418 fn changed_files_error_describe_variants() {
419 assert!(
420 ChangedFilesError::InvalidRef("bad".to_owned())
421 .describe()
422 .contains("invalid git ref")
423 );
424 assert!(
425 ChangedFilesError::GitMissing("oops".to_owned())
426 .describe()
427 .contains("oops")
428 );
429 assert_eq!(
430 ChangedFilesError::NotARepository.describe(),
431 "not a git repository"
432 );
433 assert!(
434 ChangedFilesError::GitFailed("bad ref".to_owned())
435 .describe()
436 .contains("bad ref")
437 );
438 }
439
440 #[test]
441 fn augment_git_failed_appends_shallow_clone_hint_for_unknown_revision() {
442 let stderr = "fatal: ambiguous argument 'fallow-baseline...HEAD': unknown revision or path not in the working tree.";
443 let described = ChangedFilesError::GitFailed(stderr.to_owned()).describe();
444 assert!(described.contains(stderr), "original stderr preserved");
445 assert!(
446 described.contains("shallow clone"),
447 "hint surfaced: {described}"
448 );
449 assert!(
450 described.contains("fetch-depth: 0") || described.contains("git fetch --unshallow"),
451 "hint actionable: {described}"
452 );
453 }
454
455 #[test]
456 fn augment_git_failed_passthrough_for_other_errors() {
457 let stderr = "fatal: refusing to merge unrelated histories";
459 let described = ChangedFilesError::GitFailed(stderr.to_owned()).describe();
460 assert_eq!(described, stderr);
461 }
462
463 #[test]
464 fn validate_git_ref_rejects_leading_dash() {
465 assert!(validate_git_ref("--upload-pack=evil").is_err());
466 assert!(validate_git_ref("-flag").is_err());
467 }
468
469 #[test]
470 fn validate_git_ref_accepts_baseline_tag() {
471 assert_eq!(
472 validate_git_ref("fallow-baseline").unwrap(),
473 "fallow-baseline"
474 );
475 }
476
477 #[test]
478 fn try_get_changed_files_rejects_invalid_ref() {
479 let err = try_get_changed_files(Path::new("/"), "--evil")
481 .expect_err("leading-dash ref must be rejected");
482 assert!(matches!(err, ChangedFilesError::InvalidRef(_)));
483 assert!(err.describe().contains("cannot start with"));
484 }
485
486 #[test]
487 fn validate_git_ref_rejects_option_like_ref() {
488 assert!(validate_git_ref("--output=/tmp/fallow-proof").is_err());
489 }
490
491 #[test]
492 fn validate_git_ref_allows_reflog_relative_date() {
493 assert!(validate_git_ref("HEAD@{1 week ago}").is_ok());
494 }
495
496 #[test]
497 fn try_get_changed_files_rejects_option_like_ref_before_git() {
498 let root = tempfile::tempdir().expect("create temp dir");
499 let proof_path = root.path().join("proof");
500
501 let result = try_get_changed_files(
502 root.path(),
503 &format!("--output={}", proof_path.to_string_lossy()),
504 );
505
506 assert!(matches!(result, Err(ChangedFilesError::InvalidRef(_))));
507 assert!(
508 !proof_path.exists(),
509 "invalid changedSince ref must not be passed through to git as an option"
510 );
511 }
512
513 #[test]
514 fn filter_results_keeps_only_changed_files() {
515 let mut results = AnalysisResults::default();
516 results.unused_files.push(UnusedFile {
517 path: "/a.ts".into(),
518 });
519 results.unused_files.push(UnusedFile {
520 path: "/b.ts".into(),
521 });
522 results.unused_exports.push(UnusedExport {
523 path: "/a.ts".into(),
524 export_name: "foo".into(),
525 is_type_only: false,
526 line: 1,
527 col: 0,
528 span_start: 0,
529 is_re_export: false,
530 });
531
532 let mut changed: FxHashSet<PathBuf> = FxHashSet::default();
533 changed.insert("/a.ts".into());
534
535 filter_results_by_changed_files(&mut results, &changed);
536
537 assert_eq!(results.unused_files.len(), 1);
538 assert_eq!(results.unused_files[0].path, PathBuf::from("/a.ts"));
539 assert_eq!(results.unused_exports.len(), 1);
540 }
541
542 #[test]
543 fn filter_results_preserves_dependency_level_issues() {
544 let mut results = AnalysisResults::default();
545 results
546 .unused_dependencies
547 .push(crate::results::UnusedDependency {
548 package_name: "lodash".into(),
549 location: crate::results::DependencyLocation::Dependencies,
550 path: "/pkg.json".into(),
551 line: 3,
552 });
553
554 let changed: FxHashSet<PathBuf> = FxHashSet::default();
555 filter_results_by_changed_files(&mut results, &changed);
556
557 assert_eq!(results.unused_dependencies.len(), 1);
559 }
560
561 #[test]
562 fn filter_results_keeps_circular_dep_when_any_file_changed() {
563 let mut results = AnalysisResults::default();
564 results.circular_dependencies.push(CircularDependency {
565 files: vec!["/a.ts".into(), "/b.ts".into()],
566 length: 2,
567 line: 1,
568 col: 0,
569 is_cross_package: false,
570 });
571
572 let mut changed: FxHashSet<PathBuf> = FxHashSet::default();
573 changed.insert("/b.ts".into());
574
575 filter_results_by_changed_files(&mut results, &changed);
576 assert_eq!(results.circular_dependencies.len(), 1);
577 }
578
579 #[test]
580 fn filter_results_drops_circular_dep_when_no_file_changed() {
581 let mut results = AnalysisResults::default();
582 results.circular_dependencies.push(CircularDependency {
583 files: vec!["/a.ts".into(), "/b.ts".into()],
584 length: 2,
585 line: 1,
586 col: 0,
587 is_cross_package: false,
588 });
589
590 let changed: FxHashSet<PathBuf> = FxHashSet::default();
591 filter_results_by_changed_files(&mut results, &changed);
592 assert!(results.circular_dependencies.is_empty());
593 }
594
595 #[test]
596 fn filter_results_drops_boundary_violation_when_importer_unchanged() {
597 let mut results = AnalysisResults::default();
598 results.boundary_violations.push(BoundaryViolation {
599 from_path: "/a.ts".into(),
600 to_path: "/b.ts".into(),
601 from_zone: "ui".into(),
602 to_zone: "data".into(),
603 import_specifier: "../data/db".into(),
604 line: 1,
605 col: 0,
606 });
607
608 let mut changed: FxHashSet<PathBuf> = FxHashSet::default();
609 changed.insert("/b.ts".into());
611
612 filter_results_by_changed_files(&mut results, &changed);
613 assert!(results.boundary_violations.is_empty());
614 }
615
616 #[test]
617 fn filter_duplication_keeps_groups_with_at_least_one_changed_instance() {
618 let mut report = DuplicationReport {
619 clone_groups: vec![CloneGroup {
620 instances: vec![
621 CloneInstance {
622 file: "/a.ts".into(),
623 start_line: 1,
624 end_line: 5,
625 start_col: 0,
626 end_col: 10,
627 fragment: "code".into(),
628 },
629 CloneInstance {
630 file: "/b.ts".into(),
631 start_line: 1,
632 end_line: 5,
633 start_col: 0,
634 end_col: 10,
635 fragment: "code".into(),
636 },
637 ],
638 token_count: 20,
639 line_count: 5,
640 }],
641 clone_families: vec![],
642 mirrored_directories: vec![],
643 stats: DuplicationStats {
644 total_files: 2,
645 files_with_clones: 2,
646 total_lines: 100,
647 duplicated_lines: 10,
648 total_tokens: 200,
649 duplicated_tokens: 40,
650 clone_groups: 1,
651 clone_instances: 2,
652 duplication_percentage: 10.0,
653 },
654 };
655
656 let mut changed: FxHashSet<PathBuf> = FxHashSet::default();
657 changed.insert("/a.ts".into());
658
659 filter_duplication_by_changed_files(&mut report, &changed, Path::new(""));
660 assert_eq!(report.clone_groups.len(), 1);
661 assert_eq!(report.stats.clone_groups, 1);
663 assert_eq!(report.stats.clone_instances, 2);
664 }
665
666 fn init_repo(repo: &Path) -> PathBuf {
678 run_git(repo, &["init", "--quiet", "--initial-branch=main"]);
679 run_git(repo, &["config", "user.email", "test@example.com"]);
680 run_git(repo, &["config", "user.name", "test"]);
681 run_git(repo, &["config", "commit.gpgsign", "false"]);
682 std::fs::write(repo.join("seed.txt"), "seed\n").unwrap();
683 run_git(repo, &["add", "seed.txt"]);
684 run_git(repo, &["commit", "--quiet", "-m", "initial"]);
685 run_git(repo, &["tag", "fallow-baseline"]);
686 repo.canonicalize().unwrap()
687 }
688
689 fn run_git(cwd: &Path, args: &[&str]) {
690 let output = std::process::Command::new("git")
691 .args(args)
692 .current_dir(cwd)
693 .output()
694 .expect("git available");
695 assert!(
696 output.status.success(),
697 "git {args:?} failed: {}",
698 String::from_utf8_lossy(&output.stderr)
699 );
700 }
701
702 #[test]
705 fn try_get_changed_files_workspace_at_repo_root() {
706 let tmp = tempfile::tempdir().unwrap();
707 let repo = init_repo(tmp.path());
708 std::fs::create_dir_all(repo.join("src")).unwrap();
709 std::fs::write(repo.join("src/new.ts"), "export const x = 1;\n").unwrap();
710
711 let changed = try_get_changed_files(&repo, "fallow-baseline").unwrap();
712
713 let expected = repo.join("src/new.ts");
714 assert!(
715 changed.contains(&expected),
716 "changed set should contain {expected:?}; actual: {changed:?}"
717 );
718 }
719
720 #[test]
728 fn try_get_changed_files_workspace_in_subdirectory() {
729 let tmp = tempfile::tempdir().unwrap();
730 let repo = init_repo(tmp.path());
731 let frontend = repo.join("frontend");
732 std::fs::create_dir_all(frontend.join("src")).unwrap();
733 std::fs::write(frontend.join("src/new.ts"), "export const x = 1;\n").unwrap();
734
735 let changed = try_get_changed_files(&frontend, "fallow-baseline").unwrap();
736
737 let expected = repo.join("frontend/src/new.ts");
738 assert!(
739 changed.contains(&expected),
740 "changed set should contain canonical {expected:?}; actual: {changed:?}"
741 );
742 let bogus = frontend.join("frontend/src/new.ts");
744 assert!(
745 !changed.contains(&bogus),
746 "changed set must not contain double-frontend path {bogus:?}"
747 );
748 }
749
750 #[test]
765 fn try_get_changed_files_includes_committed_sibling_changes() {
766 let tmp = tempfile::tempdir().unwrap();
767 let repo = init_repo(tmp.path());
768 let backend = repo.join("backend");
769 std::fs::create_dir_all(&backend).unwrap();
770 std::fs::write(backend.join("server.py"), "print('hi')\n").unwrap();
771 run_git(&repo, &["add", "."]);
772 run_git(&repo, &["commit", "--quiet", "-m", "add backend"]);
773
774 let frontend = repo.join("frontend");
775 std::fs::create_dir_all(&frontend).unwrap();
776
777 let changed = try_get_changed_files(&frontend, "fallow-baseline").unwrap();
778
779 let expected = repo.join("backend/server.py");
780 assert!(
781 changed.contains(&expected),
782 "committed sibling backend/server.py should be in the set: {changed:?}"
783 );
784 }
785
786 #[test]
790 fn try_get_changed_files_includes_modified_tracked_file() {
791 let tmp = tempfile::tempdir().unwrap();
792 let repo = init_repo(tmp.path());
793 let frontend = repo.join("frontend");
794 std::fs::create_dir_all(frontend.join("src")).unwrap();
795 std::fs::write(frontend.join("src/old.ts"), "export const x = 1;\n").unwrap();
796 run_git(&repo, &["add", "."]);
797 run_git(&repo, &["commit", "--quiet", "-m", "add old"]);
798 run_git(&repo, &["tag", "fallow-baseline-v2"]);
799 std::fs::write(frontend.join("src/old.ts"), "export const x = 2;\n").unwrap();
801
802 let changed = try_get_changed_files(&frontend, "fallow-baseline-v2").unwrap();
803
804 let expected = repo.join("frontend/src/old.ts");
805 assert!(
806 changed.contains(&expected),
807 "modified tracked file {expected:?} missing from set: {changed:?}"
808 );
809 }
810
811 #[test]
817 fn resolve_git_toplevel_returns_canonical_path() {
818 let tmp = tempfile::tempdir().unwrap();
819 let repo = init_repo(tmp.path());
820 let frontend = repo.join("frontend");
821 std::fs::create_dir_all(&frontend).unwrap();
822
823 let toplevel = resolve_git_toplevel(&frontend).unwrap();
824 assert_eq!(toplevel, repo, "toplevel should equal canonical repo root");
825 assert_eq!(
826 toplevel,
827 toplevel.canonicalize().unwrap(),
828 "resolved toplevel should already be canonical"
829 );
830 }
831
832 #[test]
836 fn resolve_git_toplevel_not_a_repository() {
837 let tmp = tempfile::tempdir().unwrap();
838 let result = resolve_git_toplevel(tmp.path());
839 assert!(
840 matches!(result, Err(ChangedFilesError::NotARepository)),
841 "expected NotARepository, got {result:?}"
842 );
843 }
844
845 #[test]
848 fn try_get_changed_files_not_a_repository() {
849 let tmp = tempfile::tempdir().unwrap();
850 let result = try_get_changed_files(tmp.path(), "main");
851 assert!(matches!(result, Err(ChangedFilesError::NotARepository)));
852 }
853
854 #[test]
855 fn filter_duplication_drops_groups_with_no_changed_instance() {
856 let mut report = DuplicationReport {
857 clone_groups: vec![CloneGroup {
858 instances: vec![CloneInstance {
859 file: "/a.ts".into(),
860 start_line: 1,
861 end_line: 5,
862 start_col: 0,
863 end_col: 10,
864 fragment: "code".into(),
865 }],
866 token_count: 20,
867 line_count: 5,
868 }],
869 clone_families: vec![],
870 mirrored_directories: vec![],
871 stats: DuplicationStats {
872 total_files: 1,
873 files_with_clones: 1,
874 total_lines: 100,
875 duplicated_lines: 5,
876 total_tokens: 100,
877 duplicated_tokens: 20,
878 clone_groups: 1,
879 clone_instances: 1,
880 duplication_percentage: 5.0,
881 },
882 };
883
884 let changed: FxHashSet<PathBuf> = FxHashSet::default();
885 filter_duplication_by_changed_files(&mut report, &changed, Path::new(""));
886 assert!(report.clone_groups.is_empty());
887 assert_eq!(report.stats.clone_groups, 0);
888 assert_eq!(report.stats.clone_instances, 0);
889 assert!((report.stats.duplication_percentage - 0.0).abs() < f64::EPSILON);
890 }
891}