1use std::path::{Path, PathBuf};
17
18use rustc_hash::{FxHashMap, FxHashSet};
19
20use crate::duplicates::{DuplicationReport, DuplicationStats, families};
21use crate::results::AnalysisResults;
22
23pub fn validate_git_ref(s: &str) -> Result<&str, String> {
36 if s.is_empty() {
37 return Err("git ref cannot be empty".to_string());
38 }
39 if s.starts_with('-') {
40 return Err("git ref cannot start with '-'".to_string());
41 }
42 let mut in_braces = false;
43 for c in s.chars() {
44 match c {
45 '{' => in_braces = true,
46 '}' => in_braces = false,
47 ':' | ' ' if in_braces => {}
48 c if c.is_ascii_alphanumeric()
49 || matches!(c, '.' | '_' | '-' | '/' | '~' | '^' | '@' | '{' | '}') => {}
50 _ => return Err(format!("git ref contains disallowed character: '{c}'")),
51 }
52 }
53 if in_braces {
54 return Err("git ref has unclosed '{'".to_string());
55 }
56 Ok(s)
57}
58
59#[derive(Debug)]
62pub enum ChangedFilesError {
63 InvalidRef(String),
65 GitMissing(String),
67 NotARepository,
69 GitFailed(String),
71}
72
73impl ChangedFilesError {
74 pub fn describe(&self) -> String {
78 match self {
79 Self::InvalidRef(e) => format!("invalid git ref: {e}"),
80 Self::GitMissing(e) => format!("failed to run git: {e}"),
81 Self::NotARepository => "not a git repository".to_owned(),
82 Self::GitFailed(stderr) => augment_git_failed(stderr),
83 }
84 }
85}
86
87fn augment_git_failed(stderr: &str) -> String {
93 let lower = stderr.to_ascii_lowercase();
94 if lower.contains("not a valid object name")
95 || lower.contains("unknown revision")
96 || lower.contains("ambiguous argument")
97 {
98 format!(
99 "{stderr} (shallow clone? try `git fetch --unshallow`, or set `fetch-depth: 0` on actions/checkout / `GIT_DEPTH: 0` in GitLab CI)"
100 )
101 } else {
102 stderr.to_owned()
103 }
104}
105
106pub fn resolve_git_toplevel(cwd: &Path) -> Result<PathBuf, ChangedFilesError> {
117 let output = std::process::Command::new("git")
118 .args(["rev-parse", "--show-toplevel"])
119 .current_dir(cwd)
120 .output()
121 .map_err(|e| ChangedFilesError::GitMissing(e.to_string()))?;
122
123 if !output.status.success() {
124 let stderr = String::from_utf8_lossy(&output.stderr);
125 return Err(if stderr.contains("not a git repository") {
126 ChangedFilesError::NotARepository
127 } else {
128 ChangedFilesError::GitFailed(stderr.trim().to_owned())
129 });
130 }
131
132 let raw = String::from_utf8_lossy(&output.stdout);
133 let trimmed = raw.trim();
134 if trimmed.is_empty() {
135 return Err(ChangedFilesError::GitFailed(
136 "git rev-parse --show-toplevel returned empty output".to_owned(),
137 ));
138 }
139
140 let path = PathBuf::from(trimmed);
141 Ok(path.canonicalize().unwrap_or(path))
142}
143
144fn collect_git_paths(
145 cwd: &Path,
146 toplevel: &Path,
147 args: &[&str],
148) -> Result<FxHashSet<PathBuf>, ChangedFilesError> {
149 let output = std::process::Command::new("git")
150 .args(args)
151 .current_dir(cwd)
152 .output()
153 .map_err(|e| ChangedFilesError::GitMissing(e.to_string()))?;
154
155 if !output.status.success() {
156 let stderr = String::from_utf8_lossy(&output.stderr);
157 return Err(if stderr.contains("not a git repository") {
158 ChangedFilesError::NotARepository
159 } else {
160 ChangedFilesError::GitFailed(stderr.trim().to_owned())
161 });
162 }
163
164 let files: FxHashSet<PathBuf> = String::from_utf8_lossy(&output.stdout)
170 .lines()
171 .filter(|line| !line.is_empty())
172 .map(|line| toplevel.join(line))
173 .collect();
174
175 Ok(files)
176}
177
178pub fn try_get_changed_files(
196 root: &Path,
197 git_ref: &str,
198) -> Result<FxHashSet<PathBuf>, ChangedFilesError> {
199 validate_git_ref(git_ref).map_err(ChangedFilesError::InvalidRef)?;
205 let toplevel = resolve_git_toplevel(root)?;
206 try_get_changed_files_with_toplevel(root, &toplevel, git_ref)
207}
208
209pub fn try_get_changed_files_with_toplevel(
217 cwd: &Path,
218 toplevel: &Path,
219 git_ref: &str,
220) -> Result<FxHashSet<PathBuf>, ChangedFilesError> {
221 validate_git_ref(git_ref).map_err(ChangedFilesError::InvalidRef)?;
222
223 let mut files = collect_git_paths(
224 cwd,
225 toplevel,
226 &[
227 "diff",
228 "--name-only",
229 "--end-of-options",
230 &format!("{git_ref}...HEAD"),
231 ],
232 )?;
233 files.extend(collect_git_paths(
234 cwd,
235 toplevel,
236 &["diff", "--name-only", "HEAD"],
237 )?);
238 files.extend(collect_git_paths(
243 cwd,
244 toplevel,
245 &["ls-files", "--full-name", "--others", "--exclude-standard"],
246 )?);
247 Ok(files)
248}
249
250#[expect(
254 clippy::print_stderr,
255 reason = "intentional user-facing warning for the CLI's --changed-since fallback path; LSP callers use try_get_changed_files instead"
256)]
257pub fn get_changed_files(root: &Path, git_ref: &str) -> Option<FxHashSet<PathBuf>> {
258 match try_get_changed_files(root, git_ref) {
259 Ok(files) => Some(files),
260 Err(ChangedFilesError::InvalidRef(e)) => {
261 eprintln!("Warning: --changed-since ignored: invalid git ref: {e}");
262 None
263 }
264 Err(ChangedFilesError::GitMissing(e)) => {
265 eprintln!("Warning: --changed-since ignored: failed to run git: {e}");
266 None
267 }
268 Err(ChangedFilesError::NotARepository) => {
269 eprintln!("Warning: --changed-since ignored: not a git repository");
270 None
271 }
272 Err(ChangedFilesError::GitFailed(stderr)) => {
273 eprintln!("Warning: --changed-since failed for ref '{git_ref}': {stderr}");
274 None
275 }
276 }
277}
278
279#[expect(
287 clippy::implicit_hasher,
288 reason = "fallow standardizes on FxHashSet across the workspace"
289)]
290pub fn filter_results_by_changed_files(
291 results: &mut AnalysisResults,
292 changed_files: &FxHashSet<PathBuf>,
293) {
294 results
295 .unused_files
296 .retain(|f| changed_files.contains(&f.path));
297 results
298 .unused_exports
299 .retain(|e| changed_files.contains(&e.path));
300 results
301 .unused_types
302 .retain(|e| changed_files.contains(&e.path));
303 results
304 .unused_enum_members
305 .retain(|m| changed_files.contains(&m.path));
306 results
307 .unused_class_members
308 .retain(|m| changed_files.contains(&m.path));
309 results
310 .unresolved_imports
311 .retain(|i| changed_files.contains(&i.path));
312
313 results.unlisted_dependencies.retain(|d| {
315 d.imported_from
316 .iter()
317 .any(|s| changed_files.contains(&s.path))
318 });
319
320 for dup in &mut results.duplicate_exports {
322 dup.locations
323 .retain(|loc| changed_files.contains(&loc.path));
324 }
325 results.duplicate_exports.retain(|d| d.locations.len() >= 2);
326
327 results
329 .circular_dependencies
330 .retain(|c| c.files.iter().any(|f| changed_files.contains(f)));
331
332 results
334 .boundary_violations
335 .retain(|v| changed_files.contains(&v.from_path));
336
337 results
339 .stale_suppressions
340 .retain(|s| changed_files.contains(&s.path));
341}
342
343fn recompute_duplication_stats(report: &DuplicationReport) -> DuplicationStats {
349 let mut files_with_clones: FxHashSet<&Path> = FxHashSet::default();
350 let mut file_dup_lines: FxHashMap<&Path, FxHashSet<usize>> = FxHashMap::default();
351 let mut duplicated_tokens = 0_usize;
352 let mut clone_instances = 0_usize;
353
354 for group in &report.clone_groups {
355 for instance in &group.instances {
356 files_with_clones.insert(&instance.file);
357 clone_instances += 1;
358 let lines = file_dup_lines.entry(&instance.file).or_default();
359 for line in instance.start_line..=instance.end_line {
360 lines.insert(line);
361 }
362 }
363 duplicated_tokens += group.token_count * group.instances.len();
364 }
365
366 let duplicated_lines: usize = file_dup_lines.values().map(FxHashSet::len).sum();
367
368 DuplicationStats {
369 total_files: report.stats.total_files,
370 files_with_clones: files_with_clones.len(),
371 total_lines: report.stats.total_lines,
372 duplicated_lines,
373 total_tokens: report.stats.total_tokens,
374 duplicated_tokens,
375 clone_groups: report.clone_groups.len(),
376 clone_instances,
377 #[expect(
378 clippy::cast_precision_loss,
379 reason = "stat percentages are display-only; precision loss at usize::MAX line counts is acceptable"
380 )]
381 duplication_percentage: if report.stats.total_lines > 0 {
382 (duplicated_lines as f64 / report.stats.total_lines as f64) * 100.0
383 } else {
384 0.0
385 },
386 }
387}
388
389#[expect(
394 clippy::implicit_hasher,
395 reason = "fallow standardizes on FxHashSet across the workspace"
396)]
397pub fn filter_duplication_by_changed_files(
398 report: &mut DuplicationReport,
399 changed_files: &FxHashSet<PathBuf>,
400 root: &Path,
401) {
402 report
403 .clone_groups
404 .retain(|g| g.instances.iter().any(|i| changed_files.contains(&i.file)));
405 report.clone_families = families::group_into_families(&report.clone_groups, root);
406 report.mirrored_directories =
407 families::detect_mirrored_directories(&report.clone_families, root);
408 report.stats = recompute_duplication_stats(report);
409}
410
411#[cfg(test)]
412mod tests {
413 use super::*;
414 use crate::duplicates::{CloneGroup, CloneInstance};
415 use crate::results::{BoundaryViolation, CircularDependency, UnusedExport, UnusedFile};
416
417 #[test]
418 fn changed_files_error_describe_variants() {
419 assert!(
420 ChangedFilesError::InvalidRef("bad".to_owned())
421 .describe()
422 .contains("invalid git ref")
423 );
424 assert!(
425 ChangedFilesError::GitMissing("oops".to_owned())
426 .describe()
427 .contains("oops")
428 );
429 assert_eq!(
430 ChangedFilesError::NotARepository.describe(),
431 "not a git repository"
432 );
433 assert!(
434 ChangedFilesError::GitFailed("bad ref".to_owned())
435 .describe()
436 .contains("bad ref")
437 );
438 }
439
440 #[test]
441 fn augment_git_failed_appends_shallow_clone_hint_for_unknown_revision() {
442 let stderr = "fatal: ambiguous argument 'fallow-baseline...HEAD': unknown revision or path not in the working tree.";
443 let described = ChangedFilesError::GitFailed(stderr.to_owned()).describe();
444 assert!(described.contains(stderr), "original stderr preserved");
445 assert!(
446 described.contains("shallow clone"),
447 "hint surfaced: {described}"
448 );
449 assert!(
450 described.contains("fetch-depth: 0") || described.contains("git fetch --unshallow"),
451 "hint actionable: {described}"
452 );
453 }
454
455 #[test]
456 fn augment_git_failed_passthrough_for_other_errors() {
457 let stderr = "fatal: refusing to merge unrelated histories";
459 let described = ChangedFilesError::GitFailed(stderr.to_owned()).describe();
460 assert_eq!(described, stderr);
461 }
462
463 #[test]
464 fn validate_git_ref_rejects_leading_dash() {
465 assert!(validate_git_ref("--upload-pack=evil").is_err());
466 assert!(validate_git_ref("-flag").is_err());
467 }
468
469 #[test]
470 fn validate_git_ref_accepts_baseline_tag() {
471 assert_eq!(
472 validate_git_ref("fallow-baseline").unwrap(),
473 "fallow-baseline"
474 );
475 }
476
477 #[test]
478 fn try_get_changed_files_rejects_invalid_ref() {
479 let err = try_get_changed_files(Path::new("/"), "--evil")
481 .expect_err("leading-dash ref must be rejected");
482 assert!(matches!(err, ChangedFilesError::InvalidRef(_)));
483 assert!(err.describe().contains("cannot start with"));
484 }
485
486 #[test]
487 fn validate_git_ref_rejects_option_like_ref() {
488 assert!(validate_git_ref("--output=/tmp/fallow-proof").is_err());
489 }
490
491 #[test]
492 fn validate_git_ref_allows_reflog_relative_date() {
493 assert!(validate_git_ref("HEAD@{1 week ago}").is_ok());
494 }
495
496 #[test]
497 fn try_get_changed_files_rejects_option_like_ref_before_git() {
498 let root = tempfile::tempdir().expect("create temp dir");
499 let proof_path = root.path().join("proof");
500
501 let result = try_get_changed_files(
502 root.path(),
503 &format!("--output={}", proof_path.to_string_lossy()),
504 );
505
506 assert!(matches!(result, Err(ChangedFilesError::InvalidRef(_))));
507 assert!(
508 !proof_path.exists(),
509 "invalid changedSince ref must not be passed through to git as an option"
510 );
511 }
512
513 #[test]
514 fn filter_results_keeps_only_changed_files() {
515 let mut results = AnalysisResults::default();
516 results.unused_files.push(UnusedFile {
517 path: "/a.ts".into(),
518 });
519 results.unused_files.push(UnusedFile {
520 path: "/b.ts".into(),
521 });
522 results.unused_exports.push(UnusedExport {
523 path: "/a.ts".into(),
524 export_name: "foo".into(),
525 is_type_only: false,
526 line: 1,
527 col: 0,
528 span_start: 0,
529 is_re_export: false,
530 });
531
532 let mut changed: FxHashSet<PathBuf> = FxHashSet::default();
533 changed.insert("/a.ts".into());
534
535 filter_results_by_changed_files(&mut results, &changed);
536
537 assert_eq!(results.unused_files.len(), 1);
538 assert_eq!(results.unused_files[0].path, PathBuf::from("/a.ts"));
539 assert_eq!(results.unused_exports.len(), 1);
540 }
541
542 #[test]
543 fn filter_results_preserves_dependency_level_issues() {
544 let mut results = AnalysisResults::default();
545 results
546 .unused_dependencies
547 .push(crate::results::UnusedDependency {
548 package_name: "lodash".into(),
549 location: crate::results::DependencyLocation::Dependencies,
550 path: "/pkg.json".into(),
551 line: 3,
552 used_in_workspaces: Vec::new(),
553 });
554
555 let changed: FxHashSet<PathBuf> = FxHashSet::default();
556 filter_results_by_changed_files(&mut results, &changed);
557
558 assert_eq!(results.unused_dependencies.len(), 1);
560 }
561
562 #[test]
563 fn filter_results_keeps_circular_dep_when_any_file_changed() {
564 let mut results = AnalysisResults::default();
565 results.circular_dependencies.push(CircularDependency {
566 files: vec!["/a.ts".into(), "/b.ts".into()],
567 length: 2,
568 line: 1,
569 col: 0,
570 is_cross_package: false,
571 });
572
573 let mut changed: FxHashSet<PathBuf> = FxHashSet::default();
574 changed.insert("/b.ts".into());
575
576 filter_results_by_changed_files(&mut results, &changed);
577 assert_eq!(results.circular_dependencies.len(), 1);
578 }
579
580 #[test]
581 fn filter_results_drops_circular_dep_when_no_file_changed() {
582 let mut results = AnalysisResults::default();
583 results.circular_dependencies.push(CircularDependency {
584 files: vec!["/a.ts".into(), "/b.ts".into()],
585 length: 2,
586 line: 1,
587 col: 0,
588 is_cross_package: false,
589 });
590
591 let changed: FxHashSet<PathBuf> = FxHashSet::default();
592 filter_results_by_changed_files(&mut results, &changed);
593 assert!(results.circular_dependencies.is_empty());
594 }
595
596 #[test]
597 fn filter_results_drops_boundary_violation_when_importer_unchanged() {
598 let mut results = AnalysisResults::default();
599 results.boundary_violations.push(BoundaryViolation {
600 from_path: "/a.ts".into(),
601 to_path: "/b.ts".into(),
602 from_zone: "ui".into(),
603 to_zone: "data".into(),
604 import_specifier: "../data/db".into(),
605 line: 1,
606 col: 0,
607 });
608
609 let mut changed: FxHashSet<PathBuf> = FxHashSet::default();
610 changed.insert("/b.ts".into());
612
613 filter_results_by_changed_files(&mut results, &changed);
614 assert!(results.boundary_violations.is_empty());
615 }
616
617 #[test]
618 fn filter_duplication_keeps_groups_with_at_least_one_changed_instance() {
619 let mut report = DuplicationReport {
620 clone_groups: vec![CloneGroup {
621 instances: vec![
622 CloneInstance {
623 file: "/a.ts".into(),
624 start_line: 1,
625 end_line: 5,
626 start_col: 0,
627 end_col: 10,
628 fragment: "code".into(),
629 },
630 CloneInstance {
631 file: "/b.ts".into(),
632 start_line: 1,
633 end_line: 5,
634 start_col: 0,
635 end_col: 10,
636 fragment: "code".into(),
637 },
638 ],
639 token_count: 20,
640 line_count: 5,
641 }],
642 clone_families: vec![],
643 mirrored_directories: vec![],
644 stats: DuplicationStats {
645 total_files: 2,
646 files_with_clones: 2,
647 total_lines: 100,
648 duplicated_lines: 10,
649 total_tokens: 200,
650 duplicated_tokens: 40,
651 clone_groups: 1,
652 clone_instances: 2,
653 duplication_percentage: 10.0,
654 },
655 };
656
657 let mut changed: FxHashSet<PathBuf> = FxHashSet::default();
658 changed.insert("/a.ts".into());
659
660 filter_duplication_by_changed_files(&mut report, &changed, Path::new(""));
661 assert_eq!(report.clone_groups.len(), 1);
662 assert_eq!(report.stats.clone_groups, 1);
664 assert_eq!(report.stats.clone_instances, 2);
665 }
666
667 fn init_repo(repo: &Path) -> PathBuf {
679 run_git(repo, &["init", "--quiet", "--initial-branch=main"]);
680 run_git(repo, &["config", "user.email", "test@example.com"]);
681 run_git(repo, &["config", "user.name", "test"]);
682 run_git(repo, &["config", "commit.gpgsign", "false"]);
683 std::fs::write(repo.join("seed.txt"), "seed\n").unwrap();
684 run_git(repo, &["add", "seed.txt"]);
685 run_git(repo, &["commit", "--quiet", "-m", "initial"]);
686 run_git(repo, &["tag", "fallow-baseline"]);
687 repo.canonicalize().unwrap()
688 }
689
690 fn run_git(cwd: &Path, args: &[&str]) {
691 let output = std::process::Command::new("git")
692 .args(args)
693 .current_dir(cwd)
694 .output()
695 .expect("git available");
696 assert!(
697 output.status.success(),
698 "git {args:?} failed: {}",
699 String::from_utf8_lossy(&output.stderr)
700 );
701 }
702
703 #[test]
706 fn try_get_changed_files_workspace_at_repo_root() {
707 let tmp = tempfile::tempdir().unwrap();
708 let repo = init_repo(tmp.path());
709 std::fs::create_dir_all(repo.join("src")).unwrap();
710 std::fs::write(repo.join("src/new.ts"), "export const x = 1;\n").unwrap();
711
712 let changed = try_get_changed_files(&repo, "fallow-baseline").unwrap();
713
714 let expected = repo.join("src/new.ts");
715 assert!(
716 changed.contains(&expected),
717 "changed set should contain {expected:?}; actual: {changed:?}"
718 );
719 }
720
721 #[test]
729 fn try_get_changed_files_workspace_in_subdirectory() {
730 let tmp = tempfile::tempdir().unwrap();
731 let repo = init_repo(tmp.path());
732 let frontend = repo.join("frontend");
733 std::fs::create_dir_all(frontend.join("src")).unwrap();
734 std::fs::write(frontend.join("src/new.ts"), "export const x = 1;\n").unwrap();
735
736 let changed = try_get_changed_files(&frontend, "fallow-baseline").unwrap();
737
738 let expected = repo.join("frontend/src/new.ts");
739 assert!(
740 changed.contains(&expected),
741 "changed set should contain canonical {expected:?}; actual: {changed:?}"
742 );
743 let bogus = frontend.join("frontend/src/new.ts");
745 assert!(
746 !changed.contains(&bogus),
747 "changed set must not contain double-frontend path {bogus:?}"
748 );
749 }
750
751 #[test]
766 fn try_get_changed_files_includes_committed_sibling_changes() {
767 let tmp = tempfile::tempdir().unwrap();
768 let repo = init_repo(tmp.path());
769 let backend = repo.join("backend");
770 std::fs::create_dir_all(&backend).unwrap();
771 std::fs::write(backend.join("server.py"), "print('hi')\n").unwrap();
772 run_git(&repo, &["add", "."]);
773 run_git(&repo, &["commit", "--quiet", "-m", "add backend"]);
774
775 let frontend = repo.join("frontend");
776 std::fs::create_dir_all(&frontend).unwrap();
777
778 let changed = try_get_changed_files(&frontend, "fallow-baseline").unwrap();
779
780 let expected = repo.join("backend/server.py");
781 assert!(
782 changed.contains(&expected),
783 "committed sibling backend/server.py should be in the set: {changed:?}"
784 );
785 }
786
787 #[test]
791 fn try_get_changed_files_includes_modified_tracked_file() {
792 let tmp = tempfile::tempdir().unwrap();
793 let repo = init_repo(tmp.path());
794 let frontend = repo.join("frontend");
795 std::fs::create_dir_all(frontend.join("src")).unwrap();
796 std::fs::write(frontend.join("src/old.ts"), "export const x = 1;\n").unwrap();
797 run_git(&repo, &["add", "."]);
798 run_git(&repo, &["commit", "--quiet", "-m", "add old"]);
799 run_git(&repo, &["tag", "fallow-baseline-v2"]);
800 std::fs::write(frontend.join("src/old.ts"), "export const x = 2;\n").unwrap();
802
803 let changed = try_get_changed_files(&frontend, "fallow-baseline-v2").unwrap();
804
805 let expected = repo.join("frontend/src/old.ts");
806 assert!(
807 changed.contains(&expected),
808 "modified tracked file {expected:?} missing from set: {changed:?}"
809 );
810 }
811
812 #[test]
818 fn resolve_git_toplevel_returns_canonical_path() {
819 let tmp = tempfile::tempdir().unwrap();
820 let repo = init_repo(tmp.path());
821 let frontend = repo.join("frontend");
822 std::fs::create_dir_all(&frontend).unwrap();
823
824 let toplevel = resolve_git_toplevel(&frontend).unwrap();
825 assert_eq!(toplevel, repo, "toplevel should equal canonical repo root");
826 assert_eq!(
827 toplevel,
828 toplevel.canonicalize().unwrap(),
829 "resolved toplevel should already be canonical"
830 );
831 }
832
833 #[test]
837 fn resolve_git_toplevel_not_a_repository() {
838 let tmp = tempfile::tempdir().unwrap();
839 let result = resolve_git_toplevel(tmp.path());
840 assert!(
841 matches!(result, Err(ChangedFilesError::NotARepository)),
842 "expected NotARepository, got {result:?}"
843 );
844 }
845
846 #[test]
849 fn try_get_changed_files_not_a_repository() {
850 let tmp = tempfile::tempdir().unwrap();
851 let result = try_get_changed_files(tmp.path(), "main");
852 assert!(matches!(result, Err(ChangedFilesError::NotARepository)));
853 }
854
855 #[test]
856 fn filter_duplication_drops_groups_with_no_changed_instance() {
857 let mut report = DuplicationReport {
858 clone_groups: vec![CloneGroup {
859 instances: vec![CloneInstance {
860 file: "/a.ts".into(),
861 start_line: 1,
862 end_line: 5,
863 start_col: 0,
864 end_col: 10,
865 fragment: "code".into(),
866 }],
867 token_count: 20,
868 line_count: 5,
869 }],
870 clone_families: vec![],
871 mirrored_directories: vec![],
872 stats: DuplicationStats {
873 total_files: 1,
874 files_with_clones: 1,
875 total_lines: 100,
876 duplicated_lines: 5,
877 total_tokens: 100,
878 duplicated_tokens: 20,
879 clone_groups: 1,
880 clone_instances: 1,
881 duplication_percentage: 5.0,
882 },
883 };
884
885 let changed: FxHashSet<PathBuf> = FxHashSet::default();
886 filter_duplication_by_changed_files(&mut report, &changed, Path::new(""));
887 assert!(report.clone_groups.is_empty());
888 assert_eq!(report.stats.clone_groups, 0);
889 assert_eq!(report.stats.clone_instances, 0);
890 assert!((report.stats.duplication_percentage - 0.0).abs() < f64::EPSILON);
891 }
892}