1use std::path::{Path, PathBuf};
17
18use rustc_hash::{FxHashMap, FxHashSet};
19
20use crate::duplicates::{DuplicationReport, DuplicationStats, families};
21use crate::results::AnalysisResults;
22
23pub fn validate_git_ref(s: &str) -> Result<&str, String> {
36 if s.is_empty() {
37 return Err("git ref cannot be empty".to_string());
38 }
39 if s.starts_with('-') {
40 return Err("git ref cannot start with '-'".to_string());
41 }
42 let mut in_braces = false;
43 for c in s.chars() {
44 match c {
45 '{' => in_braces = true,
46 '}' => in_braces = false,
47 ':' | ' ' if in_braces => {}
48 c if c.is_ascii_alphanumeric()
49 || matches!(c, '.' | '_' | '-' | '/' | '~' | '^' | '@' | '{' | '}') => {}
50 _ => return Err(format!("git ref contains disallowed character: '{c}'")),
51 }
52 }
53 if in_braces {
54 return Err("git ref has unclosed '{'".to_string());
55 }
56 Ok(s)
57}
58
59#[derive(Debug)]
62pub enum ChangedFilesError {
63 InvalidRef(String),
65 GitMissing(String),
67 NotARepository,
69 GitFailed(String),
71}
72
73impl ChangedFilesError {
74 pub fn describe(&self) -> String {
78 match self {
79 Self::InvalidRef(e) => format!("invalid git ref: {e}"),
80 Self::GitMissing(e) => format!("failed to run git: {e}"),
81 Self::NotARepository => "not a git repository".to_owned(),
82 Self::GitFailed(stderr) => augment_git_failed(stderr),
83 }
84 }
85}
86
87fn augment_git_failed(stderr: &str) -> String {
93 let lower = stderr.to_ascii_lowercase();
94 if lower.contains("not a valid object name")
95 || lower.contains("unknown revision")
96 || lower.contains("ambiguous argument")
97 {
98 format!(
99 "{stderr} (shallow clone? try `git fetch --unshallow`, or set `fetch-depth: 0` on actions/checkout / `GIT_DEPTH: 0` in GitLab CI)"
100 )
101 } else {
102 stderr.to_owned()
103 }
104}
105
106pub fn resolve_git_toplevel(cwd: &Path) -> Result<PathBuf, ChangedFilesError> {
117 let output = git_command(cwd, &["rev-parse", "--show-toplevel"])
118 .output()
119 .map_err(|e| ChangedFilesError::GitMissing(e.to_string()))?;
120
121 if !output.status.success() {
122 let stderr = String::from_utf8_lossy(&output.stderr);
123 return Err(if stderr.contains("not a git repository") {
124 ChangedFilesError::NotARepository
125 } else {
126 ChangedFilesError::GitFailed(stderr.trim().to_owned())
127 });
128 }
129
130 let raw = String::from_utf8_lossy(&output.stdout);
131 let trimmed = raw.trim();
132 if trimmed.is_empty() {
133 return Err(ChangedFilesError::GitFailed(
134 "git rev-parse --show-toplevel returned empty output".to_owned(),
135 ));
136 }
137
138 let path = PathBuf::from(trimmed);
139 Ok(path.canonicalize().unwrap_or(path))
140}
141
142fn collect_git_paths(
143 cwd: &Path,
144 toplevel: &Path,
145 args: &[&str],
146) -> Result<FxHashSet<PathBuf>, ChangedFilesError> {
147 let output = git_command(cwd, args)
148 .output()
149 .map_err(|e| ChangedFilesError::GitMissing(e.to_string()))?;
150
151 if !output.status.success() {
152 let stderr = String::from_utf8_lossy(&output.stderr);
153 return Err(if stderr.contains("not a git repository") {
154 ChangedFilesError::NotARepository
155 } else {
156 ChangedFilesError::GitFailed(stderr.trim().to_owned())
157 });
158 }
159
160 let files: FxHashSet<PathBuf> = String::from_utf8_lossy(&output.stdout)
166 .lines()
167 .filter(|line| !line.is_empty())
168 .map(|line| toplevel.join(line))
169 .collect();
170
171 Ok(files)
172}
173
174fn git_command(cwd: &Path, args: &[&str]) -> std::process::Command {
175 let mut command = std::process::Command::new("git");
176 command
177 .args(args)
178 .current_dir(cwd)
179 .env_remove("GIT_DIR")
180 .env_remove("GIT_WORK_TREE");
181 command
182}
183
184pub fn try_get_changed_files(
202 root: &Path,
203 git_ref: &str,
204) -> Result<FxHashSet<PathBuf>, ChangedFilesError> {
205 validate_git_ref(git_ref).map_err(ChangedFilesError::InvalidRef)?;
211 let toplevel = resolve_git_toplevel(root)?;
212 try_get_changed_files_with_toplevel(root, &toplevel, git_ref)
213}
214
215pub fn try_get_changed_files_with_toplevel(
223 cwd: &Path,
224 toplevel: &Path,
225 git_ref: &str,
226) -> Result<FxHashSet<PathBuf>, ChangedFilesError> {
227 validate_git_ref(git_ref).map_err(ChangedFilesError::InvalidRef)?;
228
229 let mut files = collect_git_paths(
230 cwd,
231 toplevel,
232 &[
233 "diff",
234 "--name-only",
235 "--end-of-options",
236 &format!("{git_ref}...HEAD"),
237 ],
238 )?;
239 files.extend(collect_git_paths(
240 cwd,
241 toplevel,
242 &["diff", "--name-only", "HEAD"],
243 )?);
244 files.extend(collect_git_paths(
249 cwd,
250 toplevel,
251 &["ls-files", "--full-name", "--others", "--exclude-standard"],
252 )?);
253 Ok(files)
254}
255
256#[expect(
260 clippy::print_stderr,
261 reason = "intentional user-facing warning for the CLI's --changed-since fallback path; LSP callers use try_get_changed_files instead"
262)]
263pub fn get_changed_files(root: &Path, git_ref: &str) -> Option<FxHashSet<PathBuf>> {
264 match try_get_changed_files(root, git_ref) {
265 Ok(files) => Some(files),
266 Err(ChangedFilesError::InvalidRef(e)) => {
267 eprintln!("Warning: --changed-since ignored: invalid git ref: {e}");
268 None
269 }
270 Err(ChangedFilesError::GitMissing(e)) => {
271 eprintln!("Warning: --changed-since ignored: failed to run git: {e}");
272 None
273 }
274 Err(ChangedFilesError::NotARepository) => {
275 eprintln!("Warning: --changed-since ignored: not a git repository");
276 None
277 }
278 Err(ChangedFilesError::GitFailed(stderr)) => {
279 eprintln!("Warning: --changed-since failed for ref '{git_ref}': {stderr}");
280 None
281 }
282 }
283}
284
285#[expect(
293 clippy::implicit_hasher,
294 reason = "fallow standardizes on FxHashSet across the workspace"
295)]
296pub fn filter_results_by_changed_files(
297 results: &mut AnalysisResults,
298 changed_files: &FxHashSet<PathBuf>,
299) {
300 results
301 .unused_files
302 .retain(|f| changed_files.contains(&f.path));
303 results
304 .unused_exports
305 .retain(|e| changed_files.contains(&e.path));
306 results
307 .unused_types
308 .retain(|e| changed_files.contains(&e.path));
309 results
310 .private_type_leaks
311 .retain(|e| changed_files.contains(&e.path));
312 results
313 .unused_enum_members
314 .retain(|m| changed_files.contains(&m.path));
315 results
316 .unused_class_members
317 .retain(|m| changed_files.contains(&m.path));
318 results
319 .unresolved_imports
320 .retain(|i| changed_files.contains(&i.path));
321
322 results.unlisted_dependencies.retain(|d| {
324 d.imported_from
325 .iter()
326 .any(|s| changed_files.contains(&s.path))
327 });
328
329 for dup in &mut results.duplicate_exports {
331 dup.locations
332 .retain(|loc| changed_files.contains(&loc.path));
333 }
334 results.duplicate_exports.retain(|d| d.locations.len() >= 2);
335
336 results
338 .circular_dependencies
339 .retain(|c| c.files.iter().any(|f| changed_files.contains(f)));
340
341 results
343 .boundary_violations
344 .retain(|v| changed_files.contains(&v.from_path));
345
346 results
348 .stale_suppressions
349 .retain(|s| changed_files.contains(&s.path));
350}
351
352fn recompute_duplication_stats(report: &DuplicationReport) -> DuplicationStats {
358 let mut files_with_clones: FxHashSet<&Path> = FxHashSet::default();
359 let mut file_dup_lines: FxHashMap<&Path, FxHashSet<usize>> = FxHashMap::default();
360 let mut duplicated_tokens = 0_usize;
361 let mut clone_instances = 0_usize;
362
363 for group in &report.clone_groups {
364 for instance in &group.instances {
365 files_with_clones.insert(&instance.file);
366 clone_instances += 1;
367 let lines = file_dup_lines.entry(&instance.file).or_default();
368 for line in instance.start_line..=instance.end_line {
369 lines.insert(line);
370 }
371 }
372 duplicated_tokens += group.token_count * group.instances.len();
373 }
374
375 let duplicated_lines: usize = file_dup_lines.values().map(FxHashSet::len).sum();
376
377 DuplicationStats {
378 total_files: report.stats.total_files,
379 files_with_clones: files_with_clones.len(),
380 total_lines: report.stats.total_lines,
381 duplicated_lines,
382 total_tokens: report.stats.total_tokens,
383 duplicated_tokens,
384 clone_groups: report.clone_groups.len(),
385 clone_instances,
386 #[expect(
387 clippy::cast_precision_loss,
388 reason = "stat percentages are display-only; precision loss at usize::MAX line counts is acceptable"
389 )]
390 duplication_percentage: if report.stats.total_lines > 0 {
391 (duplicated_lines as f64 / report.stats.total_lines as f64) * 100.0
392 } else {
393 0.0
394 },
395 }
396}
397
398#[expect(
403 clippy::implicit_hasher,
404 reason = "fallow standardizes on FxHashSet across the workspace"
405)]
406pub fn filter_duplication_by_changed_files(
407 report: &mut DuplicationReport,
408 changed_files: &FxHashSet<PathBuf>,
409 root: &Path,
410) {
411 report
412 .clone_groups
413 .retain(|g| g.instances.iter().any(|i| changed_files.contains(&i.file)));
414 report.clone_families = families::group_into_families(&report.clone_groups, root);
415 report.mirrored_directories =
416 families::detect_mirrored_directories(&report.clone_families, root);
417 report.stats = recompute_duplication_stats(report);
418}
419
420#[cfg(test)]
421mod tests {
422 use super::*;
423 use crate::duplicates::{CloneGroup, CloneInstance};
424 use crate::results::{BoundaryViolation, CircularDependency, UnusedExport, UnusedFile};
425
426 #[test]
427 fn changed_files_error_describe_variants() {
428 assert!(
429 ChangedFilesError::InvalidRef("bad".to_owned())
430 .describe()
431 .contains("invalid git ref")
432 );
433 assert!(
434 ChangedFilesError::GitMissing("oops".to_owned())
435 .describe()
436 .contains("oops")
437 );
438 assert_eq!(
439 ChangedFilesError::NotARepository.describe(),
440 "not a git repository"
441 );
442 assert!(
443 ChangedFilesError::GitFailed("bad ref".to_owned())
444 .describe()
445 .contains("bad ref")
446 );
447 }
448
449 #[test]
450 fn augment_git_failed_appends_shallow_clone_hint_for_unknown_revision() {
451 let stderr = "fatal: ambiguous argument 'fallow-baseline...HEAD': unknown revision or path not in the working tree.";
452 let described = ChangedFilesError::GitFailed(stderr.to_owned()).describe();
453 assert!(described.contains(stderr), "original stderr preserved");
454 assert!(
455 described.contains("shallow clone"),
456 "hint surfaced: {described}"
457 );
458 assert!(
459 described.contains("fetch-depth: 0") || described.contains("git fetch --unshallow"),
460 "hint actionable: {described}"
461 );
462 }
463
464 #[test]
465 fn augment_git_failed_passthrough_for_other_errors() {
466 let stderr = "fatal: refusing to merge unrelated histories";
468 let described = ChangedFilesError::GitFailed(stderr.to_owned()).describe();
469 assert_eq!(described, stderr);
470 }
471
472 #[test]
473 fn validate_git_ref_rejects_leading_dash() {
474 assert!(validate_git_ref("--upload-pack=evil").is_err());
475 assert!(validate_git_ref("-flag").is_err());
476 }
477
478 #[test]
479 fn validate_git_ref_accepts_baseline_tag() {
480 assert_eq!(
481 validate_git_ref("fallow-baseline").unwrap(),
482 "fallow-baseline"
483 );
484 }
485
486 #[test]
487 fn try_get_changed_files_rejects_invalid_ref() {
488 let err = try_get_changed_files(Path::new("/"), "--evil")
490 .expect_err("leading-dash ref must be rejected");
491 assert!(matches!(err, ChangedFilesError::InvalidRef(_)));
492 assert!(err.describe().contains("cannot start with"));
493 }
494
495 #[test]
496 fn validate_git_ref_rejects_option_like_ref() {
497 assert!(validate_git_ref("--output=/tmp/fallow-proof").is_err());
498 }
499
500 #[test]
501 fn validate_git_ref_allows_reflog_relative_date() {
502 assert!(validate_git_ref("HEAD@{1 week ago}").is_ok());
503 }
504
505 #[test]
506 fn try_get_changed_files_rejects_option_like_ref_before_git() {
507 let root = tempfile::tempdir().expect("create temp dir");
508 let proof_path = root.path().join("proof");
509
510 let result = try_get_changed_files(
511 root.path(),
512 &format!("--output={}", proof_path.to_string_lossy()),
513 );
514
515 assert!(matches!(result, Err(ChangedFilesError::InvalidRef(_))));
516 assert!(
517 !proof_path.exists(),
518 "invalid changedSince ref must not be passed through to git as an option"
519 );
520 }
521
522 #[test]
523 fn git_command_clears_parent_git_environment() {
524 let command = git_command(Path::new("."), &["status", "--short"]);
525 let overrides: Vec<_> = command.get_envs().collect();
526
527 assert!(
528 overrides
529 .iter()
530 .any(|(key, value)| *key == "GIT_DIR" && value.is_none()),
531 "git helper must clear inherited GIT_DIR"
532 );
533 assert!(
534 overrides
535 .iter()
536 .any(|(key, value)| *key == "GIT_WORK_TREE" && value.is_none()),
537 "git helper must clear inherited GIT_WORK_TREE"
538 );
539 }
540
541 #[test]
542 fn filter_results_keeps_only_changed_files() {
543 let mut results = AnalysisResults::default();
544 results.unused_files.push(UnusedFile {
545 path: "/a.ts".into(),
546 });
547 results.unused_files.push(UnusedFile {
548 path: "/b.ts".into(),
549 });
550 results.unused_exports.push(UnusedExport {
551 path: "/a.ts".into(),
552 export_name: "foo".into(),
553 is_type_only: false,
554 line: 1,
555 col: 0,
556 span_start: 0,
557 is_re_export: false,
558 });
559
560 let mut changed: FxHashSet<PathBuf> = FxHashSet::default();
561 changed.insert("/a.ts".into());
562
563 filter_results_by_changed_files(&mut results, &changed);
564
565 assert_eq!(results.unused_files.len(), 1);
566 assert_eq!(results.unused_files[0].path, PathBuf::from("/a.ts"));
567 assert_eq!(results.unused_exports.len(), 1);
568 }
569
570 #[test]
571 fn filter_results_preserves_dependency_level_issues() {
572 let mut results = AnalysisResults::default();
573 results
574 .unused_dependencies
575 .push(crate::results::UnusedDependency {
576 package_name: "lodash".into(),
577 location: crate::results::DependencyLocation::Dependencies,
578 path: "/pkg.json".into(),
579 line: 3,
580 used_in_workspaces: Vec::new(),
581 });
582
583 let changed: FxHashSet<PathBuf> = FxHashSet::default();
584 filter_results_by_changed_files(&mut results, &changed);
585
586 assert_eq!(results.unused_dependencies.len(), 1);
588 }
589
590 #[test]
591 fn filter_results_keeps_circular_dep_when_any_file_changed() {
592 let mut results = AnalysisResults::default();
593 results.circular_dependencies.push(CircularDependency {
594 files: vec!["/a.ts".into(), "/b.ts".into()],
595 length: 2,
596 line: 1,
597 col: 0,
598 is_cross_package: false,
599 });
600
601 let mut changed: FxHashSet<PathBuf> = FxHashSet::default();
602 changed.insert("/b.ts".into());
603
604 filter_results_by_changed_files(&mut results, &changed);
605 assert_eq!(results.circular_dependencies.len(), 1);
606 }
607
608 #[test]
609 fn filter_results_drops_circular_dep_when_no_file_changed() {
610 let mut results = AnalysisResults::default();
611 results.circular_dependencies.push(CircularDependency {
612 files: vec!["/a.ts".into(), "/b.ts".into()],
613 length: 2,
614 line: 1,
615 col: 0,
616 is_cross_package: false,
617 });
618
619 let changed: FxHashSet<PathBuf> = FxHashSet::default();
620 filter_results_by_changed_files(&mut results, &changed);
621 assert!(results.circular_dependencies.is_empty());
622 }
623
624 #[test]
625 fn filter_results_drops_boundary_violation_when_importer_unchanged() {
626 let mut results = AnalysisResults::default();
627 results.boundary_violations.push(BoundaryViolation {
628 from_path: "/a.ts".into(),
629 to_path: "/b.ts".into(),
630 from_zone: "ui".into(),
631 to_zone: "data".into(),
632 import_specifier: "../data/db".into(),
633 line: 1,
634 col: 0,
635 });
636
637 let mut changed: FxHashSet<PathBuf> = FxHashSet::default();
638 changed.insert("/b.ts".into());
640
641 filter_results_by_changed_files(&mut results, &changed);
642 assert!(results.boundary_violations.is_empty());
643 }
644
645 #[test]
646 fn filter_duplication_keeps_groups_with_at_least_one_changed_instance() {
647 let mut report = DuplicationReport {
648 clone_groups: vec![CloneGroup {
649 instances: vec![
650 CloneInstance {
651 file: "/a.ts".into(),
652 start_line: 1,
653 end_line: 5,
654 start_col: 0,
655 end_col: 10,
656 fragment: "code".into(),
657 },
658 CloneInstance {
659 file: "/b.ts".into(),
660 start_line: 1,
661 end_line: 5,
662 start_col: 0,
663 end_col: 10,
664 fragment: "code".into(),
665 },
666 ],
667 token_count: 20,
668 line_count: 5,
669 }],
670 clone_families: vec![],
671 mirrored_directories: vec![],
672 stats: DuplicationStats {
673 total_files: 2,
674 files_with_clones: 2,
675 total_lines: 100,
676 duplicated_lines: 10,
677 total_tokens: 200,
678 duplicated_tokens: 40,
679 clone_groups: 1,
680 clone_instances: 2,
681 duplication_percentage: 10.0,
682 },
683 };
684
685 let mut changed: FxHashSet<PathBuf> = FxHashSet::default();
686 changed.insert("/a.ts".into());
687
688 filter_duplication_by_changed_files(&mut report, &changed, Path::new(""));
689 assert_eq!(report.clone_groups.len(), 1);
690 assert_eq!(report.stats.clone_groups, 1);
692 assert_eq!(report.stats.clone_instances, 2);
693 }
694
695 fn init_repo(repo: &Path) -> PathBuf {
707 run_git(repo, &["init", "--quiet", "--initial-branch=main"]);
708 run_git(repo, &["config", "user.email", "test@example.com"]);
709 run_git(repo, &["config", "user.name", "test"]);
710 run_git(repo, &["config", "commit.gpgsign", "false"]);
711 std::fs::write(repo.join("seed.txt"), "seed\n").unwrap();
712 run_git(repo, &["add", "seed.txt"]);
713 run_git(repo, &["commit", "--quiet", "-m", "initial"]);
714 run_git(repo, &["tag", "fallow-baseline"]);
715 repo.canonicalize().unwrap()
716 }
717
718 fn run_git(cwd: &Path, args: &[&str]) {
719 let output = std::process::Command::new("git")
720 .args(args)
721 .current_dir(cwd)
722 .output()
723 .expect("git available");
724 assert!(
725 output.status.success(),
726 "git {args:?} failed: {}",
727 String::from_utf8_lossy(&output.stderr)
728 );
729 }
730
731 #[test]
734 fn try_get_changed_files_workspace_at_repo_root() {
735 let tmp = tempfile::tempdir().unwrap();
736 let repo = init_repo(tmp.path());
737 std::fs::create_dir_all(repo.join("src")).unwrap();
738 std::fs::write(repo.join("src/new.ts"), "export const x = 1;\n").unwrap();
739
740 let changed = try_get_changed_files(&repo, "fallow-baseline").unwrap();
741
742 let expected = repo.join("src/new.ts");
743 assert!(
744 changed.contains(&expected),
745 "changed set should contain {expected:?}; actual: {changed:?}"
746 );
747 }
748
749 #[test]
757 fn try_get_changed_files_workspace_in_subdirectory() {
758 let tmp = tempfile::tempdir().unwrap();
759 let repo = init_repo(tmp.path());
760 let frontend = repo.join("frontend");
761 std::fs::create_dir_all(frontend.join("src")).unwrap();
762 std::fs::write(frontend.join("src/new.ts"), "export const x = 1;\n").unwrap();
763
764 let changed = try_get_changed_files(&frontend, "fallow-baseline").unwrap();
765
766 let expected = repo.join("frontend/src/new.ts");
767 assert!(
768 changed.contains(&expected),
769 "changed set should contain canonical {expected:?}; actual: {changed:?}"
770 );
771 let bogus = frontend.join("frontend/src/new.ts");
773 assert!(
774 !changed.contains(&bogus),
775 "changed set must not contain double-frontend path {bogus:?}"
776 );
777 }
778
779 #[test]
794 fn try_get_changed_files_includes_committed_sibling_changes() {
795 let tmp = tempfile::tempdir().unwrap();
796 let repo = init_repo(tmp.path());
797 let backend = repo.join("backend");
798 std::fs::create_dir_all(&backend).unwrap();
799 std::fs::write(backend.join("server.py"), "print('hi')\n").unwrap();
800 run_git(&repo, &["add", "."]);
801 run_git(&repo, &["commit", "--quiet", "-m", "add backend"]);
802
803 let frontend = repo.join("frontend");
804 std::fs::create_dir_all(&frontend).unwrap();
805
806 let changed = try_get_changed_files(&frontend, "fallow-baseline").unwrap();
807
808 let expected = repo.join("backend/server.py");
809 assert!(
810 changed.contains(&expected),
811 "committed sibling backend/server.py should be in the set: {changed:?}"
812 );
813 }
814
815 #[test]
819 fn try_get_changed_files_includes_modified_tracked_file() {
820 let tmp = tempfile::tempdir().unwrap();
821 let repo = init_repo(tmp.path());
822 let frontend = repo.join("frontend");
823 std::fs::create_dir_all(frontend.join("src")).unwrap();
824 std::fs::write(frontend.join("src/old.ts"), "export const x = 1;\n").unwrap();
825 run_git(&repo, &["add", "."]);
826 run_git(&repo, &["commit", "--quiet", "-m", "add old"]);
827 run_git(&repo, &["tag", "fallow-baseline-v2"]);
828 std::fs::write(frontend.join("src/old.ts"), "export const x = 2;\n").unwrap();
830
831 let changed = try_get_changed_files(&frontend, "fallow-baseline-v2").unwrap();
832
833 let expected = repo.join("frontend/src/old.ts");
834 assert!(
835 changed.contains(&expected),
836 "modified tracked file {expected:?} missing from set: {changed:?}"
837 );
838 }
839
840 #[test]
846 fn resolve_git_toplevel_returns_canonical_path() {
847 let tmp = tempfile::tempdir().unwrap();
848 let repo = init_repo(tmp.path());
849 let frontend = repo.join("frontend");
850 std::fs::create_dir_all(&frontend).unwrap();
851
852 let toplevel = resolve_git_toplevel(&frontend).unwrap();
853 assert_eq!(toplevel, repo, "toplevel should equal canonical repo root");
854 assert_eq!(
855 toplevel,
856 toplevel.canonicalize().unwrap(),
857 "resolved toplevel should already be canonical"
858 );
859 }
860
861 #[test]
865 fn resolve_git_toplevel_not_a_repository() {
866 let tmp = tempfile::tempdir().unwrap();
867 let result = resolve_git_toplevel(tmp.path());
868 assert!(
869 matches!(result, Err(ChangedFilesError::NotARepository)),
870 "expected NotARepository, got {result:?}"
871 );
872 }
873
874 #[test]
877 fn try_get_changed_files_not_a_repository() {
878 let tmp = tempfile::tempdir().unwrap();
879 let result = try_get_changed_files(tmp.path(), "main");
880 assert!(matches!(result, Err(ChangedFilesError::NotARepository)));
881 }
882
883 #[test]
884 fn filter_duplication_drops_groups_with_no_changed_instance() {
885 let mut report = DuplicationReport {
886 clone_groups: vec![CloneGroup {
887 instances: vec![CloneInstance {
888 file: "/a.ts".into(),
889 start_line: 1,
890 end_line: 5,
891 start_col: 0,
892 end_col: 10,
893 fragment: "code".into(),
894 }],
895 token_count: 20,
896 line_count: 5,
897 }],
898 clone_families: vec![],
899 mirrored_directories: vec![],
900 stats: DuplicationStats {
901 total_files: 1,
902 files_with_clones: 1,
903 total_lines: 100,
904 duplicated_lines: 5,
905 total_tokens: 100,
906 duplicated_tokens: 20,
907 clone_groups: 1,
908 clone_instances: 1,
909 duplication_percentage: 5.0,
910 },
911 };
912
913 let changed: FxHashSet<PathBuf> = FxHashSet::default();
914 filter_duplication_by_changed_files(&mut report, &changed, Path::new(""));
915 assert!(report.clone_groups.is_empty());
916 assert_eq!(report.stats.clone_groups, 0);
917 assert_eq!(report.stats.clone_instances, 0);
918 assert!((report.stats.duplication_percentage - 0.0).abs() < f64::EPSILON);
919 }
920}