1use std::path::Path;
2use std::process::ExitCode;
3
4use fallow_core::results::AnalysisResults;
5
6#[derive(Debug, Clone, Copy)]
10pub enum Tolerance {
11 Percentage(f64),
13 Absolute(usize),
15}
16
17impl Tolerance {
18 pub fn parse(s: &str) -> Result<Self, String> {
26 let s = s.trim();
27 if s.is_empty() {
28 return Ok(Self::Absolute(0));
29 }
30 if let Some(pct_str) = s.strip_suffix('%') {
31 let pct: f64 = pct_str
32 .trim()
33 .parse()
34 .map_err(|_| format!("invalid tolerance percentage: {s}"))?;
35 if pct < 0.0 {
36 return Err(format!("tolerance percentage must be non-negative: {s}"));
37 }
38 Ok(Self::Percentage(pct))
39 } else {
40 let abs: usize = s
41 .parse()
42 .map_err(|_| format!("invalid tolerance value: {s} (use a number or N%)"))?;
43 Ok(Self::Absolute(abs))
44 }
45 }
46
47 #[expect(
49 clippy::cast_possible_truncation,
50 reason = "percentage of a count is bounded by the count itself"
51 )]
52 fn exceeded(&self, baseline_total: usize, current_total: usize) -> bool {
53 if current_total <= baseline_total {
54 return false;
55 }
56 let delta = current_total - baseline_total;
57 match *self {
58 Self::Percentage(pct) => {
59 if baseline_total == 0 {
60 return delta > 0;
62 }
63 let allowed = (baseline_total as f64 * pct / 100.0).floor() as usize;
64 delta > allowed
65 }
66 Self::Absolute(abs) => delta > abs,
67 }
68 }
69}
70
71#[derive(Debug, serde::Serialize, serde::Deserialize)]
78pub struct RegressionBaseline {
79 pub schema_version: u32,
81 pub fallow_version: String,
83 pub timestamp: String,
85 #[serde(default, skip_serializing_if = "Option::is_none")]
87 pub git_sha: Option<String>,
88 #[serde(default, skip_serializing_if = "Option::is_none")]
90 pub check: Option<CheckCounts>,
91 #[serde(default, skip_serializing_if = "Option::is_none")]
93 pub dupes: Option<DupesCounts>,
94}
95
96const REGRESSION_SCHEMA_VERSION: u32 = 1;
97
98#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
104pub struct CheckCounts {
105 #[serde(default)]
106 pub total_issues: usize,
107 #[serde(default)]
108 pub unused_files: usize,
109 #[serde(default)]
110 pub unused_exports: usize,
111 #[serde(default)]
112 pub unused_types: usize,
113 #[serde(default)]
114 pub unused_dependencies: usize,
115 #[serde(default)]
116 pub unused_dev_dependencies: usize,
117 #[serde(default)]
118 pub unused_optional_dependencies: usize,
119 #[serde(default)]
120 pub unused_enum_members: usize,
121 #[serde(default)]
122 pub unused_class_members: usize,
123 #[serde(default)]
124 pub unresolved_imports: usize,
125 #[serde(default)]
126 pub unlisted_dependencies: usize,
127 #[serde(default)]
128 pub duplicate_exports: usize,
129 #[serde(default)]
130 pub circular_dependencies: usize,
131 #[serde(default)]
132 pub type_only_dependencies: usize,
133 #[serde(default)]
134 pub test_only_dependencies: usize,
135}
136
137impl CheckCounts {
138 #[must_use]
139 pub const fn from_results(results: &AnalysisResults) -> Self {
140 Self {
141 total_issues: results.total_issues(),
142 unused_files: results.unused_files.len(),
143 unused_exports: results.unused_exports.len(),
144 unused_types: results.unused_types.len(),
145 unused_dependencies: results.unused_dependencies.len(),
146 unused_dev_dependencies: results.unused_dev_dependencies.len(),
147 unused_optional_dependencies: results.unused_optional_dependencies.len(),
148 unused_enum_members: results.unused_enum_members.len(),
149 unused_class_members: results.unused_class_members.len(),
150 unresolved_imports: results.unresolved_imports.len(),
151 unlisted_dependencies: results.unlisted_dependencies.len(),
152 duplicate_exports: results.duplicate_exports.len(),
153 circular_dependencies: results.circular_dependencies.len(),
154 type_only_dependencies: results.type_only_dependencies.len(),
155 test_only_dependencies: results.test_only_dependencies.len(),
156 }
157 }
158
159 #[must_use]
161 pub const fn from_config_baseline(b: &fallow_config::RegressionBaseline) -> Self {
162 Self {
163 total_issues: b.total_issues,
164 unused_files: b.unused_files,
165 unused_exports: b.unused_exports,
166 unused_types: b.unused_types,
167 unused_dependencies: b.unused_dependencies,
168 unused_dev_dependencies: b.unused_dev_dependencies,
169 unused_optional_dependencies: b.unused_optional_dependencies,
170 unused_enum_members: b.unused_enum_members,
171 unused_class_members: b.unused_class_members,
172 unresolved_imports: b.unresolved_imports,
173 unlisted_dependencies: b.unlisted_dependencies,
174 duplicate_exports: b.duplicate_exports,
175 circular_dependencies: b.circular_dependencies,
176 type_only_dependencies: b.type_only_dependencies,
177 test_only_dependencies: b.test_only_dependencies,
178 }
179 }
180
181 #[must_use]
183 pub const fn to_config_baseline(&self) -> fallow_config::RegressionBaseline {
184 fallow_config::RegressionBaseline {
185 total_issues: self.total_issues,
186 unused_files: self.unused_files,
187 unused_exports: self.unused_exports,
188 unused_types: self.unused_types,
189 unused_dependencies: self.unused_dependencies,
190 unused_dev_dependencies: self.unused_dev_dependencies,
191 unused_optional_dependencies: self.unused_optional_dependencies,
192 unused_enum_members: self.unused_enum_members,
193 unused_class_members: self.unused_class_members,
194 unresolved_imports: self.unresolved_imports,
195 unlisted_dependencies: self.unlisted_dependencies,
196 duplicate_exports: self.duplicate_exports,
197 circular_dependencies: self.circular_dependencies,
198 type_only_dependencies: self.type_only_dependencies,
199 test_only_dependencies: self.test_only_dependencies,
200 }
201 }
202
203 fn deltas(&self, current: &Self) -> Vec<(&'static str, isize)> {
205 let pairs: Vec<(&str, usize, usize)> = vec![
206 ("unused_files", self.unused_files, current.unused_files),
207 (
208 "unused_exports",
209 self.unused_exports,
210 current.unused_exports,
211 ),
212 ("unused_types", self.unused_types, current.unused_types),
213 (
214 "unused_dependencies",
215 self.unused_dependencies,
216 current.unused_dependencies,
217 ),
218 (
219 "unused_dev_dependencies",
220 self.unused_dev_dependencies,
221 current.unused_dev_dependencies,
222 ),
223 (
224 "unused_optional_dependencies",
225 self.unused_optional_dependencies,
226 current.unused_optional_dependencies,
227 ),
228 (
229 "unused_enum_members",
230 self.unused_enum_members,
231 current.unused_enum_members,
232 ),
233 (
234 "unused_class_members",
235 self.unused_class_members,
236 current.unused_class_members,
237 ),
238 (
239 "unresolved_imports",
240 self.unresolved_imports,
241 current.unresolved_imports,
242 ),
243 (
244 "unlisted_dependencies",
245 self.unlisted_dependencies,
246 current.unlisted_dependencies,
247 ),
248 (
249 "duplicate_exports",
250 self.duplicate_exports,
251 current.duplicate_exports,
252 ),
253 (
254 "circular_dependencies",
255 self.circular_dependencies,
256 current.circular_dependencies,
257 ),
258 (
259 "type_only_dependencies",
260 self.type_only_dependencies,
261 current.type_only_dependencies,
262 ),
263 (
264 "test_only_dependencies",
265 self.test_only_dependencies,
266 current.test_only_dependencies,
267 ),
268 ];
269 pairs
270 .into_iter()
271 .filter_map(|(name, baseline, current)| {
272 let delta = current as isize - baseline as isize;
273 if delta != 0 {
274 Some((name, delta))
275 } else {
276 None
277 }
278 })
279 .collect()
280 }
281}
282
283#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
285pub struct DupesCounts {
286 #[serde(default)]
287 pub clone_groups: usize,
288 #[serde(default)]
289 pub duplication_percentage: f64,
290}
291
292#[derive(Debug)]
296pub enum RegressionOutcome {
297 Pass {
299 baseline_total: usize,
300 current_total: usize,
301 },
302 Exceeded {
304 baseline_total: usize,
305 current_total: usize,
306 tolerance: Tolerance,
307 type_deltas: Vec<(&'static str, isize)>,
309 },
310 Skipped { reason: &'static str },
312}
313
314impl RegressionOutcome {
315 #[must_use]
317 pub const fn is_failure(&self) -> bool {
318 matches!(self, Self::Exceeded { .. })
319 }
320
321 #[must_use]
323 pub fn to_json(&self) -> serde_json::Value {
324 match self {
325 Self::Pass {
326 baseline_total,
327 current_total,
328 } => serde_json::json!({
329 "status": "pass",
330 "baseline_total": baseline_total,
331 "current_total": current_total,
332 "delta": *current_total as isize - *baseline_total as isize,
333 "exceeded": false,
334 }),
335 Self::Exceeded {
336 baseline_total,
337 current_total,
338 tolerance,
339 ..
340 } => {
341 let (tolerance_value, tolerance_kind) = match tolerance {
342 Tolerance::Percentage(pct) => (*pct, "percentage"),
343 Tolerance::Absolute(abs) => (*abs as f64, "absolute"),
344 };
345 serde_json::json!({
346 "status": "exceeded",
347 "baseline_total": baseline_total,
348 "current_total": current_total,
349 "delta": *current_total as isize - *baseline_total as isize,
350 "tolerance": tolerance_value,
351 "tolerance_kind": tolerance_kind,
352 "exceeded": true,
353 })
354 }
355 Self::Skipped { reason } => serde_json::json!({
356 "status": "skipped",
357 "reason": reason,
358 "exceeded": false,
359 }),
360 }
361 }
362}
363
364#[derive(Clone, Copy)]
368pub enum SaveRegressionTarget<'a> {
369 None,
371 Config,
373 File(&'a Path),
375}
376
377#[derive(Clone, Copy)]
379pub struct RegressionOpts<'a> {
380 pub fail_on_regression: bool,
381 pub tolerance: Tolerance,
382 pub regression_baseline_file: Option<&'a Path>,
384 pub save_target: SaveRegressionTarget<'a>,
386 pub scoped: bool,
388 pub quiet: bool,
389}
390
391fn is_likely_gitignored(path: &Path, root: &Path) -> bool {
394 std::process::Command::new("git")
395 .args(["check-ignore", "-q"])
396 .arg(path)
397 .current_dir(root)
398 .output()
399 .ok()
400 .is_some_and(|o| o.status.success())
401}
402
403fn current_git_sha(root: &Path) -> Option<String> {
405 std::process::Command::new("git")
406 .args(["rev-parse", "HEAD"])
407 .current_dir(root)
408 .output()
409 .ok()
410 .filter(|o| o.status.success())
411 .map(|o| String::from_utf8_lossy(&o.stdout).trim().to_string())
412}
413
414pub fn save_regression_baseline(
420 path: &Path,
421 root: &Path,
422 check_counts: Option<&CheckCounts>,
423 dupes_counts: Option<&DupesCounts>,
424) -> Result<(), ExitCode> {
425 let baseline = RegressionBaseline {
426 schema_version: REGRESSION_SCHEMA_VERSION,
427 fallow_version: env!("CARGO_PKG_VERSION").to_string(),
428 timestamp: chrono_now(),
429 git_sha: current_git_sha(root),
430 check: check_counts.cloned(),
431 dupes: dupes_counts.cloned(),
432 };
433 let json = serde_json::to_string_pretty(&baseline).map_err(|e| {
434 eprintln!("Error: failed to serialize regression baseline: {e}");
435 ExitCode::from(2)
436 })?;
437 if let Some(parent) = path.parent() {
439 let _ = std::fs::create_dir_all(parent);
440 }
441 std::fs::write(path, json).map_err(|e| {
442 eprintln!("Error: failed to save regression baseline: {e}");
443 ExitCode::from(2)
444 })?;
445 eprintln!("Regression baseline saved to {}", path.display());
448 if is_likely_gitignored(path, root) {
450 eprintln!(
451 "Warning: '{}' may be gitignored. Commit this file so CI can compare against it.",
452 path.display()
453 );
454 }
455 Ok(())
456}
457
458pub fn save_baseline_to_config(config_path: &Path, counts: &CheckCounts) -> Result<(), ExitCode> {
468 let content = match std::fs::read_to_string(config_path) {
470 Ok(c) => c,
471 Err(e) if e.kind() == std::io::ErrorKind::NotFound => {
472 let is_toml = config_path.extension().is_some_and(|ext| ext == "toml");
473 if is_toml {
474 String::new()
475 } else {
476 "{}".to_string()
477 }
478 }
479 Err(e) => {
480 eprintln!(
481 "Error: failed to read config file '{}': {e}",
482 config_path.display()
483 );
484 return Err(ExitCode::from(2));
485 }
486 };
487
488 let baseline = counts.to_config_baseline();
489 let is_toml = config_path.extension().is_some_and(|ext| ext == "toml");
490
491 let updated = if is_toml {
492 Ok(update_toml_regression(&content, &baseline))
493 } else {
494 update_json_regression(&content, &baseline)
495 }
496 .map_err(|e| {
497 eprintln!(
498 "Error: failed to update config file '{}': {e}",
499 config_path.display()
500 );
501 ExitCode::from(2)
502 })?;
503
504 std::fs::write(config_path, updated).map_err(|e| {
505 eprintln!(
506 "Error: failed to write config file '{}': {e}",
507 config_path.display()
508 );
509 ExitCode::from(2)
510 })?;
511
512 eprintln!(
513 "Regression baseline saved to {} (regression.baseline section)",
514 config_path.display()
515 );
516 Ok(())
517}
518
519fn find_json_key(content: &str, key: &str) -> Option<usize> {
523 let needle = format!("\"{key}\"");
524 let mut search_from = 0;
525 while let Some(pos) = content[search_from..].find(&needle) {
526 let abs_pos = search_from + pos;
527 let line_start = content[..abs_pos].rfind('\n').map_or(0, |i| i + 1);
529 let line_prefix = content[line_start..abs_pos].trim_start();
530 if line_prefix.starts_with("//") {
531 search_from = abs_pos + needle.len();
532 continue;
533 }
534 let before = &content[..abs_pos];
536 let last_open = before.rfind("/*");
537 let last_close = before.rfind("*/");
538 if let Some(open_pos) = last_open
539 && last_close.is_none_or(|close_pos| close_pos < open_pos)
540 {
541 search_from = abs_pos + needle.len();
542 continue;
543 }
544 return Some(abs_pos);
545 }
546 None
547}
548
549fn update_json_regression(
550 content: &str,
551 baseline: &fallow_config::RegressionBaseline,
552) -> Result<String, String> {
553 let baseline_json =
554 serde_json::to_string_pretty(baseline).map_err(|e| format!("serialization error: {e}"))?;
555
556 let indented: String = baseline_json
558 .lines()
559 .enumerate()
560 .map(|(i, line)| {
561 if i == 0 {
562 format!(" {line}")
563 } else {
564 format!("\n {line}")
565 }
566 })
567 .collect();
568
569 let regression_block = format!(" \"regression\": {{\n \"baseline\": {indented}\n }}");
570
571 if let Some(start) = find_json_key(content, "regression") {
575 let after_key = &content[start..];
576 if let Some(brace_start) = after_key.find('{') {
577 let abs_brace = start + brace_start;
578 let mut depth = 0;
579 let mut end = abs_brace;
580 let mut found_close = false;
581 for (i, ch) in content[abs_brace..].char_indices() {
582 match ch {
583 '{' => depth += 1,
584 '}' => {
585 depth -= 1;
586 if depth == 0 {
587 end = abs_brace + i + 1;
588 found_close = true;
589 break;
590 }
591 }
592 _ => {}
593 }
594 }
595 if !found_close {
596 return Err("malformed JSON: unmatched brace in regression object".to_string());
597 }
598 let mut result = String::new();
599 result.push_str(&content[..start]);
600 result.push_str(®ression_block[2..]); result.push_str(&content[end..]);
602 return Ok(result);
603 }
604 }
605
606 if let Some(last_brace) = content.rfind('}') {
608 let before_brace = content[..last_brace].trim_end();
610 let needs_comma = !before_brace.ends_with('{') && !before_brace.ends_with(',');
611
612 let mut result = String::new();
613 result.push_str(before_brace);
614 if needs_comma {
615 result.push(',');
616 }
617 result.push('\n');
618 result.push_str(®ression_block);
619 result.push('\n');
620 result.push_str(&content[last_brace..]);
621 Ok(result)
622 } else {
623 Err("config file has no closing brace".to_string())
624 }
625}
626
627fn update_toml_regression(content: &str, baseline: &fallow_config::RegressionBaseline) -> String {
629 use std::fmt::Write;
630 let mut section = String::from("[regression.baseline]\n");
632 let _ = writeln!(section, "totalIssues = {}", baseline.total_issues);
633 let _ = writeln!(section, "unusedFiles = {}", baseline.unused_files);
634 let _ = writeln!(section, "unusedExports = {}", baseline.unused_exports);
635 let _ = writeln!(section, "unusedTypes = {}", baseline.unused_types);
636 let _ = writeln!(
637 section,
638 "unusedDependencies = {}",
639 baseline.unused_dependencies
640 );
641 let _ = writeln!(
642 section,
643 "unusedDevDependencies = {}",
644 baseline.unused_dev_dependencies
645 );
646 let _ = writeln!(
647 section,
648 "unusedOptionalDependencies = {}",
649 baseline.unused_optional_dependencies
650 );
651 let _ = writeln!(
652 section,
653 "unusedEnumMembers = {}",
654 baseline.unused_enum_members
655 );
656 let _ = writeln!(
657 section,
658 "unusedClassMembers = {}",
659 baseline.unused_class_members
660 );
661 let _ = writeln!(
662 section,
663 "unresolvedImports = {}",
664 baseline.unresolved_imports
665 );
666 let _ = writeln!(
667 section,
668 "unlistedDependencies = {}",
669 baseline.unlisted_dependencies
670 );
671 let _ = writeln!(section, "duplicateExports = {}", baseline.duplicate_exports);
672 let _ = writeln!(
673 section,
674 "circularDependencies = {}",
675 baseline.circular_dependencies
676 );
677 let _ = writeln!(
678 section,
679 "typeOnlyDependencies = {}",
680 baseline.type_only_dependencies
681 );
682 let _ = writeln!(
683 section,
684 "testOnlyDependencies = {}",
685 baseline.test_only_dependencies
686 );
687
688 if let Some(start) = content.find("[regression.baseline]") {
690 let after = &content[start + "[regression.baseline]".len()..];
692 let end_offset = after.find("\n[").map_or(content.len(), |i| {
693 start + "[regression.baseline]".len() + i + 1
694 });
695
696 let mut result = String::new();
697 result.push_str(&content[..start]);
698 result.push_str(§ion);
699 if end_offset < content.len() {
700 result.push_str(&content[end_offset..]);
701 }
702 result
703 } else {
704 let mut result = content.to_string();
706 if !result.ends_with('\n') {
707 result.push('\n');
708 }
709 result.push('\n');
710 result.push_str(§ion);
711 result
712 }
713}
714
715pub fn load_regression_baseline(path: &Path) -> Result<RegressionBaseline, ExitCode> {
721 let content = std::fs::read_to_string(path).map_err(|e| {
722 if e.kind() == std::io::ErrorKind::NotFound {
723 eprintln!(
724 "Error: no regression baseline found at '{}'.\n\
725 Run with --save-regression-baseline on your main branch to create one.",
726 path.display()
727 );
728 } else {
729 eprintln!(
730 "Error: failed to read regression baseline '{}': {e}",
731 path.display()
732 );
733 }
734 ExitCode::from(2)
735 })?;
736 serde_json::from_str(&content).map_err(|e| {
737 eprintln!(
738 "Error: failed to parse regression baseline '{}': {e}",
739 path.display()
740 );
741 ExitCode::from(2)
742 })
743}
744
745pub fn compare_check_regression(
757 results: &AnalysisResults,
758 opts: &RegressionOpts<'_>,
759 config_baseline: Option<&fallow_config::RegressionBaseline>,
760) -> Result<Option<RegressionOutcome>, ExitCode> {
761 if !opts.fail_on_regression {
762 return Ok(None);
763 }
764
765 if opts.scoped {
767 let reason = "--changed-since or --workspace is active; regression check skipped \
768 (counts not comparable to full-project baseline)";
769 if !opts.quiet {
770 eprintln!("Warning: {reason}");
771 }
772 return Ok(Some(RegressionOutcome::Skipped { reason }));
773 }
774
775 let baseline_counts: CheckCounts = if let Some(baseline_path) = opts.regression_baseline_file {
777 let baseline = load_regression_baseline(baseline_path)?;
779 let Some(counts) = baseline.check else {
780 eprintln!(
781 "Error: regression baseline '{}' has no check data",
782 baseline_path.display()
783 );
784 return Err(ExitCode::from(2));
785 };
786 counts
787 } else if let Some(config_baseline) = config_baseline {
788 CheckCounts::from_config_baseline(config_baseline)
790 } else {
791 eprintln!(
792 "Error: no regression baseline found.\n\
793 Either add a `regression.baseline` section to your config file\n\
794 (run with --save-regression-baseline to generate it),\n\
795 or provide an explicit file via --regression-baseline <PATH>."
796 );
797 return Err(ExitCode::from(2));
798 };
799
800 let current_total = results.total_issues();
801 let baseline_total = baseline_counts.total_issues;
802
803 if opts.tolerance.exceeded(baseline_total, current_total) {
804 let current_counts = CheckCounts::from_results(results);
805 let type_deltas = baseline_counts.deltas(¤t_counts);
806 Ok(Some(RegressionOutcome::Exceeded {
807 baseline_total,
808 current_total,
809 tolerance: opts.tolerance,
810 type_deltas,
811 }))
812 } else {
813 Ok(Some(RegressionOutcome::Pass {
814 baseline_total,
815 current_total,
816 }))
817 }
818}
819
820pub fn print_regression_outcome(outcome: &RegressionOutcome) {
822 match outcome {
823 RegressionOutcome::Pass {
824 baseline_total,
825 current_total,
826 } => {
827 let delta = *current_total as isize - *baseline_total as isize;
828 let sign = if delta >= 0 { "+" } else { "" };
829 eprintln!(
830 "Regression check passed: {current_total} issues (baseline: {baseline_total}, \
831 delta: {sign}{delta})"
832 );
833 }
834 RegressionOutcome::Exceeded {
835 baseline_total,
836 current_total,
837 tolerance,
838 type_deltas,
839 } => {
840 let delta = *current_total as isize - *baseline_total as isize;
841 let tol_str = match tolerance {
842 Tolerance::Percentage(pct) => format!("{pct}%"),
843 Tolerance::Absolute(abs) => format!("{abs}"),
844 };
845 eprintln!(
846 "Regression detected: {current_total} issues (baseline: {baseline_total}, \
847 delta: +{delta}, tolerance: {tol_str})"
848 );
849 for (name, d) in type_deltas {
850 let sign = if *d > 0 { "+" } else { "" };
851 eprintln!(" {name}: {sign}{d}");
852 }
853 }
854 RegressionOutcome::Skipped { .. } => {
855 }
857 }
858}
859
860fn chrono_now() -> String {
862 let duration = std::time::SystemTime::now()
863 .duration_since(std::time::UNIX_EPOCH)
864 .unwrap_or_default();
865 let secs = duration.as_secs();
866 let days = secs / 86400;
868 let time_secs = secs % 86400;
869 let hours = time_secs / 3600;
870 let minutes = (time_secs % 3600) / 60;
871 let seconds = time_secs % 60;
872 let z = days + 719_468;
874 let era = z / 146_097;
875 let doe = z - era * 146_097;
876 let yoe = (doe - doe / 1460 + doe / 36524 - doe / 146_096) / 365;
877 let y = yoe + era * 400;
878 let doy = doe - (365 * yoe + yoe / 4 - yoe / 100);
879 let mp = (5 * doy + 2) / 153;
880 let d = doy - (153 * mp + 2) / 5 + 1;
881 let m = if mp < 10 { mp + 3 } else { mp - 9 };
882 let y = if m <= 2 { y + 1 } else { y };
883 format!("{y:04}-{m:02}-{d:02}T{hours:02}:{minutes:02}:{seconds:02}Z")
884}
885
886#[cfg(test)]
887mod tests {
888 use super::*;
889 use fallow_core::results::*;
890 use std::path::PathBuf;
891
892 #[test]
895 fn parse_percentage_tolerance() {
896 let t = Tolerance::parse("2%").unwrap();
897 assert!(matches!(t, Tolerance::Percentage(p) if (p - 2.0).abs() < f64::EPSILON));
898 }
899
900 #[test]
901 fn parse_absolute_tolerance() {
902 let t = Tolerance::parse("5").unwrap();
903 assert!(matches!(t, Tolerance::Absolute(5)));
904 }
905
906 #[test]
907 fn parse_zero_tolerance() {
908 let t = Tolerance::parse("0").unwrap();
909 assert!(matches!(t, Tolerance::Absolute(0)));
910 }
911
912 #[test]
913 fn parse_empty_defaults_to_zero() {
914 let t = Tolerance::parse("").unwrap();
915 assert!(matches!(t, Tolerance::Absolute(0)));
916 }
917
918 #[test]
919 fn parse_invalid_percentage() {
920 assert!(Tolerance::parse("abc%").is_err());
921 }
922
923 #[test]
924 fn parse_negative_percentage() {
925 assert!(Tolerance::parse("-1%").is_err());
926 }
927
928 #[test]
929 fn parse_invalid_absolute() {
930 assert!(Tolerance::parse("abc").is_err());
931 }
932
933 #[test]
936 fn zero_tolerance_detects_any_increase() {
937 let t = Tolerance::Absolute(0);
938 assert!(t.exceeded(10, 11));
939 assert!(!t.exceeded(10, 10));
940 assert!(!t.exceeded(10, 9));
941 }
942
943 #[test]
944 fn absolute_tolerance_allows_within_range() {
945 let t = Tolerance::Absolute(3);
946 assert!(!t.exceeded(10, 12)); assert!(!t.exceeded(10, 13)); assert!(t.exceeded(10, 14)); }
950
951 #[test]
952 fn percentage_tolerance_allows_within_range() {
953 let t = Tolerance::Percentage(10.0);
954 assert!(!t.exceeded(100, 109)); assert!(!t.exceeded(100, 110)); assert!(t.exceeded(100, 111)); }
958
959 #[test]
960 fn percentage_tolerance_from_zero_baseline() {
961 let t = Tolerance::Percentage(10.0);
962 assert!(t.exceeded(0, 1)); assert!(!t.exceeded(0, 0)); }
965
966 #[test]
967 fn decrease_never_exceeds() {
968 let t = Tolerance::Absolute(0);
969 assert!(!t.exceeded(10, 5));
970 let t = Tolerance::Percentage(0.0);
971 assert!(!t.exceeded(10, 5));
972 }
973
974 #[test]
977 fn check_counts_from_results() {
978 let mut results = AnalysisResults::default();
979 results.unused_files.push(UnusedFile {
980 path: PathBuf::from("a.ts"),
981 });
982 results.unused_exports.push(UnusedExport {
983 path: PathBuf::from("b.ts"),
984 export_name: "foo".into(),
985 is_type_only: false,
986 line: 1,
987 col: 0,
988 span_start: 0,
989 is_re_export: false,
990 });
991 let counts = CheckCounts::from_results(&results);
992 assert_eq!(counts.total_issues, 2);
993 assert_eq!(counts.unused_files, 1);
994 assert_eq!(counts.unused_exports, 1);
995 assert_eq!(counts.unused_types, 0);
996 }
997
998 #[test]
1001 fn deltas_reports_changes_only() {
1002 let baseline = CheckCounts {
1003 total_issues: 10,
1004 unused_files: 5,
1005 unused_exports: 3,
1006 unused_types: 2,
1007 unused_dependencies: 0,
1008 unused_dev_dependencies: 0,
1009 unused_optional_dependencies: 0,
1010 unused_enum_members: 0,
1011 unused_class_members: 0,
1012 unresolved_imports: 0,
1013 unlisted_dependencies: 0,
1014 duplicate_exports: 0,
1015 circular_dependencies: 0,
1016 type_only_dependencies: 0,
1017 test_only_dependencies: 0,
1018 };
1019 let current = CheckCounts {
1020 unused_files: 7, unused_exports: 1, unused_types: 2, ..baseline
1024 };
1025 let deltas = baseline.deltas(¤t);
1026 assert_eq!(deltas.len(), 2);
1027 assert!(deltas.contains(&("unused_files", 2)));
1028 assert!(deltas.contains(&("unused_exports", -2)));
1029 }
1030
1031 #[test]
1034 fn pass_outcome_json() {
1035 let outcome = RegressionOutcome::Pass {
1036 baseline_total: 10,
1037 current_total: 10,
1038 };
1039 let json = outcome.to_json();
1040 assert_eq!(json["status"], "pass");
1041 assert_eq!(json["exceeded"], false);
1042 assert_eq!(json["delta"], 0);
1043 }
1044
1045 #[test]
1046 fn exceeded_outcome_json() {
1047 let outcome = RegressionOutcome::Exceeded {
1048 baseline_total: 10,
1049 current_total: 15,
1050 tolerance: Tolerance::Percentage(2.0),
1051 type_deltas: vec![("unused_files", 5)],
1052 };
1053 let json = outcome.to_json();
1054 assert_eq!(json["status"], "exceeded");
1055 assert_eq!(json["exceeded"], true);
1056 assert_eq!(json["delta"], 5);
1057 assert_eq!(json["tolerance_kind"], "percentage");
1058 }
1059
1060 #[test]
1061 fn skipped_outcome_json() {
1062 let outcome = RegressionOutcome::Skipped {
1063 reason: "test reason",
1064 };
1065 let json = outcome.to_json();
1066 assert_eq!(json["status"], "skipped");
1067 assert_eq!(json["exceeded"], false);
1068 }
1069
1070 #[test]
1073 fn regression_baseline_roundtrip() {
1074 let baseline = RegressionBaseline {
1075 schema_version: 1,
1076 fallow_version: "2.4.0".into(),
1077 timestamp: "2026-03-27T10:00:00Z".into(),
1078 git_sha: Some("abc123".into()),
1079 check: Some(CheckCounts {
1080 total_issues: 42,
1081 unused_files: 5,
1082 unused_exports: 20,
1083 unused_types: 8,
1084 unused_dependencies: 3,
1085 unused_dev_dependencies: 2,
1086 unused_optional_dependencies: 0,
1087 unused_enum_members: 1,
1088 unused_class_members: 1,
1089 unresolved_imports: 0,
1090 unlisted_dependencies: 1,
1091 duplicate_exports: 0,
1092 circular_dependencies: 1,
1093 type_only_dependencies: 0,
1094 test_only_dependencies: 0,
1095 }),
1096 dupes: Some(DupesCounts {
1097 clone_groups: 12,
1098 duplication_percentage: 4.2,
1099 }),
1100 };
1101 let json = serde_json::to_string_pretty(&baseline).unwrap();
1102 let loaded: RegressionBaseline = serde_json::from_str(&json).unwrap();
1103 assert_eq!(loaded.schema_version, 1);
1104 assert_eq!(loaded.check.as_ref().unwrap().total_issues, 42);
1105 assert_eq!(loaded.dupes.as_ref().unwrap().clone_groups, 12);
1106 }
1107
1108 #[test]
1111 fn regression_outcome_is_failure() {
1112 let pass = RegressionOutcome::Pass {
1113 baseline_total: 10,
1114 current_total: 10,
1115 };
1116 assert!(!pass.is_failure());
1117
1118 let exceeded = RegressionOutcome::Exceeded {
1119 baseline_total: 10,
1120 current_total: 15,
1121 tolerance: Tolerance::Absolute(2),
1122 type_deltas: vec![],
1123 };
1124 assert!(exceeded.is_failure());
1125
1126 let skipped = RegressionOutcome::Skipped { reason: "test" };
1127 assert!(!skipped.is_failure());
1128 }
1129
1130 fn sample_baseline() -> fallow_config::RegressionBaseline {
1133 fallow_config::RegressionBaseline {
1134 total_issues: 5,
1135 unused_files: 2,
1136 ..Default::default()
1137 }
1138 }
1139
1140 #[test]
1141 fn json_insert_into_empty_object() {
1142 let result = update_json_regression("{}", &sample_baseline()).unwrap();
1143 assert!(result.contains("\"regression\""));
1144 assert!(result.contains("\"totalIssues\": 5"));
1145 serde_json::from_str::<serde_json::Value>(&result).unwrap();
1147 }
1148
1149 #[test]
1150 fn json_insert_into_existing_config() {
1151 let config = r#"{
1152 "entry": ["src/main.ts"],
1153 "production": true
1154}"#;
1155 let result = update_json_regression(config, &sample_baseline()).unwrap();
1156 assert!(result.contains("\"regression\""));
1157 assert!(result.contains("\"entry\""));
1158 serde_json::from_str::<serde_json::Value>(&result).unwrap();
1159 }
1160
1161 #[test]
1162 fn json_replace_existing_regression() {
1163 let config = r#"{
1164 "entry": ["src/main.ts"],
1165 "regression": {
1166 "baseline": {
1167 "totalIssues": 99
1168 }
1169 }
1170}"#;
1171 let result = update_json_regression(config, &sample_baseline()).unwrap();
1172 assert!(!result.contains("99"));
1174 assert!(result.contains("\"totalIssues\": 5"));
1175 serde_json::from_str::<serde_json::Value>(&result).unwrap();
1176 }
1177
1178 #[test]
1179 fn json_skips_regression_in_comment() {
1180 let config = "{\n // See \"regression\" docs\n \"entry\": []\n}";
1181 let result = update_json_regression(config, &sample_baseline()).unwrap();
1182 assert!(result.contains("\"regression\":"));
1184 assert!(result.contains("\"entry\""));
1185 }
1186
1187 #[test]
1188 fn json_malformed_brace_returns_error() {
1189 let config = r#"{ "regression": { "baseline": { "totalIssues": 1 }"#;
1191 let result = update_json_regression(config, &sample_baseline());
1192 assert!(result.is_err());
1193 }
1194
1195 #[test]
1198 fn toml_insert_into_empty() {
1199 let result = update_toml_regression("", &sample_baseline());
1200 assert!(result.contains("[regression.baseline]"));
1201 assert!(result.contains("totalIssues = 5"));
1202 }
1203
1204 #[test]
1205 fn toml_insert_after_existing_content() {
1206 let config = "[rules]\nunused-files = \"warn\"\n";
1207 let result = update_toml_regression(config, &sample_baseline());
1208 assert!(result.contains("[rules]"));
1209 assert!(result.contains("[regression.baseline]"));
1210 assert!(result.contains("totalIssues = 5"));
1211 }
1212
1213 #[test]
1214 fn toml_replace_existing_section() {
1215 let config =
1216 "[regression.baseline]\ntotalIssues = 99\n\n[rules]\nunused-files = \"warn\"\n";
1217 let result = update_toml_regression(config, &sample_baseline());
1218 assert!(!result.contains("99"));
1219 assert!(result.contains("totalIssues = 5"));
1220 assert!(result.contains("[rules]"));
1221 }
1222
1223 #[test]
1226 fn find_json_key_basic() {
1227 assert_eq!(find_json_key(r#"{"foo": 1}"#, "foo"), Some(1));
1228 }
1229
1230 #[test]
1231 fn find_json_key_skips_comment() {
1232 let content = "{\n // \"foo\" is important\n \"bar\": 1\n}";
1233 assert_eq!(find_json_key(content, "foo"), None);
1234 assert!(find_json_key(content, "bar").is_some());
1235 }
1236
1237 #[test]
1238 fn find_json_key_not_found() {
1239 assert_eq!(find_json_key("{}", "missing"), None);
1240 }
1241
1242 #[test]
1243 fn find_json_key_skips_block_comment() {
1244 let content = "{\n /* \"foo\": old value */\n \"foo\": 1\n}";
1245 let pos = find_json_key(content, "foo").unwrap();
1247 assert!(content[pos..].starts_with("\"foo\": 1"));
1248 }
1249}