1use std::path::Path;
2use std::process::ExitCode;
3
4use fallow_core::results::AnalysisResults;
5
6#[derive(Debug, Clone, Copy)]
10pub enum Tolerance {
11 Percentage(f64),
13 Absolute(usize),
15}
16
17impl Tolerance {
18 pub fn parse(s: &str) -> Result<Self, String> {
21 let s = s.trim();
22 if s.is_empty() {
23 return Ok(Self::Absolute(0));
24 }
25 if let Some(pct_str) = s.strip_suffix('%') {
26 let pct: f64 = pct_str
27 .trim()
28 .parse()
29 .map_err(|_| format!("invalid tolerance percentage: {s}"))?;
30 if pct < 0.0 {
31 return Err(format!("tolerance percentage must be non-negative: {s}"));
32 }
33 Ok(Self::Percentage(pct))
34 } else {
35 let abs: usize = s
36 .parse()
37 .map_err(|_| format!("invalid tolerance value: {s} (use a number or N%)"))?;
38 Ok(Self::Absolute(abs))
39 }
40 }
41
42 fn exceeded(&self, baseline_total: usize, current_total: usize) -> bool {
44 if current_total <= baseline_total {
45 return false;
46 }
47 let delta = current_total - baseline_total;
48 match *self {
49 Self::Percentage(pct) => {
50 if baseline_total == 0 {
51 return delta > 0;
53 }
54 let allowed = (baseline_total as f64 * pct / 100.0).floor() as usize;
55 delta > allowed
56 }
57 Self::Absolute(abs) => delta > abs,
58 }
59 }
60}
61
62#[derive(Debug, serde::Serialize, serde::Deserialize)]
69pub struct RegressionBaseline {
70 pub schema_version: u32,
72 pub fallow_version: String,
74 pub timestamp: String,
76 #[serde(default, skip_serializing_if = "Option::is_none")]
78 pub git_sha: Option<String>,
79 #[serde(default, skip_serializing_if = "Option::is_none")]
81 pub check: Option<CheckCounts>,
82 #[serde(default, skip_serializing_if = "Option::is_none")]
84 pub dupes: Option<DupesCounts>,
85}
86
87const REGRESSION_SCHEMA_VERSION: u32 = 1;
88
89#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
95pub struct CheckCounts {
96 #[serde(default)]
97 pub total_issues: usize,
98 #[serde(default)]
99 pub unused_files: usize,
100 #[serde(default)]
101 pub unused_exports: usize,
102 #[serde(default)]
103 pub unused_types: usize,
104 #[serde(default)]
105 pub unused_dependencies: usize,
106 #[serde(default)]
107 pub unused_dev_dependencies: usize,
108 #[serde(default)]
109 pub unused_optional_dependencies: usize,
110 #[serde(default)]
111 pub unused_enum_members: usize,
112 #[serde(default)]
113 pub unused_class_members: usize,
114 #[serde(default)]
115 pub unresolved_imports: usize,
116 #[serde(default)]
117 pub unlisted_dependencies: usize,
118 #[serde(default)]
119 pub duplicate_exports: usize,
120 #[serde(default)]
121 pub circular_dependencies: usize,
122 #[serde(default)]
123 pub type_only_dependencies: usize,
124 #[serde(default)]
125 pub test_only_dependencies: usize,
126}
127
128impl CheckCounts {
129 pub fn from_results(results: &AnalysisResults) -> Self {
130 Self {
131 total_issues: results.total_issues(),
132 unused_files: results.unused_files.len(),
133 unused_exports: results.unused_exports.len(),
134 unused_types: results.unused_types.len(),
135 unused_dependencies: results.unused_dependencies.len(),
136 unused_dev_dependencies: results.unused_dev_dependencies.len(),
137 unused_optional_dependencies: results.unused_optional_dependencies.len(),
138 unused_enum_members: results.unused_enum_members.len(),
139 unused_class_members: results.unused_class_members.len(),
140 unresolved_imports: results.unresolved_imports.len(),
141 unlisted_dependencies: results.unlisted_dependencies.len(),
142 duplicate_exports: results.duplicate_exports.len(),
143 circular_dependencies: results.circular_dependencies.len(),
144 type_only_dependencies: results.type_only_dependencies.len(),
145 test_only_dependencies: results.test_only_dependencies.len(),
146 }
147 }
148
149 pub fn from_config_baseline(b: &fallow_config::RegressionBaseline) -> Self {
151 Self {
152 total_issues: b.total_issues,
153 unused_files: b.unused_files,
154 unused_exports: b.unused_exports,
155 unused_types: b.unused_types,
156 unused_dependencies: b.unused_dependencies,
157 unused_dev_dependencies: b.unused_dev_dependencies,
158 unused_optional_dependencies: b.unused_optional_dependencies,
159 unused_enum_members: b.unused_enum_members,
160 unused_class_members: b.unused_class_members,
161 unresolved_imports: b.unresolved_imports,
162 unlisted_dependencies: b.unlisted_dependencies,
163 duplicate_exports: b.duplicate_exports,
164 circular_dependencies: b.circular_dependencies,
165 type_only_dependencies: b.type_only_dependencies,
166 test_only_dependencies: b.test_only_dependencies,
167 }
168 }
169
170 pub fn to_config_baseline(&self) -> fallow_config::RegressionBaseline {
172 fallow_config::RegressionBaseline {
173 total_issues: self.total_issues,
174 unused_files: self.unused_files,
175 unused_exports: self.unused_exports,
176 unused_types: self.unused_types,
177 unused_dependencies: self.unused_dependencies,
178 unused_dev_dependencies: self.unused_dev_dependencies,
179 unused_optional_dependencies: self.unused_optional_dependencies,
180 unused_enum_members: self.unused_enum_members,
181 unused_class_members: self.unused_class_members,
182 unresolved_imports: self.unresolved_imports,
183 unlisted_dependencies: self.unlisted_dependencies,
184 duplicate_exports: self.duplicate_exports,
185 circular_dependencies: self.circular_dependencies,
186 type_only_dependencies: self.type_only_dependencies,
187 test_only_dependencies: self.test_only_dependencies,
188 }
189 }
190
191 fn deltas(&self, current: &Self) -> Vec<(&'static str, isize)> {
193 let pairs: Vec<(&str, usize, usize)> = vec![
194 ("unused_files", self.unused_files, current.unused_files),
195 (
196 "unused_exports",
197 self.unused_exports,
198 current.unused_exports,
199 ),
200 ("unused_types", self.unused_types, current.unused_types),
201 (
202 "unused_dependencies",
203 self.unused_dependencies,
204 current.unused_dependencies,
205 ),
206 (
207 "unused_dev_dependencies",
208 self.unused_dev_dependencies,
209 current.unused_dev_dependencies,
210 ),
211 (
212 "unused_optional_dependencies",
213 self.unused_optional_dependencies,
214 current.unused_optional_dependencies,
215 ),
216 (
217 "unused_enum_members",
218 self.unused_enum_members,
219 current.unused_enum_members,
220 ),
221 (
222 "unused_class_members",
223 self.unused_class_members,
224 current.unused_class_members,
225 ),
226 (
227 "unresolved_imports",
228 self.unresolved_imports,
229 current.unresolved_imports,
230 ),
231 (
232 "unlisted_dependencies",
233 self.unlisted_dependencies,
234 current.unlisted_dependencies,
235 ),
236 (
237 "duplicate_exports",
238 self.duplicate_exports,
239 current.duplicate_exports,
240 ),
241 (
242 "circular_dependencies",
243 self.circular_dependencies,
244 current.circular_dependencies,
245 ),
246 (
247 "type_only_dependencies",
248 self.type_only_dependencies,
249 current.type_only_dependencies,
250 ),
251 (
252 "test_only_dependencies",
253 self.test_only_dependencies,
254 current.test_only_dependencies,
255 ),
256 ];
257 pairs
258 .into_iter()
259 .filter_map(|(name, baseline, current)| {
260 let delta = current as isize - baseline as isize;
261 if delta != 0 {
262 Some((name, delta))
263 } else {
264 None
265 }
266 })
267 .collect()
268 }
269}
270
271#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
273pub struct DupesCounts {
274 #[serde(default)]
275 pub clone_groups: usize,
276 #[serde(default)]
277 pub duplication_percentage: f64,
278}
279
280#[derive(Debug)]
284pub enum RegressionOutcome {
285 Pass {
287 baseline_total: usize,
288 current_total: usize,
289 },
290 Exceeded {
292 baseline_total: usize,
293 current_total: usize,
294 tolerance: Tolerance,
295 type_deltas: Vec<(&'static str, isize)>,
297 },
298 Skipped { reason: &'static str },
300}
301
302impl RegressionOutcome {
303 pub const fn is_failure(&self) -> bool {
305 matches!(self, Self::Exceeded { .. })
306 }
307
308 pub fn to_json(&self) -> serde_json::Value {
310 match self {
311 Self::Pass {
312 baseline_total,
313 current_total,
314 } => serde_json::json!({
315 "status": "pass",
316 "baseline_total": baseline_total,
317 "current_total": current_total,
318 "delta": *current_total as isize - *baseline_total as isize,
319 "exceeded": false,
320 }),
321 Self::Exceeded {
322 baseline_total,
323 current_total,
324 tolerance,
325 ..
326 } => {
327 let (tolerance_value, tolerance_kind) = match tolerance {
328 Tolerance::Percentage(pct) => (*pct, "percentage"),
329 Tolerance::Absolute(abs) => (*abs as f64, "absolute"),
330 };
331 serde_json::json!({
332 "status": "exceeded",
333 "baseline_total": baseline_total,
334 "current_total": current_total,
335 "delta": *current_total as isize - *baseline_total as isize,
336 "tolerance": tolerance_value,
337 "tolerance_kind": tolerance_kind,
338 "exceeded": true,
339 })
340 }
341 Self::Skipped { reason } => serde_json::json!({
342 "status": "skipped",
343 "reason": reason,
344 "exceeded": false,
345 }),
346 }
347 }
348}
349
350#[derive(Clone, Copy)]
354pub enum SaveRegressionTarget<'a> {
355 None,
357 Config,
359 File(&'a Path),
361}
362
363#[derive(Clone, Copy)]
365pub struct RegressionOpts<'a> {
366 pub fail_on_regression: bool,
367 pub tolerance: Tolerance,
368 pub regression_baseline_file: Option<&'a Path>,
370 pub save_target: SaveRegressionTarget<'a>,
372 pub scoped: bool,
374 pub quiet: bool,
375}
376
377fn is_likely_gitignored(path: &Path, root: &Path) -> bool {
380 std::process::Command::new("git")
381 .args(["check-ignore", "-q"])
382 .arg(path)
383 .current_dir(root)
384 .output()
385 .ok()
386 .is_some_and(|o| o.status.success())
387}
388
389fn current_git_sha(root: &Path) -> Option<String> {
391 std::process::Command::new("git")
392 .args(["rev-parse", "HEAD"])
393 .current_dir(root)
394 .output()
395 .ok()
396 .filter(|o| o.status.success())
397 .map(|o| String::from_utf8_lossy(&o.stdout).trim().to_string())
398}
399
400pub fn save_regression_baseline(
402 path: &Path,
403 root: &Path,
404 check_counts: Option<&CheckCounts>,
405 dupes_counts: Option<&DupesCounts>,
406) -> Result<(), ExitCode> {
407 let baseline = RegressionBaseline {
408 schema_version: REGRESSION_SCHEMA_VERSION,
409 fallow_version: env!("CARGO_PKG_VERSION").to_string(),
410 timestamp: chrono_now(),
411 git_sha: current_git_sha(root),
412 check: check_counts.cloned(),
413 dupes: dupes_counts.cloned(),
414 };
415 let json = serde_json::to_string_pretty(&baseline).map_err(|e| {
416 eprintln!("Error: failed to serialize regression baseline: {e}");
417 ExitCode::from(2)
418 })?;
419 if let Some(parent) = path.parent() {
421 let _ = std::fs::create_dir_all(parent);
422 }
423 std::fs::write(path, json).map_err(|e| {
424 eprintln!("Error: failed to save regression baseline: {e}");
425 ExitCode::from(2)
426 })?;
427 eprintln!("Regression baseline saved to {}", path.display());
430 if is_likely_gitignored(path, root) {
432 eprintln!(
433 "Warning: '{}' may be gitignored. Commit this file so CI can compare against it.",
434 path.display()
435 );
436 }
437 Ok(())
438}
439
440pub fn save_baseline_to_config(config_path: &Path, counts: &CheckCounts) -> Result<(), ExitCode> {
446 let content = match std::fs::read_to_string(config_path) {
448 Ok(c) => c,
449 Err(e) if e.kind() == std::io::ErrorKind::NotFound => {
450 let is_toml = config_path.extension().is_some_and(|ext| ext == "toml");
451 if is_toml {
452 String::new()
453 } else {
454 "{}".to_string()
455 }
456 }
457 Err(e) => {
458 eprintln!(
459 "Error: failed to read config file '{}': {e}",
460 config_path.display()
461 );
462 return Err(ExitCode::from(2));
463 }
464 };
465
466 let baseline = counts.to_config_baseline();
467 let is_toml = config_path.extension().is_some_and(|ext| ext == "toml");
468
469 let updated = if is_toml {
470 Ok(update_toml_regression(&content, &baseline))
471 } else {
472 update_json_regression(&content, &baseline)
473 }
474 .map_err(|e| {
475 eprintln!(
476 "Error: failed to update config file '{}': {e}",
477 config_path.display()
478 );
479 ExitCode::from(2)
480 })?;
481
482 std::fs::write(config_path, updated).map_err(|e| {
483 eprintln!(
484 "Error: failed to write config file '{}': {e}",
485 config_path.display()
486 );
487 ExitCode::from(2)
488 })?;
489
490 eprintln!(
491 "Regression baseline saved to {} (regression.baseline section)",
492 config_path.display()
493 );
494 Ok(())
495}
496
497fn find_json_key(content: &str, key: &str) -> Option<usize> {
501 let needle = format!("\"{key}\"");
502 let mut search_from = 0;
503 while let Some(pos) = content[search_from..].find(&needle) {
504 let abs_pos = search_from + pos;
505 let line_start = content[..abs_pos].rfind('\n').map_or(0, |i| i + 1);
507 let line_prefix = content[line_start..abs_pos].trim_start();
508 if line_prefix.starts_with("//") {
509 search_from = abs_pos + needle.len();
510 continue;
511 }
512 let before = &content[..abs_pos];
514 let last_open = before.rfind("/*");
515 let last_close = before.rfind("*/");
516 if let Some(open_pos) = last_open
517 && last_close.is_none_or(|close_pos| close_pos < open_pos)
518 {
519 search_from = abs_pos + needle.len();
520 continue;
521 }
522 return Some(abs_pos);
523 }
524 None
525}
526
527fn update_json_regression(
528 content: &str,
529 baseline: &fallow_config::RegressionBaseline,
530) -> Result<String, String> {
531 let baseline_json =
532 serde_json::to_string_pretty(baseline).map_err(|e| format!("serialization error: {e}"))?;
533
534 let indented: String = baseline_json
536 .lines()
537 .enumerate()
538 .map(|(i, line)| {
539 if i == 0 {
540 format!(" {line}")
541 } else {
542 format!("\n {line}")
543 }
544 })
545 .collect();
546
547 let regression_block = format!(" \"regression\": {{\n \"baseline\": {indented}\n }}");
548
549 if let Some(start) = find_json_key(content, "regression") {
553 let after_key = &content[start..];
554 if let Some(brace_start) = after_key.find('{') {
555 let abs_brace = start + brace_start;
556 let mut depth = 0;
557 let mut end = abs_brace;
558 let mut found_close = false;
559 for (i, ch) in content[abs_brace..].char_indices() {
560 match ch {
561 '{' => depth += 1,
562 '}' => {
563 depth -= 1;
564 if depth == 0 {
565 end = abs_brace + i + 1;
566 found_close = true;
567 break;
568 }
569 }
570 _ => {}
571 }
572 }
573 if !found_close {
574 return Err("malformed JSON: unmatched brace in regression object".to_string());
575 }
576 let mut result = String::new();
577 result.push_str(&content[..start]);
578 result.push_str(®ression_block[2..]); result.push_str(&content[end..]);
580 return Ok(result);
581 }
582 }
583
584 if let Some(last_brace) = content.rfind('}') {
586 let before_brace = content[..last_brace].trim_end();
588 let needs_comma = !before_brace.ends_with('{') && !before_brace.ends_with(',');
589
590 let mut result = String::new();
591 result.push_str(before_brace);
592 if needs_comma {
593 result.push(',');
594 }
595 result.push('\n');
596 result.push_str(®ression_block);
597 result.push('\n');
598 result.push_str(&content[last_brace..]);
599 Ok(result)
600 } else {
601 Err("config file has no closing brace".to_string())
602 }
603}
604
605fn update_toml_regression(content: &str, baseline: &fallow_config::RegressionBaseline) -> String {
607 use std::fmt::Write;
608 let mut section = String::from("[regression.baseline]\n");
610 let _ = writeln!(section, "totalIssues = {}", baseline.total_issues);
611 let _ = writeln!(section, "unusedFiles = {}", baseline.unused_files);
612 let _ = writeln!(section, "unusedExports = {}", baseline.unused_exports);
613 let _ = writeln!(section, "unusedTypes = {}", baseline.unused_types);
614 let _ = writeln!(
615 section,
616 "unusedDependencies = {}",
617 baseline.unused_dependencies
618 );
619 let _ = writeln!(
620 section,
621 "unusedDevDependencies = {}",
622 baseline.unused_dev_dependencies
623 );
624 let _ = writeln!(
625 section,
626 "unusedOptionalDependencies = {}",
627 baseline.unused_optional_dependencies
628 );
629 let _ = writeln!(
630 section,
631 "unusedEnumMembers = {}",
632 baseline.unused_enum_members
633 );
634 let _ = writeln!(
635 section,
636 "unusedClassMembers = {}",
637 baseline.unused_class_members
638 );
639 let _ = writeln!(
640 section,
641 "unresolvedImports = {}",
642 baseline.unresolved_imports
643 );
644 let _ = writeln!(
645 section,
646 "unlistedDependencies = {}",
647 baseline.unlisted_dependencies
648 );
649 let _ = writeln!(section, "duplicateExports = {}", baseline.duplicate_exports);
650 let _ = writeln!(
651 section,
652 "circularDependencies = {}",
653 baseline.circular_dependencies
654 );
655 let _ = writeln!(
656 section,
657 "typeOnlyDependencies = {}",
658 baseline.type_only_dependencies
659 );
660 let _ = writeln!(
661 section,
662 "testOnlyDependencies = {}",
663 baseline.test_only_dependencies
664 );
665
666 if let Some(start) = content.find("[regression.baseline]") {
668 let after = &content[start + "[regression.baseline]".len()..];
670 let end_offset = after.find("\n[").map_or(content.len(), |i| {
671 start + "[regression.baseline]".len() + i + 1
672 });
673
674 let mut result = String::new();
675 result.push_str(&content[..start]);
676 result.push_str(§ion);
677 if end_offset < content.len() {
678 result.push_str(&content[end_offset..]);
679 }
680 result
681 } else {
682 let mut result = content.to_string();
684 if !result.ends_with('\n') {
685 result.push('\n');
686 }
687 result.push('\n');
688 result.push_str(§ion);
689 result
690 }
691}
692
693pub fn load_regression_baseline(path: &Path) -> Result<RegressionBaseline, ExitCode> {
695 let content = std::fs::read_to_string(path).map_err(|e| {
696 if e.kind() == std::io::ErrorKind::NotFound {
697 eprintln!(
698 "Error: no regression baseline found at '{}'.\n\
699 Run with --save-regression-baseline on your main branch to create one.",
700 path.display()
701 );
702 } else {
703 eprintln!(
704 "Error: failed to read regression baseline '{}': {e}",
705 path.display()
706 );
707 }
708 ExitCode::from(2)
709 })?;
710 serde_json::from_str(&content).map_err(|e| {
711 eprintln!(
712 "Error: failed to parse regression baseline '{}': {e}",
713 path.display()
714 );
715 ExitCode::from(2)
716 })
717}
718
719pub fn compare_check_regression(
726 results: &AnalysisResults,
727 opts: &RegressionOpts<'_>,
728 config_baseline: Option<&fallow_config::RegressionBaseline>,
729) -> Result<Option<RegressionOutcome>, ExitCode> {
730 if !opts.fail_on_regression {
731 return Ok(None);
732 }
733
734 if opts.scoped {
736 let reason = "--changed-since or --workspace is active; regression check skipped \
737 (counts not comparable to full-project baseline)";
738 if !opts.quiet {
739 eprintln!("Warning: {reason}");
740 }
741 return Ok(Some(RegressionOutcome::Skipped { reason }));
742 }
743
744 let baseline_counts: CheckCounts = if let Some(baseline_path) = opts.regression_baseline_file {
746 let baseline = load_regression_baseline(baseline_path)?;
748 let Some(counts) = baseline.check else {
749 eprintln!(
750 "Error: regression baseline '{}' has no check data",
751 baseline_path.display()
752 );
753 return Err(ExitCode::from(2));
754 };
755 counts
756 } else if let Some(config_baseline) = config_baseline {
757 CheckCounts::from_config_baseline(config_baseline)
759 } else {
760 eprintln!(
761 "Error: no regression baseline found.\n\
762 Either add a `regression.baseline` section to your config file\n\
763 (run with --save-regression-baseline to generate it),\n\
764 or provide an explicit file via --regression-baseline <PATH>."
765 );
766 return Err(ExitCode::from(2));
767 };
768
769 let current_total = results.total_issues();
770 let baseline_total = baseline_counts.total_issues;
771
772 if opts.tolerance.exceeded(baseline_total, current_total) {
773 let current_counts = CheckCounts::from_results(results);
774 let type_deltas = baseline_counts.deltas(¤t_counts);
775 Ok(Some(RegressionOutcome::Exceeded {
776 baseline_total,
777 current_total,
778 tolerance: opts.tolerance,
779 type_deltas,
780 }))
781 } else {
782 Ok(Some(RegressionOutcome::Pass {
783 baseline_total,
784 current_total,
785 }))
786 }
787}
788
789pub fn print_regression_outcome(outcome: &RegressionOutcome) {
791 match outcome {
792 RegressionOutcome::Pass {
793 baseline_total,
794 current_total,
795 } => {
796 let delta = *current_total as isize - *baseline_total as isize;
797 let sign = if delta >= 0 { "+" } else { "" };
798 eprintln!(
799 "Regression check passed: {current_total} issues (baseline: {baseline_total}, \
800 delta: {sign}{delta})"
801 );
802 }
803 RegressionOutcome::Exceeded {
804 baseline_total,
805 current_total,
806 tolerance,
807 type_deltas,
808 } => {
809 let delta = *current_total as isize - *baseline_total as isize;
810 let tol_str = match tolerance {
811 Tolerance::Percentage(pct) => format!("{pct}%"),
812 Tolerance::Absolute(abs) => format!("{abs}"),
813 };
814 eprintln!(
815 "Regression detected: {current_total} issues (baseline: {baseline_total}, \
816 delta: +{delta}, tolerance: {tol_str})"
817 );
818 for (name, d) in type_deltas {
819 let sign = if *d > 0 { "+" } else { "" };
820 eprintln!(" {name}: {sign}{d}");
821 }
822 }
823 RegressionOutcome::Skipped { .. } => {
824 }
826 }
827}
828
829fn chrono_now() -> String {
831 let duration = std::time::SystemTime::now()
832 .duration_since(std::time::UNIX_EPOCH)
833 .unwrap_or_default();
834 let secs = duration.as_secs();
835 let days = secs / 86400;
837 let time_secs = secs % 86400;
838 let hours = time_secs / 3600;
839 let minutes = (time_secs % 3600) / 60;
840 let seconds = time_secs % 60;
841 let z = days + 719_468;
843 let era = z / 146_097;
844 let doe = z - era * 146_097;
845 let yoe = (doe - doe / 1460 + doe / 36524 - doe / 146_096) / 365;
846 let y = yoe + era * 400;
847 let doy = doe - (365 * yoe + yoe / 4 - yoe / 100);
848 let mp = (5 * doy + 2) / 153;
849 let d = doy - (153 * mp + 2) / 5 + 1;
850 let m = if mp < 10 { mp + 3 } else { mp - 9 };
851 let y = if m <= 2 { y + 1 } else { y };
852 format!("{y:04}-{m:02}-{d:02}T{hours:02}:{minutes:02}:{seconds:02}Z")
853}
854
855#[cfg(test)]
856mod tests {
857 use super::*;
858 use fallow_core::results::*;
859 use std::path::PathBuf;
860
861 #[test]
864 fn parse_percentage_tolerance() {
865 let t = Tolerance::parse("2%").unwrap();
866 assert!(matches!(t, Tolerance::Percentage(p) if (p - 2.0).abs() < f64::EPSILON));
867 }
868
869 #[test]
870 fn parse_absolute_tolerance() {
871 let t = Tolerance::parse("5").unwrap();
872 assert!(matches!(t, Tolerance::Absolute(5)));
873 }
874
875 #[test]
876 fn parse_zero_tolerance() {
877 let t = Tolerance::parse("0").unwrap();
878 assert!(matches!(t, Tolerance::Absolute(0)));
879 }
880
881 #[test]
882 fn parse_empty_defaults_to_zero() {
883 let t = Tolerance::parse("").unwrap();
884 assert!(matches!(t, Tolerance::Absolute(0)));
885 }
886
887 #[test]
888 fn parse_invalid_percentage() {
889 assert!(Tolerance::parse("abc%").is_err());
890 }
891
892 #[test]
893 fn parse_negative_percentage() {
894 assert!(Tolerance::parse("-1%").is_err());
895 }
896
897 #[test]
898 fn parse_invalid_absolute() {
899 assert!(Tolerance::parse("abc").is_err());
900 }
901
902 #[test]
905 fn zero_tolerance_detects_any_increase() {
906 let t = Tolerance::Absolute(0);
907 assert!(t.exceeded(10, 11));
908 assert!(!t.exceeded(10, 10));
909 assert!(!t.exceeded(10, 9));
910 }
911
912 #[test]
913 fn absolute_tolerance_allows_within_range() {
914 let t = Tolerance::Absolute(3);
915 assert!(!t.exceeded(10, 12)); assert!(!t.exceeded(10, 13)); assert!(t.exceeded(10, 14)); }
919
920 #[test]
921 fn percentage_tolerance_allows_within_range() {
922 let t = Tolerance::Percentage(10.0);
923 assert!(!t.exceeded(100, 109)); assert!(!t.exceeded(100, 110)); assert!(t.exceeded(100, 111)); }
927
928 #[test]
929 fn percentage_tolerance_from_zero_baseline() {
930 let t = Tolerance::Percentage(10.0);
931 assert!(t.exceeded(0, 1)); assert!(!t.exceeded(0, 0)); }
934
935 #[test]
936 fn decrease_never_exceeds() {
937 let t = Tolerance::Absolute(0);
938 assert!(!t.exceeded(10, 5));
939 let t = Tolerance::Percentage(0.0);
940 assert!(!t.exceeded(10, 5));
941 }
942
943 #[test]
946 fn check_counts_from_results() {
947 let mut results = AnalysisResults::default();
948 results.unused_files.push(UnusedFile {
949 path: PathBuf::from("a.ts"),
950 });
951 results.unused_exports.push(UnusedExport {
952 path: PathBuf::from("b.ts"),
953 export_name: "foo".into(),
954 is_type_only: false,
955 line: 1,
956 col: 0,
957 span_start: 0,
958 is_re_export: false,
959 });
960 let counts = CheckCounts::from_results(&results);
961 assert_eq!(counts.total_issues, 2);
962 assert_eq!(counts.unused_files, 1);
963 assert_eq!(counts.unused_exports, 1);
964 assert_eq!(counts.unused_types, 0);
965 }
966
967 #[test]
970 fn deltas_reports_changes_only() {
971 let baseline = CheckCounts {
972 total_issues: 10,
973 unused_files: 5,
974 unused_exports: 3,
975 unused_types: 2,
976 unused_dependencies: 0,
977 unused_dev_dependencies: 0,
978 unused_optional_dependencies: 0,
979 unused_enum_members: 0,
980 unused_class_members: 0,
981 unresolved_imports: 0,
982 unlisted_dependencies: 0,
983 duplicate_exports: 0,
984 circular_dependencies: 0,
985 type_only_dependencies: 0,
986 test_only_dependencies: 0,
987 };
988 let current = CheckCounts {
989 unused_files: 7, unused_exports: 1, unused_types: 2, ..baseline
993 };
994 let deltas = baseline.deltas(¤t);
995 assert_eq!(deltas.len(), 2);
996 assert!(deltas.contains(&("unused_files", 2)));
997 assert!(deltas.contains(&("unused_exports", -2)));
998 }
999
1000 #[test]
1003 fn pass_outcome_json() {
1004 let outcome = RegressionOutcome::Pass {
1005 baseline_total: 10,
1006 current_total: 10,
1007 };
1008 let json = outcome.to_json();
1009 assert_eq!(json["status"], "pass");
1010 assert_eq!(json["exceeded"], false);
1011 assert_eq!(json["delta"], 0);
1012 }
1013
1014 #[test]
1015 fn exceeded_outcome_json() {
1016 let outcome = RegressionOutcome::Exceeded {
1017 baseline_total: 10,
1018 current_total: 15,
1019 tolerance: Tolerance::Percentage(2.0),
1020 type_deltas: vec![("unused_files", 5)],
1021 };
1022 let json = outcome.to_json();
1023 assert_eq!(json["status"], "exceeded");
1024 assert_eq!(json["exceeded"], true);
1025 assert_eq!(json["delta"], 5);
1026 assert_eq!(json["tolerance_kind"], "percentage");
1027 }
1028
1029 #[test]
1030 fn skipped_outcome_json() {
1031 let outcome = RegressionOutcome::Skipped {
1032 reason: "test reason",
1033 };
1034 let json = outcome.to_json();
1035 assert_eq!(json["status"], "skipped");
1036 assert_eq!(json["exceeded"], false);
1037 }
1038
1039 #[test]
1042 fn regression_baseline_roundtrip() {
1043 let baseline = RegressionBaseline {
1044 schema_version: 1,
1045 fallow_version: "2.4.0".into(),
1046 timestamp: "2026-03-27T10:00:00Z".into(),
1047 git_sha: Some("abc123".into()),
1048 check: Some(CheckCounts {
1049 total_issues: 42,
1050 unused_files: 5,
1051 unused_exports: 20,
1052 unused_types: 8,
1053 unused_dependencies: 3,
1054 unused_dev_dependencies: 2,
1055 unused_optional_dependencies: 0,
1056 unused_enum_members: 1,
1057 unused_class_members: 1,
1058 unresolved_imports: 0,
1059 unlisted_dependencies: 1,
1060 duplicate_exports: 0,
1061 circular_dependencies: 1,
1062 type_only_dependencies: 0,
1063 test_only_dependencies: 0,
1064 }),
1065 dupes: Some(DupesCounts {
1066 clone_groups: 12,
1067 duplication_percentage: 4.2,
1068 }),
1069 };
1070 let json = serde_json::to_string_pretty(&baseline).unwrap();
1071 let loaded: RegressionBaseline = serde_json::from_str(&json).unwrap();
1072 assert_eq!(loaded.schema_version, 1);
1073 assert_eq!(loaded.check.as_ref().unwrap().total_issues, 42);
1074 assert_eq!(loaded.dupes.as_ref().unwrap().clone_groups, 12);
1075 }
1076
1077 #[test]
1080 fn regression_outcome_is_failure() {
1081 let pass = RegressionOutcome::Pass {
1082 baseline_total: 10,
1083 current_total: 10,
1084 };
1085 assert!(!pass.is_failure());
1086
1087 let exceeded = RegressionOutcome::Exceeded {
1088 baseline_total: 10,
1089 current_total: 15,
1090 tolerance: Tolerance::Absolute(2),
1091 type_deltas: vec![],
1092 };
1093 assert!(exceeded.is_failure());
1094
1095 let skipped = RegressionOutcome::Skipped { reason: "test" };
1096 assert!(!skipped.is_failure());
1097 }
1098
1099 fn sample_baseline() -> fallow_config::RegressionBaseline {
1102 fallow_config::RegressionBaseline {
1103 total_issues: 5,
1104 unused_files: 2,
1105 ..Default::default()
1106 }
1107 }
1108
1109 #[test]
1110 fn json_insert_into_empty_object() {
1111 let result = update_json_regression("{}", &sample_baseline()).unwrap();
1112 assert!(result.contains("\"regression\""));
1113 assert!(result.contains("\"totalIssues\": 5"));
1114 serde_json::from_str::<serde_json::Value>(&result).unwrap();
1116 }
1117
1118 #[test]
1119 fn json_insert_into_existing_config() {
1120 let config = r#"{
1121 "entry": ["src/main.ts"],
1122 "production": true
1123}"#;
1124 let result = update_json_regression(config, &sample_baseline()).unwrap();
1125 assert!(result.contains("\"regression\""));
1126 assert!(result.contains("\"entry\""));
1127 serde_json::from_str::<serde_json::Value>(&result).unwrap();
1128 }
1129
1130 #[test]
1131 fn json_replace_existing_regression() {
1132 let config = r#"{
1133 "entry": ["src/main.ts"],
1134 "regression": {
1135 "baseline": {
1136 "totalIssues": 99
1137 }
1138 }
1139}"#;
1140 let result = update_json_regression(config, &sample_baseline()).unwrap();
1141 assert!(!result.contains("99"));
1143 assert!(result.contains("\"totalIssues\": 5"));
1144 serde_json::from_str::<serde_json::Value>(&result).unwrap();
1145 }
1146
1147 #[test]
1148 fn json_skips_regression_in_comment() {
1149 let config = "{\n // See \"regression\" docs\n \"entry\": []\n}";
1150 let result = update_json_regression(config, &sample_baseline()).unwrap();
1151 assert!(result.contains("\"regression\":"));
1153 assert!(result.contains("\"entry\""));
1154 }
1155
1156 #[test]
1157 fn json_malformed_brace_returns_error() {
1158 let config = r#"{ "regression": { "baseline": { "totalIssues": 1 }"#;
1160 let result = update_json_regression(config, &sample_baseline());
1161 assert!(result.is_err());
1162 }
1163
1164 #[test]
1167 fn toml_insert_into_empty() {
1168 let result = update_toml_regression("", &sample_baseline());
1169 assert!(result.contains("[regression.baseline]"));
1170 assert!(result.contains("totalIssues = 5"));
1171 }
1172
1173 #[test]
1174 fn toml_insert_after_existing_content() {
1175 let config = "[rules]\nunused-files = \"warn\"\n";
1176 let result = update_toml_regression(config, &sample_baseline());
1177 assert!(result.contains("[rules]"));
1178 assert!(result.contains("[regression.baseline]"));
1179 assert!(result.contains("totalIssues = 5"));
1180 }
1181
1182 #[test]
1183 fn toml_replace_existing_section() {
1184 let config =
1185 "[regression.baseline]\ntotalIssues = 99\n\n[rules]\nunused-files = \"warn\"\n";
1186 let result = update_toml_regression(config, &sample_baseline());
1187 assert!(!result.contains("99"));
1188 assert!(result.contains("totalIssues = 5"));
1189 assert!(result.contains("[rules]"));
1190 }
1191
1192 #[test]
1195 fn find_json_key_basic() {
1196 assert_eq!(find_json_key(r#"{"foo": 1}"#, "foo"), Some(1));
1197 }
1198
1199 #[test]
1200 fn find_json_key_skips_comment() {
1201 let content = "{\n // \"foo\" is important\n \"bar\": 1\n}";
1202 assert_eq!(find_json_key(content, "foo"), None);
1203 assert!(find_json_key(content, "bar").is_some());
1204 }
1205
1206 #[test]
1207 fn find_json_key_not_found() {
1208 assert_eq!(find_json_key("{}", "missing"), None);
1209 }
1210
1211 #[test]
1212 fn find_json_key_skips_block_comment() {
1213 let content = "{\n /* \"foo\": old value */\n \"foo\": 1\n}";
1214 let pos = find_json_key(content, "foo").unwrap();
1216 assert!(content[pos..].starts_with("\"foo\": 1"));
1217 }
1218}