#[cfg_attr(coverage_nightly, coverage(off))]
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_calculate_summary_all_pass() {
let checks = vec![
HealthCheck {
name: "Test1".to_string(),
status: CheckStatus::Pass,
message: "OK".to_string(),
details: None,
},
HealthCheck {
name: "Test2".to_string(),
status: CheckStatus::Pass,
message: "OK".to_string(),
details: None,
},
];
let summary = calculate_summary(&checks);
assert_eq!(summary.total_checks, 2);
assert_eq!(summary.passed, 2);
assert_eq!(summary.failed, 0);
}
#[test]
fn test_calculate_summary_mixed() {
let checks = vec![
HealthCheck {
name: "Test1".to_string(),
status: CheckStatus::Pass,
message: "OK".to_string(),
details: None,
},
HealthCheck {
name: "Test2".to_string(),
status: CheckStatus::Warn,
message: "Warning".to_string(),
details: None,
},
HealthCheck {
name: "Test3".to_string(),
status: CheckStatus::Fail,
message: "Failed".to_string(),
details: None,
},
];
let summary = calculate_summary(&checks);
assert_eq!(summary.total_checks, 3);
assert_eq!(summary.passed, 1);
assert_eq!(summary.warned, 1);
assert_eq!(summary.failed, 1);
}
#[test]
fn test_parse_coverage_valid() {
let output = "Filename Regions Missed Regions Cover Functions Missed Functions Executed\n\
TOTAL 1234 234 81.0%";
let coverage = parse_coverage_percentage(output);
assert_eq!(coverage, 81.0);
}
#[test]
fn test_parse_coverage_invalid() {
let output = "No coverage data";
let coverage = parse_coverage_percentage(output);
assert_eq!(coverage, 0.0);
}
#[test]
fn test_determine_checks_quick_mode() {
let checks = determine_checks_to_run(true, false, false, false, false, false, false);
assert!(checks.build);
assert!(!checks.tests);
assert!(!checks.coverage);
assert!(!checks.complexity);
assert!(!checks.satd);
}
#[test]
fn test_determine_checks_all_mode() {
let checks = determine_checks_to_run(false, true, false, false, false, false, false);
assert!(checks.build);
assert!(checks.tests);
assert!(checks.coverage);
assert!(checks.complexity);
assert!(checks.satd);
}
#[test]
fn test_determine_checks_default_no_flags() {
let checks = determine_checks_to_run(false, false, false, false, false, false, false);
assert!(checks.build);
assert!(!checks.tests);
assert!(!checks.coverage);
assert!(!checks.complexity);
assert!(!checks.satd);
}
#[test]
fn test_determine_checks_specific_flags() {
let checks = determine_checks_to_run(false, false, true, true, false, false, false);
assert!(checks.build);
assert!(checks.tests);
assert!(!checks.coverage);
assert!(!checks.complexity);
assert!(!checks.satd);
}
#[test]
fn test_determine_checks_quick_overrides_all() {
let checks = determine_checks_to_run(true, true, false, false, false, false, false);
assert!(checks.build);
assert!(!checks.tests);
}
}
#[cfg_attr(coverage_nightly, coverage(off))]
#[cfg(test)]
mod property_tests {
use super::*;
use proptest::prelude::*;
proptest! {
#[test]
fn summary_totals_match(passed in 0u32..100, warned in 0u32..100, failed in 0u32..100, skipped in 0u32..100) {
let mut checks = Vec::new();
for _ in 0..passed {
checks.push(HealthCheck {
name: "Pass".to_string(),
status: CheckStatus::Pass,
message: "OK".to_string(),
details: None,
});
}
for _ in 0..warned {
checks.push(HealthCheck {
name: "Warn".to_string(),
status: CheckStatus::Warn,
message: "Warning".to_string(),
details: None,
});
}
for _ in 0..failed {
checks.push(HealthCheck {
name: "Fail".to_string(),
status: CheckStatus::Fail,
message: "Failed".to_string(),
details: None,
});
}
for _ in 0..skipped {
checks.push(HealthCheck {
name: "Skip".to_string(),
status: CheckStatus::Skip,
message: "Skipped".to_string(),
details: None,
});
}
let summary = calculate_summary(&checks);
prop_assert_eq!(summary.total_checks, checks.len());
prop_assert_eq!(summary.passed, passed as usize);
prop_assert_eq!(summary.warned, warned as usize);
prop_assert_eq!(summary.failed, failed as usize);
prop_assert_eq!(summary.skipped, skipped as usize);
}
}
}
#[cfg_attr(coverage_nightly, coverage(off))]
#[cfg(test)]
mod parallel_tests {
use super::*;
#[tokio::test]
#[ignore] async fn test_run_checks_parallel_returns_all_results() {
let project_dir = PathBuf::from(".");
let check_types = vec![CheckType::Build, CheckType::Complexity, CheckType::Satd];
let results = run_checks_parallel(&project_dir, check_types).await;
assert!(results.is_ok());
let checks = results.unwrap();
assert_eq!(checks.len(), 3);
let names: Vec<_> = checks.iter().map(|c| c.name.as_str()).collect();
assert!(names.contains(&"Build"));
assert!(names.contains(&"Complexity"));
assert!(names.contains(&"SATD"));
}
#[tokio::test]
async fn test_run_checks_parallel_empty_list() {
let project_dir = PathBuf::from(".");
let check_types = vec![];
let results = run_checks_parallel(&project_dir, check_types).await;
assert!(results.is_ok());
let checks = results.unwrap();
assert_eq!(checks.len(), 0);
}
#[tokio::test]
#[ignore = "requires health check setup"]
async fn test_run_checks_parallel_single_check() {
let project_dir = PathBuf::from(".");
let check_types = vec![CheckType::Build];
let results = run_checks_parallel(&project_dir, check_types).await;
assert!(results.is_ok());
let checks = results.unwrap();
assert_eq!(checks.len(), 1);
assert_eq!(checks[0].name, "Build");
}
#[test]
fn test_check_type_coverage() {
let types = [
CheckType::Build,
CheckType::Tests,
CheckType::Coverage,
CheckType::Complexity,
CheckType::Satd,
];
assert_eq!(types.len(), 5);
}
fn make_check(name: &str, status: CheckStatus, with_details: bool) -> HealthCheck {
HealthCheck {
name: name.to_string(),
status,
message: format!("{name} message"),
details: if with_details {
Some("some/path".to_string())
} else {
None
},
}
}
fn make_report(checks: Vec<HealthCheck>, healthy: bool) -> HealthReport {
let summary = calculate_summary(&checks);
HealthReport {
healthy,
checks,
summary,
}
}
#[test]
fn test_print_health_report_json_arm() {
let report = make_report(vec![make_check("Build", CheckStatus::Pass, true)], true);
print_health_report(&report, &OutputFormat::Json).unwrap();
}
#[test]
fn test_print_health_report_yaml_arm() {
let report = make_report(vec![make_check("Build", CheckStatus::Pass, false)], true);
print_health_report(&report, &OutputFormat::Yaml).unwrap();
}
#[test]
fn test_print_health_report_table_arm() {
let report = make_report(vec![make_check("Build", CheckStatus::Pass, false)], true);
print_health_report(&report, &OutputFormat::Table).unwrap();
}
#[test]
fn test_print_health_report_default_arm_for_unknown_format() {
let report = make_report(vec![make_check("Build", CheckStatus::Pass, false)], true);
print_health_report(&report, &OutputFormat::Csv).unwrap();
}
#[test]
fn test_print_health_table_with_all_4_status_arms_and_details() {
let checks = vec![
make_check("P", CheckStatus::Pass, true),
make_check("W", CheckStatus::Warn, true),
make_check("F", CheckStatus::Fail, true),
make_check("S", CheckStatus::Skip, true),
];
let report = make_report(checks, false);
print_health_table(&report);
}
#[test]
fn test_print_health_table_healthy_arm() {
let report = make_report(vec![make_check("P", CheckStatus::Pass, false)], true);
print_health_table(&report);
}
#[test]
fn test_print_health_table_unhealthy_arm() {
let report = make_report(vec![make_check("F", CheckStatus::Fail, false)], false);
print_health_table(&report);
}
#[test]
fn test_print_health_yaml_with_details_some_and_none() {
let checks = vec![
make_check("with-details", CheckStatus::Pass, true),
make_check("no-details", CheckStatus::Skip, false),
];
let report = make_report(checks, true);
print_health_yaml(&report);
}
#[test]
fn test_print_health_yaml_empty_checks() {
let report = make_report(vec![], true);
print_health_yaml(&report);
}
}
#[cfg(test)]
mod r5_classifier_tests {
use super::*;
#[test]
fn test_classify_coverage_at_80_passes() {
assert_eq!(classify_coverage_status(80.0), CheckStatus::Pass);
}
#[test]
fn test_classify_coverage_above_80_passes() {
assert_eq!(classify_coverage_status(95.5), CheckStatus::Pass);
assert_eq!(classify_coverage_status(100.0), CheckStatus::Pass);
}
#[test]
fn test_classify_coverage_at_60_warns() {
assert_eq!(classify_coverage_status(60.0), CheckStatus::Warn);
}
#[test]
fn test_classify_coverage_between_60_and_80_warns() {
assert_eq!(classify_coverage_status(65.0), CheckStatus::Warn);
assert_eq!(classify_coverage_status(79.9), CheckStatus::Warn);
}
#[test]
fn test_classify_coverage_below_60_fails() {
assert_eq!(classify_coverage_status(0.0), CheckStatus::Fail);
assert_eq!(classify_coverage_status(59.9), CheckStatus::Fail);
}
#[test]
fn test_classify_complexity_zero_violations_passes() {
assert_eq!(classify_complexity_status(0), CheckStatus::Pass);
}
#[test]
fn test_classify_complexity_one_to_five_warns() {
for v in 1..=5 {
assert_eq!(classify_complexity_status(v), CheckStatus::Warn);
}
}
#[test]
fn test_classify_complexity_six_or_more_fails() {
assert_eq!(classify_complexity_status(6), CheckStatus::Fail);
assert_eq!(classify_complexity_status(100), CheckStatus::Fail);
}
#[test]
fn test_classify_satd_zero_total_passes() {
assert_eq!(classify_satd_status(0, 0), CheckStatus::Pass);
}
#[test]
fn test_classify_satd_with_items_no_high_severity_warns() {
assert_eq!(classify_satd_status(5, 0), CheckStatus::Warn);
assert_eq!(classify_satd_status(100, 0), CheckStatus::Warn);
}
#[test]
fn test_classify_satd_with_high_severity_fails() {
assert_eq!(classify_satd_status(1, 1), CheckStatus::Fail);
assert_eq!(classify_satd_status(10, 3), CheckStatus::Fail);
}
use crate::services::complexity::{
ComplexityMetrics, FileComplexityMetrics, FunctionComplexity,
};
fn make_function(name: &str, cyclomatic: u16) -> FunctionComplexity {
FunctionComplexity {
name: name.to_string(),
line_start: 1,
line_end: 10,
metrics: ComplexityMetrics {
cyclomatic,
cognitive: 0,
nesting_max: 0,
lines: 10,
halstead: None,
},
}
}
fn make_file_metrics(path: &str, functions: Vec<FunctionComplexity>) -> FileComplexityMetrics {
FileComplexityMetrics {
path: path.to_string(),
total_complexity: ComplexityMetrics {
cyclomatic: functions
.iter()
.map(|f| f.metrics.cyclomatic)
.sum::<u16>()
.max(1),
cognitive: 0,
nesting_max: 0,
lines: 10,
halstead: None,
},
functions,
classes: vec![],
}
}
#[test]
fn test_count_complexity_violations_empty_returns_zero() {
let (total, violations, max) = count_complexity_violations(&[]);
assert_eq!(total, 0);
assert_eq!(violations, 0);
assert_eq!(max, 0);
}
#[test]
fn test_count_complexity_violations_no_functions_in_file() {
let files = vec![make_file_metrics("a.rs", vec![])];
let (total, violations, max) = count_complexity_violations(&files);
assert_eq!(total, 0);
assert_eq!(violations, 0);
assert_eq!(max, 0);
}
#[test]
fn test_count_complexity_violations_threshold_at_20_inclusive_no_violation() {
let files = vec![make_file_metrics("a.rs", vec![make_function("f", 20)])];
let (total, violations, max) = count_complexity_violations(&files);
assert_eq!(total, 1);
assert_eq!(violations, 0);
assert_eq!(max, 20);
}
#[test]
fn test_count_complexity_violations_threshold_above_20_is_violation() {
let files = vec![make_file_metrics("a.rs", vec![make_function("f", 21)])];
let (total, violations, max) = count_complexity_violations(&files);
assert_eq!(total, 1);
assert_eq!(violations, 1);
assert_eq!(max, 21);
}
#[test]
fn test_count_complexity_violations_max_tracks_highest_across_files() {
let files = vec![
make_file_metrics(
"a.rs",
vec![make_function("a1", 5), make_function("a2", 30)],
),
make_file_metrics("b.rs", vec![make_function("b1", 25)]),
];
let (total, violations, max) = count_complexity_violations(&files);
assert_eq!(total, 3);
assert_eq!(violations, 2); assert_eq!(max, 30); }
#[test]
fn test_count_complexity_violations_mixed_violators_and_clean() {
let files = vec![make_file_metrics(
"a.rs",
vec![
make_function("clean1", 1),
make_function("clean2", 10),
make_function("clean3", 19),
make_function("violator1", 100),
make_function("violator2", 500),
],
)];
let (total, violations, max) = count_complexity_violations(&files);
assert_eq!(total, 5);
assert_eq!(violations, 2);
assert_eq!(max, 500);
}
}