use crate::cli::analysis_utilities::{
check_complexity, check_dead_code, check_duplicates, check_satd, QualityViolation,
};
use proptest::prelude::*;
use std::path::Path;
use tempfile::TempDir;
use tokio::runtime::Runtime;
fn create_test_file(dir: &Path, name: &str, content: &str) -> std::path::PathBuf {
let file_path = dir.join(name);
std::fs::write(&file_path, content).unwrap();
file_path
}
proptest! {
#[test]
fn prop_complexity_threshold_respected(
threshold in 1u32..100u32,
file_count in 1usize..10usize,
) {
let rt = Runtime::new().unwrap();
let temp_dir = TempDir::new().unwrap();
for i in 0..file_count {
let complexity = i as u32 * 10; let content = generate_code_with_complexity(complexity);
create_test_file(temp_dir.path(), &format!("test{}.rs", i), &content);
}
let _ = rt.block_on(async {
let violations = check_complexity(temp_dir.path(), threshold).await.unwrap();
for violation in violations {
assert_eq!(violation.check_type, "complexity");
if let Some(complexity_str) = violation.message
.split("complexity ")
.nth(1)
.and_then(|s| s.split(' ').next())
.and_then(|s| s.parse::<u32>().ok()) {
if violation.severity == "warning" {
let warn_threshold = threshold.saturating_sub(2).max(1);
prop_assert!(complexity_str > warn_threshold);
prop_assert!(complexity_str <= threshold);
} else if violation.severity == "error" {
prop_assert!(complexity_str > threshold);
} else {
panic!("Unexpected severity: {}", violation.severity);
}
}
}
Ok(())
});
}
#[test]
fn prop_dead_code_percentage_accurate(
max_percentage in 0.0..50.0,
total_functions in 10usize..100usize,
dead_ratio in 0.0..0.5f64,
) {
let rt = Runtime::new().unwrap();
let temp_dir = TempDir::new().unwrap();
let dead_count = (total_functions as f64 * dead_ratio) as usize;
let mut content = String::new();
for i in 0..(total_functions - dead_count) {
content.push_str(&format!("pub fn live_func_{}() {{ println!(\"live\"); }}\n", i));
}
for i in 0..dead_count {
content.push_str(&format!("fn _dead_func_{}() {{ println!(\"dead\"); }}\n", i));
}
create_test_file(temp_dir.path(), "lib.rs", &content);
let _ = rt.block_on(async {
let violations = check_dead_code(temp_dir.path(), max_percentage).await.unwrap();
let expected_percentage = (dead_count as f64 / total_functions as f64) * 100.0;
if expected_percentage > max_percentage {
prop_assert!(!violations.is_empty(),
"Expected violations when dead code {}% > max {}%",
expected_percentage, max_percentage);
}
Ok(())
});
}
#[test]
fn prop_satd_detection_complete_and_sound(
satd_types in prop::collection::vec(
prop_oneof![
Just("TODO"),
Just("FIXME"),
Just("HACK"),
Just("XXX"),
Just("BUG"),
Just("REFACTOR"),
],
0..10
),
descriptions in prop::collection::vec("[a-zA-Z0-9 ]{1,50}", 0..10),
) {
let rt = Runtime::new().unwrap();
let temp_dir = TempDir::new().unwrap();
let mut content = String::new();
let expected_count = satd_types.len().min(descriptions.len());
for i in 0..expected_count {
content.push_str(&format!("// {}: {}\n", satd_types[i], descriptions[i]));
content.push_str("fn some_function() {}\n\n");
}
content.push_str("// This is a regular comment\n");
content.push_str("// Another normal comment without debt markers\n");
create_test_file(temp_dir.path(), "test.rs", &content);
let _ = rt.block_on(async {
let violations = check_satd(temp_dir.path()).await.unwrap();
prop_assert_eq!(violations.len(), expected_count);
for violation in &violations {
prop_assert_eq!(&violation.check_type, "satd");
prop_assert!(violation.line.is_some());
prop_assert!(violation.severity == "warning" || violation.severity == "error");
}
Ok(())
});
}
#[test]
fn prop_duplicate_detection_properties(
file_contents in prop::collection::vec(
"[a-zA-Z0-9\n ]{50,200}", // Generate code-like content
2..10
),
duplicate_indices in prop::collection::vec(0usize..10usize, 0..5),
) {
let rt = Runtime::new().unwrap();
let temp_dir = TempDir::new().unwrap();
let mut duplicate_groups = std::collections::HashMap::new();
for (i, content) in file_contents.iter().enumerate() {
let file_content = if duplicate_indices.contains(&i) && i > 0 {
duplicate_groups.entry(0).or_insert_with(Vec::new).push(i);
&file_contents[0]
} else {
duplicate_groups.entry(i).or_insert_with(Vec::new);
content
};
create_test_file(temp_dir.path(), &format!("file{}.rs", i), file_content);
}
let _ = rt.block_on(async {
let violations = check_duplicates(temp_dir.path()).await.unwrap();
let mut violation_pairs = std::collections::HashSet::new();
for violation in &violations {
violation_pairs.insert(violation.file.clone());
}
for violation in &violations {
if let Some(other_files) = extract_files_from_duplicate_message(&violation.message) {
for other_file in other_files {
prop_assert!(
violation_pairs.contains(&other_file),
"Symmetry violated: {} marked as duplicate but {} is not",
violation.file, other_file
);
}
}
}
Ok(())
});
}
}
fn generate_code_with_complexity(target_complexity: u32) -> String {
let mut code = String::from("fn complex_function() {\n");
for i in 0..target_complexity {
code.push_str(&format!(" if condition_{} {{\n", i));
code.push_str(&format!(" println!(\"branch {}\");\n", i));
code.push_str(" }\n");
}
code.push_str("}\n");
code
}
fn extract_files_from_duplicate_message(message: &str) -> Option<Vec<String>> {
message
.split("found in: ")
.nth(1)
.map(|files_part| files_part.split(", ").map(String::from).collect())
}
#[cfg_attr(coverage_nightly, coverage(off))]
#[cfg(test)]
mod additional_property_tests {
use super::*;
proptest! {
#[test]
fn prop_violation_structure_valid(
check_type in "[a-z_]+",
severity in prop_oneof![Just("error"), Just("warning"), Just("info")],
file_path in "[a-zA-Z0-9/_]+\\.rs",
line in prop::option::of(1usize..10000usize),
message in "[a-zA-Z0-9 :,.-]+",
) {
let violation = QualityViolation::new(
check_type.clone(),
severity.to_string(),
file_path.clone(),
line,
message.clone(),
);
prop_assert!(!violation.check_type.is_empty());
prop_assert!(!violation.severity.is_empty());
prop_assert!(!violation.file.is_empty());
prop_assert!(!violation.message.is_empty());
prop_assert!(
["error", "warning", "info"].contains(&violation.severity.as_str()),
"Invalid severity: {}", violation.severity
);
}
}
}
#[cfg_attr(coverage_nightly, coverage(off))]
#[cfg(test)]
mod unit_tests {
use super::*;
use crate::cli::QualityCheckType;
#[test]
fn test_quality_check_types_comprehensive() {
let all_checks = vec![
QualityCheckType::All,
QualityCheckType::Complexity,
QualityCheckType::DeadCode,
QualityCheckType::Satd,
QualityCheckType::Security,
QualityCheckType::Entropy,
QualityCheckType::Duplicates,
QualityCheckType::Coverage,
QualityCheckType::Sections,
QualityCheckType::Provability,
];
assert_eq!(
all_checks.len(),
10,
"Should have exactly 10 check types defined"
);
for check_type in &all_checks {
let cloned = check_type.clone();
assert_eq!(
check_type, &cloned,
"Check type should be cloneable and comparable"
);
}
}
#[test]
fn test_check_type_debug_format() {
let complexity_check = QualityCheckType::Complexity;
let debug_string = format!("{:?}", complexity_check);
assert!(
debug_string.contains("Complexity"),
"Debug format should contain type name"
);
assert!(!debug_string.is_empty(), "Debug format should not be empty");
}
#[test]
fn test_performance_metrics_calculation() {
use std::time::Duration;
let start_time = std::time::Instant::now();
std::thread::sleep(Duration::from_millis(10)); let elapsed = start_time.elapsed();
assert!(
elapsed.as_millis() >= 10,
"Should have elapsed at least 10ms"
);
assert!(
elapsed.as_millis() < 1000,
"Should have elapsed less than 1 second"
);
let num_checks = 5;
let avg_time = elapsed.as_secs_f64() / num_checks as f64;
assert!(avg_time > 0.0, "Average time should be positive");
assert!(avg_time.is_finite(), "Average time should be finite");
}
#[test]
fn test_performance_metrics_display_format() {
use std::time::Duration;
let total_time = Duration::from_millis(1500); let num_checks = 3;
let avg_time = total_time.as_secs_f64() / num_checks as f64;
let perf_output = format!(
"⏱️ Performance Metrics:\n Total execution time: {:.2}s\n Checks performed: {}\n Average time per check: {:.2}s",
total_time.as_secs_f64(),
num_checks,
avg_time
);
assert!(perf_output.contains("Performance Metrics"));
assert!(perf_output.contains("Total execution time: 1.50s"));
assert!(perf_output.contains("Checks performed: 3"));
assert!(perf_output.contains("Average time per check: 0.50s"));
}
#[test]
fn test_performance_flag_integration() {
let perf_enabled = true;
let should_show_metrics = perf_enabled;
assert!(
should_show_metrics,
"Performance metrics should be shown when perf flag is enabled"
);
let perf_disabled = false;
let should_show_metrics = perf_disabled;
assert!(
!should_show_metrics,
"Performance metrics should not be shown when perf flag is disabled"
);
}
#[test]
fn test_check_type_consistency() {
let complexity_check1 = QualityCheckType::Complexity;
let complexity_check2 = QualityCheckType::Complexity;
assert_eq!(
complexity_check1, complexity_check2,
"Same check types should be equal"
);
assert_eq!(
format!("{:?}", complexity_check1),
format!("{:?}", complexity_check2),
"Same check types should have identical debug representation"
);
}
#[test]
fn test_complexity_violation_detection() {
let rt = Runtime::new().unwrap();
let temp_dir = TempDir::new().unwrap();
let branches: String = (0..35)
.map(|i| format!(" if a == {i} {{ return {i}; }}\n"))
.collect();
let code = format!("fn f(a: i32) -> i32 {{\n{branches} 0\n}}");
let _test_file = create_test_file(temp_dir.path(), "test.rs", &code);
rt.block_on(async {
let complexity_violations = check_complexity(temp_dir.path(), 5).await.unwrap();
assert!(
!complexity_violations.is_empty(),
"Should detect complexity violations when cyclomatic > 30"
);
for violation in &complexity_violations {
assert_eq!(violation.check_type, "complexity");
assert!(!violation.message.is_empty());
assert!(!violation.file.is_empty());
}
});
}
}