use pmat::cli::analysis_utilities::*;
use pmat::cli::{QualityGateOutputFormat, SatdOutputFormat, TdgOutputFormat};
use std::fs;
use std::path::PathBuf;
use tempfile::tempdir;
#[test]
fn test_is_excluded_filename_test_files() {
assert!(is_excluded_filename("test_something.rs"));
assert!(is_excluded_filename("something_test.rs"));
assert!(is_excluded_filename("tests.rs"));
}
#[test]
fn test_is_excluded_filename_benchmark_files() {
assert!(is_excluded_filename("bench_something.rs")); assert!(is_excluded_filename("something_bench.rs")); assert!(is_excluded_filename("benchmark_test.rs")); }
#[test]
fn test_is_excluded_filename_mock_stub_files() {
assert!(is_excluded_filename("mock_server.rs")); assert!(is_excluded_filename("stub_implementation.rs")); assert!(is_excluded_filename("server_mock.rs")); }
#[test]
fn test_is_excluded_filename_example_demo_files() {
assert!(is_excluded_filename("example_usage.rs")); assert!(is_excluded_filename("demo_application.rs")); assert!(is_excluded_filename("code_example.rs")); }
#[test]
fn test_is_excluded_filename_regular_files() {
assert!(!is_excluded_filename("main.rs"));
assert!(!is_excluded_filename("lib.rs"));
assert!(!is_excluded_filename("parser.rs"));
assert!(!is_excluded_filename("analyzer.rs"));
}
#[test]
fn test_calculate_string_similarity_identical() {
let result = calculate_string_similarity("hello", "hello");
assert!(result >= 0.99); }
#[test]
fn test_calculate_string_similarity_completely_different() {
let result = calculate_string_similarity("abc", "xyz");
assert!(result < 0.5);
}
#[test]
fn test_calculate_string_similarity_partial_match() {
let result = calculate_string_similarity("testing", "test");
assert!(result > 0.0 && result < 1.0);
}
#[test]
fn test_calculate_edit_distance_identical() {
let result = calculate_edit_distance("hello", "hello");
assert_eq!(result, 0);
}
#[test]
fn test_calculate_edit_distance_single_char_diff() {
let result = calculate_edit_distance("hello", "hallo");
assert_eq!(result, 1);
}
#[test]
fn test_calculate_edit_distance_completely_different() {
let result = calculate_edit_distance("abc", "xyz");
assert_eq!(result, 3);
}
#[test]
fn test_calculate_soundex_similar_sounding() {
let soundex1 = calculate_soundex("Smith");
let soundex2 = calculate_soundex("Smyth");
assert_eq!(soundex1, soundex2);
}
#[test]
fn test_calculate_soundex_different_sounding() {
let soundex1 = calculate_soundex("Smith");
let soundex2 = calculate_soundex("Johnson");
assert_ne!(soundex1, soundex2);
}
#[test]
fn test_detect_toolchain_rust() {
let temp_dir = tempdir().unwrap();
let cargo_toml = temp_dir.path().join("Cargo.toml");
fs::write(&cargo_toml, "[package]\nname = \"test\"").unwrap();
let result = detect_toolchain(temp_dir.path());
assert_eq!(result, Some("rust".to_string()));
}
#[test]
fn test_detect_toolchain_python_variants() {
let temp_dir = tempdir().unwrap();
let setup_py = temp_dir.path().join("setup.py");
fs::write(&setup_py, "# Python setup").unwrap();
let result = detect_toolchain(temp_dir.path());
if let Some(toolchain) = result {
assert!(toolchain.contains("python") || toolchain.contains("py"));
}
}
#[test]
fn test_detect_toolchain_nodejs_empty_project() {
let temp_dir = tempdir().unwrap();
let package_json = temp_dir.path().join("package.json");
fs::write(&package_json, r#"{"name": "test"}"#).unwrap();
let result = detect_toolchain(temp_dir.path());
assert!(result.is_none() || result.is_some());
}
#[test]
fn test_detect_toolchain_none() {
let temp_dir = tempdir().unwrap();
let result = detect_toolchain(temp_dir.path());
assert_eq!(result, None);
}
#[test]
fn test_get_file_extensions_rust() {
let exts = get_file_extensions(Some("rust"));
assert!(exts.contains(&"rs"));
}
#[test]
fn test_get_file_extensions_python() {
let exts = get_file_extensions(Some("python"));
assert!(exts.contains(&"py"));
}
#[test]
fn test_get_file_extensions_javascript() {
let exts = get_file_extensions(Some("javascript"));
assert!(exts.contains(&"js"));
assert!(exts.contains(&"jsx"));
assert!(!exts.contains(&"ts")); }
#[test]
fn test_get_file_extensions_none() {
let exts = get_file_extensions(None);
assert!(!exts.is_empty());
}
#[test]
fn test_should_analyze_file_included_extension() {
let result = should_analyze_file(&PathBuf::from("test.rs"), &PathBuf::from("."), &["rs"], &[]);
assert!(result);
}
#[test]
fn test_should_analyze_file_excluded_directory() {
let result = should_analyze_file(
&PathBuf::from("target/debug/test.rs"),
&PathBuf::from("."),
&["rs"],
&[], );
assert!(!result); }
#[test]
fn test_should_analyze_file_include_pattern_override() {
let result = should_analyze_file(
&PathBuf::from("src/test.rs"),
&PathBuf::from("."),
&["rs"],
&[String::from("src/**")],
);
assert!(result);
}
#[tokio::test]
async fn test_handle_analyze_tdg_nonexistent_path() {
let result = handle_analyze_tdg(
PathBuf::from("/nonexistent/path"),
None,
vec![],
1.0,
10,
TdgOutputFormat::Table,
false,
None,
false,
false,
vec![],
false,
)
.await;
assert!(result.is_ok());
}
#[tokio::test]
async fn test_handle_analyze_satd_empty_directory() {
let temp_dir = tempdir().unwrap();
let result = handle_analyze_satd(
temp_dir.path().to_path_buf(),
SatdOutputFormat::Summary,
None,
false,
false,
false,
30,
false,
None,
)
.await;
match result {
Ok(_) | Err(_) => {} }
}
#[tokio::test]
async fn test_handle_quality_gate_nonexistent_path() {
let result = handle_quality_gate(
PathBuf::from("/nonexistent/path"),
None,
QualityGateOutputFormat::Summary,
false,
vec![],
0.15,
0.5,
20,
false,
None,
false,
)
.await;
assert!(result.is_err());
}
#[tokio::test]
async fn test_handle_analyze_tdg_with_simple_file() {
let temp_dir = tempdir().unwrap();
let rust_file = temp_dir.path().join("simple.rs");
fs::write(
&rust_file,
r#"
fn simple_function() {
println!("Hello, world!");
}
"#,
)
.unwrap();
let result = handle_analyze_tdg(
temp_dir.path().to_path_buf(),
None,
vec![],
1.0,
10,
TdgOutputFormat::Json,
false,
None,
false,
false,
vec![],
false,
)
.await;
match result {
Ok(_) | Err(_) => {}
}
}
#[tokio::test]
async fn test_handle_analyze_satd_with_todo_comments() {
let temp_dir = tempdir().unwrap();
let rust_file = temp_dir.path().join("test.rs");
fs::write(
&rust_file,
r#"
// TODO: Implement this feature
fn placeholder() {}
// FIXME: This is broken
fn broken() {}
"#,
)
.unwrap();
let result = handle_analyze_satd(
temp_dir.path().to_path_buf(),
SatdOutputFormat::Json,
None,
false,
false,
false,
30,
false,
None,
)
.await;
if let Ok(()) = result {
}
}
mod churn_formatter_comprehensive {
use super::*;
use pmat::models::churn::{ChurnSummary, CodeChurnAnalysis, FileChurnMetrics};
use chrono::Utc;
use std::collections::HashMap;
use std::path::PathBuf;
fn create_sample_file_churn(path: &str, commits: usize, score: f32) -> FileChurnMetrics {
FileChurnMetrics {
path: PathBuf::from(path),
relative_path: path.to_string(),
commit_count: commits,
unique_authors: vec![
"alice@example.com".to_string(),
"bob@example.com".to_string(),
]
.into_iter()
.take(commits.min(2))
.collect(),
churn_score: score,
additions: commits * 50,
deletions: commits * 30,
last_modified: Utc::now(),
first_seen: Utc::now(),
}
}
fn create_sample_analysis(file_count: usize) -> CodeChurnAnalysis {
let files: Vec<FileChurnMetrics> = (0..file_count)
.map(|i| {
create_sample_file_churn(
&format!("src/file_{}.rs", i),
(i + 1) * 2,
(i + 1) as f32 * 1.5,
)
})
.collect();
let mut author_contributions = HashMap::new();
author_contributions.insert("alice@example.com".to_string(), file_count);
author_contributions.insert("bob@example.com".to_string(), file_count.saturating_sub(1));
CodeChurnAnalysis {
generated_at: Utc::now(),
period_days: 30,
repository_root: PathBuf::from("/test/repo"),
files,
summary: ChurnSummary {
total_commits: file_count * 3,
total_files_changed: file_count,
hotspot_files: vec![PathBuf::from("src/hotspot.rs")],
stable_files: vec![PathBuf::from("src/stable.rs")],
author_contributions,
mean_churn_score: 0.0,
variance_churn_score: 0.0,
stddev_churn_score: 0.0,
},
}
}
#[test]
fn test_format_churn_summary_with_multiple_files() {
let analysis = create_sample_analysis(5);
let result = format_churn_as_summary(&analysis);
assert!(result.is_ok());
let summary = result.unwrap();
assert!(summary.contains("Code Churn Analysis Summary"));
assert!(summary.contains("Last 30 days"));
assert!(summary.contains("Total commits"));
assert!(summary.contains("Files changed"));
assert!(summary.contains("Top Files by Churn"));
assert!(summary.contains("Top Contributors"));
assert!(summary.contains("alice@example.com"));
}
#[test]
fn test_format_churn_summary_sorting() {
let mut analysis = create_sample_analysis(3);
analysis.files[0].commit_count = 10;
analysis.files[1].commit_count = 5;
analysis.files[2].commit_count = 15;
let result = format_churn_as_summary(&analysis);
assert!(result.is_ok());
let summary = result.unwrap();
assert!(summary.contains("1. `file_2.rs` - 15 commits"));
}
#[test]
fn test_format_churn_markdown_structure() {
let analysis = create_sample_analysis(3);
let result = format_churn_as_markdown(&analysis);
assert!(result.is_ok());
let markdown = result.unwrap();
assert!(markdown.contains("#"));
assert!(markdown.contains("Code Churn Analysis"));
assert!(markdown.contains("Period"));
}
#[test]
fn test_format_churn_csv_headers() {
let analysis = create_sample_analysis(2);
let result = format_churn_as_csv(&analysis);
assert!(result.is_ok());
let csv = result.unwrap();
let lines: Vec<&str> = csv.lines().collect();
assert!(!lines.is_empty());
let headers = lines[0];
assert!(headers.contains("file") || headers.contains("File") || headers.contains("path"));
assert!(headers.contains("commit") || headers.contains("Commit"));
}
#[test]
fn test_format_churn_csv_data_rows() {
let analysis = create_sample_analysis(2);
let result = format_churn_as_csv(&analysis);
assert!(result.is_ok());
let csv = result.unwrap();
let lines: Vec<&str> = csv.lines().collect();
assert!(lines.len() >= 2);
assert!(csv.contains("file_0.rs") || csv.contains("file_1.rs"));
}
#[test]
fn test_format_churn_empty_hotspots() {
let mut analysis = create_sample_analysis(1);
analysis.summary.hotspot_files = vec![];
let result = format_churn_as_summary(&analysis);
assert!(result.is_ok());
}
#[test]
fn test_format_churn_empty_stable_files() {
let mut analysis = create_sample_analysis(1);
analysis.summary.stable_files = vec![];
let result = format_churn_as_summary(&analysis);
assert!(result.is_ok());
}
#[test]
fn test_format_churn_empty_contributors() {
let mut analysis = create_sample_analysis(1);
analysis.summary.author_contributions = HashMap::new();
let result = format_churn_as_summary(&analysis);
assert!(result.is_ok());
}
#[test]
fn test_format_churn_single_file() {
let analysis = create_sample_analysis(1);
let result = format_churn_as_summary(&analysis);
assert!(result.is_ok());
let summary = result.unwrap();
assert!(summary.contains("file_0.rs"));
}
#[test]
fn test_format_churn_many_files_truncates_to_10() {
let analysis = create_sample_analysis(20);
let result = format_churn_as_summary(&analysis);
assert!(result.is_ok());
let summary = result.unwrap();
assert!(summary.contains("1. "));
assert!(summary.contains("10. "));
}
}
mod identifier_extraction_comprehensive {
use super::*;
#[test]
fn test_extract_rust_functions() {
let content = r#"
pub fn public_function() {}
async fn async_function() {}
fn simple_function() {}
"#;
let identifiers = extract_identifiers(content);
let functions: Vec<_> = identifiers
.iter()
.filter(|i| i.kind == "function")
.collect();
assert_eq!(functions.len(), 3);
let names: Vec<_> = functions.iter().map(|f| f.name.as_str()).collect();
assert!(names.contains(&"public_function"));
assert!(names.contains(&"async_function"));
assert!(names.contains(&"simple_function"));
}
#[test]
fn test_extract_python_functions() {
let content = r#"
def calculate_sum(a, b):
return a + b
def process_data():
pass
"#;
let identifiers = extract_identifiers(content);
let functions: Vec<_> = identifiers
.iter()
.filter(|i| i.kind == "function")
.collect();
assert!(functions.len() >= 2);
let names: Vec<_> = functions.iter().map(|f| f.name.as_str()).collect();
assert!(names.contains(&"calculate_sum"));
assert!(names.contains(&"process_data"));
}
#[test]
fn test_extract_javascript_functions() {
let content = r#"
function handleClick() {
console.log("clicked");
}
function processData() {
return true;
}
"#;
let identifiers = extract_identifiers(content);
let functions: Vec<_> = identifiers
.iter()
.filter(|i| i.kind == "function")
.collect();
assert!(functions.len() >= 2);
let names: Vec<_> = functions.iter().map(|f| f.name.as_str()).collect();
assert!(names.contains(&"handleClick"));
assert!(names.contains(&"processData"));
}
#[test]
fn test_extract_rust_structs_enums_traits() {
let content = r#"
pub struct User {
name: String,
}
enum Status {
Active,
Inactive,
}
pub trait Processable {
fn process(&self);
}
"#;
let identifiers = extract_identifiers(content);
let structs: Vec<_> = identifiers.iter().filter(|i| i.kind == "struct").collect();
assert_eq!(structs.len(), 1);
assert_eq!(structs[0].name, "User");
let enums: Vec<_> = identifiers.iter().filter(|i| i.kind == "enum").collect();
assert_eq!(enums.len(), 1);
assert_eq!(enums[0].name, "Status");
let traits: Vec<_> = identifiers.iter().filter(|i| i.kind == "trait").collect();
assert_eq!(traits.len(), 1);
assert_eq!(traits[0].name, "Processable");
}
#[test]
fn test_extract_classes() {
let content = r#"
class UserManager {
constructor() {}
}
class DataProcessor {
process() {}
}
"#;
let identifiers = extract_identifiers(content);
let classes: Vec<_> = identifiers.iter().filter(|i| i.kind == "class").collect();
assert_eq!(classes.len(), 2);
let names: Vec<_> = classes.iter().map(|c| c.name.as_str()).collect();
assert!(names.contains(&"UserManager"));
assert!(names.contains(&"DataProcessor"));
}
#[test]
fn test_extract_constants_and_variables() {
let content = r#"
const MAX_SIZE = 100;
let counter = 0;
var result = null;
pub const THRESHOLD: f32 = 0.5;
"#;
let identifiers = extract_identifiers(content);
let constants: Vec<_> = identifiers
.iter()
.filter(|i| i.kind == "constant")
.collect();
assert!(!constants.is_empty());
let variables: Vec<_> = identifiers
.iter()
.filter(|i| i.kind == "variable")
.collect();
assert!(!variables.is_empty());
}
#[test]
fn test_extract_empty_content() {
let content = "";
let identifiers = extract_identifiers(content);
assert_eq!(identifiers.len(), 0);
}
#[test]
fn test_extract_no_identifiers() {
let content = r#"
// Just comments
/* Multi-line
comments */
"string literals"
"#;
let identifiers = extract_identifiers(content);
assert!(identifiers.len() < 10);
}
#[test]
fn test_extract_deduplicates_identifiers() {
let content = r#"
fn process_data() {}
fn process_data() {}
fn process_data() {}
"#;
let identifiers = extract_identifiers(content);
let functions: Vec<_> = identifiers
.iter()
.filter(|i| i.name == "process_data")
.collect();
assert_eq!(functions.len(), 1);
}
#[test]
fn test_extract_tracks_line_numbers() {
let content = r#"
fn first() {}
fn second() {}
fn third() {}
"#;
let identifiers = extract_identifiers(content);
let functions: Vec<_> = identifiers
.iter()
.filter(|i| i.kind == "function")
.collect();
assert_eq!(functions.len(), 3);
let line_numbers: Vec<_> = functions.iter().map(|f| f.line).collect();
let unique_lines: std::collections::HashSet<_> = line_numbers.iter().collect();
assert!(unique_lines.len() >= 2); }
#[test]
fn test_extract_mixed_languages() {
let content = r#"
fn rust_function() {}
def python_function():
pass
function jsFunction() {}
class MyClass {}
"#;
let identifiers = extract_identifiers(content);
assert!(identifiers.len() >= 4);
let kinds: std::collections::HashSet<_> =
identifiers.iter().map(|i| i.kind.as_str()).collect();
assert!(kinds.contains("function"));
assert!(kinds.contains("class"));
}
#[test]
fn test_extract_interface_and_type() {
let content = r#"
interface UserInterface {
name: string;
}
type UserId = string;
"#;
let identifiers = extract_identifiers(content);
let interfaces: Vec<_> = identifiers
.iter()
.filter(|i| i.kind == "interface")
.collect();
assert_eq!(interfaces.len(), 1);
assert_eq!(interfaces[0].name, "UserInterface");
let types: Vec<_> = identifiers.iter().filter(|i| i.kind == "type").collect();
assert_eq!(types.len(), 1);
assert_eq!(types[0].name, "UserId");
}
#[test]
fn test_extract_ignores_non_identifier_lines() {
let content = r#"
fn valid_function() {}
this is not valid rust code
function validFunction() {}
more random text
"#;
let identifiers = extract_identifiers(content);
let functions: Vec<_> = identifiers
.iter()
.filter(|i| i.kind == "function")
.collect();
assert_eq!(functions.len(), 2);
}
}
mod dead_code_formatter_comprehensive {
use pmat::cli::dead_code_formatter::{
DeadCodeFormatter, JsonFormatter, MarkdownFormatter, SummaryFormatter,
};
use pmat::models::dead_code::{
ConfidenceLevel, DeadCodeResult, DeadCodeSummary, FileDeadCodeMetrics,
};
fn create_sample_file_metrics(
path: &str,
dead_lines: usize,
total_lines: usize,
) -> FileDeadCodeMetrics {
let mut metrics = FileDeadCodeMetrics::new(path.to_string());
metrics.dead_lines = dead_lines;
metrics.total_lines = total_lines;
metrics.dead_percentage = if total_lines > 0 {
(dead_lines as f32 / total_lines as f32) * 100.0
} else {
0.0
};
metrics.confidence = ConfidenceLevel::High;
metrics
}
fn create_sample_result(file_count: usize) -> DeadCodeResult {
let files: Vec<FileDeadCodeMetrics> = (0..file_count)
.map(|i| {
create_sample_file_metrics(
&format!("src/file_{}.rs", i),
(i + 1) * 10,
(i + 1) * 100,
)
})
.collect();
let total_dead_lines: usize = files.iter().map(|f| f.dead_lines).sum();
let total_analyzed_lines: usize = files.iter().map(|f| f.total_lines).sum();
let dead_percentage = if total_analyzed_lines > 0 {
(total_dead_lines as f32 / total_analyzed_lines as f32) * 100.0
} else {
0.0
};
DeadCodeResult {
summary: DeadCodeSummary {
total_files_analyzed: file_count,
files_with_dead_code: file_count,
total_dead_lines,
dead_percentage,
dead_functions: file_count * 2,
dead_classes: file_count,
dead_modules: file_count,
unreachable_blocks: file_count,
},
files,
total_files: file_count,
analyzed_files: file_count,
}
}
#[test]
fn test_format_summary_with_multiple_files() {
let result = create_sample_result(5);
let formatter = SummaryFormatter;
let output = formatter.format(&result);
assert!(output.is_ok());
let text = output.unwrap();
assert!(text.contains("Dead Code Analysis Summary"));
assert!(text.contains("Files analyzed"));
assert!(text.contains("5"));
}
#[test]
fn test_format_summary_empty_result() {
let result = create_sample_result(0);
let formatter = SummaryFormatter;
let output = formatter.format(&result);
assert!(output.is_ok());
let text = output.unwrap();
assert!(text.contains("Dead Code Analysis Summary"));
}
#[test]
fn test_format_json_structure() {
let result = create_sample_result(3);
let formatter = JsonFormatter;
let output = formatter.format(&result);
assert!(output.is_ok());
let json = output.unwrap();
assert!(json.contains("\"summary\""));
assert!(json.contains("\"files\""));
assert!(json.contains("\"total_files\""));
}
#[test]
fn test_format_json_empty_files() {
let result = create_sample_result(0);
let formatter = JsonFormatter;
let output = formatter.format(&result);
assert!(output.is_ok());
let json = output.unwrap();
assert!(json.contains("\"files\""));
assert!(json.contains("[]")); }
#[test]
fn test_format_markdown_structure() {
let result = create_sample_result(4);
let formatter = MarkdownFormatter;
let output = formatter.format(&result);
assert!(output.is_ok());
let markdown = output.unwrap();
assert!(markdown.contains("#"));
assert!(markdown.len() > 50); }
#[test]
fn test_format_markdown_with_tables() {
let result = create_sample_result(2);
let formatter = MarkdownFormatter;
let output = formatter.format(&result);
assert!(output.is_ok());
let markdown = output.unwrap();
assert!(markdown.contains("|") || markdown.contains("file_0.rs"));
}
#[test]
fn test_summary_shows_top_files() {
let result = create_sample_result(15);
let formatter = SummaryFormatter;
let output = formatter.format(&result);
assert!(output.is_ok());
let text = output.unwrap();
assert!(text.contains("Top Files"));
}
#[test]
fn test_json_serializes_all_files() {
let result = create_sample_result(3);
let formatter = JsonFormatter;
let output = formatter.format(&result);
assert!(output.is_ok());
let json = output.unwrap();
assert!(json.contains("file_0.rs"));
assert!(json.contains("file_1.rs"));
assert!(json.contains("file_2.rs"));
}
#[test]
fn test_formatters_handle_high_dead_percentage() {
let mut result = create_sample_result(1);
result.summary.dead_percentage = 95.5;
let summary_formatter = SummaryFormatter;
let summary_output = summary_formatter.format(&result);
assert!(summary_output.is_ok());
let json_formatter = JsonFormatter;
let json_output = json_formatter.format(&result);
assert!(json_output.is_ok());
}
#[test]
fn test_formatters_handle_zero_dead_code() {
let mut result = create_sample_result(1);
result.summary.total_dead_lines = 0;
result.summary.dead_percentage = 0.0;
result.files[0].dead_lines = 0;
result.files[0].dead_percentage = 0.0;
let summary_formatter = SummaryFormatter;
let summary_output = summary_formatter.format(&result);
assert!(summary_output.is_ok());
let markdown_formatter = MarkdownFormatter;
let markdown_output = markdown_formatter.format(&result);
assert!(markdown_output.is_ok());
}
}
mod defect_prediction_formatter_comprehensive {
use pmat::cli::analysis_utilities::{
format_defect_summary, DefectPredictionReport, FilePrediction,
};
fn create_sample_file_prediction(
path: &str,
risk_score: f32,
risk_level: &str,
) -> FilePrediction {
FilePrediction {
file_path: path.to_string(),
risk_score,
risk_level: risk_level.to_string(),
factors: vec!["High complexity".to_string(), "Recent churn".to_string()],
}
}
fn create_sample_report(high: usize, medium: usize, low: usize) -> DefectPredictionReport {
let mut file_predictions = Vec::new();
for i in 0..high {
file_predictions.push(create_sample_file_prediction(
&format!("src/high_risk_{}.rs", i),
0.85 + (i as f32 * 0.01),
"high",
));
}
for i in 0..medium {
file_predictions.push(create_sample_file_prediction(
&format!("src/medium_risk_{}.rs", i),
0.50 + (i as f32 * 0.01),
"medium",
));
}
for i in 0..low {
file_predictions.push(create_sample_file_prediction(
&format!("src/low_risk_{}.rs", i),
0.20 + (i as f32 * 0.01),
"low",
));
}
DefectPredictionReport {
total_files: high + medium + low,
high_risk_files: high,
medium_risk_files: medium,
low_risk_files: low,
file_predictions,
}
}
#[test]
fn test_format_defect_summary_with_high_risk_files() {
let report = create_sample_report(5, 3, 2);
let result = format_defect_summary(&report, 10);
assert!(result.is_ok());
let summary = result.unwrap();
assert!(summary.contains("Defect Prediction Analysis"));
assert!(summary.contains("Total files analyzed: 10"));
assert!(summary.contains("High risk files: 5"));
}
#[test]
fn test_format_defect_summary_empty_report() {
let report = create_sample_report(0, 0, 0);
let result = format_defect_summary(&report, 10);
assert!(result.is_ok());
let summary = result.unwrap();
assert!(summary.contains("Defect Prediction Analysis"));
assert!(summary.contains("Total files analyzed: 0"));
}
#[test]
fn test_format_defect_summary_top_files_truncation() {
let report = create_sample_report(15, 0, 0);
let result = format_defect_summary(&report, 5);
assert!(result.is_ok());
let summary = result.unwrap();
assert!(summary.contains("high_risk_0.rs"));
assert!(summary.contains("1. "));
assert!(summary.contains("5. "));
}
#[test]
fn test_format_defect_summary_all_risk_levels() {
let report = create_sample_report(2, 3, 5);
let result = format_defect_summary(&report, 20);
assert!(result.is_ok());
let summary = result.unwrap();
assert!(summary.contains("Total files analyzed: 10"));
assert!(summary.contains("High risk files: 2"));
assert!(summary.contains("Medium risk files: 3"));
assert!(summary.contains("Low risk files: 5"));
}
#[test]
fn test_format_defect_summary_single_file() {
let report = create_sample_report(1, 0, 0);
let result = format_defect_summary(&report, 10);
assert!(result.is_ok());
let summary = result.unwrap();
assert!(summary.contains("high_risk_0.rs"));
}
#[test]
fn test_format_defect_summary_shows_risk_scores() {
let report = create_sample_report(2, 0, 0);
let result = format_defect_summary(&report, 10);
assert!(result.is_ok());
let summary = result.unwrap();
assert!(summary.contains("%") || summary.contains("risk"));
}
#[test]
fn test_format_defect_summary_shows_factors() {
let report = create_sample_report(1, 0, 0);
let result = format_defect_summary(&report, 10);
assert!(result.is_ok());
let summary = result.unwrap();
assert!(summary.len() > 100); }
#[test]
fn test_format_defect_summary_no_high_risk() {
let report = create_sample_report(0, 5, 10);
let result = format_defect_summary(&report, 10);
assert!(result.is_ok());
let summary = result.unwrap();
assert!(summary.contains("High risk files: 0"));
assert!(summary.contains("Medium risk files: 5"));
}
}
mod coverage_formatter_comprehensive {
use pmat::cli::analysis_utilities::{
format_incremental_coverage_summary, CoverageSummary, FileCoverageMetrics,
IncrementalCoverageReport,
};
use std::path::PathBuf;
fn create_sample_file_metrics(
path: &str,
base_coverage: f64,
target_coverage: f64,
lines_added: usize,
lines_covered: usize,
) -> FileCoverageMetrics {
FileCoverageMetrics {
path: PathBuf::from(path),
base_coverage,
target_coverage,
coverage_delta: target_coverage - base_coverage,
lines_added,
lines_covered,
lines_uncovered: lines_added.saturating_sub(lines_covered),
}
}
fn create_sample_summary(
total_files: usize,
files_improved: usize,
files_degraded: usize,
overall_delta: f64,
meets_threshold: bool,
) -> CoverageSummary {
CoverageSummary {
total_files_changed: total_files,
files_improved,
files_degraded,
overall_delta,
meets_threshold,
}
}
fn create_sample_report(
files: Vec<FileCoverageMetrics>,
summary: CoverageSummary,
) -> IncrementalCoverageReport {
IncrementalCoverageReport {
base_branch: "main".to_string(),
target_branch: "feature-branch".to_string(),
coverage_threshold: 80.0,
files,
summary,
}
}
#[test]
fn test_format_coverage_summary_with_improvements() {
let files = vec![
create_sample_file_metrics("src/lib.rs", 70.0, 85.0, 100, 85),
create_sample_file_metrics("src/main.rs", 60.0, 75.0, 80, 60),
];
let summary = create_sample_summary(2, 2, 0, 10.0, true);
let report = create_sample_report(files, summary);
let result = format_incremental_coverage_summary(&report, 10);
assert!(result.is_ok());
let output = result.unwrap();
assert!(output.contains("Incremental Coverage Analysis"));
assert!(output.contains("main"));
assert!(output.contains("feature-branch"));
assert!(output.contains("Files Improved: 2"));
}
#[test]
fn test_format_coverage_summary_with_degradations() {
let files = vec![create_sample_file_metrics(
"src/bad.rs",
80.0,
70.0,
100,
70,
)];
let summary = create_sample_summary(1, 0, 1, -10.0, false);
let report = create_sample_report(files, summary);
let result = format_incremental_coverage_summary(&report, 10);
assert!(result.is_ok());
let output = result.unwrap();
assert!(output.contains("Files Degraded: 1"));
assert!(output.contains("bad.rs"));
}
#[test]
fn test_format_coverage_summary_empty_files() {
let files = vec![];
let summary = create_sample_summary(0, 0, 0, 0.0, true);
let report = create_sample_report(files, summary);
let result = format_incremental_coverage_summary(&report, 10);
assert!(result.is_ok());
let output = result.unwrap();
assert!(output.contains("Files Changed: 0"));
}
#[test]
fn test_format_coverage_meets_threshold() {
let files = vec![create_sample_file_metrics(
"src/good.rs",
70.0,
90.0,
100,
90,
)];
let summary = create_sample_summary(1, 1, 0, 20.0, true);
let report = create_sample_report(files, summary);
let result = format_incremental_coverage_summary(&report, 10);
assert!(result.is_ok());
let output = result.unwrap();
assert!(output.contains("✅ Yes") || output.contains("true"));
}
#[test]
fn test_format_coverage_fails_threshold() {
let files = vec![create_sample_file_metrics(
"src/low.rs",
50.0,
55.0,
100,
55,
)];
let summary = create_sample_summary(1, 1, 0, 5.0, false);
let report = create_sample_report(files, summary);
let result = format_incremental_coverage_summary(&report, 10);
assert!(result.is_ok());
let output = result.unwrap();
assert!(output.contains("❌ No") || output.contains("false"));
}
#[test]
fn test_format_coverage_multiple_files() {
let files = vec![
create_sample_file_metrics("src/a.rs", 60.0, 80.0, 100, 80),
create_sample_file_metrics("src/b.rs", 70.0, 75.0, 50, 38),
create_sample_file_metrics("src/c.rs", 90.0, 85.0, 200, 170),
];
let summary = create_sample_summary(3, 2, 1, 5.0, true);
let report = create_sample_report(files, summary);
let result = format_incremental_coverage_summary(&report, 10);
assert!(result.is_ok());
let output = result.unwrap();
assert!(output.contains("a.rs"));
assert!(output.contains("b.rs"));
assert!(output.contains("c.rs"));
assert!(output.contains("Files Changed: 3"));
}
#[test]
fn test_format_coverage_top_files_truncation() {
let files: Vec<FileCoverageMetrics> = (0..20)
.map(|i| create_sample_file_metrics(&format!("src/file_{}.rs", i), 70.0, 75.0, 100, 75))
.collect();
let summary = create_sample_summary(20, 20, 0, 5.0, true);
let report = create_sample_report(files, summary);
let result = format_incremental_coverage_summary(&report, 10);
assert!(result.is_ok());
let output = result.unwrap();
assert!(output.contains("Files Changed: 20"));
assert!(output.contains("file_0.rs") || output.len() > 200);
}
#[test]
fn test_format_coverage_shows_delta() {
let files = vec![create_sample_file_metrics(
"src/delta.rs",
50.0,
75.0,
100,
75,
)];
let summary = create_sample_summary(1, 1, 0, 25.0, true);
let report = create_sample_report(files, summary);
let result = format_incremental_coverage_summary(&report, 10);
assert!(result.is_ok());
let output = result.unwrap();
assert!(output.contains("25") || output.contains("delta") || output.contains("+"));
}
#[test]
fn test_format_coverage_mixed_results() {
let files = vec![
create_sample_file_metrics("src/improved.rs", 50.0, 80.0, 100, 80),
create_sample_file_metrics("src/degraded.rs", 80.0, 70.0, 100, 70),
create_sample_file_metrics("src/unchanged.rs", 75.0, 75.0, 100, 75),
];
let summary = create_sample_summary(3, 1, 1, 3.3, true);
let report = create_sample_report(files, summary);
let result = format_incremental_coverage_summary(&report, 10);
assert!(result.is_ok());
let output = result.unwrap();
assert!(output.contains("Files Improved: 1"));
assert!(output.contains("Files Degraded: 1"));
assert!(output.contains("Files Changed: 3"));
}
#[test]
fn test_format_coverage_structure_validation() {
let files = vec![create_sample_file_metrics(
"src/test.rs",
60.0,
70.0,
100,
70,
)];
let summary = create_sample_summary(1, 1, 0, 10.0, true);
let report = create_sample_report(files, summary);
let result = format_incremental_coverage_summary(&report, 10);
assert!(result.is_ok());
let output = result.unwrap();
assert!(output.contains("#"));
assert!(output.contains("Summary") || output.contains("summary"));
assert!(output.len() > 50); }
}
mod satd_helpers {
use pmat::cli::analysis_utilities::{apply_satd_filters, determine_satd_severity};
use pmat::cli::enums::SatdSeverity;
use pmat::services::satd_detector::{DebtCategory, Severity, TechnicalDebt};
use std::path::PathBuf;
fn create_debt(severity: Severity, category: DebtCategory, text: &str) -> TechnicalDebt {
TechnicalDebt {
category,
severity,
text: text.to_string(),
file: PathBuf::from("test.rs"),
line: 1,
column: 1,
context_hash: [0u8; 16],
}
}
#[test]
fn test_determine_severity_hack_high() {
assert_eq!(determine_satd_severity("HACK"), "high");
}
#[test]
fn test_determine_severity_xxx_high() {
assert_eq!(determine_satd_severity("XXX"), "high");
}
#[test]
fn test_determine_severity_fixme_medium() {
assert_eq!(determine_satd_severity("FIXME"), "medium");
}
#[test]
fn test_determine_severity_refactor_medium() {
assert_eq!(determine_satd_severity("REFACTOR"), "medium");
}
#[test]
fn test_determine_severity_todo_low() {
assert_eq!(determine_satd_severity("TODO"), "low");
}
#[test]
fn test_determine_severity_unknown_defaults_low() {
assert_eq!(determine_satd_severity("UNKNOWN"), "low");
assert_eq!(determine_satd_severity("CUSTOM"), "low");
assert_eq!(determine_satd_severity(""), "low");
}
#[test]
fn test_filter_no_filters_returns_all() {
let items = vec![
create_debt(Severity::Critical, DebtCategory::Security, "Security issue"),
create_debt(Severity::High, DebtCategory::Defect, "Bug"),
create_debt(Severity::Medium, DebtCategory::Design, "Design debt"),
create_debt(Severity::Low, DebtCategory::Requirement, "TODO"),
];
let result = apply_satd_filters(items.clone(), None, false);
assert_eq!(result.len(), 4);
}
#[test]
fn test_filter_by_critical_severity() {
let items = vec![
create_debt(Severity::Critical, DebtCategory::Security, "Critical"),
create_debt(Severity::High, DebtCategory::Defect, "High"),
create_debt(Severity::Medium, DebtCategory::Design, "Medium"),
create_debt(Severity::Low, DebtCategory::Requirement, "Low"),
];
let result = apply_satd_filters(items, Some(SatdSeverity::Critical), false);
assert_eq!(result.len(), 1);
assert_eq!(result[0].severity, Severity::Critical);
}
#[test]
fn test_filter_by_high_severity() {
let items = vec![
create_debt(Severity::Critical, DebtCategory::Security, "Critical"),
create_debt(Severity::High, DebtCategory::Defect, "High"),
create_debt(Severity::Medium, DebtCategory::Design, "Medium"),
create_debt(Severity::Low, DebtCategory::Requirement, "Low"),
];
let result = apply_satd_filters(items, Some(SatdSeverity::High), false);
assert_eq!(result.len(), 2); assert!(result.iter().any(|d| d.severity == Severity::Critical));
assert!(result.iter().any(|d| d.severity == Severity::High));
}
#[test]
fn test_filter_by_medium_severity() {
let items = vec![
create_debt(Severity::Critical, DebtCategory::Security, "Critical"),
create_debt(Severity::High, DebtCategory::Defect, "High"),
create_debt(Severity::Medium, DebtCategory::Design, "Medium"),
create_debt(Severity::Low, DebtCategory::Requirement, "Low"),
];
let result = apply_satd_filters(items, Some(SatdSeverity::Medium), false);
assert_eq!(result.len(), 3); }
#[test]
fn test_filter_by_low_severity() {
let items = vec![
create_debt(Severity::Critical, DebtCategory::Security, "Critical"),
create_debt(Severity::High, DebtCategory::Defect, "High"),
create_debt(Severity::Medium, DebtCategory::Design, "Medium"),
create_debt(Severity::Low, DebtCategory::Requirement, "Low"),
];
let result = apply_satd_filters(items, Some(SatdSeverity::Low), false);
assert_eq!(result.len(), 4); }
#[test]
fn test_filter_critical_only_flag() {
let items = vec![
create_debt(Severity::Critical, DebtCategory::Security, "Critical"),
create_debt(Severity::High, DebtCategory::Defect, "High"),
create_debt(Severity::Medium, DebtCategory::Design, "Medium"),
create_debt(Severity::Low, DebtCategory::Requirement, "Low"),
];
let result = apply_satd_filters(items, None, true);
assert_eq!(result.len(), 2); assert!(result
.iter()
.all(|d| matches!(d.severity, Severity::Critical | Severity::High)));
}
#[test]
fn test_filter_empty_input() {
let items: Vec<TechnicalDebt> = vec![];
let result = apply_satd_filters(items, Some(SatdSeverity::Critical), true);
assert_eq!(result.len(), 0);
}
#[test]
fn test_filter_combined_severity_and_critical() {
let items = vec![
create_debt(Severity::Critical, DebtCategory::Security, "Critical"),
create_debt(Severity::High, DebtCategory::Defect, "High"),
create_debt(Severity::Medium, DebtCategory::Design, "Medium"),
create_debt(Severity::Low, DebtCategory::Requirement, "Low"),
];
let result = apply_satd_filters(items, Some(SatdSeverity::Medium), true);
assert_eq!(result.len(), 2); }
#[test]
fn test_filter_preserves_debt_data() {
let items = vec![create_debt(
Severity::Critical,
DebtCategory::Security,
"Security vulnerability",
)];
let result = apply_satd_filters(items, None, false);
assert_eq!(result.len(), 1);
assert_eq!(result[0].text, "Security vulnerability");
assert_eq!(result[0].category, DebtCategory::Security);
assert_eq!(result[0].severity, Severity::Critical);
}
}
mod tdg_utility_helpers {
use pmat::cli::analysis_utilities::{
estimate_refactoring_hours, get_gcc_level, get_sarif_level, get_severity_display,
identify_primary_factor, percentile,
};
use pmat::models::tdg::TDGComponents;
use pmat::services::makefile_linter;
#[test]
fn test_percentile_empty_array() {
let values: Vec<f64> = vec![];
assert_eq!(percentile(&values, 0.5), 0.0);
}
#[test]
fn test_percentile_single_value() {
let values = vec![42.0];
assert_eq!(percentile(&values, 0.0), 42.0);
assert_eq!(percentile(&values, 0.5), 42.0);
assert_eq!(percentile(&values, 1.0), 42.0);
}
#[test]
fn test_percentile_median() {
let values = vec![1.0, 2.0, 3.0, 4.0, 5.0];
assert_eq!(percentile(&values, 0.5), 3.0); }
#[test]
fn test_percentile_p99() {
let values = vec![1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0];
let p99 = percentile(&values, 0.99);
assert!(p99 >= 9.0); }
#[test]
fn test_percentile_boundaries() {
let values = vec![10.0, 20.0, 30.0, 40.0, 50.0];
assert_eq!(percentile(&values, 0.0), 10.0); assert!(percentile(&values, 1.0) <= 50.0); }
#[test]
fn test_primary_factor_complexity() {
let components = TDGComponents {
complexity: 100.0, churn: 10.0,
coupling: 5.0,
domain_risk: 2.0,
duplication: 1.0,
dead_code: 0.0,
};
assert_eq!(identify_primary_factor(&components), "High Complexity");
}
#[test]
fn test_primary_factor_churn() {
let components = TDGComponents {
complexity: 10.0,
churn: 100.0, coupling: 5.0,
domain_risk: 2.0,
duplication: 1.0,
dead_code: 0.0,
};
assert_eq!(identify_primary_factor(&components), "Frequent Changes");
}
#[test]
fn test_primary_factor_coupling() {
let components = TDGComponents {
complexity: 0.0,
churn: 0.0,
coupling: 100.0, domain_risk: 0.0,
duplication: 0.0,
dead_code: 0.0,
};
assert_eq!(identify_primary_factor(&components), "High Coupling");
}
#[test]
fn test_primary_factor_all_equal() {
let components = TDGComponents {
complexity: 10.0,
churn: 10.0,
coupling: 10.0,
domain_risk: 10.0,
duplication: 10.0,
dead_code: 0.0,
};
assert_eq!(identify_primary_factor(&components), "Frequent Changes");
}
#[test]
fn test_refactoring_hours_zero_tdg() {
let hours = estimate_refactoring_hours(0.0);
assert!((hours - 2.0).abs() < 0.01); }
#[test]
fn test_refactoring_hours_low_tdg() {
let hours = estimate_refactoring_hours(1.0);
assert!((hours - 3.6).abs() < 0.1); }
#[test]
fn test_refactoring_hours_high_tdg() {
let hours = estimate_refactoring_hours(5.0);
assert!(hours > 30.0); }
#[test]
fn test_refactoring_hours_increases_with_tdg() {
let h1 = estimate_refactoring_hours(1.0);
let h2 = estimate_refactoring_hours(2.0);
let h3 = estimate_refactoring_hours(3.0);
assert!(h2 > h1);
assert!(h3 > h2);
}
#[test]
fn test_severity_display_error() {
let sev = makefile_linter::Severity::Error;
assert_eq!(get_severity_display(&sev), "❌ Error");
}
#[test]
fn test_severity_display_warning() {
let sev = makefile_linter::Severity::Warning;
assert_eq!(get_severity_display(&sev), "⚠️ Warning");
}
#[test]
fn test_severity_display_performance() {
let sev = makefile_linter::Severity::Performance;
assert_eq!(get_severity_display(&sev), "⚡ Performance");
}
#[test]
fn test_severity_display_info() {
let sev = makefile_linter::Severity::Info;
assert_eq!(get_severity_display(&sev), "ℹ️ Info");
}
#[test]
fn test_sarif_level_error() {
let sev = makefile_linter::Severity::Error;
assert_eq!(get_sarif_level(&sev), "error");
}
#[test]
fn test_sarif_level_warning() {
let sev = makefile_linter::Severity::Warning;
assert_eq!(get_sarif_level(&sev), "warning");
}
#[test]
fn test_sarif_level_performance() {
let sev = makefile_linter::Severity::Performance;
assert_eq!(get_sarif_level(&sev), "note");
}
#[test]
fn test_sarif_level_info() {
let sev = makefile_linter::Severity::Info;
assert_eq!(get_sarif_level(&sev), "note");
}
#[test]
fn test_gcc_level_error() {
let sev = makefile_linter::Severity::Error;
assert_eq!(get_gcc_level(&sev), "error");
}
#[test]
fn test_gcc_level_warning() {
let sev = makefile_linter::Severity::Warning;
assert_eq!(get_gcc_level(&sev), "warning");
}
#[test]
fn test_gcc_level_performance() {
let sev = makefile_linter::Severity::Performance;
assert_eq!(get_gcc_level(&sev), "note");
}
#[test]
fn test_gcc_level_info() {
let sev = makefile_linter::Severity::Info;
assert_eq!(get_gcc_level(&sev), "note");
}
}
mod path_display_utilities {
use pmat::cli::analysis_utilities::{
calculate_content_hash, extract_filename, get_coverage_emoji, is_build_artifact,
};
use std::path::Path;
#[test]
fn test_is_build_artifact_target_dir() {
assert!(is_build_artifact(Path::new("/project/target/debug/bin")));
assert!(is_build_artifact(Path::new("/target/release/lib.so")));
}
#[test]
fn test_is_build_artifact_node_modules() {
assert!(is_build_artifact(Path::new(
"/project/node_modules/package/index.js"
)));
assert!(is_build_artifact(Path::new("/node_modules/lib/file.js")));
}
#[test]
fn test_is_build_artifact_git_dir() {
assert!(is_build_artifact(Path::new("/project/.git/objects/abc")));
assert!(is_build_artifact(Path::new("/.git/HEAD")));
}
#[test]
fn test_is_build_artifact_build_dirs() {
assert!(is_build_artifact(Path::new("/project/build/output.o")));
assert!(is_build_artifact(Path::new("/out/binary")));
assert!(is_build_artifact(Path::new("/dist/bundle.js")));
assert!(is_build_artifact(Path::new("/.cargo/registry/cache")));
assert!(is_build_artifact(Path::new("/generated/proto.rs")));
}
#[test]
fn test_is_build_artifact_source_files() {
assert!(!is_build_artifact(Path::new("/project/src/main.rs")));
assert!(!is_build_artifact(Path::new("/project/lib.rs")));
assert!(!is_build_artifact(Path::new("README.md")));
}
#[test]
fn test_calculate_content_hash_same_content() {
let content1 = "fn main() { println!(\"Hello\"); }";
let content2 = "fn main() { println!(\"Hello\"); }";
assert_eq!(
calculate_content_hash(content1),
calculate_content_hash(content2)
);
}
#[test]
fn test_calculate_content_hash_different_content() {
let content1 = "fn main() { println!(\"Hello\"); }";
let content2 = "fn main() { println!(\"World\"); }";
assert_ne!(
calculate_content_hash(content1),
calculate_content_hash(content2)
);
}
#[test]
fn test_calculate_content_hash_empty_string() {
let hash = calculate_content_hash("");
assert!(hash > 0); }
#[test]
fn test_calculate_content_hash_deterministic() {
let content = "Some test content for hashing";
let hash1 = calculate_content_hash(content);
let hash2 = calculate_content_hash(content);
assert_eq!(hash1, hash2); }
#[test]
fn test_calculate_files_to_show_zero_limit() {
let files: Vec<u8> = vec![];
let result = if 0 == 0 {
files.len()
} else {
0.min(files.len())
};
assert_eq!(result, 0);
}
#[test]
fn test_calculate_files_to_show_under_limit() {
let files: Vec<u8> = vec![1, 2, 3];
let result = if 10 == 0 {
files.len()
} else {
10.min(files.len())
};
assert_eq!(result, 3);
}
#[test]
fn test_calculate_files_to_show_over_limit() {
let files: Vec<u8> = vec![1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12];
let result = if 10 == 0 {
files.len()
} else {
10.min(files.len())
};
assert_eq!(result, 10);
}
#[test]
fn test_calculate_files_to_show_exact_limit() {
let files: Vec<u8> = vec![1, 2, 3, 4, 5];
let result = if 5 == 0 {
files.len()
} else {
5.min(files.len())
};
assert_eq!(result, 5);
}
#[test]
fn test_extract_filename_simple_path() {
let path = Path::new("/project/src/main.rs");
assert_eq!(extract_filename(path), "main.rs");
}
#[test]
fn test_extract_filename_no_directory() {
let path = Path::new("file.txt");
assert_eq!(extract_filename(path), "file.txt");
}
#[test]
fn test_extract_filename_nested_path() {
let path = Path::new("/home/user/projects/rust/server/src/lib.rs");
assert_eq!(extract_filename(path), "lib.rs");
}
#[test]
fn test_extract_filename_directory_path() {
let path = Path::new("/project/src/");
let filename = extract_filename(path);
assert!(filename == "src" || filename == "unknown");
}
#[test]
fn test_get_coverage_emoji_positive_delta() {
assert_eq!(get_coverage_emoji(5.0), "📈");
assert_eq!(get_coverage_emoji(0.1), "📈");
assert_eq!(get_coverage_emoji(100.0), "📈");
}
#[test]
fn test_get_coverage_emoji_negative_delta() {
assert_eq!(get_coverage_emoji(-5.0), "📉");
assert_eq!(get_coverage_emoji(-0.1), "📉");
assert_eq!(get_coverage_emoji(-100.0), "📉");
}
#[test]
fn test_get_coverage_emoji_zero_delta() {
assert_eq!(get_coverage_emoji(0.0), "📉");
}
}
mod file_type_and_content_validators {
use pmat::cli::analysis_utilities::{
get_severity_icon, is_benchmark_file, is_example_or_demo_file, is_excluded_directory,
is_mock_or_stub_file, is_test_file, normalize_code_content,
};
#[test]
fn test_is_test_file_suffix_patterns() {
assert!(is_test_file("module_test.rs"));
assert!(is_test_file("utils_tests.rs"));
assert!(is_test_file("tests.rs"));
}
#[test]
fn test_is_test_file_prefix_patterns() {
assert!(is_test_file("test_utils.rs"));
assert!(is_test_file("tests_integration.rs"));
}
#[test]
fn test_is_test_file_contains_patterns() {
assert!(is_test_file("integration_test_helpers.rs"));
assert!(is_test_file("utils_tests_extended.rs"));
assert!(is_test_file("test_harness.rs"));
assert!(is_test_file("property_tests.rs"));
assert!(is_test_file("module_property_test.rs"));
}
#[test]
fn test_is_test_file_non_test_files() {
assert!(!is_test_file("main.rs"));
assert!(!is_test_file("lib.rs"));
assert!(!is_test_file("utils.rs"));
assert!(!is_test_file("module.rs"));
}
#[test]
fn test_is_test_file_edge_cases() {
assert!(!is_test_file("testing.rs")); assert!(is_test_file("test_.rs")); assert!(is_test_file("_test.rs")); }
#[test]
fn test_is_example_or_demo_file_prefix_patterns() {
assert!(is_example_or_demo_file("example_usage.rs"));
assert!(is_example_or_demo_file("demo_application.rs"));
}
#[test]
fn test_is_example_or_demo_file_contains_patterns() {
assert!(is_example_or_demo_file("api_example.rs"));
assert!(is_example_or_demo_file("user_demo.rs"));
}
#[test]
fn test_is_example_or_demo_file_non_example_files() {
assert!(!is_example_or_demo_file("main.rs"));
assert!(!is_example_or_demo_file("lib.rs"));
assert!(!is_example_or_demo_file("utils.rs"));
}
#[test]
fn test_is_example_or_demo_file_edge_cases() {
assert!(!is_example_or_demo_file("examples.rs")); assert!(is_example_or_demo_file("example_basic.rs")); }
#[test]
fn test_is_benchmark_file_suffix_patterns() {
assert!(is_benchmark_file("performance_bench.rs"));
assert!(is_benchmark_file("sort_benchmark.rs"));
}
#[test]
fn test_is_benchmark_file_contains_patterns() {
assert!(is_benchmark_file("bench_sorting.rs"));
assert!(is_benchmark_file("benchmark_performance.rs"));
}
#[test]
fn test_is_benchmark_file_non_benchmark_files() {
assert!(!is_benchmark_file("main.rs"));
assert!(!is_benchmark_file("lib.rs"));
assert!(!is_benchmark_file("utils.rs"));
}
#[test]
fn test_is_benchmark_file_edge_cases() {
assert!(!is_benchmark_file("benches.rs")); assert!(is_benchmark_file("bench_basic.rs")); }
#[test]
fn test_is_mock_or_stub_file_prefix_patterns() {
assert!(is_mock_or_stub_file("mock_database.rs"));
assert!(is_mock_or_stub_file("stub_api.rs"));
assert!(is_mock_or_stub_file("stubs_integration.rs"));
}
#[test]
fn test_is_mock_or_stub_file_contains_patterns() {
assert!(is_mock_or_stub_file("database_mock.rs"));
assert!(is_mock_or_stub_file("api_stub.rs"));
assert!(is_mock_or_stub_file("integration_stubs.rs"));
}
#[test]
fn test_is_mock_or_stub_file_non_mock_files() {
assert!(!is_mock_or_stub_file("main.rs"));
assert!(!is_mock_or_stub_file("lib.rs"));
assert!(!is_mock_or_stub_file("utils.rs"));
}
#[test]
fn test_is_mock_or_stub_file_edge_cases() {
assert!(!is_mock_or_stub_file("mocking.rs")); assert!(is_mock_or_stub_file("mock_basic.rs")); }
#[test]
fn test_normalize_code_content_removes_empty_lines() {
let code = "fn main() {\n\n println!(\"Hello\");\n\n}";
let normalized = normalize_code_content(code);
assert!(!normalized.contains("\n\n"));
assert_eq!(normalized.lines().count(), 3); }
#[test]
fn test_normalize_code_content_removes_comments() {
let code = "fn main() {\n // This is a comment\n println!(\"Hello\");\n}";
let normalized = normalize_code_content(code);
assert!(!normalized.contains("// This is a comment"));
assert_eq!(normalized.lines().count(), 3); }
#[test]
fn test_normalize_code_content_trims_whitespace() {
let code = " fn main() { \n println!(\"Hello\"); \n } ";
let normalized = normalize_code_content(code);
assert!(normalized.starts_with("fn main()"));
assert!(!normalized.contains(" fn")); }
#[test]
fn test_normalize_code_content_empty_input() {
let code = "";
let normalized = normalize_code_content(code);
assert_eq!(normalized, "");
}
#[test]
fn test_normalize_code_content_only_comments() {
let code = "// Comment 1\n// Comment 2\n/* Block comment */";
let normalized = normalize_code_content(code);
assert!(
normalized.is_empty() || normalized == "\n",
"Should be empty or single newline"
);
}
#[test]
fn test_is_excluded_directory_common_build_dirs() {
assert!(is_excluded_directory("/project/target/debug"));
assert!(is_excluded_directory("/project/build/output"));
assert!(is_excluded_directory("/project/out/artifacts"));
}
#[test]
fn test_is_excluded_directory_node_and_js() {
assert!(is_excluded_directory("/project/node_modules/package"));
assert!(is_excluded_directory("/project/dist/bundle"));
}
#[test]
fn test_is_excluded_directory_vcs_and_cache() {
assert!(is_excluded_directory("/project/.git/objects"));
assert!(is_excluded_directory("/project/.cache/data"));
assert!(is_excluded_directory("/project/__pycache__/module"));
}
#[test]
fn test_is_excluded_directory_python_envs() {
assert!(is_excluded_directory("/project/.venv/lib"));
assert!(is_excluded_directory("/project/venv/bin"));
assert!(is_excluded_directory("/project/ENV/python"));
}
#[test]
fn test_is_excluded_directory_source_dirs_allowed() {
assert!(!is_excluded_directory("/project/src/main"));
assert!(!is_excluded_directory("/project/lib/utils"));
assert!(!is_excluded_directory("/project/core/logic"));
}
#[test]
fn test_is_excluded_directory_windows_paths() {
assert!(is_excluded_directory("C:\\project\\target\\debug")); assert!(is_excluded_directory("C:\\project\\.git\\objects"));
}
#[test]
fn test_get_severity_icon_error() {
assert_eq!(get_severity_icon("error"), "🔴");
}
#[test]
fn test_get_severity_icon_warning() {
assert_eq!(get_severity_icon("warning"), "🟡");
}
#[test]
fn test_get_severity_icon_default() {
assert_eq!(get_severity_icon("info"), "🟢");
assert_eq!(get_severity_icon("success"), "🟢");
assert_eq!(get_severity_icon("unknown"), "🟢");
assert_eq!(get_severity_icon(""), "🟢");
}
}