use crate::quality::scoring::{Grade, QualityScore};
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use std::path::{Path, PathBuf};
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct QualityGateConfig {
pub min_score: f64,
pub min_grade: Grade,
pub component_thresholds: ComponentThresholds,
pub anti_gaming: AntiGamingRules,
pub ci_integration: CiIntegration,
pub project_overrides: HashMap<String, f64>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ComponentThresholds {
pub correctness: f64,
pub performance: f64,
pub maintainability: f64,
pub safety: f64,
pub idiomaticity: f64,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct AntiGamingRules {
pub min_confidence: f64,
pub max_cache_hit_rate: f64,
pub require_deep_analysis: Vec<String>,
pub min_file_size_bytes: usize,
pub max_test_ratio: f64,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct CiIntegration {
pub fail_on_violation: bool,
pub junit_xml: bool,
pub json_output: bool,
pub notifications: NotificationConfig,
pub block_merge: bool,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct NotificationConfig {
pub slack: bool,
pub email: bool,
pub webhook: Option<String>,
}
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
pub struct GateResult {
pub passed: bool,
pub score: f64,
pub grade: Grade,
pub violations: Vec<Violation>,
pub confidence: f64,
pub gaming_warnings: Vec<String>,
}
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
pub struct Violation {
pub violation_type: ViolationType,
pub actual: f64,
pub required: f64,
pub severity: Severity,
pub message: String,
}
#[derive(Debug, Clone, PartialEq, Eq, Hash, serde::Serialize, serde::Deserialize)]
pub enum ViolationType {
OverallScore,
Grade,
Correctness,
Performance,
Maintainability,
Safety,
Idiomaticity,
Confidence,
Gaming,
}
#[derive(Debug, Clone, PartialEq, serde::Serialize, serde::Deserialize)]
pub enum Severity {
Critical, High, Medium, Low, }
pub struct QualityGateEnforcer {
config: QualityGateConfig,
}
impl Default for QualityGateConfig {
fn default() -> Self {
Self {
min_score: 0.7, min_grade: Grade::BMinus,
component_thresholds: ComponentThresholds {
correctness: 0.8, performance: 0.6, maintainability: 0.7, safety: 0.8, idiomaticity: 0.5, },
anti_gaming: AntiGamingRules {
min_confidence: 0.6,
max_cache_hit_rate: 0.8,
require_deep_analysis: vec!["src/main.rs".to_string(), "src/lib.rs".to_string()],
min_file_size_bytes: 100,
max_test_ratio: 2.0,
},
ci_integration: CiIntegration {
fail_on_violation: true,
junit_xml: true,
json_output: true,
notifications: NotificationConfig {
slack: false,
email: false,
webhook: None,
},
block_merge: true,
},
project_overrides: HashMap::new(),
}
}
}
impl QualityGateEnforcer {
pub fn new(config: QualityGateConfig) -> Self {
Self { config }
}
pub fn load_config(project_root: &Path) -> anyhow::Result<QualityGateConfig> {
let config_path = project_root.join(".ruchy").join("score.toml");
if config_path.exists() {
let content = std::fs::read_to_string(&config_path)?;
let config: QualityGateConfig = toml::from_str(&content)?;
Ok(config)
} else {
let default_config = QualityGateConfig::default();
std::fs::create_dir_all(project_root.join(".ruchy"))?;
let toml_content = toml::to_string_pretty(&default_config)?;
std::fs::write(&config_path, toml_content)?;
Ok(default_config)
}
}
pub fn enforce_gates(&self, score: &QualityScore, file_path: Option<&PathBuf>) -> GateResult {
let mut violations = Vec::new();
let mut gaming_warnings = Vec::new();
if score.value < self.config.min_score {
violations.push(Violation {
violation_type: ViolationType::OverallScore,
actual: score.value,
required: self.config.min_score,
severity: Severity::Critical,
message: format!(
"Overall score {:.1}% below minimum {:.1}%",
score.value * 100.0,
self.config.min_score * 100.0
),
});
}
if score.grade < self.config.min_grade {
violations.push(Violation {
violation_type: ViolationType::Grade,
actual: score.value,
required: self.config.min_score,
severity: Severity::Critical,
message: format!(
"Grade {} below minimum {}",
score.grade, self.config.min_grade
),
});
}
self.check_component_thresholds(score, &mut violations);
self.check_anti_gaming_rules(score, file_path, &mut gaming_warnings, &mut violations);
if score.confidence < self.config.anti_gaming.min_confidence {
violations.push(Violation {
violation_type: ViolationType::Confidence,
actual: score.confidence,
required: self.config.anti_gaming.min_confidence,
severity: Severity::High,
message: format!(
"Confidence {:.1}% below minimum {:.1}%",
score.confidence * 100.0,
self.config.anti_gaming.min_confidence * 100.0
),
});
}
let passed = violations.iter().all(|v| v.severity != Severity::Critical);
GateResult {
passed,
score: score.value,
grade: score.grade,
violations,
confidence: score.confidence,
gaming_warnings,
}
}
fn check_component_thresholds(&self, score: &QualityScore, violations: &mut Vec<Violation>) {
let thresholds = &self.config.component_thresholds;
if score.components.correctness < thresholds.correctness {
violations.push(Violation {
violation_type: ViolationType::Correctness,
actual: score.components.correctness,
required: thresholds.correctness,
severity: Severity::Critical,
message: format!(
"Correctness {:.1}% below minimum {:.1}%",
score.components.correctness * 100.0,
thresholds.correctness * 100.0
),
});
}
if score.components.performance < thresholds.performance {
violations.push(Violation {
violation_type: ViolationType::Performance,
actual: score.components.performance,
required: thresholds.performance,
severity: Severity::High,
message: format!(
"Performance {:.1}% below minimum {:.1}%",
score.components.performance * 100.0,
thresholds.performance * 100.0
),
});
}
if score.components.maintainability < thresholds.maintainability {
violations.push(Violation {
violation_type: ViolationType::Maintainability,
actual: score.components.maintainability,
required: thresholds.maintainability,
severity: Severity::High,
message: format!(
"Maintainability {:.1}% below minimum {:.1}%",
score.components.maintainability * 100.0,
thresholds.maintainability * 100.0
),
});
}
if score.components.safety < thresholds.safety {
violations.push(Violation {
violation_type: ViolationType::Safety,
actual: score.components.safety,
required: thresholds.safety,
severity: Severity::Critical,
message: format!(
"Safety {:.1}% below minimum {:.1}%",
score.components.safety * 100.0,
thresholds.safety * 100.0
),
});
}
if score.components.idiomaticity < thresholds.idiomaticity {
violations.push(Violation {
violation_type: ViolationType::Idiomaticity,
actual: score.components.idiomaticity,
required: thresholds.idiomaticity,
severity: Severity::Medium,
message: format!(
"Idiomaticity {:.1}% below minimum {:.1}%",
score.components.idiomaticity * 100.0,
thresholds.idiomaticity * 100.0
),
});
}
}
fn check_anti_gaming_rules(
&self,
score: &QualityScore,
file_path: Option<&PathBuf>,
gaming_warnings: &mut Vec<String>,
violations: &mut Vec<Violation>,
) {
if score.cache_hit_rate > self.config.anti_gaming.max_cache_hit_rate {
gaming_warnings.push(format!(
"High cache hit rate {:.1}% may indicate stale analysis",
score.cache_hit_rate * 100.0
));
}
if let Some(path) = file_path {
if let Ok(metadata) = std::fs::metadata(path) {
if metadata.len() < self.config.anti_gaming.min_file_size_bytes as u64 {
gaming_warnings.push(format!(
"File {} is very small ({} bytes) - may indicate gaming by splitting",
path.display(),
metadata.len()
));
}
}
let path_str = path.to_string_lossy();
if self
.config
.anti_gaming
.require_deep_analysis
.iter()
.any(|p| path_str.contains(p))
&& score.confidence < 0.9
{
violations.push(Violation {
violation_type: ViolationType::Gaming,
actual: score.confidence,
required: 0.9,
severity: Severity::Critical,
message: format!(
"Critical file {} requires deep analysis (confidence < 90%)",
path.display()
),
});
}
}
}
pub fn export_ci_results(
&self,
results: &[GateResult],
output_dir: &Path,
) -> anyhow::Result<()> {
if self.config.ci_integration.json_output {
self.export_json_results(results, output_dir)?;
}
if self.config.ci_integration.junit_xml {
self.export_junit_results(results, output_dir)?;
}
Ok(())
}
fn export_json_results(&self, results: &[GateResult], output_dir: &Path) -> anyhow::Result<()> {
let output_path = output_dir.join("quality-gates.json");
let json_content = serde_json::to_string_pretty(results)?;
std::fs::write(output_path, json_content)?;
Ok(())
}
fn export_junit_results(
&self,
results: &[GateResult],
output_dir: &Path,
) -> anyhow::Result<()> {
let output_path = output_dir.join("quality-gates.xml");
let total = results.len();
let failures = results.iter().filter(|r| !r.passed).count();
let mut xml = format!(
r#"<?xml version="1.0" encoding="UTF-8"?>
<testsuite name="Quality Gates" tests="{total}" failures="{failures}" time="0.0">
"#
);
for (i, result) in results.iter().enumerate() {
let test_name = format!("quality-gate-{i}");
if result.passed {
xml.push_str(&format!(
r#" <testcase name="{test_name}" classname="QualityGate" time="0.0"/>
"#
));
} else {
xml.push_str(&format!(
r#" <testcase name="{}" classname="QualityGate" time="0.0">
<failure message="Quality gate violation">Score: {:.1}%, Grade: {}</failure>
</testcase>
"#,
test_name,
result.score * 100.0,
result.grade
));
}
}
xml.push_str("</testsuite>\n");
std::fs::write(output_path, xml)?;
Ok(())
}
}
impl PartialOrd for Grade {
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
Some(self.cmp(other))
}
}
impl Ord for Grade {
fn cmp(&self, other: &Self) -> std::cmp::Ordering {
self.to_rank().cmp(&other.to_rank())
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::quality::scoring::{Grade, QualityScore};
use tempfile::TempDir;
fn create_minimal_score() -> QualityScore {
use crate::quality::scoring::ScoreComponents;
QualityScore {
value: 0.5,
components: ScoreComponents {
correctness: 0.5,
performance: 0.5,
maintainability: 0.5,
safety: 0.5,
idiomaticity: 0.5,
},
grade: Grade::D,
confidence: 0.4,
cache_hit_rate: 0.3,
}
}
fn create_passing_score() -> QualityScore {
use crate::quality::scoring::ScoreComponents;
QualityScore {
value: 0.85,
components: ScoreComponents {
correctness: 0.9,
performance: 0.8,
maintainability: 0.8,
safety: 0.9,
idiomaticity: 0.7,
},
grade: Grade::APlus,
confidence: 0.9,
cache_hit_rate: 0.2,
}
}
#[test]
fn test_default_quality_gate_config() {
let config = QualityGateConfig::default();
assert_eq!(config.min_score, 0.7);
assert_eq!(config.min_grade, Grade::BMinus);
assert_eq!(config.component_thresholds.correctness, 0.8);
assert_eq!(config.component_thresholds.safety, 0.8);
assert_eq!(config.anti_gaming.min_confidence, 0.6);
assert!(config.ci_integration.fail_on_violation);
assert!(config.project_overrides.is_empty());
}
#[test]
fn test_quality_gate_enforcer_creation() {
let config = QualityGateConfig::default();
let enforcer = QualityGateEnforcer::new(config);
let score = create_minimal_score();
let result = enforcer.enforce_gates(&score, None);
assert!(!result.passed);
assert!(!result.violations.is_empty());
}
#[test]
fn test_quality_gate_passes_with_high_score() {
let config = QualityGateConfig::default();
let enforcer = QualityGateEnforcer::new(config);
let score = create_passing_score();
let result = enforcer.enforce_gates(&score, None);
assert!(result.passed, "High quality score should pass all gates");
assert_eq!(result.score, 0.85);
assert_eq!(result.grade, Grade::APlus);
assert!(result.violations.is_empty());
assert_eq!(result.confidence, 0.9);
assert!(result.gaming_warnings.is_empty());
}
#[test]
fn test_quality_gate_fails_overall_score() {
let config = QualityGateConfig::default(); let enforcer = QualityGateEnforcer::new(config);
let mut score = create_minimal_score();
score.value = 0.6; let result = enforcer.enforce_gates(&score, None);
assert!(!result.passed, "Score below threshold should fail");
let overall_violations: Vec<_> = result
.violations
.iter()
.filter(|v| v.violation_type == ViolationType::OverallScore)
.collect();
assert_eq!(overall_violations.len(), 1);
let violation = &overall_violations[0];
assert_eq!(violation.actual, 0.6);
assert_eq!(violation.required, 0.7);
assert_eq!(violation.severity, Severity::Critical);
assert!(violation.message.contains("60.0%"));
assert!(violation.message.contains("70.0%"));
}
#[test]
fn test_confidence_threshold_violation() {
let config = QualityGateConfig::default(); let enforcer = QualityGateEnforcer::new(config);
let mut score = create_passing_score();
score.confidence = 0.4; let result = enforcer.enforce_gates(&score, None);
let confidence_violations: Vec<_> = result
.violations
.iter()
.filter(|v| v.violation_type == ViolationType::Confidence)
.collect();
assert_eq!(confidence_violations.len(), 1);
let violation = &confidence_violations[0];
assert_eq!(violation.severity, Severity::High);
assert_eq!(violation.actual, 0.4);
assert_eq!(violation.required, 0.6);
}
#[test]
fn test_load_config_creates_default() {
let temp_dir = TempDir::new().expect("operation should succeed in test");
let project_root = temp_dir.path();
let config = QualityGateEnforcer::load_config(project_root)
.expect("operation should succeed in test");
assert_eq!(config.min_score, 0.7);
assert_eq!(config.min_grade, Grade::BMinus);
let config_path = project_root.join(".ruchy").join("score.toml");
assert!(config_path.exists(), "Config file should be created");
let content =
std::fs::read_to_string(config_path).expect("operation should succeed in test");
assert!(content.contains("min_score"));
assert!(content.contains("0.7"));
}
#[test]
fn test_config_serialization() {
let original_config = QualityGateConfig::default();
let toml_content =
toml::to_string(&original_config).expect("operation should succeed in test");
assert!(toml_content.contains("min_score"));
let deserialized_config: QualityGateConfig =
toml::from_str(&toml_content).expect("operation should succeed in test");
assert_eq!(deserialized_config.min_score, original_config.min_score);
assert_eq!(deserialized_config.min_grade, original_config.min_grade);
}
#[test]
fn test_grade_ordering() {
assert!(Grade::F < Grade::D);
assert!(Grade::D < Grade::CMinus);
assert!(Grade::CMinus < Grade::C);
assert!(Grade::C < Grade::CPlus);
assert!(Grade::CPlus < Grade::BMinus);
assert!(Grade::BMinus < Grade::B);
assert!(Grade::B < Grade::BPlus);
assert!(Grade::BPlus < Grade::AMinus);
assert!(Grade::AMinus < Grade::A);
assert!(Grade::A < Grade::APlus);
assert!(Grade::C < Grade::BMinus); assert!(Grade::BMinus < Grade::A);
}
#[test]
fn test_grade_threshold_violation() {
let config = QualityGateConfig::default(); let enforcer = QualityGateEnforcer::new(config);
let mut score = create_passing_score();
score.grade = Grade::C;
let result = enforcer.enforce_gates(&score, None);
let grade_violations: Vec<_> = result
.violations
.iter()
.filter(|v| v.violation_type == ViolationType::Grade)
.collect();
assert_eq!(grade_violations.len(), 1);
let violation = &grade_violations[0];
assert_eq!(violation.severity, Severity::Critical);
assert!(violation.message.contains("Grade C below minimum B-"));
}
#[test]
fn test_correctness_threshold_violation() {
let config = QualityGateConfig::default(); let enforcer = QualityGateEnforcer::new(config);
let mut score = create_passing_score();
score.components.correctness = 0.7;
let result = enforcer.enforce_gates(&score, None);
let correctness_violations: Vec<_> = result
.violations
.iter()
.filter(|v| v.violation_type == ViolationType::Correctness)
.collect();
assert_eq!(correctness_violations.len(), 1);
let violation = &correctness_violations[0];
assert_eq!(violation.actual, 0.7);
assert_eq!(violation.required, 0.8);
assert_eq!(violation.severity, Severity::Critical);
assert!(violation.message.contains("70.0%"));
assert!(violation.message.contains("80.0%"));
}
#[test]
fn test_performance_threshold_violation() {
let config = QualityGateConfig::default(); let enforcer = QualityGateEnforcer::new(config);
let mut score = create_passing_score();
score.components.performance = 0.5;
let result = enforcer.enforce_gates(&score, None);
let performance_violations: Vec<_> = result
.violations
.iter()
.filter(|v| v.violation_type == ViolationType::Performance)
.collect();
assert_eq!(performance_violations.len(), 1);
let violation = &performance_violations[0];
assert_eq!(violation.actual, 0.5);
assert_eq!(violation.required, 0.6);
assert_eq!(violation.severity, Severity::High);
}
#[test]
fn test_safety_threshold_violation() {
let config = QualityGateConfig::default(); let enforcer = QualityGateEnforcer::new(config);
let mut score = create_passing_score();
score.components.safety = 0.75;
let result = enforcer.enforce_gates(&score, None);
let safety_violations: Vec<_> = result
.violations
.iter()
.filter(|v| v.violation_type == ViolationType::Safety)
.collect();
assert_eq!(safety_violations.len(), 1);
let violation = &safety_violations[0];
assert_eq!(violation.severity, Severity::Critical);
assert!(violation.message.contains("75.0%"));
assert!(violation.message.contains("80.0%"));
}
#[test]
fn test_maintainability_threshold_violation() {
let config = QualityGateConfig::default(); let enforcer = QualityGateEnforcer::new(config);
let mut score = create_passing_score();
score.components.maintainability = 0.65;
let result = enforcer.enforce_gates(&score, None);
let maintainability_violations: Vec<_> = result
.violations
.iter()
.filter(|v| v.violation_type == ViolationType::Maintainability)
.collect();
assert_eq!(maintainability_violations.len(), 1);
let violation = &maintainability_violations[0];
assert_eq!(violation.severity, Severity::High);
assert_eq!(violation.actual, 0.65);
assert_eq!(violation.required, 0.7);
}
#[test]
fn test_idiomaticity_threshold_violation() {
let config = QualityGateConfig::default(); let enforcer = QualityGateEnforcer::new(config);
let mut score = create_passing_score();
score.components.idiomaticity = 0.4;
let result = enforcer.enforce_gates(&score, None);
let idiomaticity_violations: Vec<_> = result
.violations
.iter()
.filter(|v| v.violation_type == ViolationType::Idiomaticity)
.collect();
assert_eq!(idiomaticity_violations.len(), 1);
let violation = &idiomaticity_violations[0];
assert_eq!(violation.severity, Severity::Medium);
assert_eq!(violation.actual, 0.4);
assert_eq!(violation.required, 0.5);
}
#[test]
fn test_high_cache_hit_rate_warning() {
let config = QualityGateConfig::default(); let enforcer = QualityGateEnforcer::new(config);
let mut score = create_passing_score();
score.cache_hit_rate = 0.9;
let result = enforcer.enforce_gates(&score, None);
assert!(!result.gaming_warnings.is_empty());
let warning = &result.gaming_warnings[0];
assert!(warning.contains("High cache hit rate 90.0%"));
assert!(warning.contains("stale analysis"));
}
#[test]
fn test_small_file_size_warning() -> anyhow::Result<()> {
let temp_dir = TempDir::new().expect("operation should succeed in test");
let small_file = temp_dir.path().join("small.rs");
std::fs::write(&small_file, "// Small file")?;
let config = QualityGateConfig::default(); let enforcer = QualityGateEnforcer::new(config);
let score = create_passing_score();
let result = enforcer.enforce_gates(&score, Some(&small_file));
assert!(!result.gaming_warnings.is_empty());
let warning = &result.gaming_warnings[0];
assert!(warning.contains("very small"));
assert!(warning.contains("gaming by splitting"));
Ok(())
}
#[test]
fn test_critical_files_deep_analysis() {
let temp_dir = TempDir::new().expect("operation should succeed in test");
let critical_file = temp_dir.path().join("src").join("main.rs");
std::fs::create_dir_all(
critical_file
.parent()
.expect("operation should succeed in test"),
)
.expect("operation should succeed in test");
std::fs::write(&critical_file, "fn main() {}").expect("operation should succeed in test");
let config = QualityGateConfig::default(); let enforcer = QualityGateEnforcer::new(config);
let mut score = create_passing_score();
score.confidence = 0.8;
let result = enforcer.enforce_gates(&score, Some(&critical_file));
let gaming_violations: Vec<_> = result
.violations
.iter()
.filter(|v| v.violation_type == ViolationType::Gaming)
.collect();
assert_eq!(gaming_violations.len(), 1);
let violation = &gaming_violations[0];
assert_eq!(violation.severity, Severity::Critical);
assert_eq!(violation.actual, 0.8);
assert_eq!(violation.required, 0.9);
assert!(violation.message.contains("deep analysis"));
}
#[test]
fn test_multiple_violations() {
let config = QualityGateConfig::default();
let enforcer = QualityGateEnforcer::new(config);
let score = create_minimal_score(); let result = enforcer.enforce_gates(&score, None);
assert!(!result.passed);
assert!(result.violations.len() >= 3);
let violation_types: std::collections::HashSet<_> = result
.violations
.iter()
.map(|v| &v.violation_type)
.collect();
assert!(violation_types.contains(&ViolationType::OverallScore));
assert!(violation_types.contains(&ViolationType::Grade));
assert!(violation_types.contains(&ViolationType::Confidence));
}
#[test]
fn test_export_json_results() -> anyhow::Result<()> {
let temp_dir = TempDir::new().expect("operation should succeed in test");
let output_dir = temp_dir.path();
let mut config = QualityGateConfig::default();
config.ci_integration.json_output = true;
config.ci_integration.junit_xml = false;
let enforcer = QualityGateEnforcer::new(config);
let results = vec![create_gate_result_passed(), create_gate_result_failed()];
enforcer.export_ci_results(&results, output_dir)?;
let json_file = output_dir.join("quality-gates.json");
assert!(json_file.exists());
let content = std::fs::read_to_string(json_file)?;
let parsed: Vec<GateResult> = serde_json::from_str(&content)?;
assert_eq!(parsed.len(), 2);
assert!(parsed[0].passed);
assert!(!parsed[1].passed);
Ok(())
}
#[test]
fn test_export_junit_xml_results() -> anyhow::Result<()> {
let temp_dir = TempDir::new().expect("operation should succeed in test");
let output_dir = temp_dir.path();
let mut config = QualityGateConfig::default();
config.ci_integration.json_output = false;
config.ci_integration.junit_xml = true;
let enforcer = QualityGateEnforcer::new(config);
let results = vec![create_gate_result_passed(), create_gate_result_failed()];
enforcer.export_ci_results(&results, output_dir)?;
let xml_file = output_dir.join("quality-gates.xml");
assert!(xml_file.exists());
let content = std::fs::read_to_string(xml_file)?;
assert!(content.contains("<?xml version="));
assert!(content.contains("<testsuite name=\"Quality Gates\" tests=\"2\" failures=\"1\""));
assert!(content.contains("<testcase name=\"quality-gate-0\" classname=\"QualityGate\""));
assert!(content.contains("<failure message=\"Quality gate violation\""));
assert!(content.contains("</testsuite>"));
Ok(())
}
#[test]
fn test_violation_enums_coverage() {
let types = [
ViolationType::OverallScore,
ViolationType::Grade,
ViolationType::Correctness,
ViolationType::Performance,
ViolationType::Maintainability,
ViolationType::Safety,
ViolationType::Idiomaticity,
ViolationType::Confidence,
ViolationType::Gaming,
];
for (i, vtype) in types.iter().enumerate() {
for (j, other) in types.iter().enumerate() {
if i == j {
assert_eq!(vtype, other);
} else {
assert_ne!(vtype, other);
}
}
}
let severities = [
Severity::Critical,
Severity::High,
Severity::Medium,
Severity::Low,
];
for (i, severity) in severities.iter().enumerate() {
for (j, other) in severities.iter().enumerate() {
if i == j {
assert_eq!(severity, other);
} else {
assert_ne!(severity, other);
}
}
}
}
#[test]
fn test_notification_config_serialization() {
let config = NotificationConfig {
slack: true,
email: false,
webhook: Some("https://test.example.com/webhook".to_string()),
};
let serialized = serde_json::to_string(&config).expect("operation should succeed in test");
let deserialized: NotificationConfig =
serde_json::from_str(&serialized).expect("operation should succeed in test");
assert!(deserialized.slack);
assert!(!deserialized.email);
assert_eq!(
deserialized.webhook,
Some("https://test.example.com/webhook".to_string())
);
}
#[test]
fn test_gate_passes_at_exact_boundary_r162() {
let config = QualityGateConfig::default(); let enforcer = QualityGateEnforcer::new(config);
let mut score = create_passing_score();
score.value = 0.7; score.grade = Grade::BMinus;
let result = enforcer.enforce_gates(&score, None);
let score_violations: Vec<_> = result
.violations
.iter()
.filter(|v| v.violation_type == ViolationType::OverallScore)
.collect();
assert!(score_violations.is_empty());
}
#[test]
fn test_gate_fails_just_below_boundary_r162() {
let config = QualityGateConfig::default(); let enforcer = QualityGateEnforcer::new(config);
let mut score = create_passing_score();
score.value = 0.699;
let result = enforcer.enforce_gates(&score, None);
let score_violations: Vec<_> = result
.violations
.iter()
.filter(|v| v.violation_type == ViolationType::OverallScore)
.collect();
assert_eq!(score_violations.len(), 1);
}
#[test]
fn test_strict_config_a_minus_minimum_r162() {
let mut config = QualityGateConfig::default();
config.min_score = 0.85;
config.min_grade = Grade::AMinus;
let enforcer = QualityGateEnforcer::new(config);
let mut score = create_passing_score();
score.value = 0.84; score.grade = Grade::BPlus;
let result = enforcer.enforce_gates(&score, None);
assert!(!result.passed);
}
#[test]
fn test_lenient_config_c_minimum_r162() {
let mut config = QualityGateConfig::default();
config.min_score = 0.5;
config.min_grade = Grade::C;
config.component_thresholds.correctness = 0.5;
config.component_thresholds.safety = 0.5;
config.anti_gaming.min_confidence = 0.3;
let enforcer = QualityGateEnforcer::new(config);
let mut score = create_minimal_score();
score.value = 0.55;
score.grade = Grade::C;
score.components.correctness = 0.55;
score.components.safety = 0.55;
score.confidence = 0.4;
let result = enforcer.enforce_gates(&score, None);
let critical_violations: Vec<_> = result
.violations
.iter()
.filter(|v| v.severity == Severity::Critical)
.collect();
assert!(critical_violations.is_empty());
}
#[test]
fn test_project_overrides_r162() {
let mut config = QualityGateConfig::default();
config
.project_overrides
.insert("performance".to_string(), 0.3);
assert_eq!(config.project_overrides.get("performance"), Some(&0.3));
}
#[test]
fn test_grade_ordering_a_plus_greater_than_a_r162() {
assert!(Grade::APlus > Grade::A);
}
#[test]
fn test_grade_ordering_a_greater_than_a_minus_r162() {
assert!(Grade::A > Grade::AMinus);
}
#[test]
fn test_grade_ordering_a_minus_greater_than_b_plus_r162() {
assert!(Grade::AMinus > Grade::BPlus);
}
#[test]
fn test_grade_ordering_f_less_than_d_r162() {
assert!(Grade::F < Grade::D);
}
#[test]
fn test_grade_equality_r162() {
assert!(Grade::A == Grade::A);
assert!(Grade::BMinus == Grade::BMinus);
}
#[test]
fn test_gate_result_serialization_roundtrip_r162() {
let result = create_gate_result_passed();
let serialized = serde_json::to_string(&result).expect("serialize should succeed");
let deserialized: GateResult =
serde_json::from_str(&serialized).expect("deserialize should succeed");
assert_eq!(result.passed, deserialized.passed);
assert!((result.score - deserialized.score).abs() < f64::EPSILON);
assert_eq!(result.grade, deserialized.grade);
}
#[test]
fn test_violation_serialization_roundtrip_r162() {
let violation = Violation {
violation_type: ViolationType::Correctness,
actual: 0.65,
required: 0.8,
severity: Severity::Critical,
message: "Test violation message".to_string(),
};
let serialized = serde_json::to_string(&violation).expect("serialize should succeed");
let deserialized: Violation =
serde_json::from_str(&serialized).expect("deserialize should succeed");
assert_eq!(violation.violation_type, deserialized.violation_type);
assert_eq!(violation.severity, deserialized.severity);
assert_eq!(violation.message, deserialized.message);
}
#[test]
fn test_component_thresholds_serialization_r162() {
let thresholds = ComponentThresholds {
correctness: 0.9,
performance: 0.7,
maintainability: 0.8,
safety: 0.95,
idiomaticity: 0.6,
};
let serialized = serde_json::to_string(&thresholds).expect("serialize should succeed");
let deserialized: ComponentThresholds =
serde_json::from_str(&serialized).expect("deserialize should succeed");
assert!((thresholds.correctness - deserialized.correctness).abs() < f64::EPSILON);
assert!((thresholds.safety - deserialized.safety).abs() < f64::EPSILON);
}
#[test]
fn test_anti_gaming_rules_serialization_r162() {
let rules = AntiGamingRules {
min_confidence: 0.7,
max_cache_hit_rate: 0.75,
require_deep_analysis: vec!["src/lib.rs".to_string()],
min_file_size_bytes: 200,
max_test_ratio: 1.5,
};
let serialized = serde_json::to_string(&rules).expect("serialize should succeed");
let deserialized: AntiGamingRules =
serde_json::from_str(&serialized).expect("deserialize should succeed");
assert_eq!(rules.min_file_size_bytes, deserialized.min_file_size_bytes);
assert_eq!(
rules.require_deep_analysis.len(),
deserialized.require_deep_analysis.len()
);
}
#[test]
fn test_ci_integration_serialization_r162() {
let ci = CiIntegration {
fail_on_violation: true,
junit_xml: true,
json_output: false,
notifications: NotificationConfig {
slack: true,
email: true,
webhook: Some("https://hook.example.com".to_string()),
},
block_merge: false,
};
let serialized = serde_json::to_string(&ci).expect("serialize should succeed");
let deserialized: CiIntegration =
serde_json::from_str(&serialized).expect("deserialize should succeed");
assert_eq!(ci.fail_on_violation, deserialized.fail_on_violation);
assert_eq!(ci.block_merge, deserialized.block_merge);
assert!(deserialized.notifications.slack);
}
#[test]
fn test_empty_violations_means_passed_r162() {
let result = GateResult {
passed: true,
score: 0.95,
grade: Grade::APlus,
violations: vec![],
confidence: 0.99,
gaming_warnings: vec![],
};
assert!(result.passed);
assert!(result.violations.is_empty());
}
#[test]
fn test_performance_violation_severity_is_high_r162() {
let config = QualityGateConfig::default();
let enforcer = QualityGateEnforcer::new(config);
let mut score = create_passing_score();
score.components.performance = 0.5;
let result = enforcer.enforce_gates(&score, None);
let perf_violations: Vec<_> = result
.violations
.iter()
.filter(|v| v.violation_type == ViolationType::Performance)
.collect();
assert_eq!(perf_violations.len(), 1);
assert_eq!(perf_violations[0].severity, Severity::High);
}
#[test]
fn test_full_config_serialization_roundtrip_r162() {
let config = QualityGateConfig::default();
let serialized = serde_json::to_string(&config).expect("serialize should succeed");
let deserialized: QualityGateConfig =
serde_json::from_str(&serialized).expect("deserialize should succeed");
assert!((config.min_score - deserialized.min_score).abs() < f64::EPSILON);
assert_eq!(config.min_grade, deserialized.min_grade);
}
#[test]
fn test_cache_hit_rate_at_exact_max_r162() {
let config = QualityGateConfig::default(); let enforcer = QualityGateEnforcer::new(config);
let mut score = create_passing_score();
score.cache_hit_rate = 0.8;
let result = enforcer.enforce_gates(&score, None);
assert!(result.gaming_warnings.is_empty());
}
#[test]
fn test_confidence_at_exact_min_passes_r162() {
let config = QualityGateConfig::default(); let enforcer = QualityGateEnforcer::new(config);
let mut score = create_passing_score();
score.confidence = 0.6;
let result = enforcer.enforce_gates(&score, None);
let confidence_violations: Vec<_> = result
.violations
.iter()
.filter(|v| v.violation_type == ViolationType::Confidence)
.collect();
assert!(confidence_violations.is_empty());
}
#[test]
fn test_all_components_at_exact_thresholds_pass_r162() {
let config = QualityGateConfig::default();
let enforcer = QualityGateEnforcer::new(config);
let mut score = create_passing_score();
score.components.correctness = 0.8;
score.components.performance = 0.6;
score.components.maintainability = 0.7;
score.components.safety = 0.8;
score.components.idiomaticity = 0.5;
let result = enforcer.enforce_gates(&score, None);
let component_violations: Vec<_> = result
.violations
.iter()
.filter(|v| {
matches!(
v.violation_type,
ViolationType::Correctness
| ViolationType::Performance
| ViolationType::Maintainability
| ViolationType::Safety
| ViolationType::Idiomaticity
)
})
.collect();
assert!(component_violations.is_empty());
}
#[test]
fn test_notification_config_none_webhook_r162() {
let config = NotificationConfig {
slack: false,
email: false,
webhook: None,
};
let serialized = serde_json::to_string(&config).expect("serialize should succeed");
let deserialized: NotificationConfig =
serde_json::from_str(&serialized).expect("deserialize should succeed");
assert!(!deserialized.slack);
assert!(!deserialized.email);
assert!(deserialized.webhook.is_none());
}
#[test]
fn test_violation_message_percentage_format_r162() {
let config = QualityGateConfig::default();
let enforcer = QualityGateEnforcer::new(config);
let mut score = create_passing_score();
score.value = 0.55;
let result = enforcer.enforce_gates(&score, None);
let score_violation = result
.violations
.iter()
.find(|v| v.violation_type == ViolationType::OverallScore)
.expect("should have score violation");
assert!(score_violation.message.contains("55.0%"));
assert!(score_violation.message.contains("70.0%"));
}
fn create_gate_result_passed() -> GateResult {
GateResult {
passed: true,
score: 0.85,
grade: Grade::APlus,
violations: vec![],
confidence: 0.9,
gaming_warnings: vec![],
}
}
fn create_gate_result_failed() -> GateResult {
GateResult {
passed: false,
score: 0.6,
grade: Grade::D,
violations: vec![Violation {
violation_type: ViolationType::OverallScore,
actual: 0.6,
required: 0.7,
severity: Severity::Critical,
message: "Overall score 60.0% below minimum 70.0%".to_string(),
}],
confidence: 0.5,
gaming_warnings: vec!["Low confidence warning".to_string()],
}
}
}
#[cfg(test)]
mod property_tests_gates {
use proptest::proptest;
proptest! {
#[test]
fn test_new_never_panics(input: String) {
let _input = if input.len() > 100 { &input[..100] } else { &input[..] };
let _ = std::panic::catch_unwind(|| {
});
}
}
}