use pmat::red_team::{ClaimExtractor, EvidenceGatherer, IntentClassifier, RepositoryContext};
#[ignore = "red team test - run manually"]
#[test]
fn test_analyze_commit_message_detect_hallucination() {
let commit_message = "feat: All tests passing";
let extractor = ClaimExtractor::new();
let claims = extractor.extract(commit_message);
assert_eq!(claims.len(), 1);
assert_eq!(claims[0].text, "all tests passing");
assert!(claims[0].is_absolute);
let context = RepositoryContext::new_mock().with_test_results(true, 5);
let gatherer = EvidenceGatherer::new();
let evidence = gatherer.gather_evidence(&claims[0], &context);
let test_evidence = evidence
.iter()
.find(|e| matches!(e.source, pmat::red_team::EvidenceSource::TestExecution))
.expect("Test execution evidence should exist");
assert!(!test_evidence.supports_claim); }
#[ignore = "red team test - run manually"]
#[test]
fn test_analyze_commit_message_no_hallucination() {
let commit_message = "feat: Implement user authentication (MVP - Sprint 42)";
let extractor = ClaimExtractor::new();
let claims = extractor.extract(commit_message);
assert_eq!(claims.len(), 1);
assert!(claims[0].has_scope_qualifier); assert_eq!(claims[0].scope, Some("MVP - Sprint 42".to_string()));
}
#[test]
fn test_full_pipeline_hallucination_detection() {
let original_message = "feat: Complete feature X - all tests passing";
let followup_message = "fix: Fix failing tests in feature X";
let extractor = ClaimExtractor::new();
let classifier = IntentClassifier::new();
let claims = extractor.extract(original_message);
assert!(!claims.is_empty());
let original_commit = pmat::red_team::CommitInfo {
message: original_message.to_string(),
timestamp_seconds: 1000,
modified_files: vec!["src/feature_x.rs".to_string()],
issue_number: None,
issue_created_timestamp: None,
branch: "feature/x".to_string(),
test_changes: pmat::red_team::TestChanges {
added_tests: 0,
fixed_tests: 0,
modified_test_files: vec![],
},
};
let followup_commit = pmat::red_team::CommitInfo {
message: followup_message.to_string(),
timestamp_seconds: 1000 + (100 * 3600), modified_files: vec![
"src/feature_x.rs".to_string(),
"tests/feature_x_tests.rs".to_string(),
],
issue_number: Some(42),
issue_created_timestamp: Some(5000), branch: "hotfix/feature-x-tests".to_string(),
test_changes: pmat::red_team::TestChanges {
added_tests: 0,
fixed_tests: 5,
modified_test_files: vec!["tests/feature_x_tests.rs".to_string()],
},
};
let classification = classifier.classify(&original_commit, &followup_commit);
assert_eq!(
classification.intent,
pmat::red_team::CommitIntent::HallucinationFix
);
assert!(classification.confidence > 0.7);
}
#[test]
fn test_cli_output_formatting() {
use pmat::red_team::{EvidenceResult, EvidenceSource};
let _claim_text = "all tests passing";
let evidence = [
EvidenceResult {
source: EvidenceSource::TestExecution,
supports_claim: false,
confidence: 0.9,
details: "5 tests ignored".to_string(),
timestamp: None,
},
EvidenceResult {
source: EvidenceSource::GitHistory,
supports_claim: false,
confidence: 0.85,
details: "2 subsequent test fixes found".to_string(),
timestamp: Some(2000),
},
];
assert_eq!(evidence.len(), 2);
assert!(!evidence[0].supports_claim);
assert!(evidence[0].details.contains("5 tests"));
assert!(!evidence[1].supports_claim);
assert!(evidence[1].details.contains("2 subsequent"));
}
#[test]
fn test_no_hallucination_detected() {
let commit_message = "test: Add 5 new integration tests";
let extractor = ClaimExtractor::new();
let claims = extractor.extract(commit_message);
assert_eq!(claims.len(), 0);
}