use std::collections::HashSet;
use quorum_core::memory::identity::finding_identity_hash;
use quorum_core::memory::{DismissalReason, LocalSqliteMemoryStore, MemoryStore};
use quorum_core::review::{Finding, FindingSource, Severity};
use tempfile::TempDir;
fn init_repo() -> TempDir {
let td = TempDir::new().unwrap();
let _ = git2::Repository::init(td.path()).unwrap();
td
}
fn f(title: &str, src: FindingSource, models: &[&str]) -> Finding {
Finding {
severity: Severity::High,
title: title.into(),
body: format!("Confidence: 0.90. Supported by: {}.", models.join(", ")),
source: src,
supported_by: models.iter().map(|s| s.to_string()).collect(),
confidence: Some(0.90),
}
}
fn filter_and_record(
review: &mut quorum_core::Review,
store: &LocalSqliteMemoryStore,
session_id: &str,
) -> Vec<quorum_core::memory::FindingIdentityHash> {
let active = store.load_active_dismissals().unwrap();
let mut matched = Vec::new();
let mut keep = Vec::with_capacity(review.findings.len());
let original = std::mem::take(&mut review.findings);
for f in original {
let h = finding_identity_hash(&f);
if active.contains_key(&h) {
matched.push(h);
} else {
keep.push(f);
}
}
review.findings = keep;
if !matched.is_empty() {
store
.record_seen(&matched, session_id, time::OffsetDateTime::now_utc())
.unwrap();
}
matched
}
#[test]
fn dismissed_finding_is_filtered_out_on_subsequent_review() {
let td = init_repo();
let store = LocalSqliteMemoryStore::new(td.path()).unwrap();
let to_dismiss = f(
"Race condition in cache",
FindingSource::Divergence,
&["m1", "m2"],
);
let keep = f("Spurious log noise", FindingSource::Agreement, &["m1"]);
store
.dismiss(
&to_dismiss,
"head",
"main",
DismissalReason::WontFix,
None,
Some(time::Duration::days(365)),
)
.unwrap();
let mut review = quorum_core::Review {
session_id: "S1".into(),
findings: vec![to_dismiss.clone(), keep.clone()],
model_names: vec!["m1".into(), "m2".into()],
elapsed: std::time::Duration::ZERO,
project_id: None,
base_url: "https://x".into(),
summary_text: None,
final_agreement_score: None,
};
let matched = filter_and_record(&mut review, &store, "S1");
assert_eq!(matched.len(), 1);
assert_eq!(review.findings.len(), 1);
assert_eq!(review.findings[0].title, "Spurious log noise");
}
#[test]
fn record_seen_bumps_once_per_session_then_again_on_new_session() {
let td = init_repo();
let store = LocalSqliteMemoryStore::new(td.path()).unwrap();
let target = f("X", FindingSource::Divergence, &["m"]);
store
.dismiss(
&target,
"h",
"main",
DismissalReason::FalsePositive,
None,
Some(time::Duration::days(365)),
)
.unwrap();
let hash = finding_identity_hash(&target);
let mut review = quorum_core::Review {
session_id: "S1".into(),
findings: vec![target.clone()],
model_names: vec!["m".into()],
elapsed: std::time::Duration::ZERO,
project_id: None,
base_url: "https://x".into(),
summary_text: None,
final_agreement_score: None,
};
filter_and_record(&mut review, &store, "S1");
review.findings = vec![target.clone()];
filter_and_record(&mut review, &store, "S1");
let row = store.load_active_dismissals().unwrap()[&hash].clone();
assert_eq!(row.recurrence_count, 2, "same session must bump only once");
review.findings = vec![target.clone()];
filter_and_record(&mut review, &store, "S2");
let row = store.load_active_dismissals().unwrap()[&hash].clone();
assert_eq!(row.recurrence_count, 3);
}
#[test]
fn permanent_dismissal_suppresses_indefinitely() {
let td = init_repo();
let store = LocalSqliteMemoryStore::new(td.path()).unwrap();
let target = f("permanent issue", FindingSource::Divergence, &["m"]);
store
.dismiss(
&target,
"h",
"main",
DismissalReason::Intentional,
None,
None, )
.unwrap();
let mut review = quorum_core::Review {
session_id: "S".into(),
findings: vec![target.clone()],
model_names: vec!["m".into()],
elapsed: std::time::Duration::ZERO,
project_id: None,
base_url: "https://x".into(),
summary_text: None,
final_agreement_score: None,
};
let matched = filter_and_record(&mut review, &store, "S");
assert_eq!(matched.len(), 1);
assert!(review.findings.is_empty());
}
#[test]
fn hash_stable_under_body_and_confidence_drift() {
let a = Finding {
body: "Confidence: 0.95. Supported by: alpha, beta.".into(),
confidence: Some(0.95),
..f(
"Same finding",
FindingSource::Divergence,
&["alpha", "beta"],
)
};
let b = Finding {
body: "Confidence: 0.71. Supported by: alpha, beta.".into(),
confidence: Some(0.71),
..f(
"Same finding",
FindingSource::Divergence,
&["alpha", "beta"],
)
};
assert_eq!(finding_identity_hash(&a), finding_identity_hash(&b));
}
#[test]
fn hash_changes_when_model_set_changes() {
let a = f("T", FindingSource::Agreement, &["alpha", "beta"]);
let b = f("T", FindingSource::Agreement, &["alpha", "beta", "gamma"]);
assert_ne!(finding_identity_hash(&a), finding_identity_hash(&b));
}
#[test]
fn cross_finding_collision_rate() {
let findings = vec![
f(
"Race condition in cache",
FindingSource::Divergence,
&["m1", "m2"],
),
f(
"Race condition in cache",
FindingSource::Divergence,
&["m1", "m3"],
),
f(
"Race condition in queue",
FindingSource::Divergence,
&["m1", "m2"],
),
f(
"Unbounded recursion in parser",
FindingSource::Divergence,
&["m1", "m2", "m3"],
),
f(
"Missing input validation on endpoint",
FindingSource::Divergence,
&["m1"],
),
f(
"Authentication bypass via admin flag",
FindingSource::Divergence,
&["m1", "m2", "m3"],
),
f(
"SQL injection in search filter",
FindingSource::Divergence,
&["m2"],
),
f(
"Token leak in error response",
FindingSource::Agreement,
&["m1", "m3"],
),
f(
"Stale cache eviction policy",
FindingSource::Agreement,
&["m1", "m2"],
),
f(
"Inconsistent timezone handling",
FindingSource::Agreement,
&["m1", "m2"],
),
f(
"Hard-coded retry count",
FindingSource::Agreement,
&["m1", "m2"],
),
f(
"Off-by-one in pagination",
FindingSource::Agreement,
&["m1", "m3"],
),
f(
"Magic number 42 in scheduler",
FindingSource::Assumption,
&[],
),
f(
"Caller is expected to validate UTF-8",
FindingSource::Assumption,
&[],
),
f(
"File system is case-insensitive",
FindingSource::Assumption,
&[],
),
f(
"HTTPS terminated at the proxy",
FindingSource::Assumption,
&[],
),
f(
"Stack frames are bounded by recursion depth",
FindingSource::Assumption,
&[],
),
f(
"Logging level is INFO by default",
FindingSource::Assumption,
&[],
),
f(
"Process restart is acceptable on OOM",
FindingSource::Assumption,
&[],
),
f(
"Network is not partitioned during boot",
FindingSource::Assumption,
&[],
),
];
assert_eq!(findings.len(), 20, "test corpus stays at 20 findings");
let hashes: Vec<_> = findings.iter().map(finding_identity_hash).collect();
let mut collisions = 0;
let n = findings.len();
for i in 0..n {
for j in (i + 1)..n {
if hashes[i] == hashes[j] {
collisions += 1;
}
}
}
let total_pairs = n * (n - 1) / 2;
let rate = (collisions as f64) / (total_pairs as f64);
assert!(
rate <= 0.02,
"cross-finding collision rate exceeds 2% guard: {} / {} = {:.2}%",
collisions,
total_pairs,
rate * 100.0,
);
let distinct: HashSet<_> = hashes.iter().collect();
assert!(
distinct.len() >= 18,
"expected close-to-distinct hashes; got {} distinct of 20",
distinct.len()
);
}