use std::collections::{BTreeMap, BTreeSet};
use std::fs;
use std::path::{Path, PathBuf};
fn repo_root() -> PathBuf {
let manifest = PathBuf::from(env!("CARGO_MANIFEST_DIR"));
manifest
.parent()
.expect("sqry-core must live in a workspace")
.to_path_buf()
}
fn parse_assert_impl_all_types(source: &str) -> BTreeSet<String> {
source
.lines()
.filter_map(|raw| {
let line = raw.trim_start();
if line.starts_with("//") {
return None;
}
let open = line.find("assert_impl_all!(")?;
let rest = &line[open + "assert_impl_all!(".len()..];
let close = rest.find(')')?;
let inner = &rest[..close];
let (ty, trait_part) = inner.split_once(':')?;
if trait_part.trim() != "NodeIdBearing" {
return None;
}
Some(ty.trim().to_string())
})
.collect()
}
#[test]
fn coverage_rs_matches_plan_assert_impl_all_block() {
let root = repo_root();
let coverage_path = root.join("sqry-core/src/graph/unified/rebuild/coverage.rs");
let coverage_src = fs::read_to_string(&coverage_path).unwrap_or_else(|e| {
panic!(
"coverage.rs must exist at {} (err: {e})",
coverage_path.display()
)
});
let plan_path = root.join("docs/superpowers/plans/2026-03-19-sqryd-daemon.md");
let plan_src = fs::read_to_string(&plan_path).unwrap_or_else(|e| {
panic!(
"sqryd daemon plan must exist at {} (err: {e})",
plan_path.display()
)
});
let coverage_types = parse_assert_impl_all_types(&coverage_src);
assert!(
!coverage_types.is_empty(),
"coverage.rs must contain at least one assert_impl_all!(<Type>: NodeIdBearing); \
line — found none. Did the file get accidentally truncated?"
);
let plan_block = extract_plan_assert_block(&plan_src).unwrap_or_else(|| {
panic!(
"Could not locate the § K `assert_impl_all!` block in the plan at {}. \
Expected a fenced rust code block containing \
`assert_impl_all!(<Type>: NodeIdBearing);` lines inside the \
§K — or §`Step 0b` — section.",
plan_path.display()
)
});
let plan_types = parse_assert_impl_all_types(&plan_block);
assert!(
!plan_types.is_empty(),
"Plan §K assert_impl_all! block parsed to an empty type set. Block contents:\n{plan_block}"
);
if coverage_types != plan_types {
let only_in_coverage: BTreeSet<_> = coverage_types.difference(&plan_types).collect();
let only_in_plan: BTreeSet<_> = plan_types.difference(&coverage_types).collect();
panic!(
"Gate 0b coverage-matrix drift between plan §K and coverage.rs.\n\n\
Types present in coverage.rs but NOT in plan §K (add row to plan or remove impl): {only_in_coverage:?}\n\n\
Types present in plan §K but NOT in coverage.rs (add `assert_impl_all!` entry and impl): {only_in_plan:?}\n\n\
Plan block parsed at {}:\n{plan_block}\n\n\
coverage.rs parsed at {}:\n{coverage_types:?}\n",
plan_path.display(),
coverage_path.display()
);
}
}
fn extract_plan_assert_block(plan_src: &str) -> Option<String> {
let anchor = plan_src.find("Step 0b")?;
let rest = &plan_src[anchor..];
let mut in_block = false;
let mut buf = String::new();
for line in rest.lines() {
let trimmed = line.trim_start();
if !in_block {
if trimmed.starts_with("```rust") || trimmed.starts_with("``` rust") {
in_block = true;
buf.clear();
}
continue;
}
if trimmed.starts_with("```") {
if buf.contains("assert_impl_all!") {
return Some(buf);
}
in_block = false;
buf.clear();
continue;
}
buf.push_str(line);
buf.push('\n');
}
None
}
#[test]
fn plan_block_parser_extracts_rust_fenced_block_containing_assert_impl_all() {
let fixture = "Step 0b: intro text\n\
```rust\n\
// some other rust block without the assertion\n\
let x = 1;\n\
```\n\n\
Some prose.\n\n\
```rust\n\
use static_assertions::assert_impl_all;\n\
assert_impl_all!(FooType: NodeIdBearing);\n\
```\n";
let extracted = extract_plan_assert_block(fixture).expect("block must be found");
assert!(extracted.contains("assert_impl_all!(FooType: NodeIdBearing);"));
let types = parse_assert_impl_all_types(&extracted);
assert_eq!(types, BTreeSet::from(["FooType".to_string()]));
}
#[test]
fn parse_assert_impl_all_types_ignores_commented_rows() {
let src = "\
// assert_impl_all!(CommentedOut: NodeIdBearing);\n\
assert_impl_all!(LiveRow: NodeIdBearing);\n\
";
let types = parse_assert_impl_all_types(src);
assert_eq!(types, BTreeSet::from(["LiveRow".to_string()]));
}
#[test]
fn parse_assert_impl_all_types_ignores_wrong_trait() {
let src = "assert_impl_all!(UnrelatedType: SomeOtherTrait);\n";
let types = parse_assert_impl_all_types(src);
assert!(types.is_empty());
}
#[test]
fn drift_detection_fires_when_coverage_is_missing_a_plan_row() {
let plan_block = "\
use static_assertions::assert_impl_all;\n\
assert_impl_all!(A: NodeIdBearing);\n\
assert_impl_all!(B: NodeIdBearing);\n\
";
let coverage_block = "\
use static_assertions::assert_impl_all;\n\
assert_impl_all!(A: NodeIdBearing);\n\
// assert_impl_all!(B: NodeIdBearing); // forgot to wire up\n\
";
let plan = parse_assert_impl_all_types(plan_block);
let cov = parse_assert_impl_all_types(coverage_block);
assert_ne!(
plan, cov,
"the drift-detection symmetric-difference check must fire when coverage.rs \
is missing a plan row"
);
let only_in_plan: BTreeSet<_> = plan.difference(&cov).collect();
assert_eq!(only_in_plan, BTreeSet::from([&"B".to_string()]));
}
#[test]
fn drift_detection_fires_when_plan_is_missing_a_coverage_row() {
let plan_block = "\
use static_assertions::assert_impl_all;\n\
assert_impl_all!(A: NodeIdBearing);\n\
";
let coverage_block = "\
use static_assertions::assert_impl_all;\n\
assert_impl_all!(A: NodeIdBearing);\n\
assert_impl_all!(UnplannedRow: NodeIdBearing);\n\
";
let plan = parse_assert_impl_all_types(plan_block);
let cov = parse_assert_impl_all_types(coverage_block);
assert_ne!(plan, cov);
let only_in_cov: BTreeSet<_> = cov.difference(&plan).collect();
assert_eq!(only_in_cov, BTreeSet::from([&"UnplannedRow".to_string()]));
}
#[test]
fn coverage_rs_contains_all_known_k_a_and_active_k_b_types() {
let root = repo_root();
let coverage_path = root.join("sqry-core/src/graph/unified/rebuild/coverage.rs");
let coverage_src = fs::read_to_string(&coverage_path).expect("coverage.rs readable");
let types = parse_assert_impl_all_types(&coverage_src);
let expected: BTreeSet<String> = [
"NodeArena",
"BidirectionalEdgeStore",
"AuxiliaryIndices",
"NodeMetadataStore",
"NodeProvenanceStore",
"ScopeArena",
"AliasTable",
"ShadowTable",
"FileRegistry",
]
.iter()
.map(|s| (*s).to_string())
.collect();
assert_eq!(
types, expected,
"coverage.rs `assert_impl_all!` set must exactly equal the K.A (8) + active K.B (1) row set. \
If a row is being added or removed, update both the plan §K table and the prose in the \
`NodeIdBearing` module docs."
);
}
#[test]
fn no_other_file_asserts_node_id_bearing() {
let root = repo_root();
let src_root = root.join("sqry-core/src");
let mut offenders: Vec<PathBuf> = Vec::new();
walk(&src_root, &mut offenders);
let canonical = root.join("sqry-core/src/graph/unified/rebuild/coverage.rs");
let offenders: Vec<&Path> = offenders
.iter()
.filter(|p| **p != canonical)
.map(PathBuf::as_path)
.collect();
assert!(
offenders.is_empty(),
"assert_impl_all!(..: NodeIdBearing) found outside coverage.rs: {offenders:?} — \
consolidate the coverage matrix in coverage.rs to keep CI drift-detection sound."
);
}
fn parse_plan_not_in_ka_fields(plan_src: &str) -> BTreeSet<String> {
let anchor_needle = "Other `CodeGraph` fields that are intentionally NOT in K.A";
let Some(anchor) = plan_src.find(anchor_needle) else {
return BTreeSet::new();
};
let rest = &plan_src[anchor..];
let mut fields: BTreeSet<String> = BTreeSet::new();
let mut saw_first_bullet = false;
for line in rest.lines() {
let trimmed = line.trim();
if let Some(bullet) = trimmed.strip_prefix("- ") {
saw_first_bullet = true;
let decl = match bullet.find('—') {
Some(idx) => &bullet[..idx],
None => bullet,
};
for tok in split_comma_respecting_generics(decl) {
let tok = tok.trim();
let stripped = tok.trim_matches('`').trim();
let Some((name, _rest)) = stripped.split_once(':') else {
continue;
};
let name = name.trim();
if name.is_empty() || !is_ident(name) {
continue;
}
fields.insert(name.to_string());
}
} else if saw_first_bullet && trimmed.is_empty() {
break;
} else if saw_first_bullet && !trimmed.starts_with('-') {
break;
}
}
fields
}
fn parse_plan_ka_field_names(plan_src: &str) -> BTreeSet<String> {
parse_plan_table_column_field_names(plan_src, "K.A", "Field on `CodeGraph`")
}
fn parse_plan_kb_active_field_names(plan_src: &str) -> BTreeSet<String> {
let mut out = BTreeSet::new();
if plan_src.contains("`FileRegistry.per_file_nodes`") {
out.insert("files".to_string());
}
out
}
fn parse_plan_table_column_field_names(
plan_src: &str,
anchor_heading: &str,
column_title: &str,
) -> BTreeSet<String> {
let Some(anchor) = plan_src.find(anchor_heading) else {
return BTreeSet::new();
};
let rest = &plan_src[anchor..];
let mut header_col_idx: Option<usize> = None;
let mut in_rows = false;
let mut out = BTreeSet::new();
for line in rest.lines() {
let trimmed = line.trim();
if !trimmed.starts_with('|') {
if in_rows {
break;
}
continue;
}
let cells: Vec<&str> = trimmed
.trim_matches('|')
.split('|')
.map(str::trim)
.collect();
if header_col_idx.is_none() {
if let Some(idx) = cells.iter().position(|c| c.contains(column_title)) {
header_col_idx = Some(idx);
}
continue;
}
if cells.iter().all(|c| {
c.chars()
.all(|ch| ch == '-' || ch == ':' || ch.is_whitespace())
}) {
continue;
}
in_rows = true;
let Some(idx) = header_col_idx else { break };
let Some(cell) = cells.get(idx) else { continue };
let cell = cell.trim();
let Some(start) = cell.find('`') else {
continue;
};
let rest_cell = &cell[start + 1..];
let Some(end) = rest_cell.find('`') else {
continue;
};
let ident_span = &rest_cell[..end];
let top = ident_span.split('.').next().unwrap_or("").trim();
if !top.is_empty() && is_ident(top) {
out.insert(top.to_string());
}
}
out
}
fn is_ident(s: &str) -> bool {
let mut chars = s.chars();
let Some(first) = chars.next() else {
return false;
};
if !(first.is_ascii_alphabetic() || first == '_') {
return false;
}
chars.all(|c| c.is_ascii_alphanumeric() || c == '_')
}
fn split_comma_respecting_generics(s: &str) -> Vec<String> {
let mut out = Vec::new();
let mut depth: i32 = 0;
let mut buf = String::new();
for ch in s.chars() {
match ch {
'<' => {
depth += 1;
buf.push(ch);
}
'>' => {
depth -= 1;
buf.push(ch);
}
',' if depth == 0 => {
out.push(std::mem::take(&mut buf));
}
_ => buf.push(ch),
}
}
if !buf.trim().is_empty() {
out.push(buf);
}
out
}
#[test]
fn plan_not_in_ka_fields_parses_real_plan() {
let root = repo_root();
let plan_path = root.join("docs/superpowers/plans/2026-03-19-sqryd-daemon.md");
let plan_src = fs::read_to_string(&plan_path).expect("plan readable");
let got = parse_plan_not_in_ka_fields(&plan_src);
let expected: BTreeSet<String> = [
"strings",
"edge_provenance",
"scope_provenance_store",
"file_segments",
"fact_epoch",
"epoch",
"confidence",
]
.iter()
.map(|s| (*s).to_string())
.collect();
assert_eq!(
got, expected,
"Parser output for the plan's 'Other fields intentionally NOT in K.A' bullet list \
must equal the authoritative set. If the plan block is being reshaped, update both \
the plan and this expected set."
);
}
#[test]
fn plan_not_in_ka_fields_synthetic_multi_field_bullet() {
let fixture = "\
Other `CodeGraph` fields that are intentionally NOT in K.A. intro prose.
- `strings: Arc<StringInterner>` — keyed by `StringId`.
- `fact_epoch: u64`, `epoch: u64`, `confidence: HashMap<String, _>` — scalars.
If any of those fields ever gains a NodeId-bearing payload, a new K.A row must be added.
";
let got = parse_plan_not_in_ka_fields(fixture);
let expected: BTreeSet<String> = ["strings", "fact_epoch", "epoch", "confidence"]
.iter()
.map(|s| (*s).to_string())
.collect();
assert_eq!(got, expected);
}
#[test]
fn plan_ka_field_names_parses_real_plan() {
let root = repo_root();
let plan_path = root.join("docs/superpowers/plans/2026-03-19-sqryd-daemon.md");
let plan_src = fs::read_to_string(&plan_path).expect("plan readable");
let got = parse_plan_ka_field_names(&plan_src);
let expected: BTreeSet<String> = [
"nodes",
"edges",
"indices",
"macro_metadata",
"node_provenance",
"scope_arena",
"alias_table",
"shadow_table",
]
.iter()
.map(|s| (*s).to_string())
.collect();
assert_eq!(
got, expected,
"Parser output for plan K.A 'Field on `CodeGraph`' column must equal the \
authoritative set. K.A9 is intentionally absent (CSR is derived; \
covered by edges)."
);
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum FieldClassification {
Bearing,
Excluded,
Unknown(String),
}
fn classify_code_graph_fields(
fields: &[(String, String)],
plan_ka_fields: &BTreeSet<String>,
plan_kb_fields: &BTreeSet<String>,
plan_not_in_ka: &BTreeSet<String>,
coverage_types: &BTreeSet<String>,
) -> BTreeMap<String, FieldClassification> {
let mut out: BTreeMap<String, FieldClassification> = BTreeMap::new();
for (name, ty) in fields {
let bearing_named = plan_ka_fields.contains(name) || plan_kb_fields.contains(name);
let excluded_named = plan_not_in_ka.contains(name);
let type_has_impl = coverage_types.contains(ty);
if bearing_named && excluded_named {
out.insert(
name.clone(),
FieldClassification::Unknown(format!(
"field `{name}` appears in BOTH the plan K.A/K.B field list AND the \
'Other fields intentionally NOT in K.A' bullet list — the two \
are mutually exclusive by design"
)),
);
continue;
}
if bearing_named {
if type_has_impl {
out.insert(name.clone(), FieldClassification::Bearing);
} else {
out.insert(
name.clone(),
FieldClassification::Unknown(format!(
"field `{name}: {ty}` is listed in plan K.A/K.B as bearing but its \
type `{ty}` does not appear in coverage.rs `assert_impl_all!` — \
add an `impl NodeIdBearing for {ty}` and a corresponding \
`assert_impl_all!({ty}: NodeIdBearing);` line"
)),
);
}
continue;
}
if excluded_named {
out.insert(name.clone(), FieldClassification::Excluded);
continue;
}
out.insert(
name.clone(),
FieldClassification::Unknown(format!(
"field `{name}: {ty}` is not named in plan K.A, active K.B, or the \
'Other fields intentionally NOT in K.A' bullet list — add a K.A/K.B row \
(if NodeId-bearing) OR an exclusion bullet (if not)"
)),
);
}
out
}
#[test]
fn every_code_graph_field_is_classified_by_name() {
let root = repo_root();
let graph_path = root.join("sqry-core/src/graph/unified/concurrent/graph.rs");
let graph_src = fs::read_to_string(&graph_path).expect("concurrent/graph.rs readable");
let fields = parse_code_graph_field_types(&graph_src);
assert!(
!fields.is_empty(),
"Failed to parse any fields from CodeGraph at {}. \
Has the struct layout changed?",
graph_path.display()
);
let plan_path = root.join("docs/superpowers/plans/2026-03-19-sqryd-daemon.md");
let plan_src = fs::read_to_string(&plan_path).expect("plan readable");
let plan_ka = parse_plan_ka_field_names(&plan_src);
let plan_kb = parse_plan_kb_active_field_names(&plan_src);
let plan_not_in_ka = parse_plan_not_in_ka_fields(&plan_src);
let coverage_path = root.join("sqry-core/src/graph/unified/rebuild/coverage.rs");
let coverage_src = fs::read_to_string(&coverage_path).expect("coverage.rs readable");
let coverage_types = parse_assert_impl_all_types(&coverage_src);
let classifications = classify_code_graph_fields(
&fields,
&plan_ka,
&plan_kb,
&plan_not_in_ka,
&coverage_types,
);
let unknown: Vec<(String, String)> = classifications
.iter()
.filter_map(|(name, kind)| match kind {
FieldClassification::Unknown(reason) => Some((name.clone(), reason.clone())),
_ => None,
})
.collect();
assert!(
unknown.is_empty(),
"Gate 0b field-identity drift detected on `CodeGraph`:\n{}\n\n\
Plan K.A field names parsed: {:?}\n\
Plan active K.B field names parsed: {:?}\n\
Plan 'Other fields NOT in K.A' names parsed: {:?}\n\
coverage.rs assert_impl_all! types: {:?}\n",
unknown
.iter()
.map(|(n, r)| format!(" - {n}: {r}"))
.collect::<Vec<_>>()
.join("\n"),
plan_ka,
plan_kb,
plan_not_in_ka,
coverage_types,
);
}
#[test]
fn plan_not_in_ka_matches_classified_excluded_fields() {
let root = repo_root();
let graph_path = root.join("sqry-core/src/graph/unified/concurrent/graph.rs");
let graph_src = fs::read_to_string(&graph_path).expect("concurrent/graph.rs readable");
let fields = parse_code_graph_field_types(&graph_src);
let plan_path = root.join("docs/superpowers/plans/2026-03-19-sqryd-daemon.md");
let plan_src = fs::read_to_string(&plan_path).expect("plan readable");
let plan_ka = parse_plan_ka_field_names(&plan_src);
let plan_kb = parse_plan_kb_active_field_names(&plan_src);
let plan_not_in_ka = parse_plan_not_in_ka_fields(&plan_src);
let coverage_path = root.join("sqry-core/src/graph/unified/rebuild/coverage.rs");
let coverage_src = fs::read_to_string(&coverage_path).expect("coverage.rs readable");
let coverage_types = parse_assert_impl_all_types(&coverage_src);
let classifications = classify_code_graph_fields(
&fields,
&plan_ka,
&plan_kb,
&plan_not_in_ka,
&coverage_types,
);
let classifier_excluded: BTreeSet<String> = classifications
.iter()
.filter_map(|(name, kind)| match kind {
FieldClassification::Excluded => Some(name.clone()),
_ => None,
})
.collect();
let only_in_plan: BTreeSet<_> = plan_not_in_ka.difference(&classifier_excluded).collect();
let only_in_struct: BTreeSet<_> = classifier_excluded.difference(&plan_not_in_ka).collect();
assert!(
only_in_plan.is_empty() && only_in_struct.is_empty(),
"Plan 'Other fields intentionally NOT in K.A' bullet list must exactly equal \
the real CodeGraph fields classified as excluded.\n\n\
In plan bullets but not on struct (stale bullet — remove): {only_in_plan:?}\n\
On struct but not in plan bullets (missing bullet — add): {only_in_struct:?}\n"
);
}
#[test]
fn plan_ka_and_kb_field_names_all_exist_on_code_graph() {
let root = repo_root();
let graph_path = root.join("sqry-core/src/graph/unified/concurrent/graph.rs");
let graph_src = fs::read_to_string(&graph_path).expect("concurrent/graph.rs readable");
let fields = parse_code_graph_field_types(&graph_src);
let struct_names: BTreeSet<String> = fields.iter().map(|(n, _)| n.clone()).collect();
let plan_path = root.join("docs/superpowers/plans/2026-03-19-sqryd-daemon.md");
let plan_src = fs::read_to_string(&plan_path).expect("plan readable");
let plan_ka = parse_plan_ka_field_names(&plan_src);
let plan_kb = parse_plan_kb_active_field_names(&plan_src);
let missing_ka: BTreeSet<_> = plan_ka.difference(&struct_names).collect();
assert!(
missing_ka.is_empty(),
"Plan K.A references field name(s) not on CodeGraph: {missing_ka:?}. \
Either rename the struct field back, or update plan K.A and the classifier \
mapping together."
);
let missing_kb: BTreeSet<_> = plan_kb.difference(&struct_names).collect();
assert!(
missing_kb.is_empty(),
"Plan active K.B references field name(s) not on CodeGraph: {missing_kb:?}."
);
}
struct BaselineFixture {
fields: Vec<(String, String)>,
plan_ka: BTreeSet<String>,
plan_kb: BTreeSet<String>,
plan_not_in_ka: BTreeSet<String>,
coverage_types: BTreeSet<String>,
}
fn baseline_fixture() -> BaselineFixture {
let fields = vec![
("nodes".into(), "NodeArena".into()),
("edges".into(), "BidirectionalEdgeStore".into()),
("strings".into(), "StringInterner".into()),
("files".into(), "FileRegistry".into()),
("indices".into(), "AuxiliaryIndices".into()),
("macro_metadata".into(), "NodeMetadataStore".into()),
("node_provenance".into(), "NodeProvenanceStore".into()),
("edge_provenance".into(), "EdgeProvenanceStore".into()),
("fact_epoch".into(), "u64".into()),
("epoch".into(), "u64".into()),
(
"confidence".into(),
"HashMap<String, ConfidenceMetadata>".into(),
),
("scope_arena".into(), "ScopeArena".into()),
("alias_table".into(), "AliasTable".into()),
("shadow_table".into(), "ShadowTable".into()),
(
"scope_provenance_store".into(),
"ScopeProvenanceStore".into(),
),
("file_segments".into(), "FileSegmentTable".into()),
];
let plan_ka: BTreeSet<String> = [
"nodes",
"edges",
"indices",
"macro_metadata",
"node_provenance",
"scope_arena",
"alias_table",
"shadow_table",
]
.iter()
.map(|s| (*s).to_string())
.collect();
let plan_kb: BTreeSet<String> = ["files"].iter().map(|s| (*s).to_string()).collect();
let plan_not_in_ka: BTreeSet<String> = [
"strings",
"edge_provenance",
"scope_provenance_store",
"file_segments",
"fact_epoch",
"epoch",
"confidence",
]
.iter()
.map(|s| (*s).to_string())
.collect();
let coverage_types: BTreeSet<String> = [
"NodeArena",
"BidirectionalEdgeStore",
"AuxiliaryIndices",
"NodeMetadataStore",
"NodeProvenanceStore",
"ScopeArena",
"AliasTable",
"ShadowTable",
"FileRegistry",
]
.iter()
.map(|s| (*s).to_string())
.collect();
BaselineFixture {
fields,
plan_ka,
plan_kb,
plan_not_in_ka,
coverage_types,
}
}
impl BaselineFixture {
fn classify(&self) -> BTreeMap<String, FieldClassification> {
classify_code_graph_fields(
&self.fields,
&self.plan_ka,
&self.plan_kb,
&self.plan_not_in_ka,
&self.coverage_types,
)
}
}
#[test]
fn synthetic_baseline_classifies_every_real_field_cleanly() {
let fx = baseline_fixture();
let got = fx.classify();
let unknown: Vec<_> = got
.iter()
.filter(|(_, k)| matches!(k, FieldClassification::Unknown(_)))
.map(|(n, _)| n.clone())
.collect();
assert!(
unknown.is_empty(),
"baseline fixture must classify every field cleanly, got Unknown: {unknown:?}"
);
}
#[test]
fn synthetic_drift_new_field_reusing_bearing_type_is_caught() {
let mut fx = baseline_fixture();
fx.fields
.push(("alias_table_2".into(), "AliasTable".into()));
let got = fx.classify();
match got.get("alias_table_2") {
Some(FieldClassification::Unknown(msg)) => {
assert!(
msg.contains("not named in plan K.A"),
"unexpected Unknown reason for alias_table_2: {msg}"
);
}
other => panic!(
"alias_table_2 (new field reusing AliasTable) must be classified as Unknown; \
got {other:?}. This is the exact drift shape the iter-2 reviewer called out: \
type-name-only classification was hiding new-field drift."
),
}
}
#[test]
fn synthetic_drift_new_field_with_new_type_is_caught() {
let mut fx = baseline_fixture();
fx.fields
.push(("brand_new_table".into(), "BrandNewBearingStore".into()));
let got = fx.classify();
match got.get("brand_new_table") {
Some(FieldClassification::Unknown(_)) => {}
other => panic!("brand_new_table must be Unknown; got {other:?}"),
}
}
#[test]
fn synthetic_drift_new_unclassified_scalar_field_is_caught() {
let mut fx = baseline_fixture();
fx.fields.push(("rebuild_generation".into(), "u64".into()));
let got = fx.classify();
match got.get("rebuild_generation") {
Some(FieldClassification::Unknown(_)) => {}
other => panic!("rebuild_generation must be Unknown; got {other:?}"),
}
}
#[test]
fn synthetic_drift_plan_row_without_coverage_impl_is_caught() {
let mut fx = baseline_fixture();
fx.coverage_types.remove("AliasTable");
let got = fx.classify();
match got.get("alias_table") {
Some(FieldClassification::Unknown(msg)) => {
assert!(
msg.contains("does not appear in coverage.rs"),
"expected coverage-side error message, got: {msg}"
);
}
other => panic!("alias_table must be Unknown when impl is missing; got {other:?}"),
}
}
#[test]
fn synthetic_drift_field_listed_as_both_bearing_and_excluded_is_caught() {
let mut fx = baseline_fixture();
fx.plan_ka.insert("strings".to_string());
fx.plan_not_in_ka.insert("strings".to_string());
let got = fx.classify();
match got.get("strings") {
Some(FieldClassification::Unknown(msg)) => {
assert!(
msg.contains("appears in BOTH"),
"expected double-classification error, got: {msg}"
);
}
other => panic!("strings must be Unknown when bi-classified; got {other:?}"),
}
}
#[test]
fn synthetic_drift_plan_bullet_removed_while_field_remains_is_caught() {
let mut fx = baseline_fixture();
fx.plan_not_in_ka.remove("strings");
let got = fx.classify();
match got.get("strings") {
Some(FieldClassification::Unknown(msg)) => {
assert!(
msg.contains("not named in plan"),
"expected 'not named in plan' error, got: {msg}"
);
}
other => panic!("strings must be Unknown when bullet is removed; got {other:?}"),
}
}
fn parse_code_graph_field_types(src: &str) -> Vec<(String, String)> {
let mut fields = Vec::new();
let anchor = "pub struct CodeGraph {";
let Some(start) = src.find(anchor) else {
return fields;
};
let after = &src[start + anchor.len()..];
let Some(end_off) = after.find("\n}") else {
return fields;
};
let body = &after[..end_off];
for raw in body.lines() {
let line = raw.trim();
if line.is_empty() || line.starts_with("//") || line.starts_with("///") {
continue;
}
let line = line.trim_end_matches(',');
let line = match line.find("//") {
Some(idx) => line[..idx].trim_end(),
None => line,
};
let Some((name_part, type_part)) = line.split_once(':') else {
continue;
};
let name = name_part
.split_whitespace()
.next_back()
.expect("non-empty name side")
.to_string();
let type_str = type_part.trim();
let canonical = canonicalize_type(type_str);
fields.push((name, canonical));
}
fields
}
fn canonicalize_type(ty: &str) -> String {
let ty = ty.trim();
if let Some(rest) = ty.strip_prefix("Arc<")
&& let Some(inner) = rest.strip_suffix('>')
{
return inner.trim().to_string();
}
ty.to_string()
}
#[test]
fn parse_code_graph_field_types_handles_arc_wrapping() {
let fixture = "pub struct CodeGraph {\n \
/// doc\n \
nodes: Arc<NodeArena>,\n \
edges: Arc<BidirectionalEdgeStore>,\n \
fact_epoch: u64,\n \
confidence: HashMap<String, ConfidenceMetadata>,\n\
}\n";
let fields = parse_code_graph_field_types(fixture);
assert_eq!(
fields,
vec![
("nodes".into(), "NodeArena".into()),
("edges".into(), "BidirectionalEdgeStore".into()),
("fact_epoch".into(), "u64".into()),
(
"confidence".into(),
"HashMap<String, ConfidenceMetadata>".into()
),
]
);
}
#[test]
fn split_comma_respecting_generics_handles_nested() {
let v = split_comma_respecting_generics(
"`strings: Arc<StringInterner>`, `confidence: HashMap<String, ConfidenceMetadata>`",
);
assert_eq!(v.len(), 2);
assert!(v[0].contains("StringInterner"));
assert!(v[1].contains("HashMap<String, ConfidenceMetadata>"));
}
use syn::visit::{self, Visit};
use syn::{Expr, File as SynFile, ImplItem, Item};
struct ResidueFieldVisitor {
fields: BTreeSet<String>,
}
impl<'ast> Visit<'ast> for ResidueFieldVisitor {
fn visit_expr_for_loop(&mut self, node: &'ast syn::ExprForLoop) {
if let Some(field) = extract_self_field_on_all_node_ids(&node.expr) {
self.fields.insert(field);
}
visit::visit_expr_for_loop(self, node);
}
}
fn extract_self_field_on_all_node_ids(expr: &Expr) -> Option<String> {
let Expr::MethodCall(mc) = expr else {
return None;
};
if mc.method != "all_node_ids" || !mc.args.is_empty() {
return None;
}
let Expr::Field(field) = mc.receiver.as_ref() else {
return None;
};
let Expr::Path(path) = field.base.as_ref() else {
return None;
};
let is_self = path
.path
.segments
.last()
.map(|s| s.ident == "self")
.unwrap_or(false);
if !is_self {
return None;
}
match &field.member {
syn::Member::Named(ident) => Some(ident.to_string()),
syn::Member::Unnamed(_) => None,
}
}
fn find_fn_body<'a>(file: &'a SynFile, fn_name: &str) -> Option<&'a syn::Block> {
for item in &file.items {
match item {
Item::Fn(f) if f.sig.ident == fn_name => {
return Some(&f.block);
}
Item::Impl(imp) => {
for impl_item in &imp.items {
if let ImplItem::Fn(m) = impl_item
&& m.sig.ident == fn_name
{
return Some(&m.block);
}
}
}
_ => {}
}
}
None
}
fn parse_residue_helper_k_row_fields(src: &str, fn_name: &str) -> BTreeSet<String> {
let file: SynFile = syn::parse_file(src)
.unwrap_or_else(|e| panic!("syn failed to parse source for fn `{fn_name}`: {e}"));
let block = find_fn_body(&file, fn_name)
.unwrap_or_else(|| panic!("function `{fn_name}` not found in parsed source"));
let mut visitor = ResidueFieldVisitor {
fields: BTreeSet::new(),
};
visit::visit_block(&mut visitor, block);
visitor.fields
}
#[test]
fn every_k_row_is_covered_by_both_residue_helpers() {
let root = repo_root();
let code_graph_path = root.join("sqry-core/src/graph/unified/concurrent/graph.rs");
let code_graph_src =
fs::read_to_string(&code_graph_path).expect("concurrent/graph.rs readable");
let code_graph_fields =
parse_residue_helper_k_row_fields(&code_graph_src, "assert_no_tombstone_residue_for");
let rebuild_graph_path = root.join("sqry-core/src/graph/unified/rebuild/rebuild_graph.rs");
let rebuild_graph_src =
fs::read_to_string(&rebuild_graph_path).expect("rebuild/rebuild_graph.rs readable");
let rebuild_graph_fields =
parse_residue_helper_k_row_fields(&rebuild_graph_src, "assert_no_tombstone_residue");
let expected: BTreeSet<String> = [
"nodes",
"indices",
"edges",
"macro_metadata",
"node_provenance",
"scope_arena",
"alias_table",
"shadow_table",
"files",
]
.iter()
.map(|s| (*s).to_string())
.collect();
assert_eq!(
code_graph_fields, expected,
"CodeGraph::assert_no_tombstone_residue_for must iterate exactly the K-row fields \
{expected:?}, got {code_graph_fields:?}. \
When adding a K-row, extend this helper AND \
RebuildGraph::assert_no_tombstone_residue AND the plan."
);
assert_eq!(
rebuild_graph_fields, expected,
"RebuildGraph::assert_no_tombstone_residue must iterate exactly the K-row fields \
{expected:?}, got {rebuild_graph_fields:?}. \
When adding a K-row, extend this helper AND \
CodeGraph::assert_no_tombstone_residue_for AND the plan."
);
assert_eq!(
code_graph_fields, rebuild_graph_fields,
"Residue-helper K-row drift: CodeGraph has {code_graph_fields:?} but RebuildGraph \
has {rebuild_graph_fields:?}. A future K-row addition must extend BOTH helpers; \
this test exists to enforce that contract."
);
}
#[test]
fn residue_field_visitor_extracts_single_loop() {
let src = "
impl Foo {
pub fn bar(&self) {
for _nid in self.only_field.all_node_ids() {}
}
}
";
let fields = parse_residue_helper_k_row_fields(src, "bar");
assert_eq!(fields, BTreeSet::from(["only_field".to_string()]));
}
#[test]
fn residue_field_visitor_ignores_unrelated_calls() {
let src = "
impl Foo {
pub fn bar(&self) {
for _nid in self.a.all_node_ids() {}
for _x in self.b.iter() {} // ignored — not all_node_ids
for _c in other_fn() {} // ignored — not a self.<field>
for _nid in self.c.all_node_ids() {}
}
}
";
let fields = parse_residue_helper_k_row_fields(src, "bar");
assert_eq!(fields, BTreeSet::from(["a".to_string(), "c".to_string()]));
}
fn walk(dir: &Path, found: &mut Vec<PathBuf>) {
let Ok(entries) = fs::read_dir(dir) else {
return;
};
for entry in entries.flatten() {
let path = entry.path();
if path.is_dir() {
walk(&path, found);
continue;
}
if path.extension().and_then(|e| e.to_str()) != Some("rs") {
continue;
}
let Ok(src) = fs::read_to_string(&path) else {
continue;
};
if !parse_assert_impl_all_types(&src).is_empty() {
found.push(path);
}
}
}