use serde_json::{Map, Value, json};
use std::collections::BTreeMap;
use std::path::PathBuf;
use vela_protocol::access_tier::AccessTier;
use vela_protocol::bundle::{
Artifact, Assertion, Author, Conditions, Confidence, ConfidenceKind, ConfidenceMethod, Entity,
Evidence, Extraction, FindingBundle, Flags, Link, NegativeResult, NegativeResultKind,
Provenance, Trajectory, TrajectoryStep, TrajectoryStepKind,
};
use vela_protocol::events::{
self, FindingEventInput, NULL_HASH, StateActor, StateEvent, StateTarget,
};
use vela_protocol::reducer::replay_from_genesis;
const FIXTURE_FRONTIER_COUNT: usize = 3;
const FINDINGS_PER_FRONTIER: usize = 8;
const CASCADE_DEPTH: usize = 5;
fn fixture_timestamp(frontier_idx: usize, event_idx: usize) -> String {
format!(
"2026-05-02T{:02}:{:02}:{:02}Z",
frontier_idx % 24,
(event_idx / 60) % 60,
event_idx % 60
)
}
fn fixture_object_timestamp(frontier_idx: usize, object_idx: usize) -> String {
format!(
"2026-05-02T{:02}:30:{:02}Z",
frontier_idx % 24,
object_idx % 60
)
}
fn pin_negative_result(
mut nr: NegativeResult,
frontier_idx: usize,
object_idx: usize,
) -> NegativeResult {
nr.created = fixture_object_timestamp(frontier_idx, object_idx);
nr.id =
NegativeResult::content_address(&nr.kind, &nr.deposited_by, &nr.created, &nr.conditions);
nr
}
fn pin_trajectory(mut traj: Trajectory, frontier_idx: usize, object_idx: usize) -> Trajectory {
traj.created = fixture_object_timestamp(frontier_idx, object_idx);
traj.id = Trajectory::content_address(&traj.target_findings, &traj.deposited_by, &traj.created);
traj
}
fn pin_artifact(mut artifact: Artifact, frontier_idx: usize, object_idx: usize) -> Artifact {
artifact.created = fixture_object_timestamp(frontier_idx, object_idx);
artifact
}
fn replace_event_id_strings(value: &mut Value, id_map: &BTreeMap<String, String>) {
match value {
Value::String(s) => {
if let Some(replacement) = id_map.get(s) {
*s = replacement.clone();
}
}
Value::Array(items) => {
for item in items {
replace_event_id_strings(item, id_map);
}
}
Value::Object(map) => {
for item in map.values_mut() {
replace_event_id_strings(item, id_map);
}
}
_ => {}
}
}
fn normalize_event_log(
frontier_idx: usize,
event_log: Vec<events::StateEvent>,
) -> Vec<events::StateEvent> {
let mut id_map = BTreeMap::new();
let mut normalized = Vec::with_capacity(event_log.len());
for (event_idx, mut event) in event_log.into_iter().enumerate() {
let old_id = event.id.clone();
event.timestamp = fixture_timestamp(frontier_idx, event_idx);
replace_event_id_strings(&mut event.payload, &id_map);
event.id = events::compute_event_id(&event);
if !old_id.is_empty() {
id_map.insert(old_id, event.id.clone());
}
normalized.push(event);
}
normalized
}
fn make_finding(frontier_idx: usize, finding_idx: usize) -> FindingBundle {
let assertion = Assertion {
text: format!(
"Cross-impl finding {finding_idx} in frontier {frontier_idx}: protein-X activates pathway-Y."
),
assertion_type: "mechanism".into(),
entities: vec![Entity {
name: format!("ProteinX{finding_idx}"),
entity_type: "protein".into(),
identifiers: Map::new(),
canonical_id: None,
candidates: vec![],
aliases: vec![],
resolution_provenance: None,
resolution_confidence: 1.0,
resolution_method: None,
species_context: None,
needs_review: false,
}],
relation: Some("activates".into()),
direction: Some("positive".into()),
causal_claim: None,
causal_evidence_grade: None,
};
let evidence = Evidence {
evidence_type: "experimental".into(),
model_system: "mouse".into(),
species: Some("Mus musculus".into()),
method: "Western blot".into(),
sample_size: Some("n=30".into()),
effect_size: None,
p_value: Some("p<0.05".into()),
replicated: true,
replication_count: Some(3),
evidence_spans: vec![],
};
let conditions = Conditions {
text: "In vitro, mouse microglia".into(),
species_verified: vec!["Mus musculus".into()],
species_unverified: vec![],
in_vitro: true,
in_vivo: false,
human_data: false,
clinical_trial: false,
concentration_range: None,
duration: None,
age_group: None,
cell_type: Some("microglia".into()),
};
let confidence = Confidence {
kind: ConfidenceKind::FrontierEpistemic,
score: 0.7,
basis: "Cross-impl test fixture".into(),
method: ConfidenceMethod::LlmInitial,
components: None,
extraction_confidence: 0.9,
};
let provenance = Provenance {
source_type: "published_paper".into(),
doi: Some(format!(
"10.0000/crossimpl.frontier{frontier_idx:04}.finding{finding_idx:04}"
)),
pmid: None,
pmc: None,
openalex_id: None,
url: None,
title: format!("Cross-impl paper {frontier_idx}-{finding_idx}"),
authors: vec![Author {
name: "Cross-Impl A".into(),
orcid: None,
}],
year: Some(2026),
journal: Some("Cross Journal".into()),
license: None,
publisher: None,
funders: vec![],
extraction: Extraction::default(),
review: None,
citation_count: Some(0),
};
let flags = Flags {
gap: false,
negative_space: false,
contested: false,
retracted: false,
declining: false,
gravity_well: false,
review_state: None,
superseded: false,
signature_threshold: None,
jointly_accepted: false,
};
let mut bundle = FindingBundle::new(
assertion, evidence, conditions, confidence, provenance, flags,
);
bundle.created = fixture_object_timestamp(frontier_idx, finding_idx);
if finding_idx + 1 < FINDINGS_PER_FRONTIER {
let next_id = synthetic_id(frontier_idx, finding_idx + 1);
bundle.links = vec![Link {
target: next_id,
link_type: "supports".into(),
note: "synthetic dependency".into(),
inferred_by: "vela-cross-impl-fixture/0".into(),
created_at: "2026-05-02T00:00:00Z".into(),
mechanism: None,
}];
}
bundle
}
fn synthetic_id(frontier_idx: usize, finding_idx: usize) -> String {
let assertion = Assertion {
text: format!(
"Cross-impl finding {finding_idx} in frontier {frontier_idx}: protein-X activates pathway-Y."
),
assertion_type: "mechanism".into(),
entities: vec![],
relation: None,
direction: None,
causal_claim: None,
causal_evidence_grade: None,
};
let provenance = Provenance {
source_type: "published_paper".into(),
doi: Some(format!(
"10.0000/crossimpl.frontier{frontier_idx:04}.finding{finding_idx:04}"
)),
pmid: None,
pmc: None,
openalex_id: None,
url: None,
title: format!("Cross-impl paper {frontier_idx}-{finding_idx}"),
authors: vec![],
year: None,
journal: None,
license: None,
publisher: None,
funders: vec![],
extraction: Extraction::default(),
review: None,
citation_count: None,
};
FindingBundle::content_address(&assertion, &provenance)
}
fn build_event_log(frontier_idx: usize, findings: &[FindingBundle]) -> Vec<events::StateEvent> {
let mut log = Vec::new();
let actor_id = format!("reviewer:cross-impl-{frontier_idx}");
for f in findings {
let proposal_id = format!("vpr_{}_{}", frontier_idx, &f.id[3..]);
log.push(events::new_finding_event(FindingEventInput {
kind: "finding.asserted",
finding_id: &f.id,
actor_id: &actor_id,
actor_type: "human",
reason: "cross-impl genesis assertion",
before_hash: NULL_HASH,
after_hash: NULL_HASH,
payload: json!({"proposal_id": proposal_id}),
caveats: vec![],
}));
log.push(events::new_finding_event(FindingEventInput {
kind: "finding.reviewed",
finding_id: &f.id,
actor_id: &actor_id,
actor_type: "human",
reason: "cross-impl review",
before_hash: NULL_HASH,
after_hash: NULL_HASH,
payload: json!({"proposal_id": proposal_id, "status": "accepted"}),
caveats: vec![],
}));
}
let root = &findings[0];
let root_proposal = format!("vpr_{}_{}", frontier_idx, &root.id[3..]);
let retract = events::new_finding_event(FindingEventInput {
kind: "finding.retracted",
finding_id: &root.id,
actor_id: &actor_id,
actor_type: "human",
reason: "cross-impl retraction triggers cascade",
before_hash: NULL_HASH,
after_hash: NULL_HASH,
payload: json!({
"proposal_id": root_proposal,
"affected": CASCADE_DEPTH,
}),
caveats: vec![],
});
let retract_event_id = retract.id.clone();
let root_id = root.id.clone();
log.push(retract);
for depth in 1..=CASCADE_DEPTH {
if depth >= findings.len() {
break;
}
let dep = &findings[depth];
let dep_proposal = format!("vpr_{}_{}", frontier_idx, &dep.id[3..]);
log.push(events::new_finding_event(FindingEventInput {
kind: "finding.dependency_invalidated",
finding_id: &dep.id,
actor_id: &actor_id,
actor_type: "human",
reason: "cross-impl cascade",
before_hash: NULL_HASH,
after_hash: NULL_HASH,
payload: json!({
"proposal_id": dep_proposal,
"upstream_finding_id": root_id,
"upstream_event_id": retract_event_id,
"depth": depth as u64,
}),
caveats: vec![],
}));
}
log
}
fn build_review_branches_log(
frontier_idx: usize,
findings: &[FindingBundle],
) -> Vec<events::StateEvent> {
let mut log = Vec::new();
let actor_id = format!("reviewer:review-branches-{frontier_idx}");
let statuses = ["accepted", "contested", "needs_revision", "rejected"];
for (i, f) in findings.iter().enumerate() {
let proposal_id = format!("vpr_{}_{}", frontier_idx, &f.id[3..]);
log.push(events::new_finding_event(FindingEventInput {
kind: "finding.asserted",
finding_id: &f.id,
actor_id: &actor_id,
actor_type: "human",
reason: "review-branch genesis",
before_hash: NULL_HASH,
after_hash: NULL_HASH,
payload: json!({"proposal_id": proposal_id}),
caveats: vec![],
}));
let status = statuses[i % statuses.len()];
log.push(events::new_finding_event(FindingEventInput {
kind: "finding.reviewed",
finding_id: &f.id,
actor_id: &actor_id,
actor_type: "human",
reason: "review-branch coverage",
before_hash: NULL_HASH,
after_hash: NULL_HASH,
payload: json!({"proposal_id": proposal_id, "status": status}),
caveats: vec![],
}));
let (prev, new) = if i % 2 == 0 {
(0.7, 1.0)
} else {
(0.7, 0.42_f64)
};
let revise_reason = format!("revise to {new:.3}");
log.push(events::new_finding_event(FindingEventInput {
kind: "finding.confidence_revised",
finding_id: &f.id,
actor_id: &actor_id,
actor_type: "human",
reason: &revise_reason,
before_hash: NULL_HASH,
after_hash: NULL_HASH,
payload: json!({
"proposal_id": proposal_id,
"previous_score": prev,
"new_score": new,
}),
caveats: vec![],
}));
}
log
}
fn build_annotations_log(
frontier_idx: usize,
findings: &[FindingBundle],
) -> Vec<events::StateEvent> {
let mut log = Vec::new();
let actor_id = format!("reviewer:annotations-{frontier_idx}");
for (i, f) in findings.iter().enumerate() {
let proposal_id = format!("vpr_{}_{}", frontier_idx, &f.id[3..]);
log.push(events::new_finding_event(FindingEventInput {
kind: "finding.asserted",
finding_id: &f.id,
actor_id: &actor_id,
actor_type: "human",
reason: "annotations-fixture genesis",
before_hash: NULL_HASH,
after_hash: NULL_HASH,
payload: json!({"proposal_id": proposal_id}),
caveats: vec![],
}));
let kind = if i < findings.len() / 2 {
"finding.noted"
} else {
"finding.caveated"
};
let annotation_id = format!("ann_{}_{}", frontier_idx, i);
log.push(events::new_finding_event(FindingEventInput {
kind,
finding_id: &f.id,
actor_id: &actor_id,
actor_type: "human",
reason: "annotation coverage",
before_hash: NULL_HASH,
after_hash: NULL_HASH,
payload: json!({
"proposal_id": proposal_id,
"annotation_id": annotation_id,
"text": format!("note {i} on finding {}", &f.id[..8]),
"provenance": {
"doi": format!("10.0000/annot.{frontier_idx}.{i}"),
},
}),
caveats: vec![],
}));
}
if let Some(last) = findings.last() {
let proposal_id = format!("vpr_{}_{}", frontier_idx, &last.id[3..]);
log.push(events::new_finding_event(FindingEventInput {
kind: "finding.rejected",
finding_id: &last.id,
actor_id: &actor_id,
actor_type: "human",
reason: "rejection coverage",
before_hash: NULL_HASH,
after_hash: NULL_HASH,
payload: json!({"proposal_id": proposal_id}),
caveats: vec![],
}));
}
log
}
fn build_negative_results_log(
frontier_idx: usize,
findings: &[FindingBundle],
) -> Vec<events::StateEvent> {
let mut log = Vec::new();
let actor_id = format!("reviewer:negative-results-{frontier_idx}");
for f in findings {
let proposal_id = format!("vpr_{}_{}", frontier_idx, &f.id[3..]);
log.push(events::new_finding_event(FindingEventInput {
kind: "finding.asserted",
finding_id: &f.id,
actor_id: &actor_id,
actor_type: "human",
reason: "negative-results-fixture genesis",
before_hash: NULL_HASH,
after_hash: NULL_HASH,
payload: json!({"proposal_id": proposal_id}),
caveats: vec![],
}));
}
let trial_conditions = Conditions {
text: format!("Phase III RCT, frontier {frontier_idx}"),
species_verified: vec!["Homo sapiens".into()],
species_unverified: vec![],
in_vitro: false,
in_vivo: true,
human_data: true,
clinical_trial: true,
concentration_range: None,
duration: Some("18 months".into()),
age_group: Some("65+".into()),
cell_type: None,
};
let trial_provenance = Provenance {
source_type: "clinical_trial".into(),
doi: None,
pmid: None,
pmc: None,
openalex_id: None,
url: None,
title: format!("Trial readout, frontier {frontier_idx}"),
authors: vec![],
year: Some(2026),
journal: None,
license: None,
publisher: None,
funders: vec![],
extraction: Extraction::default(),
review: None,
citation_count: Some(0),
};
let trial_kind = NegativeResultKind::RegisteredTrial {
endpoint: format!("Primary endpoint frontier {frontier_idx}"),
intervention: "intervention-arm".into(),
comparator: "placebo".into(),
population: "early symptomatic, biomarker-positive".into(),
n_enrolled: 1200,
power: 0.9,
effect_size_ci: (-0.05, 0.05),
effect_size_threshold: Some(0.4),
registry_id: Some(format!("NCT{frontier_idx:08}")),
};
let trial_null = pin_negative_result(
NegativeResult::new(
trial_kind,
vec![findings[0].id.clone()],
format!("trial-pi:cross-impl-{frontier_idx}"),
trial_conditions,
trial_provenance,
"Pre-registered primary endpoint did not meet MCID; CI excludes it.",
),
frontier_idx,
100,
);
let trial_id = trial_null.id.clone();
let trial_proposal = format!("vpr_nr_{frontier_idx}_trial");
log.push(StateEvent {
schema: events::EVENT_SCHEMA.to_string(),
id: String::new(),
kind: "negative_result.asserted".to_string(),
target: StateTarget {
r#type: "negative_result".to_string(),
id: trial_id.clone(),
},
actor: StateActor {
id: actor_id.clone(),
r#type: "human".to_string(),
},
timestamp: chrono::Utc::now().to_rfc3339(),
reason: "deposit informative null from pre-registered trial".into(),
before_hash: NULL_HASH.to_string(),
after_hash: NULL_HASH.to_string(),
payload: json!({
"proposal_id": trial_proposal,
"negative_result": trial_null,
}),
caveats: vec![],
signature: None,
schema_artifact_id: None,
});
let lab_conditions = Conditions {
text: format!("In vitro, frontier {frontier_idx} synthesis attempts"),
species_verified: vec![],
species_unverified: vec![],
in_vitro: true,
in_vivo: false,
human_data: false,
clinical_trial: false,
concentration_range: Some("1-10 mM".into()),
duration: Some("72h".into()),
age_group: None,
cell_type: None,
};
let lab_provenance = Provenance {
source_type: "lab_notebook".into(),
doi: None,
pmid: None,
pmc: None,
openalex_id: None,
url: None,
title: format!("Lab notebook excerpt, frontier {frontier_idx}"),
authors: vec![],
year: Some(2026),
journal: None,
license: None,
publisher: None,
funders: vec![],
extraction: Extraction::default(),
review: None,
citation_count: Some(0),
};
let lab_kind = NegativeResultKind::Exploratory {
reagent: format!("CompoundX-{frontier_idx}"),
observation: "no measurable binding under any tested condition".into(),
attempts: 4,
};
let lab_null = pin_negative_result(
NegativeResult::new(
lab_kind,
vec![],
format!("lab:cross-impl-{frontier_idx}"),
lab_conditions,
lab_provenance,
"Exhausted reasonable parameter sweep; documenting before scope expansion.",
),
frontier_idx,
101,
);
let lab_id = lab_null.id.clone();
let lab_proposal = format!("vpr_nr_{frontier_idx}_lab");
log.push(StateEvent {
schema: events::EVENT_SCHEMA.to_string(),
id: String::new(),
kind: "negative_result.asserted".to_string(),
target: StateTarget {
r#type: "negative_result".to_string(),
id: lab_id.clone(),
},
actor: StateActor {
id: actor_id.clone(),
r#type: "human".to_string(),
},
timestamp: chrono::Utc::now().to_rfc3339(),
reason: "deposit exploratory wet-lab dead end".into(),
before_hash: NULL_HASH.to_string(),
after_hash: NULL_HASH.to_string(),
payload: json!({
"proposal_id": lab_proposal,
"negative_result": lab_null,
}),
caveats: vec![],
signature: None,
schema_artifact_id: None,
});
log.push(StateEvent {
schema: events::EVENT_SCHEMA.to_string(),
id: String::new(),
kind: "negative_result.reviewed".to_string(),
target: StateTarget {
r#type: "negative_result".to_string(),
id: trial_id.clone(),
},
actor: StateActor {
id: format!("reviewer:second-reader-{frontier_idx}"),
r#type: "human".to_string(),
},
timestamp: chrono::Utc::now().to_rfc3339(),
reason: "subgroup analysis suggests effect concentrated in APOE4-positive".into(),
before_hash: NULL_HASH.to_string(),
after_hash: NULL_HASH.to_string(),
payload: json!({
"proposal_id": format!("vpr_nr_{frontier_idx}_trial_review"),
"status": "contested",
}),
caveats: vec![],
signature: None,
schema_artifact_id: None,
});
log.push(StateEvent {
schema: events::EVENT_SCHEMA.to_string(),
id: String::new(),
kind: "negative_result.retracted".to_string(),
target: StateTarget {
r#type: "negative_result".to_string(),
id: lab_id,
},
actor: StateActor {
id: actor_id,
r#type: "human".to_string(),
},
timestamp: chrono::Utc::now().to_rfc3339(),
reason: "reagent batch miscatalogued; retract and re-deposit pending".into(),
before_hash: NULL_HASH.to_string(),
after_hash: NULL_HASH.to_string(),
payload: json!({
"proposal_id": format!("vpr_nr_{frontier_idx}_lab_retract"),
}),
caveats: vec![],
signature: None,
schema_artifact_id: None,
});
log
}
fn build_trajectories_log(
frontier_idx: usize,
findings: &[FindingBundle],
) -> Vec<events::StateEvent> {
let mut log = Vec::new();
let actor_id = format!("reviewer:trajectories-{frontier_idx}");
for f in findings {
let proposal_id = format!("vpr_{}_{}", frontier_idx, &f.id[3..]);
log.push(events::new_finding_event(FindingEventInput {
kind: "finding.asserted",
finding_id: &f.id,
actor_id: &actor_id,
actor_type: "human",
reason: "trajectories-fixture genesis",
before_hash: NULL_HASH,
after_hash: NULL_HASH,
payload: json!({"proposal_id": proposal_id}),
caveats: vec![],
}));
}
let traj1 = pin_trajectory(
Trajectory::new(
vec![findings[0].id.clone()],
format!("agent:scout-{frontier_idx}"),
format!(
"Search path that arrived at finding {}",
&findings[0].id[..8]
),
),
frontier_idx,
200,
);
let traj1_id = traj1.id.clone();
let traj1_value = serde_json::to_value(&traj1).expect("serialize trajectory");
log.push(StateEvent {
schema: events::EVENT_SCHEMA.to_string(),
id: String::new(),
kind: "trajectory.created".to_string(),
target: StateTarget {
r#type: "trajectory".to_string(),
id: traj1_id.clone(),
},
actor: StateActor {
id: actor_id.clone(),
r#type: "human".to_string(),
},
timestamp: chrono::Utc::now().to_rfc3339(),
reason: "open trajectory for cross-impl fixture".into(),
before_hash: NULL_HASH.to_string(),
after_hash: NULL_HASH.to_string(),
payload: json!({
"proposal_id": format!("vpr_traj_{frontier_idx}_open"),
"trajectory": traj1_value,
}),
caveats: vec![],
signature: None,
schema_artifact_id: None,
});
let step_kinds = [
(
TrajectoryStepKind::Hypothesis,
"Considered: protein-X is the key regulator.",
),
(
TrajectoryStepKind::Tried,
"Ran knockout in mouse model; observed partial phenotype.",
),
(
TrajectoryStepKind::RuledOut,
"Ruled out: knockout phenotype attributable to compensating paralog, not protein-X.",
),
];
for (i, (kind, desc)) in step_kinds.iter().enumerate() {
let step = TrajectoryStep::new(
&traj1_id,
kind.clone(),
desc.to_string(),
format!("agent:scout-{frontier_idx}"),
Some(format!("2026-05-04T0{i}:00:00Z")),
vec![],
);
let step_value = serde_json::to_value(&step).expect("serialize step");
log.push(StateEvent {
schema: events::EVENT_SCHEMA.to_string(),
id: String::new(),
kind: "trajectory.step_appended".to_string(),
target: StateTarget {
r#type: "trajectory".to_string(),
id: traj1_id.clone(),
},
actor: StateActor {
id: format!("agent:scout-{frontier_idx}"),
r#type: "agent".to_string(),
},
timestamp: chrono::Utc::now().to_rfc3339(),
reason: format!("append step {i}"),
before_hash: NULL_HASH.to_string(),
after_hash: NULL_HASH.to_string(),
payload: json!({
"proposal_id": format!("vpr_step_{frontier_idx}_{i}"),
"parent_trajectory_id": traj1_id,
"step": step_value,
}),
caveats: vec![],
signature: None,
schema_artifact_id: None,
});
}
log.push(StateEvent {
schema: events::EVENT_SCHEMA.to_string(),
id: String::new(),
kind: "trajectory.reviewed".to_string(),
target: StateTarget {
r#type: "trajectory".to_string(),
id: traj1_id.clone(),
},
actor: StateActor {
id: actor_id.clone(),
r#type: "human".to_string(),
},
timestamp: chrono::Utc::now().to_rfc3339(),
reason: "step 3 needs more support before this is canonical".into(),
before_hash: NULL_HASH.to_string(),
after_hash: NULL_HASH.to_string(),
payload: json!({
"proposal_id": format!("vpr_traj_{frontier_idx}_review"),
"status": "needs_revision",
}),
caveats: vec![],
signature: None,
schema_artifact_id: None,
});
let traj2 = pin_trajectory(
Trajectory::new(
vec![findings[1].id.clone()],
format!("agent:scout-{frontier_idx}"),
format!("Misframed search against finding {}", &findings[1].id[..8]),
),
frontier_idx,
201,
);
let traj2_id = traj2.id.clone();
let traj2_value = serde_json::to_value(&traj2).expect("serialize trajectory");
log.push(StateEvent {
schema: events::EVENT_SCHEMA.to_string(),
id: String::new(),
kind: "trajectory.created".to_string(),
target: StateTarget {
r#type: "trajectory".to_string(),
id: traj2_id.clone(),
},
actor: StateActor {
id: actor_id.clone(),
r#type: "human".to_string(),
},
timestamp: chrono::Utc::now().to_rfc3339(),
reason: "open trajectory before noticing reframe".into(),
before_hash: NULL_HASH.to_string(),
after_hash: NULL_HASH.to_string(),
payload: json!({
"proposal_id": format!("vpr_traj_{frontier_idx}_open2"),
"trajectory": traj2_value,
}),
caveats: vec![],
signature: None,
schema_artifact_id: None,
});
log.push(StateEvent {
schema: events::EVENT_SCHEMA.to_string(),
id: String::new(),
kind: "trajectory.retracted".to_string(),
target: StateTarget {
r#type: "trajectory".to_string(),
id: traj2_id,
},
actor: StateActor {
id: actor_id,
r#type: "human".to_string(),
},
timestamp: chrono::Utc::now().to_rfc3339(),
reason: "premise of the search was wrong; reframing".into(),
before_hash: NULL_HASH.to_string(),
after_hash: NULL_HASH.to_string(),
payload: json!({
"proposal_id": format!("vpr_traj_{frontier_idx}_retract"),
}),
caveats: vec![],
signature: None,
schema_artifact_id: None,
});
log
}
fn build_artifacts_log(frontier_idx: usize, findings: &[FindingBundle]) -> Vec<events::StateEvent> {
let mut log = Vec::new();
let actor_id = format!("reviewer:artifacts-{frontier_idx}");
for f in findings {
let proposal_id = format!("vpr_{}_{}", frontier_idx, &f.id[3..]);
log.push(events::new_finding_event(FindingEventInput {
kind: "finding.asserted",
finding_id: &f.id,
actor_id: &actor_id,
actor_type: "human",
reason: "artifact-fixture genesis",
before_hash: NULL_HASH,
after_hash: NULL_HASH,
payload: json!({"proposal_id": proposal_id}),
caveats: vec![],
}));
}
let provenance = |title: String| Provenance {
source_type: "clinical_trial".into(),
doi: None,
pmid: None,
pmc: None,
openalex_id: None,
url: Some(format!("https://example.org/frontier-{frontier_idx}/trial")),
title,
authors: vec![],
year: Some(2026),
journal: None,
license: Some("CC0-1.0".into()),
publisher: None,
funders: vec![],
extraction: Extraction::default(),
review: None,
citation_count: Some(0),
};
let mut metadata = BTreeMap::new();
metadata.insert("nct_id".to_string(), json!(format!("NCT{frontier_idx:08}")));
metadata.insert("overall_status".to_string(), json!("COMPLETED"));
let trial = pin_artifact(
Artifact::new(
"clinical_trial_record",
format!("Cross-impl trial record {frontier_idx}"),
"sha256:aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
Some(2048),
Some("application/json".into()),
"remote",
Some(format!(
"https://clinicaltrials.gov/api/v2/studies/NCT{frontier_idx:08}"
)),
Some(format!(
"https://clinicaltrials.gov/study/NCT{frontier_idx:08}"
)),
Some("Public domain".into()),
vec![findings[0].id.clone()],
provenance(format!("Cross-impl trial source {frontier_idx}")),
metadata,
AccessTier::Public,
)
.expect("valid trial artifact"),
frontier_idx,
300,
);
let trial_id = trial.id.clone();
log.push(StateEvent {
schema: events::EVENT_SCHEMA.to_string(),
id: String::new(),
kind: "artifact.asserted".to_string(),
target: StateTarget {
r#type: "artifact".to_string(),
id: trial_id.clone(),
},
actor: StateActor {
id: actor_id.clone(),
r#type: "human".to_string(),
},
timestamp: chrono::Utc::now().to_rfc3339(),
reason: "deposit trial registry artifact for cross-impl fixture".into(),
before_hash: NULL_HASH.to_string(),
after_hash: NULL_HASH.to_string(),
payload: json!({
"proposal_id": format!("vpr_artifact_{frontier_idx}_trial"),
"artifact": trial,
}),
caveats: vec![],
signature: None,
schema_artifact_id: None,
});
log.push(StateEvent {
schema: events::EVENT_SCHEMA.to_string(),
id: String::new(),
kind: "artifact.reviewed".to_string(),
target: StateTarget {
r#type: "artifact".to_string(),
id: trial_id.clone(),
},
actor: StateActor {
id: format!("reviewer:artifact-second-reader-{frontier_idx}"),
r#type: "human".to_string(),
},
timestamp: chrono::Utc::now().to_rfc3339(),
reason: "trial registry artifact verified against source locator".into(),
before_hash: NULL_HASH.to_string(),
after_hash: NULL_HASH.to_string(),
payload: json!({
"proposal_id": format!("vpr_artifact_{frontier_idx}_trial_review"),
"status": "accepted",
}),
caveats: vec![],
signature: None,
schema_artifact_id: None,
});
log.push(StateEvent {
schema: events::EVENT_SCHEMA.to_string(),
id: String::new(),
kind: "tier.set".to_string(),
target: StateTarget {
r#type: "artifact".to_string(),
id: trial_id.clone(),
},
actor: StateActor {
id: actor_id.clone(),
r#type: "human".to_string(),
},
timestamp: chrono::Utc::now().to_rfc3339(),
reason: "artifact includes review notes under restricted read tier".into(),
before_hash: NULL_HASH.to_string(),
after_hash: NULL_HASH.to_string(),
payload: json!({
"proposal_id": format!("vpr_artifact_{frontier_idx}_trial_tier"),
"object_type": "artifact",
"object_id": trial_id,
"new_tier": "restricted",
}),
caveats: vec![],
signature: None,
schema_artifact_id: None,
});
let lab_file = pin_artifact(
Artifact::new(
"lab_file",
format!("Cross-impl lab file {frontier_idx}"),
"sha256:bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb",
Some(512),
Some("text/plain".into()),
"pointer",
Some(format!("lab://frontier-{frontier_idx}/notebook-17")),
None,
Some("internal lab note".into()),
vec![],
provenance(format!("Cross-impl lab source {frontier_idx}")),
BTreeMap::new(),
AccessTier::Public,
)
.expect("valid lab artifact"),
frontier_idx,
301,
);
let lab_id = lab_file.id.clone();
log.push(StateEvent {
schema: events::EVENT_SCHEMA.to_string(),
id: String::new(),
kind: "artifact.asserted".to_string(),
target: StateTarget {
r#type: "artifact".to_string(),
id: lab_id.clone(),
},
actor: StateActor {
id: actor_id.clone(),
r#type: "human".to_string(),
},
timestamp: chrono::Utc::now().to_rfc3339(),
reason: "deposit lab file pointer for cross-impl fixture".into(),
before_hash: NULL_HASH.to_string(),
after_hash: NULL_HASH.to_string(),
payload: json!({
"proposal_id": format!("vpr_artifact_{frontier_idx}_lab"),
"artifact": lab_file,
}),
caveats: vec![],
signature: None,
schema_artifact_id: None,
});
log.push(StateEvent {
schema: events::EVENT_SCHEMA.to_string(),
id: String::new(),
kind: "artifact.retracted".to_string(),
target: StateTarget {
r#type: "artifact".to_string(),
id: lab_id,
},
actor: StateActor {
id: actor_id,
r#type: "human".to_string(),
},
timestamp: chrono::Utc::now().to_rfc3339(),
reason: "lab file pointer was superseded by a verified blob".into(),
before_hash: NULL_HASH.to_string(),
after_hash: NULL_HASH.to_string(),
payload: json!({
"proposal_id": format!("vpr_artifact_{frontier_idx}_lab_retract"),
}),
caveats: vec![],
signature: None,
schema_artifact_id: None,
});
log
}
fn build_tier_set_log(frontier_idx: usize, findings: &[FindingBundle]) -> Vec<events::StateEvent> {
let mut log = Vec::new();
let actor_id = format!("reviewer:tier-{frontier_idx}");
for f in findings {
let proposal_id = format!("vpr_{}_{}", frontier_idx, &f.id[3..]);
log.push(events::new_finding_event(FindingEventInput {
kind: "finding.asserted",
finding_id: &f.id,
actor_id: &actor_id,
actor_type: "human",
reason: "tier-set fixture genesis",
before_hash: NULL_HASH,
after_hash: NULL_HASH,
payload: json!({"proposal_id": proposal_id}),
caveats: vec![],
}));
}
let nr_kind = NegativeResultKind::Exploratory {
reagent: format!("ReagentX-{frontier_idx}"),
observation: "no detectable activity at any tested concentration".into(),
attempts: 2,
};
let nr_conditions = Conditions {
text: "in vitro fixture".into(),
species_verified: vec![],
species_unverified: vec![],
in_vitro: true,
in_vivo: false,
human_data: false,
clinical_trial: false,
concentration_range: None,
duration: None,
age_group: None,
cell_type: None,
};
let nr_provenance = Provenance {
source_type: "lab_notebook".into(),
doi: None,
pmid: None,
pmc: None,
openalex_id: None,
url: None,
title: format!("Tier fixture lab note {frontier_idx}"),
authors: vec![],
year: Some(2026),
journal: None,
license: None,
publisher: None,
funders: vec![],
extraction: Extraction::default(),
review: None,
citation_count: Some(0),
};
let nr = pin_negative_result(
NegativeResult::new(
nr_kind,
vec![findings[0].id.clone()],
format!("lab:tier-fixture-{frontier_idx}"),
nr_conditions,
nr_provenance,
"Fixture exploratory null for tier.set coverage.",
),
frontier_idx,
300,
);
let nr_id = nr.id.clone();
log.push(StateEvent {
schema: events::EVENT_SCHEMA.to_string(),
id: String::new(),
kind: "negative_result.asserted".to_string(),
target: StateTarget {
r#type: "negative_result".to_string(),
id: nr_id.clone(),
},
actor: StateActor {
id: actor_id.clone(),
r#type: "human".to_string(),
},
timestamp: chrono::Utc::now().to_rfc3339(),
reason: "deposit null for tier-set fixture".into(),
before_hash: NULL_HASH.to_string(),
after_hash: NULL_HASH.to_string(),
payload: json!({
"proposal_id": format!("vpr_tier_{frontier_idx}_nr"),
"negative_result": nr,
}),
caveats: vec![],
signature: None,
schema_artifact_id: None,
});
let traj = pin_trajectory(
Trajectory::new(
vec![findings[0].id.clone()],
format!("agent:tier-fixture-{frontier_idx}"),
"Fixture trajectory for tier.set coverage.",
),
frontier_idx,
301,
);
let traj_id = traj.id.clone();
log.push(StateEvent {
schema: events::EVENT_SCHEMA.to_string(),
id: String::new(),
kind: "trajectory.created".to_string(),
target: StateTarget {
r#type: "trajectory".to_string(),
id: traj_id.clone(),
},
actor: StateActor {
id: actor_id.clone(),
r#type: "human".to_string(),
},
timestamp: chrono::Utc::now().to_rfc3339(),
reason: "open trajectory for tier-set fixture".into(),
before_hash: NULL_HASH.to_string(),
after_hash: NULL_HASH.to_string(),
payload: json!({
"proposal_id": format!("vpr_tier_{frontier_idx}_traj"),
"trajectory": traj,
}),
caveats: vec![],
signature: None,
schema_artifact_id: None,
});
let reclassifications = [
(
"finding",
findings[0].id.clone(),
"restricted",
"Finding reclassified for IBC review.",
),
(
"negative_result",
nr_id,
"restricted",
"Null reclassified — readout includes capability-relevant detail.",
),
(
"trajectory",
traj_id,
"classified",
"Trajectory reclassified — search path documents synthesis steps above DURC threshold.",
),
];
for (i, (object_type, object_id, new_tier, reason)) in reclassifications.iter().enumerate() {
log.push(StateEvent {
schema: events::EVENT_SCHEMA.to_string(),
id: String::new(),
kind: "tier.set".to_string(),
target: StateTarget {
r#type: object_type.to_string(),
id: object_id.clone(),
},
actor: StateActor {
id: format!("reviewer:ibc-{frontier_idx}"),
r#type: "human".to_string(),
},
timestamp: chrono::Utc::now().to_rfc3339(),
reason: reason.to_string(),
before_hash: NULL_HASH.to_string(),
after_hash: NULL_HASH.to_string(),
payload: json!({
"proposal_id": format!("vpr_tier_set_{frontier_idx}_{i}"),
"object_type": object_type,
"object_id": object_id,
"previous_tier": "public",
"new_tier": new_tier,
}),
caveats: vec![],
signature: None,
schema_artifact_id: None,
});
}
log
}
fn build_locator_repair_log(
frontier_idx: usize,
_findings: &[FindingBundle],
) -> Vec<events::StateEvent> {
vec![StateEvent {
schema: events::EVENT_SCHEMA.to_string(),
id: String::new(),
kind: "evidence_atom.locator_repaired".to_string(),
target: StateTarget {
r#type: "evidence_atom".to_string(),
id: format!("vea_fixture_locator_{frontier_idx}"),
},
actor: StateActor {
id: format!("agent:vela-curation-bot-{frontier_idx}"),
r#type: "agent".to_string(),
},
timestamp: fixture_timestamp(frontier_idx, 0),
reason: "Mechanical repair from parent source".to_string(),
before_hash: NULL_HASH.to_string(),
after_hash: NULL_HASH.to_string(),
payload: json!({
"proposal_id": format!("vpr_locator_repair_{frontier_idx}"),
"source_id": format!("vs_fixture_source_{frontier_idx}"),
"locator": format!("doi:10.1/fixture-locator-{frontier_idx}"),
}),
caveats: vec![],
signature: None,
schema_artifact_id: None,
}]
}
fn build_span_repair_log(
frontier_idx: usize,
findings: &[FindingBundle],
) -> Vec<events::StateEvent> {
let target_finding = &findings[0];
vec![StateEvent {
schema: events::EVENT_SCHEMA.to_string(),
id: String::new(),
kind: "finding.span_repaired".to_string(),
target: StateTarget {
r#type: "finding".to_string(),
id: target_finding.id.clone(),
},
actor: StateActor {
id: format!("reviewer:span-repair-fixture-{frontier_idx}"),
r#type: "human".to_string(),
},
timestamp: fixture_timestamp(frontier_idx, 0),
reason: "Mechanical evidence-span repair".to_string(),
before_hash: NULL_HASH.to_string(),
after_hash: NULL_HASH.to_string(),
payload: json!({
"proposal_id": format!("vpr_span_repair_{frontier_idx}"),
"section": "abstract",
"text": format!("Fixture span body for span-repair coverage {frontier_idx}."),
}),
caveats: vec![],
signature: None,
schema_artifact_id: None,
}]
}
fn build_entity_resolve_log(
frontier_idx: usize,
findings: &[FindingBundle],
) -> Vec<events::StateEvent> {
let target_finding = &findings[0];
let entity_name = target_finding
.assertion
.entities
.first()
.map(|e| e.name.clone())
.unwrap_or_else(|| format!("fixture-entity-{frontier_idx}"));
vec![StateEvent {
schema: events::EVENT_SCHEMA.to_string(),
id: String::new(),
kind: "finding.entity_resolved".to_string(),
target: StateTarget {
r#type: "finding".to_string(),
id: target_finding.id.clone(),
},
actor: StateActor {
id: format!("reviewer:entity-resolve-fixture-{frontier_idx}"),
r#type: "human".to_string(),
},
timestamp: fixture_timestamp(frontier_idx, 0),
reason: "Mechanical entity resolution".to_string(),
before_hash: NULL_HASH.to_string(),
after_hash: NULL_HASH.to_string(),
payload: json!({
"proposal_id": format!("vpr_entity_resolve_{frontier_idx}"),
"entity_name": entity_name,
"source": "fixture",
"id": format!("F-{frontier_idx}"),
"confidence": 0.95,
"resolution_method": "manual",
"resolution_provenance": "delegated_human_curation",
}),
caveats: vec![],
signature: None,
schema_artifact_id: None,
}]
}
fn build_entity_added_log(
frontier_idx: usize,
findings: &[FindingBundle],
) -> Vec<events::StateEvent> {
let target_finding = &findings[0];
vec![StateEvent {
schema: events::EVENT_SCHEMA.to_string(),
id: String::new(),
kind: "finding.entity_added".to_string(),
target: StateTarget {
r#type: "finding".to_string(),
id: target_finding.id.clone(),
},
actor: StateActor {
id: format!("reviewer:entity-add-fixture-{frontier_idx}"),
r#type: "human".to_string(),
},
timestamp: fixture_timestamp(frontier_idx, 0),
reason: "Fixture-level entity-add".to_string(),
before_hash: NULL_HASH.to_string(),
after_hash: NULL_HASH.to_string(),
payload: json!({
"proposal_id": format!("vpr_entity_add_{frontier_idx}"),
"entity_name": format!("fixture-tag-{frontier_idx}"),
"entity_type": "other",
"reason": "cross-impl fixture",
}),
caveats: vec![],
signature: None,
schema_artifact_id: None,
}]
}
fn build_federation_events_log(
frontier_idx: usize,
_findings: &[FindingBundle],
) -> Vec<events::StateEvent> {
let frontier_id = format!("vfr_fixture_{frontier_idx:08x}");
let conflict_event_id = format!("vev_fixture_conflict_{frontier_idx}");
let synced = StateEvent {
schema: events::EVENT_SCHEMA.to_string(),
id: String::new(),
kind: "frontier.synced_with_peer".to_string(),
target: StateTarget {
r#type: "frontier_observation".to_string(),
id: frontier_id.clone(),
},
actor: StateActor {
id: "federation".to_string(),
r#type: "system".to_string(),
},
timestamp: fixture_timestamp(frontier_idx, 0),
reason: "Fixture sync pass".to_string(),
before_hash: NULL_HASH.to_string(),
after_hash: NULL_HASH.to_string(),
payload: json!({
"peer_id": format!("peer:fixture-east-{frontier_idx}"),
"peer_snapshot_hash": "fixture_peer_snapshot",
"our_snapshot_hash": "fixture_our_snapshot",
"divergence_count": 1,
}),
caveats: vec![],
signature: None,
schema_artifact_id: None,
};
let detected = StateEvent {
schema: events::EVENT_SCHEMA.to_string(),
id: conflict_event_id.clone(),
kind: "frontier.conflict_detected".to_string(),
target: StateTarget {
r#type: "frontier_observation".to_string(),
id: frontier_id.clone(),
},
actor: StateActor {
id: "federation".to_string(),
r#type: "system".to_string(),
},
timestamp: fixture_timestamp(frontier_idx, 1),
reason: "Fixture conflict".to_string(),
before_hash: NULL_HASH.to_string(),
after_hash: NULL_HASH.to_string(),
payload: json!({
"peer_id": format!("peer:fixture-east-{frontier_idx}"),
"finding_id": format!("vf_fixture_{frontier_idx}"),
"kind": "verdict_disagreement",
"detail": "Fixture peer disagrees on review_state",
}),
caveats: vec![],
signature: None,
schema_artifact_id: None,
};
let resolved = StateEvent {
schema: events::EVENT_SCHEMA.to_string(),
id: String::new(),
kind: "frontier.conflict_resolved".to_string(),
target: StateTarget {
r#type: "frontier_observation".to_string(),
id: frontier_id.clone(),
},
actor: StateActor {
id: format!("reviewer:fixture-{frontier_idx}"),
r#type: "human".to_string(),
},
timestamp: fixture_timestamp(frontier_idx, 2),
reason: "Fixture resolution".to_string(),
before_hash: NULL_HASH.to_string(),
after_hash: NULL_HASH.to_string(),
payload: json!({
"proposal_id": format!("vpr_fixture_resolve_{frontier_idx}"),
"conflict_event_id": conflict_event_id,
"resolved_by": format!("reviewer:fixture-{frontier_idx}"),
"resolution_note": "Reviewer accepts our view",
}),
caveats: vec![],
signature: None,
schema_artifact_id: None,
};
vec![synced, detected, resolved]
}
fn build_bridge_reviewed_log(
frontier_idx: usize,
_findings: &[FindingBundle],
) -> Vec<events::StateEvent> {
let bridge_id = format!("vbr_fixture_{frontier_idx:08x}");
vec![StateEvent {
schema: events::EVENT_SCHEMA.to_string(),
id: String::new(),
kind: "bridge.reviewed".to_string(),
target: StateTarget {
r#type: "bridge".to_string(),
id: bridge_id.clone(),
},
actor: StateActor {
id: format!("reviewer:bridge-fixture-{frontier_idx}"),
r#type: "human".to_string(),
},
timestamp: fixture_timestamp(frontier_idx, 0),
reason: "Fixture bridge review verdict".to_string(),
before_hash: NULL_HASH.to_string(),
after_hash: NULL_HASH.to_string(),
payload: json!({
"bridge_id": bridge_id,
"status": "confirmed",
"note": "Fixture verdict for cross-impl coverage",
}),
caveats: vec![],
signature: None,
schema_artifact_id: None,
}]
}
fn build_replication_deposited_log(
frontier_idx: usize,
findings: &[FindingBundle],
) -> Vec<events::StateEvent> {
use vela_protocol::bundle::Replication;
let target_finding = &findings[0];
let attempted_by = format!("lab:fixture-replication-{frontier_idx}");
let outcome = "replicated".to_string();
let conditions = Conditions {
text: format!("Fixture replication conditions {frontier_idx}"),
species_verified: vec!["Mus musculus".into()],
species_unverified: vec![],
in_vitro: true,
in_vivo: false,
human_data: false,
clinical_trial: false,
concentration_range: None,
duration: None,
age_group: None,
cell_type: Some("microglia".into()),
};
let evidence = Evidence {
evidence_type: "experimental".into(),
model_system: "mouse".into(),
species: Some("Mus musculus".into()),
method: "Independent replication, Western blot".into(),
sample_size: Some("n=24".into()),
effect_size: None,
p_value: Some("p<0.05".into()),
replicated: true,
replication_count: Some(1),
evidence_spans: vec![],
};
let provenance = Provenance {
source_type: "preprint".into(),
doi: Some(format!(
"10.0000/crossimpl.replication.frontier{frontier_idx:04}"
)),
pmid: None,
pmc: None,
openalex_id: None,
url: None,
title: format!("Fixture replication report {frontier_idx}"),
authors: vec![Author {
name: "Cross-Impl Replicator".into(),
orcid: None,
}],
year: Some(2026),
journal: Some("Cross Replications".into()),
license: None,
publisher: None,
funders: vec![],
extraction: Extraction::default(),
review: None,
citation_count: Some(0),
};
let id = Replication::content_address(&target_finding.id, &attempted_by, &conditions, &outcome);
let rep = Replication {
id,
target_finding: target_finding.id.clone(),
attempted_by: attempted_by.clone(),
outcome: outcome.clone(),
evidence,
conditions,
provenance,
notes: "Fixture replication note".to_string(),
created: fixture_object_timestamp(frontier_idx, 0),
previous_attempt: None,
};
let payload = json!({
"proposal_id": format!("vpr_replication_deposit_{frontier_idx}"),
"replication": rep,
});
vec![StateEvent {
schema: events::EVENT_SCHEMA.to_string(),
id: String::new(),
kind: "replication.deposited".to_string(),
target: StateTarget {
r#type: "finding".to_string(),
id: target_finding.id.clone(),
},
actor: StateActor {
id: attempted_by,
r#type: "human".to_string(),
},
timestamp: fixture_timestamp(frontier_idx, 0),
reason: "Fixture replication deposit".to_string(),
before_hash: NULL_HASH.to_string(),
after_hash: NULL_HASH.to_string(),
payload,
caveats: vec![],
signature: None,
schema_artifact_id: None,
}]
}
fn build_prediction_deposited_log(
frontier_idx: usize,
findings: &[FindingBundle],
) -> Vec<events::StateEvent> {
use vela_protocol::bundle::{ExpectedOutcome, Prediction};
let target_finding = &findings[0];
let made_by = format!("forecaster:fixture-prediction-{frontier_idx}");
let claim_text =
format!("Fixture prediction {frontier_idx}: replication will confirm at p<0.05.");
let predicted_at = fixture_object_timestamp(frontier_idx, 0);
let resolution_criterion =
"An independent lab posts a replication with the same outcome.".to_string();
let expected_outcome = ExpectedOutcome::Affirmed;
let conditions = Conditions {
text: format!("Fixture prediction conditions {frontier_idx}"),
species_verified: vec!["Mus musculus".into()],
species_unverified: vec![],
in_vitro: true,
in_vivo: false,
human_data: false,
clinical_trial: false,
concentration_range: None,
duration: None,
age_group: None,
cell_type: Some("microglia".into()),
};
let id = Prediction::content_address(
&claim_text,
&made_by,
&predicted_at,
&resolution_criterion,
&expected_outcome,
);
let pred = Prediction {
id,
claim_text,
target_findings: vec![target_finding.id.clone()],
predicted_at,
resolves_by: Some("2027-05-02T00:00:00Z".to_string()),
resolution_criterion,
expected_outcome,
made_by: made_by.clone(),
confidence: 0.6,
conditions,
expired_unresolved: false,
};
let payload = json!({
"proposal_id": format!("vpr_prediction_deposit_{frontier_idx}"),
"prediction": pred,
});
vec![StateEvent {
schema: events::EVENT_SCHEMA.to_string(),
id: String::new(),
kind: "prediction.deposited".to_string(),
target: StateTarget {
r#type: "finding".to_string(),
id: target_finding.id.clone(),
},
actor: StateActor {
id: made_by,
r#type: "human".to_string(),
},
timestamp: fixture_timestamp(frontier_idx, 0),
reason: "Fixture prediction deposit".to_string(),
before_hash: NULL_HASH.to_string(),
after_hash: NULL_HASH.to_string(),
payload,
caveats: vec![],
signature: None,
schema_artifact_id: None,
}]
}
fn finding_state(f: &FindingBundle) -> Value {
let review_state = f
.flags
.review_state
.as_ref()
.map(|s| match s {
vela_protocol::bundle::ReviewState::Accepted => "accepted",
vela_protocol::bundle::ReviewState::Contested => "contested",
vela_protocol::bundle::ReviewState::NeedsRevision => "needs_revision",
vela_protocol::bundle::ReviewState::Rejected => "rejected",
})
.unwrap_or("none");
let mut annotation_ids: Vec<String> = f.annotations.iter().map(|a| a.id.clone()).collect();
annotation_ids.sort();
json!({
"id": f.id,
"retracted": f.flags.retracted,
"contested": f.flags.contested,
"review_state": review_state,
"confidence_score": format!("{:.6}", f.confidence.score),
"annotation_ids": annotation_ids,
"access_tier": f.access_tier.canonical(),
})
}
fn negative_result_state(n: &vela_protocol::bundle::NegativeResult) -> Value {
let review_state = n
.review_state
.as_ref()
.map(|s| match s {
vela_protocol::bundle::ReviewState::Accepted => "accepted",
vela_protocol::bundle::ReviewState::Contested => "contested",
vela_protocol::bundle::ReviewState::NeedsRevision => "needs_revision",
vela_protocol::bundle::ReviewState::Rejected => "rejected",
})
.unwrap_or("none");
json!({
"id": n.id,
"retracted": n.retracted,
"review_state": review_state,
"access_tier": n.access_tier.canonical(),
})
}
fn trajectory_state(t: &vela_protocol::bundle::Trajectory) -> Value {
let review_state = t
.review_state
.as_ref()
.map(|s| match s {
vela_protocol::bundle::ReviewState::Accepted => "accepted",
vela_protocol::bundle::ReviewState::Contested => "contested",
vela_protocol::bundle::ReviewState::NeedsRevision => "needs_revision",
vela_protocol::bundle::ReviewState::Rejected => "rejected",
})
.unwrap_or("none");
let step_ids: Vec<String> = t.steps.iter().map(|s| s.id.clone()).collect();
json!({
"id": t.id,
"retracted": t.retracted,
"review_state": review_state,
"access_tier": t.access_tier.canonical(),
"step_ids": step_ids,
})
}
fn replication_state(r: &vela_protocol::bundle::Replication) -> Value {
json!({
"id": r.id,
"target_finding": r.target_finding,
"outcome": r.outcome,
})
}
fn prediction_state(p: &vela_protocol::bundle::Prediction) -> Value {
json!({
"id": p.id,
"made_by": p.made_by,
"expired_unresolved": p.expired_unresolved,
})
}
fn artifact_state(a: &vela_protocol::bundle::Artifact) -> Value {
let review_state = a
.review_state
.as_ref()
.map(|s| match s {
vela_protocol::bundle::ReviewState::Accepted => "accepted",
vela_protocol::bundle::ReviewState::Contested => "contested",
vela_protocol::bundle::ReviewState::NeedsRevision => "needs_revision",
vela_protocol::bundle::ReviewState::Rejected => "rejected",
})
.unwrap_or("none");
json!({
"id": a.id,
"kind": a.kind,
"retracted": a.retracted,
"review_state": review_state,
"access_tier": a.access_tier.canonical(),
})
}
fn export_one(
out_dir: &PathBuf,
fixture_idx: usize,
scenario: &str,
findings: Vec<FindingBundle>,
event_log: Vec<events::StateEvent>,
) {
let event_log = normalize_event_log(fixture_idx, event_log);
let post = replay_from_genesis(
findings.clone(),
event_log.clone(),
&format!("Cross-Impl Frontier {fixture_idx} ({scenario})"),
"Cross-implementation reducer fixture",
"2026-05-02T00:00:00Z",
"vela-cross-impl/0",
)
.expect("replay must succeed");
let mut sorted = post.findings.clone();
sorted.sort_by(|a, b| a.id.cmp(&b.id));
let expected_states: Vec<Value> = sorted.iter().map(finding_state).collect();
let mut sorted_nrs = post.negative_results.clone();
sorted_nrs.sort_by(|a, b| a.id.cmp(&b.id));
let expected_negative_results: Vec<Value> =
sorted_nrs.iter().map(negative_result_state).collect();
let mut sorted_trajs = post.trajectories.clone();
sorted_trajs.sort_by(|a, b| a.id.cmp(&b.id));
let expected_trajectories: Vec<Value> = sorted_trajs.iter().map(trajectory_state).collect();
let mut sorted_artifacts = post.artifacts.clone();
sorted_artifacts.sort_by(|a, b| a.id.cmp(&b.id));
let expected_artifacts: Vec<Value> = sorted_artifacts.iter().map(artifact_state).collect();
let mut sorted_replications = post.replications.clone();
sorted_replications.sort_by(|a, b| a.id.cmp(&b.id));
let expected_replications: Vec<Value> =
sorted_replications.iter().map(replication_state).collect();
let mut sorted_predictions = post.predictions.clone();
sorted_predictions.sort_by(|a, b| a.id.cmp(&b.id));
let expected_predictions: Vec<Value> =
sorted_predictions.iter().map(prediction_state).collect();
let mut kinds_seen: std::collections::BTreeMap<String, usize> =
std::collections::BTreeMap::new();
for ev in &event_log {
*kinds_seen.entry(ev.kind.clone()).or_insert(0) += 1;
}
let kinds_value: Value = serde_json::to_value(&kinds_seen).unwrap();
let fixture = json!({
"fixture_version": "4",
"schema_url": "https://vela.science/schema/cross-impl-reducer-fixture/v4",
"doctrine": "every reducer implementation must agree on per-kind mutation rules across findings, negative_results, trajectories, artifacts, replications, and predictions",
"scenario": scenario,
"frontier_idx": fixture_idx,
"stats": {
"findings": findings.len(),
"negative_results": post.negative_results.len(),
"trajectories": post.trajectories.len(),
"artifacts": post.artifacts.len(),
"replications": post.replications.len(),
"predictions": post.predictions.len(),
"events": event_log.len(),
"cascade_depth": if scenario == "cascade" {
CASCADE_DEPTH.min(findings.len() - 1)
} else {
0
},
"kinds_seen": kinds_value,
},
"genesis_findings": findings,
"event_log": event_log,
"expected_states": expected_states,
"expected_negative_results": expected_negative_results,
"expected_trajectories": expected_trajectories,
"expected_artifacts": expected_artifacts,
"expected_replications": expected_replications,
"expected_predictions": expected_predictions,
});
let path = out_dir.join(format!("cascade-fixture-{fixture_idx:02}.json"));
std::fs::write(&path, serde_json::to_string_pretty(&fixture).unwrap()).expect("write fixture");
eprintln!("wrote {}", path.display());
}
#[test]
fn export_cross_impl_reducer_fixtures() {
let out_dir = PathBuf::from(env!("CARGO_MANIFEST_DIR"))
.join("tests")
.join("fixtures");
std::fs::create_dir_all(&out_dir).expect("create fixtures dir");
for frontier_idx in 0..FIXTURE_FRONTIER_COUNT {
let findings: Vec<FindingBundle> = (0..FINDINGS_PER_FRONTIER)
.map(|i| make_finding(frontier_idx, i))
.collect();
let event_log = build_event_log(frontier_idx, &findings);
export_one(&out_dir, frontier_idx, "cascade", findings, event_log);
}
{
let frontier_idx = FIXTURE_FRONTIER_COUNT;
let findings: Vec<FindingBundle> = (0..FINDINGS_PER_FRONTIER)
.map(|i| make_finding(frontier_idx, i))
.collect();
let event_log = build_review_branches_log(frontier_idx, &findings);
export_one(
&out_dir,
frontier_idx,
"review_branches",
findings,
event_log,
);
}
{
let frontier_idx = FIXTURE_FRONTIER_COUNT + 1;
let findings: Vec<FindingBundle> = (0..FINDINGS_PER_FRONTIER)
.map(|i| make_finding(frontier_idx, i))
.collect();
let event_log = build_annotations_log(frontier_idx, &findings);
export_one(&out_dir, frontier_idx, "annotations", findings, event_log);
}
{
let frontier_idx = FIXTURE_FRONTIER_COUNT + 2;
let findings: Vec<FindingBundle> = (0..FINDINGS_PER_FRONTIER)
.map(|i| make_finding(frontier_idx, i))
.collect();
let event_log = build_negative_results_log(frontier_idx, &findings);
export_one(
&out_dir,
frontier_idx,
"negative_results",
findings,
event_log,
);
}
{
let frontier_idx = FIXTURE_FRONTIER_COUNT + 3;
let findings: Vec<FindingBundle> = (0..FINDINGS_PER_FRONTIER)
.map(|i| make_finding(frontier_idx, i))
.collect();
let event_log = build_trajectories_log(frontier_idx, &findings);
export_one(&out_dir, frontier_idx, "trajectories", findings, event_log);
}
{
let frontier_idx = FIXTURE_FRONTIER_COUNT + 4;
let findings: Vec<FindingBundle> = (0..FINDINGS_PER_FRONTIER)
.map(|i| make_finding(frontier_idx, i))
.collect();
let event_log = build_tier_set_log(frontier_idx, &findings);
export_one(&out_dir, frontier_idx, "tier_set", findings, event_log);
}
{
let frontier_idx = FIXTURE_FRONTIER_COUNT + 5;
let findings: Vec<FindingBundle> = (0..FINDINGS_PER_FRONTIER)
.map(|i| make_finding(frontier_idx, i))
.collect();
let mut event_log = build_artifacts_log(frontier_idx, &findings);
event_log.extend(build_bridge_reviewed_log(frontier_idx, &findings));
event_log.extend(build_replication_deposited_log(frontier_idx, &findings));
event_log.extend(build_prediction_deposited_log(frontier_idx, &findings));
export_one(&out_dir, frontier_idx, "artifacts", findings, event_log);
}
{
let frontier_idx = FIXTURE_FRONTIER_COUNT + 6;
let findings: Vec<FindingBundle> = (0..FINDINGS_PER_FRONTIER)
.map(|i| make_finding(frontier_idx, i))
.collect();
let event_log = build_span_repair_log(frontier_idx, &findings);
export_one(&out_dir, frontier_idx, "span_repair", findings, event_log);
}
{
let frontier_idx = FIXTURE_FRONTIER_COUNT + 7;
let findings: Vec<FindingBundle> = (0..FINDINGS_PER_FRONTIER)
.map(|i| make_finding(frontier_idx, i))
.collect();
let event_log = build_entity_resolve_log(frontier_idx, &findings);
export_one(
&out_dir,
frontier_idx,
"entity_resolve",
findings,
event_log,
);
}
{
let frontier_idx = FIXTURE_FRONTIER_COUNT + 8;
let findings: Vec<FindingBundle> = (0..FINDINGS_PER_FRONTIER)
.map(|i| make_finding(frontier_idx, i))
.collect();
let event_log = build_entity_added_log(frontier_idx, &findings);
export_one(&out_dir, frontier_idx, "entity_added", findings, event_log);
}
write_fixtures_manifest(&out_dir);
}
fn write_fixtures_manifest(out_dir: &PathBuf) {
use sha2::{Digest, Sha256};
let mut entries: Vec<serde_json::Value> = Vec::new();
let mut paths: Vec<PathBuf> = std::fs::read_dir(out_dir)
.expect("read fixtures dir")
.filter_map(Result::ok)
.map(|e| e.path())
.filter(|p| {
p.extension().is_some_and(|ext| ext == "json")
&& p.file_name()
.and_then(|n| n.to_str())
.is_some_and(|n| n.starts_with("cascade-fixture-"))
})
.collect();
paths.sort();
for path in &paths {
let bytes = std::fs::read(path).expect("read fixture bytes");
let digest = format!("sha256:{}", hex::encode(Sha256::digest(&bytes)));
let name = path
.file_name()
.and_then(|n| n.to_str())
.expect("fixture name utf8")
.to_string();
entries.push(json!({
"path": name,
"sha256": digest,
"bytes": bytes.len(),
}));
}
let manifest = json!({
"schema": "vela.conformance-fixtures-manifest.v1",
"doctrine": "every fixture's SHA-256 is recorded; verify.py refuses to run on a fixture whose bytes drift from the recorded digest. Closes THREAT_MODEL.md A12 (integrity half).",
"fixtures": entries,
});
let manifest_path = out_dir.join("fixtures.manifest.json");
std::fs::write(
&manifest_path,
serde_json::to_string_pretty(&manifest).expect("serialize manifest"),
)
.expect("write manifest");
eprintln!("wrote {}", manifest_path.display());
}
#[test]
fn fixture_coverage_includes_every_reducer_arm() {
use vela_protocol::reducer::REDUCER_MUTATION_KINDS;
let frontier_idx = 0;
let findings: Vec<FindingBundle> = (0..FINDINGS_PER_FRONTIER)
.map(|i| make_finding(frontier_idx, i))
.collect();
let mut all_kinds: std::collections::BTreeSet<String> = std::collections::BTreeSet::new();
for ev in build_event_log(frontier_idx, &findings) {
all_kinds.insert(ev.kind);
}
for ev in build_review_branches_log(frontier_idx, &findings) {
all_kinds.insert(ev.kind);
}
for ev in build_annotations_log(frontier_idx, &findings) {
all_kinds.insert(ev.kind);
}
for ev in build_negative_results_log(frontier_idx, &findings) {
all_kinds.insert(ev.kind);
}
for ev in build_trajectories_log(frontier_idx, &findings) {
all_kinds.insert(ev.kind);
}
for ev in build_tier_set_log(frontier_idx, &findings) {
all_kinds.insert(ev.kind);
}
for ev in build_artifacts_log(frontier_idx, &findings) {
all_kinds.insert(ev.kind);
}
for ev in build_locator_repair_log(frontier_idx, &findings) {
all_kinds.insert(ev.kind);
}
for ev in build_span_repair_log(frontier_idx, &findings) {
all_kinds.insert(ev.kind);
}
for ev in build_entity_resolve_log(frontier_idx, &findings) {
all_kinds.insert(ev.kind);
}
for ev in build_entity_added_log(frontier_idx, &findings) {
all_kinds.insert(ev.kind);
}
for kind in REDUCER_MUTATION_KINDS {
assert!(
all_kinds.contains(*kind),
"cross-impl fixture coverage missing reducer arm: {kind} \
(declared in REDUCER_MUTATION_KINDS but not exercised by \
any fixture builder)"
);
}
}
#[test]
fn federation_events_fixture_pairs_conflict_with_resolution() {
let frontier_idx = 7;
let findings: Vec<FindingBundle> = (0..FINDINGS_PER_FRONTIER)
.map(|i| make_finding(frontier_idx, i))
.collect();
let log = build_federation_events_log(frontier_idx, &findings);
assert_eq!(log.len(), 3, "expected synced + detected + resolved");
let kinds: Vec<&str> = log.iter().map(|e| e.kind.as_str()).collect();
assert_eq!(
kinds,
vec![
"frontier.synced_with_peer",
"frontier.conflict_detected",
"frontier.conflict_resolved",
]
);
let detected = &log[1];
let resolved = &log[2];
assert_eq!(
detected.id,
resolved
.payload
.get("conflict_event_id")
.and_then(|v| v.as_str())
.unwrap_or(""),
"resolved event must reference detected event by id"
);
}
#[test]
fn v067_v071_events_are_finding_state_noops() {
use vela_protocol::project;
use vela_protocol::reducer::apply_event;
let frontier_idx = 8;
let findings: Vec<FindingBundle> = (0..FINDINGS_PER_FRONTIER)
.map(|i| make_finding(frontier_idx, i))
.collect();
let cases: Vec<(&str, Vec<events::StateEvent>)> = vec![
(
"bridge.reviewed",
normalize_event_log(
frontier_idx,
build_bridge_reviewed_log(frontier_idx, &findings),
),
),
(
"replication.deposited",
normalize_event_log(
frontier_idx,
build_replication_deposited_log(frontier_idx, &findings),
),
),
(
"prediction.deposited",
normalize_event_log(
frontier_idx,
build_prediction_deposited_log(frontier_idx, &findings),
),
),
];
for (label, log) in cases {
let mut state = project::assemble(
"v072-noop-fixture",
findings.clone(),
0,
0,
"Cross-impl no-op coverage for v0.67 + v0.70 + v0.71 event kinds",
);
let findings_before: Vec<FindingBundle> = state.findings.clone();
let findings_before_bytes =
serde_json::to_vec(&findings_before).expect("canonicalize findings_before");
for event in &log {
apply_event(&mut state, event)
.unwrap_or_else(|e| panic!("{label} rejected by reducer: {e}"));
}
let findings_after_bytes =
serde_json::to_vec(&state.findings).expect("canonicalize findings_after");
assert_eq!(
findings_before_bytes, findings_after_bytes,
"{label} mutated Project.findings; expected no-op on finding state"
);
}
}
#[test]
fn v067_v071_builders_produce_well_typed_events() {
let frontier_idx = 9;
let findings: Vec<FindingBundle> = (0..FINDINGS_PER_FRONTIER)
.map(|i| make_finding(frontier_idx, i))
.collect();
let bridge_log = build_bridge_reviewed_log(frontier_idx, &findings);
assert_eq!(bridge_log.len(), 1);
assert_eq!(bridge_log[0].kind, "bridge.reviewed");
assert_eq!(bridge_log[0].target.r#type, "bridge");
assert!(
bridge_log[0]
.payload
.get("status")
.and_then(|v| v.as_str())
.map(|s| s == "confirmed" || s == "refuted")
.unwrap_or(false),
"bridge.reviewed payload.status must be 'confirmed' or 'refuted'"
);
let rep_log = build_replication_deposited_log(frontier_idx, &findings);
assert_eq!(rep_log.len(), 1);
assert_eq!(rep_log[0].kind, "replication.deposited");
let rep_id = rep_log[0]
.payload
.get("replication")
.and_then(|v| v.get("id"))
.and_then(|v| v.as_str())
.unwrap_or("");
assert!(
rep_id.starts_with("vrep_"),
"replication.deposited payload.replication.id must start with 'vrep_', got {rep_id:?}"
);
let pred_log = build_prediction_deposited_log(frontier_idx, &findings);
assert_eq!(pred_log.len(), 1);
assert_eq!(pred_log[0].kind, "prediction.deposited");
let pred_id = pred_log[0]
.payload
.get("prediction")
.and_then(|v| v.get("id"))
.and_then(|v| v.as_str())
.unwrap_or("");
assert!(
pred_id.starts_with("vpred_"),
"prediction.deposited payload.prediction.id must start with 'vpred_', got {pred_id:?}"
);
}