use std::collections::{BTreeMap, BTreeSet};
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use serde_json::{Value, json};
use sha2::{Digest, Sha256};
use crate::bundle::FindingBundle;
use crate::canonical;
use crate::project::Project;
pub const EVENT_SCHEMA: &str = "vela.event.v0.1";
pub const NULL_HASH: &str = "sha256:null";
pub const EVENT_KIND_KEY_REVOKE: &str = "key.revoke";
pub const EVENT_KIND_NEGATIVE_RESULT_ASSERTED: &str = "negative_result.asserted";
pub const EVENT_KIND_NEGATIVE_RESULT_REVIEWED: &str = "negative_result.reviewed";
pub const EVENT_KIND_NEGATIVE_RESULT_RETRACTED: &str = "negative_result.retracted";
pub const EVENT_KIND_TRAJECTORY_CREATED: &str = "trajectory.created";
pub const EVENT_KIND_TRAJECTORY_STEP_APPENDED: &str = "trajectory.step_appended";
pub const EVENT_KIND_TRAJECTORY_REVIEWED: &str = "trajectory.reviewed";
pub const EVENT_KIND_TRAJECTORY_RETRACTED: &str = "trajectory.retracted";
pub const EVENT_KIND_ARTIFACT_ASSERTED: &str = "artifact.asserted";
pub const EVENT_KIND_ARTIFACT_REVIEWED: &str = "artifact.reviewed";
pub const EVENT_KIND_ARTIFACT_RETRACTED: &str = "artifact.retracted";
pub const EVENT_KIND_TIER_SET: &str = "tier.set";
pub const EVENT_KIND_EVIDENCE_ATOM_LOCATOR_REPAIRED: &str = "evidence_atom.locator_repaired";
pub const EVENT_KIND_FINDING_SPAN_REPAIRED: &str = "finding.span_repaired";
pub const EVENT_KIND_FINDING_ENTITY_RESOLVED: &str = "finding.entity_resolved";
pub const EVENT_KIND_ATTESTATION_RECORDED: &str = "attestation.recorded";
pub const EVENT_KIND_FINDING_ENTITY_ADDED: &str = "finding.entity_added";
pub const EVENT_KIND_REPLICATION_DEPOSITED: &str = "replication.deposited";
pub const EVENT_KIND_PREDICTION_DEPOSITED: &str = "prediction.deposited";
pub const EVENT_KIND_BRIDGE_REVIEWED: &str = "bridge.reviewed";
pub const EVENT_KIND_FRONTIER_CONFLICT_RESOLVED: &str = "frontier.conflict_resolved";
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
pub struct StateTarget {
pub r#type: String,
pub id: String,
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
pub struct StateActor {
pub id: String,
pub r#type: String,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct StateEvent {
#[serde(default = "default_schema")]
pub schema: String,
pub id: String,
pub kind: String,
pub target: StateTarget,
pub actor: StateActor,
pub timestamp: String,
pub reason: String,
pub before_hash: String,
pub after_hash: String,
#[serde(default)]
pub payload: Value,
#[serde(default)]
pub caveats: Vec<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub signature: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub schema_artifact_id: Option<String>,
}
pub struct FindingEventInput<'a> {
pub kind: &'a str,
pub finding_id: &'a str,
pub actor_id: &'a str,
pub actor_type: &'a str,
pub reason: &'a str,
pub before_hash: &'a str,
pub after_hash: &'a str,
pub payload: Value,
pub caveats: Vec<String>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct EventLogSummary {
pub count: usize,
pub kinds: BTreeMap<String, usize>,
pub first_timestamp: Option<String>,
pub last_timestamp: Option<String>,
pub duplicate_ids: Vec<String>,
pub orphan_targets: Vec<String>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ReplayReport {
pub ok: bool,
pub status: String,
pub event_log: EventLogSummary,
pub source_hash: String,
pub event_log_hash: String,
pub replayed_hash: String,
pub current_hash: String,
pub conflicts: Vec<String>,
}
fn default_schema() -> String {
EVENT_SCHEMA.to_string()
}
pub fn new_finding_event(input: FindingEventInput<'_>) -> StateEvent {
let timestamp = Utc::now().to_rfc3339();
let mut event = StateEvent {
schema: EVENT_SCHEMA.to_string(),
id: String::new(),
kind: input.kind.to_string(),
target: StateTarget {
r#type: "finding".to_string(),
id: input.finding_id.to_string(),
},
actor: StateActor {
id: input.actor_id.to_string(),
r#type: input.actor_type.to_string(),
},
timestamp,
reason: input.reason.to_string(),
before_hash: input.before_hash.to_string(),
after_hash: input.after_hash.to_string(),
payload: input.payload,
caveats: input.caveats,
signature: None,
schema_artifact_id: None,
};
event.id = event_id(&event);
event
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
pub struct RevocationPayload {
pub revoked_pubkey: String,
pub revoked_at: String,
#[serde(default, skip_serializing_if = "String::is_empty")]
pub replacement_pubkey: String,
#[serde(default, skip_serializing_if = "String::is_empty")]
pub reason: String,
}
pub fn new_revocation_event(
actor_id: &str,
actor_type: &str,
payload: RevocationPayload,
reason: &str,
before_hash: &str,
after_hash: &str,
) -> StateEvent {
let timestamp = Utc::now().to_rfc3339();
let payload_value =
serde_json::to_value(&payload).expect("RevocationPayload serializes to a JSON object");
let mut event = StateEvent {
schema: EVENT_SCHEMA.to_string(),
id: String::new(),
kind: EVENT_KIND_KEY_REVOKE.to_string(),
target: StateTarget {
r#type: "actor".to_string(),
id: actor_id.to_string(),
},
actor: StateActor {
id: actor_id.to_string(),
r#type: actor_type.to_string(),
},
timestamp,
reason: reason.to_string(),
before_hash: before_hash.to_string(),
after_hash: after_hash.to_string(),
payload: payload_value,
caveats: Vec::new(),
signature: None,
schema_artifact_id: None,
};
event.id = event_id(&event);
event
}
pub fn new_evidence_atom_locator_repair_event(
atom_id: &str,
actor_id: &str,
actor_type: &str,
reason: &str,
before_hash: &str,
after_hash: &str,
payload: Value,
caveats: Vec<String>,
) -> StateEvent {
let timestamp = Utc::now().to_rfc3339();
let mut event = StateEvent {
schema: EVENT_SCHEMA.to_string(),
id: String::new(),
kind: EVENT_KIND_EVIDENCE_ATOM_LOCATOR_REPAIRED.to_string(),
target: StateTarget {
r#type: "evidence_atom".to_string(),
id: atom_id.to_string(),
},
actor: StateActor {
id: actor_id.to_string(),
r#type: actor_type.to_string(),
},
timestamp,
reason: reason.to_string(),
before_hash: before_hash.to_string(),
after_hash: after_hash.to_string(),
payload,
caveats,
signature: None,
schema_artifact_id: None,
};
event.id = event_id(&event);
event
}
pub fn new_frontier_conflict_resolved_event(
frontier_id: &str,
actor_id: &str,
actor_type: &str,
reason: &str,
payload: Value,
caveats: Vec<String>,
) -> StateEvent {
let timestamp = Utc::now().to_rfc3339();
let mut event = StateEvent {
schema: EVENT_SCHEMA.to_string(),
id: String::new(),
kind: EVENT_KIND_FRONTIER_CONFLICT_RESOLVED.to_string(),
target: StateTarget {
r#type: "frontier_observation".to_string(),
id: frontier_id.to_string(),
},
actor: StateActor {
id: actor_id.to_string(),
r#type: actor_type.to_string(),
},
timestamp,
reason: reason.to_string(),
before_hash: NULL_HASH.to_string(),
after_hash: NULL_HASH.to_string(),
payload,
caveats,
signature: None,
schema_artifact_id: None,
};
event.id = event_id(&event);
event
}
pub fn new_bridge_reviewed_event(
bridge_id: &str,
actor_id: &str,
actor_type: &str,
reason: &str,
payload: Value,
caveats: Vec<String>,
) -> StateEvent {
let timestamp = Utc::now().to_rfc3339();
let mut event = StateEvent {
schema: EVENT_SCHEMA.to_string(),
id: String::new(),
kind: EVENT_KIND_BRIDGE_REVIEWED.to_string(),
target: StateTarget {
r#type: "bridge".to_string(),
id: bridge_id.to_string(),
},
actor: StateActor {
id: actor_id.to_string(),
r#type: actor_type.to_string(),
},
timestamp,
reason: reason.to_string(),
before_hash: NULL_HASH.to_string(),
after_hash: NULL_HASH.to_string(),
payload,
caveats,
signature: None,
schema_artifact_id: None,
};
event.id = event_id(&event);
event
}
pub fn evidence_atom_hash(atom: &crate::sources::EvidenceAtom) -> String {
let bytes = canonical::to_canonical_bytes(atom).unwrap_or_default();
format!("sha256:{}", hex::encode(Sha256::digest(bytes)))
}
pub fn evidence_atom_hash_by_id(frontier: &Project, atom_id: &str) -> String {
frontier
.evidence_atoms
.iter()
.find(|atom| atom.id == atom_id)
.map(evidence_atom_hash)
.unwrap_or_else(|| NULL_HASH.to_string())
}
pub fn finding_hash(finding: &FindingBundle) -> String {
let mut hashable = finding.clone();
hashable.links.clear();
let bytes = canonical::to_canonical_bytes(&hashable).unwrap_or_default();
format!("sha256:{}", hex::encode(Sha256::digest(bytes)))
}
pub fn finding_hash_by_id(frontier: &Project, finding_id: &str) -> String {
frontier
.findings
.iter()
.find(|finding| finding.id == finding_id)
.map(finding_hash)
.unwrap_or_else(|| NULL_HASH.to_string())
}
pub fn event_log_hash(events: &[StateEvent]) -> String {
let bytes = canonical::to_canonical_bytes(events).unwrap_or_default();
hex::encode(Sha256::digest(bytes))
}
pub fn snapshot_hash(frontier: &Project) -> String {
let value = serde_json::to_value(frontier).unwrap_or(Value::Null);
let mut value = value;
if let Value::Object(map) = &mut value {
map.remove("events");
map.remove("signatures");
map.remove("proof_state");
}
let bytes = canonical::to_canonical_bytes(&value).unwrap_or_default();
hex::encode(Sha256::digest(bytes))
}
pub fn events_for_finding<'a>(frontier: &'a Project, finding_id: &str) -> Vec<&'a StateEvent> {
frontier
.events
.iter()
.filter(|event| event.target.r#type == "finding" && event.target.id == finding_id)
.collect()
}
pub fn replay_report(frontier: &Project) -> ReplayReport {
let event_log = summarize(frontier);
let mut conflicts = Vec::new();
if frontier.events.is_empty() {
let current_hash = snapshot_hash(frontier);
return ReplayReport {
ok: true,
status: "no_events".to_string(),
event_log,
source_hash: current_hash.clone(),
event_log_hash: event_log_hash(&frontier.events),
replayed_hash: current_hash.clone(),
current_hash,
conflicts,
};
}
for duplicate in &event_log.duplicate_ids {
conflicts.push(format!("duplicate event id: {duplicate}"));
}
for orphan in &event_log.orphan_targets {
conflicts.push(format!("orphan event target: {orphan}"));
}
let mut chains = BTreeMap::<String, Vec<&StateEvent>>::new();
for event in &frontier.events {
if event.schema != EVENT_SCHEMA {
conflicts.push(format!(
"unsupported event schema for {}: {}",
event.id, event.schema
));
}
if event.reason.trim().is_empty() {
conflicts.push(format!("event {} has empty reason", event.id));
}
if event.before_hash.trim().is_empty() || event.after_hash.trim().is_empty() {
conflicts.push(format!("event {} has empty hash boundary", event.id));
}
if let Err(err) = validate_event_payload(&event.kind, &event.payload) {
conflicts.push(format!("event {} payload invalid: {err}", event.id));
}
chains
.entry(format!("{}:{}", event.target.r#type, event.target.id))
.or_default()
.push(event);
}
for (target, events) in chains {
let mut sorted = events;
sorted.sort_by(|a, b| a.timestamp.cmp(&b.timestamp).then(a.id.cmp(&b.id)));
for pair in sorted.windows(2) {
let previous = pair[0];
let next = pair[1];
if previous.after_hash != next.before_hash {
conflicts.push(format!(
"event chain break for {target}: {} after_hash does not match {} before_hash",
previous.id, next.id
));
}
}
if let Some(last) = sorted.last()
&& last.target.r#type == "finding"
{
let current = finding_hash_by_id(frontier, &last.target.id);
if current != last.after_hash {
conflicts.push(format!(
"materialized finding {} hash does not match last event {}",
last.target.id, last.id
));
}
}
}
let current_hash = snapshot_hash(frontier);
let ok = conflicts.is_empty();
ReplayReport {
ok,
status: if ok { "ok" } else { "conflict" }.to_string(),
event_log,
source_hash: current_hash.clone(),
event_log_hash: event_log_hash(&frontier.events),
replayed_hash: if ok {
current_hash.clone()
} else {
"unavailable".to_string()
},
current_hash,
conflicts,
}
}
pub fn replay_report_json(frontier: &Project) -> Value {
serde_json::to_value(replay_report(frontier)).unwrap_or_else(|_| json!({"ok": false}))
}
pub fn summarize(frontier: &Project) -> EventLogSummary {
let mut kinds = BTreeMap::<String, usize>::new();
let mut seen = BTreeSet::<String>::new();
let mut duplicate_ids = BTreeSet::<String>::new();
let finding_ids = frontier
.findings
.iter()
.map(|finding| finding.id.as_str())
.collect::<BTreeSet<_>>();
let mut orphan_targets = BTreeSet::<String>::new();
let mut timestamps = Vec::<String>::new();
for event in &frontier.events {
*kinds.entry(event.kind.clone()).or_default() += 1;
if !seen.insert(event.id.clone()) {
duplicate_ids.insert(event.id.clone());
}
if event.target.r#type == "finding"
&& !finding_ids.contains(event.target.id.as_str())
&& event.kind != "finding.retracted"
{
orphan_targets.insert(event.target.id.clone());
}
timestamps.push(event.timestamp.clone());
}
timestamps.sort();
EventLogSummary {
count: frontier.events.len(),
kinds,
first_timestamp: timestamps.first().cloned(),
last_timestamp: timestamps.last().cloned(),
duplicate_ids: duplicate_ids.into_iter().collect(),
orphan_targets: orphan_targets.into_iter().collect(),
}
}
fn validate_sha256_commitment(field: &str, value: &str) -> Result<(), String> {
let hex = value.strip_prefix("sha256:").unwrap_or(value);
if hex.len() != 64 || !hex.chars().all(|c| c.is_ascii_hexdigit()) {
return Err(format!("{field} must be sha256:<64hex>"));
}
Ok(())
}
pub fn validate_event_payload(kind: &str, payload: &Value) -> Result<(), String> {
let object = payload.as_object().ok_or_else(|| {
if matches!(payload, Value::Null) {
"payload must be a JSON object (got null)".to_string()
} else {
"payload must be a JSON object".to_string()
}
})?;
let require_str = |key: &str| -> Result<&str, String> {
object
.get(key)
.and_then(Value::as_str)
.ok_or_else(|| format!("missing required string field '{key}'"))
};
let require_f64 = |key: &str| -> Result<f64, String> {
object
.get(key)
.and_then(Value::as_f64)
.ok_or_else(|| format!("missing required number field '{key}'"))
};
match kind {
"finding.asserted" => {
require_str("proposal_id")?;
}
"finding.reviewed" => {
require_str("proposal_id")?;
let status = require_str("status")?;
if !matches!(
status,
"accepted" | "approved" | "contested" | "needs_revision" | "rejected"
) {
return Err(format!("invalid review status '{status}'"));
}
}
"finding.noted" | "finding.caveated" => {
require_str("proposal_id")?;
require_str("annotation_id")?;
let text = require_str("text")?;
if text.trim().is_empty() {
return Err("payload.text must be non-empty".to_string());
}
if let Some(prov) = object.get("provenance") {
let prov_obj = prov
.as_object()
.ok_or("payload.provenance must be a JSON object when present")?;
let has_id = prov_obj
.get("doi")
.and_then(Value::as_str)
.is_some_and(|s| !s.trim().is_empty())
|| prov_obj
.get("pmid")
.and_then(Value::as_str)
.is_some_and(|s| !s.trim().is_empty())
|| prov_obj
.get("title")
.and_then(Value::as_str)
.is_some_and(|s| !s.trim().is_empty());
if !has_id {
return Err(
"payload.provenance must include at least one of doi/pmid/title"
.to_string(),
);
}
}
}
"finding.confidence_revised" => {
require_str("proposal_id")?;
let new_score = require_f64("new_score")?;
if !(0.0..=1.0).contains(&new_score) {
return Err(format!("new_score {new_score} out of [0.0, 1.0]"));
}
let _ = require_f64("previous_score")?;
}
"finding.rejected" => {
require_str("proposal_id")?;
}
"finding.superseded" => {
require_str("proposal_id")?;
require_str("new_finding_id")?;
}
"finding.retracted" => {
require_str("proposal_id")?;
if let Some(affected) = object.get("affected") {
let _ = affected
.as_u64()
.ok_or("affected must be a non-negative integer")?;
}
}
"finding.dependency_invalidated" => {
require_str("upstream_finding_id")?;
require_str("upstream_event_id")?;
let depth = object
.get("depth")
.and_then(Value::as_u64)
.ok_or("missing required positive integer 'depth'")?;
if depth == 0 {
return Err("depth must be >= 1 (genesis is the source retraction)".to_string());
}
require_str("proposal_id")?;
}
"frontier.created" => {
require_str("name")?;
require_str("creator")?;
}
"prediction.expired_unresolved" => {
require_str("prediction_id")?;
require_str("resolves_by")?;
require_str("expired_at")?;
}
"frontier.synced_with_peer" => {
require_str("peer_id")?;
require_str("peer_snapshot_hash")?;
require_str("our_snapshot_hash")?;
let _ = object
.get("divergence_count")
.and_then(Value::as_u64)
.ok_or("missing required non-negative integer 'divergence_count'")?;
}
"frontier.conflict_detected" => {
require_str("peer_id")?;
require_str("finding_id")?;
let kind = require_str("kind")?;
if kind.trim().is_empty() {
return Err("payload.kind must be a non-empty string".to_string());
}
}
"frontier.conflict_resolved" => {
let conflict_event_id = require_str("conflict_event_id")?;
if conflict_event_id.trim().is_empty() {
return Err("payload.conflict_event_id must be a non-empty string".to_string());
}
let resolved_by = require_str("resolved_by")?;
if resolved_by.trim().is_empty() {
return Err("payload.resolved_by must be a non-empty string".to_string());
}
let note = require_str("resolution_note")?;
if note.trim().is_empty() {
return Err("payload.resolution_note must be a non-empty string".to_string());
}
if let Some(value) = object.get("winning_proposal_id")
&& !value.is_null()
&& !value.is_string()
{
return Err("payload.winning_proposal_id must be a string when present".to_string());
}
}
"replication.deposited" => {
let rep = object
.get("replication")
.ok_or("payload.replication is required")?;
if !rep.is_object() {
return Err("payload.replication must be an object".to_string());
}
let id = rep
.get("id")
.and_then(Value::as_str)
.ok_or("payload.replication.id is required (vrep_<hex>)")?;
if !id.starts_with("vrep_") {
return Err(format!(
"payload.replication.id must start with 'vrep_', got '{id}'"
));
}
}
"prediction.deposited" => {
let pred = object
.get("prediction")
.ok_or("payload.prediction is required")?;
if !pred.is_object() {
return Err("payload.prediction must be an object".to_string());
}
let id = pred
.get("id")
.and_then(Value::as_str)
.ok_or("payload.prediction.id is required (vpred_<hex>)")?;
if !id.starts_with("vpred_") {
return Err(format!(
"payload.prediction.id must start with 'vpred_', got '{id}'"
));
}
}
"bridge.reviewed" => {
let bridge_id = require_str("bridge_id")?;
if !bridge_id.starts_with("vbr_") {
return Err(format!(
"payload.bridge_id must start with 'vbr_', got '{bridge_id}'"
));
}
let status = require_str("status")?;
if !matches!(status, "confirmed" | "refuted") {
return Err(format!(
"payload.status must be 'confirmed' or 'refuted', got '{status}'"
));
}
if let Some(value) = object.get("note")
&& !value.is_null()
&& !value.is_string()
{
return Err("payload.note must be a string when present".to_string());
}
}
"assertion.reinterpreted_causal" => {
require_str("proposal_id")?;
let check_block = |block_name: &str| -> Result<(), String> {
let block = object
.get(block_name)
.and_then(Value::as_object)
.ok_or_else(|| format!("payload.{block_name} must be an object"))?;
if let Some(claim) = block.get("claim").and_then(Value::as_str)
&& !crate::bundle::VALID_CAUSAL_CLAIMS.contains(&claim)
{
return Err(format!(
"{block_name}.claim '{claim}' not in {:?}",
crate::bundle::VALID_CAUSAL_CLAIMS
));
}
if let Some(grade) = block.get("grade").and_then(Value::as_str)
&& !crate::bundle::VALID_CAUSAL_EVIDENCE_GRADES.contains(&grade)
{
return Err(format!(
"{block_name}.grade '{grade}' not in {:?}",
crate::bundle::VALID_CAUSAL_EVIDENCE_GRADES
));
}
Ok(())
};
check_block("before")?;
check_block("after")?;
}
"finding.threshold_set" => {
let threshold = object
.get("threshold")
.and_then(Value::as_u64)
.ok_or("missing required positive integer 'threshold'")?;
if threshold == 0 {
return Err("threshold must be >= 1".to_string());
}
}
"finding.threshold_met" => {
let count = object
.get("signature_count")
.and_then(Value::as_u64)
.ok_or("missing required positive integer 'signature_count'")?;
let threshold = object
.get("threshold")
.and_then(Value::as_u64)
.ok_or("missing required positive integer 'threshold'")?;
if count < threshold {
return Err(format!(
"signature_count {count} below threshold {threshold}"
));
}
}
EVENT_KIND_KEY_REVOKE => {
let revoked = require_str("revoked_pubkey")?;
if revoked.len() != 64 || !revoked.chars().all(|c| c.is_ascii_hexdigit()) {
return Err(format!(
"revoked_pubkey must be 64 hex chars (Ed25519 pubkey), got {} chars",
revoked.len()
));
}
let revoked_at = require_str("revoked_at")?;
if revoked_at.trim().is_empty() {
return Err("revoked_at must be a non-empty ISO-8601 timestamp".to_string());
}
if DateTime::parse_from_rfc3339(revoked_at).is_err() {
return Err(format!(
"revoked_at must parse as RFC-3339/ISO-8601, got {revoked_at:?}"
));
}
if let Some(replacement) = object.get("replacement_pubkey")
&& let Some(rep_str) = replacement.as_str()
&& !rep_str.is_empty()
&& (rep_str.len() != 64 || !rep_str.chars().all(|c| c.is_ascii_hexdigit()))
{
return Err(format!(
"replacement_pubkey must be 64 hex chars when present, got {} chars",
rep_str.len()
));
}
if let Some(replacement) = object.get("replacement_pubkey").and_then(Value::as_str)
&& !replacement.is_empty()
&& replacement.eq_ignore_ascii_case(revoked)
{
return Err("replacement_pubkey must differ from revoked_pubkey".to_string());
}
}
EVENT_KIND_NEGATIVE_RESULT_ASSERTED => {
require_str("proposal_id")?;
let nr = object
.get("negative_result")
.and_then(Value::as_object)
.ok_or("payload.negative_result must be a JSON object")?;
let nr_kind = nr
.get("kind")
.and_then(|k| k.as_object())
.and_then(|k| k.get("kind"))
.and_then(Value::as_str)
.ok_or(
"payload.negative_result.kind.kind must be 'registered_trial' or 'exploratory'",
)?;
match nr_kind {
"registered_trial" => {
let kind_obj = nr
.get("kind")
.and_then(Value::as_object)
.expect("checked above");
for k in ["endpoint", "intervention", "comparator", "population"] {
let v = kind_obj
.get(k)
.and_then(Value::as_str)
.ok_or_else(|| format!("registered_trial.{k} must be a string"))?;
if v.trim().is_empty() {
return Err(format!("registered_trial.{k} must be non-empty"));
}
}
let _ = kind_obj
.get("n_enrolled")
.and_then(Value::as_u64)
.ok_or("registered_trial.n_enrolled must be a non-negative integer")?;
let power = kind_obj
.get("power")
.and_then(Value::as_f64)
.ok_or("registered_trial.power must be a number on [0, 1]")?;
if !(0.0..=1.0).contains(&power) {
return Err(format!("registered_trial.power {power} out of [0.0, 1.0]"));
}
let ci = kind_obj
.get("effect_size_ci")
.and_then(Value::as_array)
.ok_or("registered_trial.effect_size_ci must be a 2-element array [lower, upper]")?;
if ci.len() != 2 {
return Err(format!(
"registered_trial.effect_size_ci must have length 2, got {}",
ci.len()
));
}
let lower = ci[0]
.as_f64()
.ok_or("registered_trial.effect_size_ci[0] must be a number")?;
let upper = ci[1]
.as_f64()
.ok_or("registered_trial.effect_size_ci[1] must be a number")?;
if upper < lower {
return Err(format!(
"registered_trial.effect_size_ci upper {upper} below lower {lower}"
));
}
}
"exploratory" => {
let kind_obj = nr
.get("kind")
.and_then(Value::as_object)
.expect("checked above");
for k in ["reagent", "observation"] {
let v = kind_obj
.get(k)
.and_then(Value::as_str)
.ok_or_else(|| format!("exploratory.{k} must be a string"))?;
if v.trim().is_empty() {
return Err(format!("exploratory.{k} must be non-empty"));
}
}
let attempts = kind_obj
.get("attempts")
.and_then(Value::as_u64)
.ok_or("exploratory.attempts must be a positive integer")?;
if attempts == 0 {
return Err("exploratory.attempts must be >= 1".to_string());
}
}
other => {
return Err(format!(
"negative_result.kind.kind '{other}' must be 'registered_trial' or 'exploratory'"
));
}
}
let depositor = nr
.get("deposited_by")
.and_then(Value::as_str)
.ok_or("payload.negative_result.deposited_by must be a non-empty string")?;
if depositor.trim().is_empty() {
return Err("payload.negative_result.deposited_by must be non-empty".to_string());
}
}
EVENT_KIND_NEGATIVE_RESULT_REVIEWED => {
require_str("proposal_id")?;
let status = require_str("status")?;
if !matches!(
status,
"accepted" | "approved" | "contested" | "needs_revision" | "rejected"
) {
return Err(format!("invalid review status '{status}'"));
}
}
EVENT_KIND_NEGATIVE_RESULT_RETRACTED => {
require_str("proposal_id")?;
}
EVENT_KIND_TRAJECTORY_CREATED => {
require_str("proposal_id")?;
let traj = object
.get("trajectory")
.and_then(Value::as_object)
.ok_or("payload.trajectory must be a JSON object")?;
let depositor = traj
.get("deposited_by")
.and_then(Value::as_str)
.ok_or("payload.trajectory.deposited_by must be a non-empty string")?;
if depositor.trim().is_empty() {
return Err("payload.trajectory.deposited_by must be non-empty".to_string());
}
let id = traj
.get("id")
.and_then(Value::as_str)
.ok_or("payload.trajectory.id must be a vtr_<hex>")?;
if !id.starts_with("vtr_") {
return Err(format!(
"payload.trajectory.id must start with 'vtr_', got '{id}'"
));
}
}
EVENT_KIND_TRAJECTORY_STEP_APPENDED => {
require_str("proposal_id")?;
let parent = require_str("parent_trajectory_id")?;
if !parent.starts_with("vtr_") {
return Err(format!(
"parent_trajectory_id must start with 'vtr_', got '{parent}'"
));
}
let step = object
.get("step")
.and_then(Value::as_object)
.ok_or("payload.step must be a JSON object")?;
let kind_str = step.get("kind").and_then(Value::as_str).ok_or(
"payload.step.kind must be one of hypothesis|tried|ruled_out|observed|refined",
)?;
if !matches!(
kind_str,
"hypothesis" | "tried" | "ruled_out" | "observed" | "refined"
) {
return Err(format!(
"payload.step.kind '{kind_str}' must be one of hypothesis|tried|ruled_out|observed|refined"
));
}
let description = step
.get("description")
.and_then(Value::as_str)
.ok_or("payload.step.description must be a non-empty string")?;
if description.trim().is_empty() {
return Err("payload.step.description must be non-empty".to_string());
}
let actor = step
.get("actor")
.and_then(Value::as_str)
.ok_or("payload.step.actor must be a non-empty string")?;
if actor.trim().is_empty() {
return Err("payload.step.actor must be non-empty".to_string());
}
}
EVENT_KIND_TRAJECTORY_REVIEWED => {
require_str("proposal_id")?;
let status = require_str("status")?;
if !matches!(
status,
"accepted" | "approved" | "contested" | "needs_revision" | "rejected"
) {
return Err(format!("invalid review status '{status}'"));
}
}
EVENT_KIND_TRAJECTORY_RETRACTED => {
require_str("proposal_id")?;
}
EVENT_KIND_ARTIFACT_ASSERTED => {
require_str("proposal_id")?;
let artifact = object
.get("artifact")
.and_then(Value::as_object)
.ok_or("payload.artifact must be a JSON object")?;
let id = artifact
.get("id")
.and_then(Value::as_str)
.ok_or("payload.artifact.id must be a va_<hex>")?;
if !id.starts_with("va_") {
return Err(format!(
"payload.artifact.id must start with 'va_', got '{id}'"
));
}
let id_hex = id.trim_start_matches("va_");
if id_hex.len() != 16 || !id_hex.chars().all(|c| c.is_ascii_hexdigit()) {
return Err("payload.artifact.id must be va_<16hex>".to_string());
}
let kind = artifact
.get("kind")
.and_then(Value::as_str)
.ok_or("payload.artifact.kind must be a string")?;
if !crate::bundle::valid_artifact_kind(kind) {
return Err(format!("payload.artifact.kind '{kind}' is not supported"));
}
for key in ["name", "content_hash", "storage_mode"] {
let value = artifact
.get(key)
.and_then(Value::as_str)
.ok_or_else(|| format!("payload.artifact.{key} must be a string"))?;
if value.trim().is_empty() {
return Err(format!("payload.artifact.{key} must be non-empty"));
}
}
let content_hash = artifact
.get("content_hash")
.and_then(Value::as_str)
.expect("content_hash checked above");
validate_sha256_commitment("payload.artifact.content_hash", content_hash)?;
}
EVENT_KIND_ARTIFACT_REVIEWED => {
require_str("proposal_id")?;
let status = require_str("status")?;
if !matches!(
status,
"accepted" | "approved" | "contested" | "needs_revision" | "rejected"
) {
return Err(format!("invalid review status '{status}'"));
}
}
EVENT_KIND_ARTIFACT_RETRACTED => {
require_str("proposal_id")?;
}
EVENT_KIND_TIER_SET => {
require_str("proposal_id")?;
let object_type = require_str("object_type")?;
if !matches!(
object_type,
"finding" | "negative_result" | "trajectory" | "artifact"
) {
return Err(format!(
"tier.set object_type '{object_type}' must be one of finding, negative_result, trajectory, artifact"
));
}
require_str("object_id")?;
let new_tier = require_str("new_tier")?;
crate::access_tier::AccessTier::parse(new_tier)?;
if let Some(prev) = object.get("previous_tier").and_then(Value::as_str) {
crate::access_tier::AccessTier::parse(prev)?;
}
}
EVENT_KIND_EVIDENCE_ATOM_LOCATOR_REPAIRED => {
require_str("proposal_id")?;
let source_id = require_str("source_id")?;
if source_id.trim().is_empty() {
return Err("payload.source_id must be non-empty".to_string());
}
let locator = require_str("locator")?;
if locator.trim().is_empty() {
return Err("payload.locator must be non-empty".to_string());
}
}
EVENT_KIND_FINDING_SPAN_REPAIRED => {
require_str("proposal_id")?;
let section = require_str("section")?;
if section.trim().is_empty() {
return Err("payload.section must be non-empty".to_string());
}
let text = require_str("text")?;
if text.trim().is_empty() {
return Err("payload.text must be non-empty".to_string());
}
}
EVENT_KIND_FINDING_ENTITY_RESOLVED => {
require_str("proposal_id")?;
let entity_name = require_str("entity_name")?;
if entity_name.trim().is_empty() {
return Err("payload.entity_name must be non-empty".to_string());
}
let source = require_str("source")?;
if source.trim().is_empty() {
return Err("payload.source must be non-empty".to_string());
}
let id = require_str("id")?;
if id.trim().is_empty() {
return Err("payload.id must be non-empty".to_string());
}
let confidence = require_f64("confidence")?;
if !(0.0..=1.0).contains(&confidence) {
return Err(format!("payload.confidence {confidence} out of [0.0, 1.0]"));
}
}
EVENT_KIND_ATTESTATION_RECORDED => {
let target_id = require_str("target_event_id")?;
if !target_id.starts_with("vev_") {
return Err(format!(
"payload.target_event_id must start with 'vev_', got '{target_id}'"
));
}
let attester = require_str("attester_id")?;
if attester.trim().is_empty() {
return Err("payload.attester_id must be non-empty".to_string());
}
let scope = require_str("scope_note")?;
if scope.trim().is_empty() {
return Err("payload.scope_note must be non-empty".to_string());
}
if let Some(sig) = object.get("signature")
&& !sig.is_null()
&& !sig.is_string()
{
return Err("payload.signature must be a string when present".to_string());
}
if let Some(proof) = object.get("proof_id")
&& !proof.is_null()
&& let Some(s) = proof.as_str()
&& !s.starts_with("vpf_")
{
return Err(format!(
"payload.proof_id must start with 'vpf_' when present, got '{s}'"
));
}
}
EVENT_KIND_FINDING_ENTITY_ADDED => {
require_str("proposal_id")?;
let entity_name = require_str("entity_name")?;
if entity_name.trim().is_empty() {
return Err("payload.entity_name must be non-empty".to_string());
}
let entity_type = require_str("entity_type")?;
const VALID_ENTITY_TYPES: &[&str] = &[
"gene",
"protein",
"compound",
"disease",
"cell_type",
"organism",
"pathway",
"assay",
"anatomical_structure",
"particle",
"instrument",
"dataset",
"quantity",
"other",
];
if !VALID_ENTITY_TYPES.contains(&entity_type) {
return Err(format!(
"payload.entity_type '{entity_type}' not in {VALID_ENTITY_TYPES:?}"
));
}
let reason = require_str("reason")?;
if reason.trim().is_empty() {
return Err("payload.reason must be non-empty".to_string());
}
}
other => return Err(format!("unknown event kind '{other}'")),
}
Ok(())
}
pub fn validate_bridge_reviewed_against_state(
payload: &Value,
known_bridge_ids: &[String],
) -> Result<(), String> {
let object = payload
.as_object()
.ok_or_else(|| "payload must be a JSON object".to_string())?;
let bridge_id = object
.get("bridge_id")
.and_then(Value::as_str)
.ok_or_else(|| "missing required string field 'bridge_id'".to_string())?;
if !known_bridge_ids.iter().any(|id| id == bridge_id) {
return Err(format!(
"bridge_id '{bridge_id}' not present on this frontier (no matching .vela/bridges/<id>.json)"
));
}
Ok(())
}
pub fn compute_event_id(event: &StateEvent) -> String {
event_id(event)
}
fn event_id(event: &StateEvent) -> String {
let content = json!({
"schema": event.schema,
"kind": event.kind,
"target": event.target,
"actor": event.actor,
"timestamp": event.timestamp,
"reason": event.reason,
"before_hash": event.before_hash,
"after_hash": event.after_hash,
"payload": event.payload,
"caveats": event.caveats,
});
let bytes = canonical::to_canonical_bytes(&content).unwrap_or_default();
format!("vev_{}", &hex::encode(Sha256::digest(bytes))[..16])
}
#[cfg(test)]
mod tests {
use super::*;
use crate::bundle::{
Assertion, Conditions, Confidence, Evidence, Extraction, FindingBundle, Flags, Provenance,
};
use crate::project;
fn finding() -> FindingBundle {
FindingBundle::new(
Assertion {
text: "LRP1 clears amyloid beta at the BBB".to_string(),
assertion_type: "mechanism".to_string(),
entities: Vec::new(),
relation: None,
direction: None,
causal_claim: None,
causal_evidence_grade: None,
},
Evidence {
evidence_type: "experimental".to_string(),
model_system: "mouse".to_string(),
species: Some("Mus musculus".to_string()),
method: "assay".to_string(),
sample_size: None,
effect_size: None,
p_value: None,
replicated: false,
replication_count: None,
evidence_spans: Vec::new(),
},
Conditions {
text: "mouse model".to_string(),
species_verified: Vec::new(),
species_unverified: Vec::new(),
in_vitro: false,
in_vivo: true,
human_data: false,
clinical_trial: false,
concentration_range: None,
duration: None,
age_group: None,
cell_type: None,
},
Confidence::raw(0.6, "test", 0.8),
Provenance {
source_type: "published_paper".to_string(),
doi: None,
pmid: None,
pmc: None,
openalex_id: None,
url: None,
title: "Test source".to_string(),
authors: Vec::new(),
year: Some(2026),
journal: None,
license: None,
publisher: None,
funders: Vec::new(),
extraction: Extraction::default(),
review: None,
citation_count: None,
},
Flags {
gap: false,
negative_space: false,
contested: false,
retracted: false,
declining: false,
gravity_well: false,
review_state: None,
superseded: false,
signature_threshold: None,
jointly_accepted: false,
},
)
}
#[test]
fn event_id_is_deterministic_for_content() {
let event = new_finding_event(FindingEventInput {
kind: "finding.reviewed",
finding_id: "vf_test",
actor_id: "reviewer",
actor_type: "human",
reason: "checked",
before_hash: NULL_HASH,
after_hash: "sha256:abc",
payload: json!({"status": "accepted", "proposal_id": "vpr_test"}),
caveats: vec![],
});
let mut same = event.clone();
same.id = String::new();
same.id = super::event_id(&same);
assert_eq!(event.id, same.id);
}
#[test]
fn genesis_only_event_log_replays_ok() {
let frontier = project::assemble("test", Vec::new(), 0, 0, "test");
let report = replay_report(&frontier);
assert!(report.ok, "{:?}", report.conflicts);
assert_eq!(report.event_log.count, 1);
assert_eq!(report.event_log.kinds.get("frontier.created"), Some(&1));
}
#[test]
fn replay_detects_duplicate_event_ids() {
let finding = finding();
let after_hash = finding_hash(&finding);
let event = new_finding_event(FindingEventInput {
kind: "finding.reviewed",
finding_id: &finding.id,
actor_id: "reviewer",
actor_type: "human",
reason: "checked",
before_hash: &after_hash,
after_hash: &after_hash,
payload: json!({"status": "accepted", "proposal_id": "vpr_test"}),
caveats: vec![],
});
let mut frontier = project::assemble("test", vec![finding], 0, 0, "test");
frontier.events = vec![event.clone(), event];
let report = replay_report(&frontier);
assert!(!report.ok);
assert_eq!(report.status, "conflict");
assert!(!report.event_log.duplicate_ids.is_empty());
}
#[test]
fn replay_detects_orphan_targets() {
let mut frontier = project::assemble("test", Vec::new(), 0, 0, "test");
frontier.events.push(new_finding_event(FindingEventInput {
kind: "finding.reviewed",
finding_id: "vf_missing",
actor_id: "reviewer",
actor_type: "human",
reason: "checked",
before_hash: NULL_HASH,
after_hash: "sha256:abc",
payload: json!({"status": "accepted", "proposal_id": "vpr_test"}),
caveats: vec![],
}));
let report = replay_report(&frontier);
assert!(!report.ok);
assert_eq!(report.event_log.orphan_targets, vec!["vf_missing"]);
}
#[test]
fn replay_accepts_current_hash_boundary() {
let finding = finding();
let hash = finding_hash(&finding);
let event = new_finding_event(FindingEventInput {
kind: "finding.reviewed",
finding_id: &finding.id,
actor_id: "reviewer",
actor_type: "human",
reason: "checked",
before_hash: &hash,
after_hash: &hash,
payload: json!({"status": "accepted", "proposal_id": "vpr_test"}),
caveats: vec![],
});
let mut frontier = project::assemble("test", vec![finding], 0, 0, "test");
frontier.events.push(event);
let report = replay_report(&frontier);
assert!(report.ok, "{:?}", report.conflicts);
assert_eq!(report.status, "ok");
}
#[test]
fn validates_synced_with_peer_payload() {
assert!(
validate_event_payload(
"frontier.synced_with_peer",
&json!({
"peer_id": "hub:peer",
"peer_snapshot_hash": "abc",
"our_snapshot_hash": "def",
"divergence_count": 3,
}),
)
.is_ok()
);
assert!(
validate_event_payload(
"frontier.synced_with_peer",
&json!({
"peer_id": "hub:peer",
"peer_snapshot_hash": "abc",
"our_snapshot_hash": "def",
}),
)
.is_err()
);
assert!(
validate_event_payload(
"frontier.synced_with_peer",
&json!({
"peer_snapshot_hash": "abc",
"our_snapshot_hash": "def",
"divergence_count": 0,
}),
)
.is_err()
);
}
#[test]
fn validates_conflict_detected_payload() {
assert!(
validate_event_payload(
"frontier.conflict_detected",
&json!({
"peer_id": "hub:peer",
"finding_id": "vf_xyz",
"kind": "different_review_verdict",
}),
)
.is_ok()
);
assert!(
validate_event_payload(
"frontier.conflict_detected",
&json!({
"peer_id": "hub:peer",
"finding_id": "vf_xyz",
"kind": " ",
}),
)
.is_err()
);
assert!(
validate_event_payload(
"frontier.conflict_detected",
&json!({
"peer_id": "hub:peer",
"kind": "missing_in_peer",
}),
)
.is_err()
);
}
#[test]
fn validates_artifact_asserted_payload() {
let good_hash = "sha256:aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa";
assert!(
validate_event_payload(
EVENT_KIND_ARTIFACT_ASSERTED,
&json!({
"proposal_id": "vpr_test",
"artifact": {
"id": "va_1234567890abcdef",
"kind": "clinical_trial_record",
"name": "NCT test record",
"content_hash": good_hash,
"storage_mode": "embedded",
},
}),
)
.is_ok()
);
assert!(
validate_event_payload(
EVENT_KIND_ARTIFACT_ASSERTED,
&json!({
"proposal_id": "vpr_test",
"artifact": {
"id": "va_123",
"kind": "clinical_trial_record",
"name": "NCT test record",
"content_hash": good_hash,
"storage_mode": "embedded",
},
}),
)
.is_err()
);
assert!(
validate_event_payload(
EVENT_KIND_ARTIFACT_ASSERTED,
&json!({
"proposal_id": "vpr_test",
"artifact": {
"id": "va_1234567890abcdef",
"kind": "clinical_trial_record",
"name": "NCT test record",
"content_hash": "sha256:not-a-real-hash",
"storage_mode": "embedded",
},
}),
)
.is_err()
);
}
#[test]
fn validates_reinterpreted_causal_payload() {
assert!(
validate_event_payload(
"assertion.reinterpreted_causal",
&json!({
"proposal_id": "vpr_test",
"before": {},
"after": { "claim": "intervention", "grade": "rct" },
}),
)
.is_ok()
);
assert!(
validate_event_payload(
"assertion.reinterpreted_causal",
&json!({
"proposal_id": "vpr_test",
"before": { "claim": "correlation" },
"after": { "claim": "mediation" },
}),
)
.is_ok()
);
assert!(
validate_event_payload(
"assertion.reinterpreted_causal",
&json!({
"proposal_id": "vpr_test",
"before": {},
"after": { "claim": "magic" },
}),
)
.is_err()
);
assert!(
validate_event_payload(
"assertion.reinterpreted_causal",
&json!({
"proposal_id": "vpr_test",
"before": {},
"after": { "claim": "intervention", "grade": "vibes" },
}),
)
.is_err()
);
assert!(
validate_event_payload(
"assertion.reinterpreted_causal",
&json!({
"before": {},
"after": { "claim": "intervention" },
}),
)
.is_err()
);
}
#[test]
fn revocation_event_canonical_shape() {
use crate::canonical;
let payload = RevocationPayload {
revoked_pubkey: "4892f93877e637b5f59af31d9ec6704814842fb278cacb0eb94704baef99455e"
.to_string(),
revoked_at: "2026-05-01T17:00:00Z".to_string(),
replacement_pubkey: "8891a2ab35ca2ed2182ed4e46b6567ce8dacc9985eb496d895578201272a1cd9"
.to_string(),
reason: "key file leaked from CI cache".to_string(),
};
let event = new_revocation_event(
"reviewer:will-blair",
"human",
payload,
"rotating compromised key",
NULL_HASH,
NULL_HASH,
);
assert_eq!(event.kind, EVENT_KIND_KEY_REVOKE);
assert_eq!(event.target.r#type, "actor");
assert!(event.id.starts_with("vev_"));
let bytes = canonical::to_canonical_bytes(&event).unwrap();
let s = std::str::from_utf8(&bytes).unwrap();
assert!(
s.contains("\"revoked_pubkey\""),
"canonical bytes missing revoked_pubkey: {s}"
);
assert!(
s.contains("\"revoked_at\""),
"canonical bytes missing revoked_at: {s}"
);
assert!(
s.contains("\"replacement_pubkey\""),
"canonical bytes missing replacement_pubkey: {s}"
);
let payload_minimal = RevocationPayload {
revoked_pubkey: "a".repeat(64),
revoked_at: "2026-05-01T17:00:00Z".to_string(),
replacement_pubkey: String::new(),
reason: String::new(),
};
let minimal_event = new_revocation_event(
"reviewer:will-blair",
"human",
payload_minimal,
"scheduled rotation",
NULL_HASH,
NULL_HASH,
);
let minimal_bytes = canonical::to_canonical_bytes(&minimal_event).unwrap();
let minimal_s = std::str::from_utf8(&minimal_bytes).unwrap();
assert!(
!minimal_s.contains("\"replacement_pubkey\""),
"empty replacement_pubkey leaked into canonical JSON: {minimal_s}"
);
assert!(
!minimal_s.contains("\"reason\":\"\""),
"empty payload reason leaked into canonical JSON: {minimal_s}"
);
}
#[test]
fn revocation_payload_validation() {
let good_pubkey = "4892f93877e637b5f59af31d9ec6704814842fb278cacb0eb94704baef99455e";
let other_pubkey = "8891a2ab35ca2ed2182ed4e46b6567ce8dacc9985eb496d895578201272a1cd9";
assert!(
validate_event_payload(
EVENT_KIND_KEY_REVOKE,
&json!({
"revoked_pubkey": good_pubkey,
"revoked_at": "2026-05-01T17:00:00Z",
}),
)
.is_ok()
);
assert!(
validate_event_payload(
EVENT_KIND_KEY_REVOKE,
&json!({
"revoked_pubkey": good_pubkey,
"revoked_at": "2026-05-01T17:00:00Z",
"replacement_pubkey": other_pubkey,
"reason": "key file leaked",
}),
)
.is_ok()
);
assert!(
validate_event_payload(
EVENT_KIND_KEY_REVOKE,
&json!({
"revoked_pubkey": "abc123",
"revoked_at": "2026-05-01T17:00:00Z",
}),
)
.is_err()
);
assert!(
validate_event_payload(
EVENT_KIND_KEY_REVOKE,
&json!({
"revoked_pubkey": "ZZ".repeat(32),
"revoked_at": "2026-05-01T17:00:00Z",
}),
)
.is_err()
);
assert!(
validate_event_payload(
EVENT_KIND_KEY_REVOKE,
&json!({
"revoked_pubkey": good_pubkey,
}),
)
.is_err()
);
assert!(
validate_event_payload(
EVENT_KIND_KEY_REVOKE,
&json!({
"revoked_pubkey": good_pubkey,
"revoked_at": "2026-05-01T17:00:00Z",
"replacement_pubkey": "deadbeef",
}),
)
.is_err()
);
assert!(
validate_event_payload(
EVENT_KIND_KEY_REVOKE,
&json!({
"revoked_pubkey": good_pubkey,
"revoked_at": "2026-05-01T17:00:00Z",
"replacement_pubkey": good_pubkey,
}),
)
.is_err()
);
for bad in [
"yesterday",
"2026-13-01T00:00:00Z", "2026-05-01", "x",
] {
assert!(
validate_event_payload(
EVENT_KIND_KEY_REVOKE,
&json!({
"revoked_pubkey": good_pubkey,
"revoked_at": bad,
}),
)
.is_err(),
"expected revoked_at {bad:?} to fail validation"
);
}
}
#[test]
fn attestation_recorded_validator() {
let good = json!({
"target_event_id": "vev_abc",
"attester_id": "reviewer:will-blair",
"scope_note": "Independent re-verification of the Stupp protocol finding."
});
assert!(validate_event_payload(EVENT_KIND_ATTESTATION_RECORDED, &good).is_ok());
let with_proof = json!({
"target_event_id": "vev_abc",
"attester_id": "reviewer:will-blair",
"scope_note": "Lean-formalized.",
"signature": "ed25519:cafebabe",
"proof_id": "vpf_demo"
});
assert!(validate_event_payload(EVENT_KIND_ATTESTATION_RECORDED, &with_proof).is_ok());
let bad_target = json!({
"target_event_id": "something_else",
"attester_id": "reviewer:x",
"scope_note": "x"
});
assert!(validate_event_payload(EVENT_KIND_ATTESTATION_RECORDED, &bad_target).is_err());
let no_attester = json!({
"target_event_id": "vev_abc",
"attester_id": "",
"scope_note": "x"
});
assert!(validate_event_payload(EVENT_KIND_ATTESTATION_RECORDED, &no_attester).is_err());
let bad_proof = json!({
"target_event_id": "vev_abc",
"attester_id": "reviewer:x",
"scope_note": "x",
"proof_id": "not_a_vpf"
});
assert!(validate_event_payload(EVENT_KIND_ATTESTATION_RECORDED, &bad_proof).is_err());
}
#[test]
fn finding_entity_added_validator() {
let good = json!({
"proposal_id": "vpr_demo",
"entity_name": "claudin-5",
"entity_type": "protein",
"reason": "Cardinal BBB tight-junction protein; cited in finding source paper."
});
assert!(validate_event_payload(EVENT_KIND_FINDING_ENTITY_ADDED, &good).is_ok());
let no_reason = json!({
"proposal_id": "vpr_demo",
"entity_name": "claudin-5",
"entity_type": "protein"
});
assert!(validate_event_payload(EVENT_KIND_FINDING_ENTITY_ADDED, &no_reason).is_err());
let bad_type = json!({
"proposal_id": "vpr_demo",
"entity_name": "claudin-5",
"entity_type": "fancy_new_thing",
"reason": "x"
});
assert!(validate_event_payload(EVENT_KIND_FINDING_ENTITY_ADDED, &bad_type).is_err());
let empty_name = json!({
"proposal_id": "vpr_demo",
"entity_name": "",
"entity_type": "protein",
"reason": "x"
});
assert!(validate_event_payload(EVENT_KIND_FINDING_ENTITY_ADDED, &empty_name).is_err());
}
#[test]
fn bridge_reviewed_state_aware_rejects_unknown_id() {
let known: Vec<String> = vec!["vbr_aaaaaaaaaaaaaaaa".to_string()];
assert!(
validate_bridge_reviewed_against_state(
&json!({
"bridge_id": "vbr_aaaaaaaaaaaaaaaa",
"status": "confirmed",
}),
&known,
)
.is_ok()
);
let err = validate_bridge_reviewed_against_state(
&json!({
"bridge_id": "vbr_bbbbbbbbbbbbbbbb",
"status": "confirmed",
}),
&known,
)
.expect_err("expected unknown bridge_id to be rejected");
assert!(
err.contains("not present on this frontier"),
"error should explain the gap: {err}"
);
assert!(
validate_bridge_reviewed_against_state(
&json!({
"status": "confirmed",
}),
&known,
)
.is_err()
);
assert!(
validate_bridge_reviewed_against_state(
&json!({
"bridge_id": "vbr_aaaaaaaaaaaaaaaa",
"status": "confirmed",
}),
&[],
)
.is_err()
);
}
}