use std::collections::{BTreeMap, BTreeSet};
use std::path::{Path, PathBuf};
use chrono::Utc;
use serde::{Deserialize, Serialize};
use serde_json::{Value, json};
use sha2::{Digest, Sha256};
use crate::bundle::{Annotation, Artifact, ConfidenceMethod, FindingBundle};
use crate::canonical;
use crate::events::{self, NULL_HASH, StateActor, StateEvent, StateTarget};
use crate::project::{self, Project};
use crate::propagate::{self, PropagationAction};
use crate::repo;
pub const PROPOSAL_SCHEMA: &str = "vela.proposal.v0.1";
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
pub struct StateProposal {
#[serde(default = "default_schema")]
pub schema: String,
pub id: String,
pub kind: String,
pub target: StateTarget,
pub actor: StateActor,
pub created_at: String,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub drafted_at: Option<String>,
pub reason: String,
#[serde(default)]
pub payload: Value,
#[serde(default)]
pub source_refs: Vec<String>,
pub status: String,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub reviewed_by: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub reviewed_at: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub decision_reason: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub applied_event_id: Option<String>,
#[serde(default)]
pub caveats: Vec<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub agent_run: Option<AgentRun>,
}
#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq)]
pub struct AgentRun {
pub agent: String,
#[serde(default, skip_serializing_if = "String::is_empty")]
pub model: String,
#[serde(default, skip_serializing_if = "String::is_empty")]
pub run_id: String,
#[serde(default, skip_serializing_if = "String::is_empty")]
pub started_at: String,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub finished_at: Option<String>,
#[serde(default, skip_serializing_if = "BTreeMap::is_empty")]
pub context: BTreeMap<String, String>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub tool_calls: Vec<ToolCallTrace>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub permissions: Option<PermissionState>,
}
#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq)]
pub struct ToolCallTrace {
pub tool: String,
#[serde(default, skip_serializing_if = "String::is_empty")]
pub input_sha256: String,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub output_sha256: Option<String>,
#[serde(default, skip_serializing_if = "String::is_empty")]
pub at: String,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub duration_ms: Option<u32>,
#[serde(default, skip_serializing_if = "String::is_empty")]
pub status: String,
#[serde(default, skip_serializing_if = "String::is_empty")]
pub error_message: String,
}
#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq)]
pub struct PermissionState {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub data_access: Vec<String>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub tool_access: Vec<String>,
#[serde(default, skip_serializing_if = "String::is_empty")]
pub note: String,
}
#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq)]
pub struct ProposalSummary {
pub total: usize,
pub pending_review: usize,
pub accepted: usize,
pub rejected: usize,
pub applied: usize,
#[serde(default)]
pub by_kind: BTreeMap<String, usize>,
#[serde(default)]
pub duplicate_ids: Vec<String>,
#[serde(default)]
pub invalid_targets: Vec<String>,
}
#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq)]
pub struct ProofState {
#[serde(default)]
pub latest_packet: ProofPacketState,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub last_event_at_export: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub stale_reason: Option<String>,
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
pub struct ProofPacketState {
pub generated_at: Option<String>,
pub snapshot_hash: Option<String>,
pub event_log_hash: Option<String>,
pub packet_manifest_hash: Option<String>,
pub status: String,
}
impl Default for ProofPacketState {
fn default() -> Self {
Self {
generated_at: None,
snapshot_hash: None,
event_log_hash: None,
packet_manifest_hash: None,
status: "never_exported".to_string(),
}
}
}
#[derive(Debug, Clone)]
pub struct CreateProposalResult {
pub proposal_id: String,
pub finding_id: String,
pub status: String,
pub applied_event_id: Option<String>,
}
#[derive(Debug, Clone, Default)]
pub struct ImportProposalReport {
pub imported: usize,
pub applied: usize,
pub rejected: usize,
pub duplicates: usize,
pub wrote_to: String,
}
#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq)]
pub struct ProposalValidationReport {
pub ok: bool,
pub checked: usize,
pub valid: usize,
pub invalid: usize,
#[serde(default)]
pub errors: Vec<String>,
#[serde(default)]
pub proposal_ids: Vec<String>,
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
pub struct ProposalPreview {
pub proposal_id: String,
pub kind: String,
pub target: StateTarget,
pub reviewer: String,
#[serde(default)]
pub changed_findings: Vec<String>,
#[serde(default)]
pub changed_artifacts: Vec<String>,
#[serde(default)]
pub new_event_ids: Vec<String>,
#[serde(default)]
pub event_kinds: Vec<String>,
pub findings_before: usize,
pub findings_after: usize,
pub findings_delta: isize,
pub artifacts_before: usize,
pub artifacts_after: usize,
pub artifacts_delta: isize,
pub events_before: usize,
pub events_after: usize,
pub events_delta: isize,
pub proof_would_be_stale: bool,
pub applied_event_id: String,
}
#[derive(Debug, Clone)]
pub struct ProofPacketRecord {
pub generated_at: String,
pub snapshot_hash: String,
pub event_log_hash: String,
pub packet_manifest_hash: String,
}
fn default_schema() -> String {
PROPOSAL_SCHEMA.to_string()
}
#[allow(clippy::too_many_arguments)]
pub fn new_proposal(
kind: impl Into<String>,
target: StateTarget,
actor_id: impl Into<String>,
actor_type: impl Into<String>,
reason: impl Into<String>,
payload: Value,
source_refs: Vec<String>,
caveats: Vec<String>,
) -> StateProposal {
let created_at = Utc::now().to_rfc3339();
let mut proposal = StateProposal {
schema: PROPOSAL_SCHEMA.to_string(),
id: String::new(),
kind: kind.into(),
target,
actor: StateActor {
id: actor_id.into(),
r#type: actor_type.into(),
},
created_at,
drafted_at: None,
reason: reason.into(),
payload,
source_refs,
status: "pending_review".to_string(),
reviewed_by: None,
reviewed_at: None,
decision_reason: None,
applied_event_id: None,
caveats,
agent_run: None,
};
proposal.id = proposal_id(&proposal);
proposal
}
pub fn proposal_id(proposal: &StateProposal) -> String {
let preimage = json!({
"schema": proposal.schema,
"kind": proposal.kind,
"target": proposal.target,
"actor": proposal.actor,
"reason": proposal.reason,
"payload": proposal.payload,
"source_refs": proposal.source_refs,
"caveats": proposal.caveats,
});
let bytes = canonical::to_canonical_bytes(&preimage).unwrap_or_default();
format!("vpr_{}", &hex::encode(Sha256::digest(bytes))[..16])
}
pub fn is_placeholder_reviewer(value: &str) -> bool {
let normalized = value.trim().to_ascii_lowercase();
normalized.is_empty()
|| normalized == "local-reviewer"
|| normalized == "local-user"
|| normalized == "reviewer"
|| normalized == "user"
|| normalized == "unknown"
|| normalized.starts_with("local-")
}
pub fn validate_reviewer_identity(value: &str) -> Result<(), String> {
if is_placeholder_reviewer(value) {
return Err(format!(
"Reviewer identity '{}' is missing or placeholder. Use a stable named reviewer id.",
value
));
}
Ok(())
}
pub fn summary(frontier: &Project) -> ProposalSummary {
let mut out = ProposalSummary::default();
let mut seen = BTreeSet::new();
let finding_ids = frontier
.findings
.iter()
.map(|finding| finding.id.as_str())
.collect::<BTreeSet<_>>();
let artifact_ids = frontier
.artifacts
.iter()
.map(|artifact| artifact.id.as_str())
.collect::<BTreeSet<_>>();
for proposal in &frontier.proposals {
out.total += 1;
*out.by_kind.entry(proposal.kind.clone()).or_default() += 1;
match proposal.status.as_str() {
"pending_review" => out.pending_review += 1,
"accepted" => out.accepted += 1,
"rejected" => out.rejected += 1,
"applied" => out.applied += 1,
_ => {}
}
if !seen.insert(proposal.id.clone()) {
out.duplicate_ids.push(proposal.id.clone());
}
let target_known = match proposal.target.r#type.as_str() {
"finding" => {
proposal.kind == "finding.add" || finding_ids.contains(proposal.target.id.as_str())
}
"artifact" => {
proposal.kind == "artifact.assert"
|| artifact_ids.contains(proposal.target.id.as_str())
}
_ => true,
};
if !target_known {
out.invalid_targets.push(proposal.target.id.clone());
}
}
out.duplicate_ids.sort();
out.duplicate_ids.dedup();
out.invalid_targets.sort();
out.invalid_targets.dedup();
out
}
pub fn proposals_for_finding<'a>(
frontier: &'a Project,
finding_id: &str,
) -> Vec<&'a StateProposal> {
frontier
.proposals
.iter()
.filter(|proposal| proposal.target.r#type == "finding" && proposal.target.id == finding_id)
.collect()
}
pub fn create_or_apply(
path: &Path,
proposal: StateProposal,
apply: bool,
) -> Result<CreateProposalResult, String> {
let mut frontier = repo::load_from_path(path)?;
let finding_id = proposal.target.id.clone();
let proposal_id = proposal.id.clone();
let existing_idx = frontier
.proposals
.iter()
.position(|existing| existing.id == proposal_id);
if existing_idx.is_none() {
validate_new_proposal(&frontier, &proposal)?;
frontier.proposals.push(proposal);
}
let applied_event_id = if apply {
if let Some(idx) = existing_idx
&& let Some(existing_event) = frontier.proposals[idx].applied_event_id.clone()
{
Some(existing_event)
} else {
let reviewer = frontier
.proposals
.iter()
.find(|proposal| proposal.id == proposal_id)
.map(|proposal| proposal.actor.id.clone())
.ok_or_else(|| format!("Proposal not found after insertion: {proposal_id}"))?;
Some(accept_proposal_in_frontier(
&mut frontier,
&proposal_id,
&reviewer,
"Applied locally from proposal creation",
)?)
}
} else {
existing_idx.and_then(|idx| frontier.proposals[idx].applied_event_id.clone())
};
if applied_event_id.is_some() {
crate::sources::materialize_project(&mut frontier);
} else {
project::recompute_stats(&mut frontier);
}
repo::save_to_path(path, &frontier)?;
Ok(CreateProposalResult {
proposal_id,
finding_id,
status: applied_event_id
.as_ref()
.map_or_else(|| "pending_review".to_string(), |_| "applied".to_string()),
applied_event_id,
})
}
pub fn list(frontier: &Project, status: Option<&str>) -> Vec<StateProposal> {
let mut proposals = frontier
.proposals
.iter()
.filter(|proposal| status.is_none_or(|wanted| proposal.status == wanted))
.cloned()
.collect::<Vec<_>>();
proposals.sort_by(|a, b| a.created_at.cmp(&b.created_at).then(a.id.cmp(&b.id)));
proposals
}
pub fn show<'a>(frontier: &'a Project, proposal_id: &str) -> Result<&'a StateProposal, String> {
frontier
.proposals
.iter()
.find(|proposal| proposal.id == proposal_id)
.ok_or_else(|| format!("Proposal not found: {proposal_id}"))
}
pub fn preview_at_path(
path: &Path,
proposal_id: &str,
reviewer: &str,
) -> Result<ProposalPreview, String> {
validate_reviewer_identity(reviewer)?;
let frontier = repo::load_from_path(path)?;
preview_in_frontier(&frontier, proposal_id, reviewer)
}
pub fn preview_in_frontier(
frontier: &Project,
proposal_id: &str,
reviewer: &str,
) -> Result<ProposalPreview, String> {
validate_reviewer_identity(reviewer)?;
let proposal = frontier
.proposals
.iter()
.find(|proposal| proposal.id == proposal_id)
.ok_or_else(|| format!("Proposal not found: {proposal_id}"))?
.clone();
if proposal.status == "applied" {
let applied_event_id = proposal
.applied_event_id
.clone()
.ok_or_else(|| format!("Proposal {} is applied but has no event id", proposal.id))?;
return Ok(ProposalPreview {
proposal_id: proposal.id,
kind: proposal.kind,
changed_findings: changed_targets_for_type(frontier, &proposal.target, "finding"),
changed_artifacts: changed_targets_for_type(frontier, &proposal.target, "artifact"),
new_event_ids: vec![applied_event_id.clone()],
event_kinds: frontier
.events
.iter()
.find(|event| event.id == applied_event_id)
.map(|event| vec![event.kind.clone()])
.unwrap_or_default(),
target: proposal.target,
reviewer: reviewer.to_string(),
findings_before: frontier.findings.len(),
findings_after: frontier.findings.len(),
findings_delta: 0,
artifacts_before: frontier.artifacts.len(),
artifacts_after: frontier.artifacts.len(),
artifacts_delta: 0,
events_before: frontier.events.len(),
events_after: frontier.events.len(),
events_delta: 0,
proof_would_be_stale: false,
applied_event_id,
});
}
if !matches!(proposal.status.as_str(), "pending_review" | "accepted") {
return Err(format!(
"Proposal {} cannot be previewed from status {}",
proposal.id, proposal.status
));
}
let mut preview_state: Project = serde_json::from_value(
serde_json::to_value(frontier).map_err(|e| format!("serialize frontier preview: {e}"))?,
)
.map_err(|e| format!("clone frontier preview: {e}"))?;
let finding_ids_before = preview_state
.findings
.iter()
.map(|finding| finding.id.clone())
.collect::<BTreeSet<_>>();
let artifact_ids_before = preview_state
.artifacts
.iter()
.map(|artifact| artifact.id.clone())
.collect::<BTreeSet<_>>();
let findings_before = preview_state.findings.len();
let artifacts_before = preview_state.artifacts.len();
let events_before = preview_state.events.len();
let event_id = apply_proposal(
&mut preview_state,
&proposal,
reviewer,
"Preview proposal application",
)?;
let findings_after = preview_state.findings.len();
let artifacts_after = preview_state.artifacts.len();
let events_after = preview_state.events.len();
let new_events = preview_state
.events
.iter()
.skip(events_before)
.cloned()
.collect::<Vec<_>>();
Ok(ProposalPreview {
proposal_id: proposal.id,
kind: proposal.kind,
target: proposal.target,
reviewer: reviewer.to_string(),
changed_findings: changed_finding_ids(&preview_state, &finding_ids_before, &new_events),
changed_artifacts: changed_artifact_ids(&preview_state, &artifact_ids_before, &new_events),
new_event_ids: new_events.iter().map(|event| event.id.clone()).collect(),
event_kinds: new_events.iter().map(|event| event.kind.clone()).collect(),
findings_before,
findings_after,
findings_delta: findings_after as isize - findings_before as isize,
artifacts_before,
artifacts_after,
artifacts_delta: artifacts_after as isize - artifacts_before as isize,
events_before,
events_after,
events_delta: events_after as isize - events_before as isize,
proof_would_be_stale: true,
applied_event_id: event_id,
})
}
fn changed_targets_for_type(
frontier: &Project,
target: &StateTarget,
target_type: &str,
) -> Vec<String> {
let known = match target_type {
"finding" => frontier
.findings
.iter()
.any(|finding| finding.id == target.id),
"artifact" => frontier
.artifacts
.iter()
.any(|artifact| artifact.id == target.id),
_ => false,
};
if target.r#type == target_type && known {
vec![target.id.clone()]
} else {
Vec::new()
}
}
fn changed_finding_ids(
preview_state: &Project,
finding_ids_before: &BTreeSet<String>,
new_events: &[StateEvent],
) -> Vec<String> {
let mut ids = preview_state
.findings
.iter()
.filter(|finding| !finding_ids_before.contains(&finding.id))
.map(|finding| finding.id.clone())
.collect::<BTreeSet<_>>();
for event in new_events {
if event.target.r#type == "finding" {
ids.insert(event.target.id.clone());
}
}
ids.into_iter().collect()
}
fn changed_artifact_ids(
preview_state: &Project,
artifact_ids_before: &BTreeSet<String>,
new_events: &[StateEvent],
) -> Vec<String> {
let mut ids = preview_state
.artifacts
.iter()
.filter(|artifact| !artifact_ids_before.contains(&artifact.id))
.map(|artifact| artifact.id.clone())
.collect::<BTreeSet<_>>();
for event in new_events {
if event.target.r#type == "artifact" {
ids.insert(event.target.id.clone());
}
}
ids.into_iter().collect()
}
pub fn import_from_path(path: &Path, source: &Path) -> Result<ImportProposalReport, String> {
let mut frontier = repo::load_from_path(path)?;
let proposals = load_proposals(source)?;
let wrote_to = path.display().to_string();
let mut report = ImportProposalReport {
wrote_to,
..ImportProposalReport::default()
};
for proposal in proposals {
if frontier
.proposals
.iter()
.any(|existing| existing.id == proposal.id)
{
report.duplicates += 1;
continue;
}
validate_new_proposal(&frontier, &proposal)?;
frontier.proposals.push(proposal.clone());
report.imported += 1;
match proposal.status.as_str() {
"accepted" => {
let reviewer = proposal
.reviewed_by
.as_deref()
.ok_or_else(|| {
format!("Accepted proposal {} missing reviewed_by", proposal.id)
})?
.to_string();
let reason = proposal
.decision_reason
.clone()
.unwrap_or_else(|| "Imported accepted proposal".to_string());
let _ =
accept_proposal_in_frontier(&mut frontier, &proposal.id, &reviewer, &reason)?;
report.applied += 1;
}
"applied" => {
let reviewer = proposal
.reviewed_by
.as_deref()
.ok_or_else(|| format!("Applied proposal {} missing reviewed_by", proposal.id))?
.to_string();
let reason = proposal
.decision_reason
.clone()
.unwrap_or_else(|| "Imported applied proposal".to_string());
let _ =
accept_proposal_in_frontier(&mut frontier, &proposal.id, &reviewer, &reason)?;
report.applied += 1;
}
"rejected" => report.rejected += 1,
_ => {}
}
}
project::recompute_stats(&mut frontier);
repo::save_to_path(path, &frontier)?;
Ok(report)
}
pub fn validate_source(source: &Path) -> Result<ProposalValidationReport, String> {
let proposals = load_proposals(source)?;
let mut report = ProposalValidationReport {
checked: proposals.len(),
..ProposalValidationReport::default()
};
let scratch = project::assemble("proposal-validation", Vec::new(), 0, 0, "validate");
let mut seen = BTreeSet::new();
for proposal in proposals {
if !seen.insert(proposal.id.clone()) {
report.invalid += 1;
report
.errors
.push(format!("Duplicate proposal id {}", proposal.id));
continue;
}
report.proposal_ids.push(proposal.id.clone());
match validate_standalone_proposal(&scratch, &proposal) {
Ok(()) => report.valid += 1,
Err(err) => {
report.invalid += 1;
report.errors.push(format!("{}: {}", proposal.id, err));
}
}
}
report.ok = report.invalid == 0;
Ok(report)
}
pub fn export_to_path(
frontier_path: &Path,
output: &Path,
status: Option<&str>,
) -> Result<usize, String> {
let frontier = repo::load_from_path(frontier_path)?;
let proposals = list(&frontier, status);
let json = serde_json::to_string_pretty(&proposals)
.map_err(|e| format!("Failed to serialize proposals for export: {e}"))?;
std::fs::write(output, json).map_err(|e| {
format!(
"Failed to write proposal export '{}': {e}",
output.display()
)
})?;
Ok(proposals.len())
}
pub fn accept_at_path(
path: &Path,
proposal_id: &str,
reviewer: &str,
reason: &str,
) -> Result<String, String> {
let mut frontier = repo::load_from_path(path)?;
let event_id = accept_proposal_in_frontier(&mut frontier, proposal_id, reviewer, reason)?;
project::recompute_stats(&mut frontier);
repo::save_to_path(path, &frontier)?;
Ok(event_id)
}
pub fn reject_at_path(
path: &Path,
proposal_id: &str,
reviewer: &str,
reason: &str,
) -> Result<(), String> {
let mut frontier = repo::load_from_path(path)?;
reject_proposal_in_frontier(&mut frontier, proposal_id, reviewer, reason)?;
project::recompute_stats(&mut frontier);
repo::save_to_path(path, &frontier)?;
Ok(())
}
pub fn request_revision_at_path(
path: &Path,
proposal_id: &str,
reviewer: &str,
reason: &str,
) -> Result<(), String> {
let mut frontier = repo::load_from_path(path)?;
request_revision_in_frontier(&mut frontier, proposal_id, reviewer, reason)?;
project::recompute_stats(&mut frontier);
repo::save_to_path(path, &frontier)?;
Ok(())
}
pub fn record_proof_export(frontier: &mut Project, record: ProofPacketRecord) {
frontier.proof_state.latest_packet = ProofPacketState {
generated_at: Some(record.generated_at),
snapshot_hash: Some(record.snapshot_hash),
event_log_hash: Some(record.event_log_hash),
packet_manifest_hash: Some(record.packet_manifest_hash),
status: "current".to_string(),
};
frontier.proof_state.last_event_at_export =
frontier.events.last().map(|event| event.timestamp.clone());
frontier.proof_state.stale_reason = None;
}
pub fn mark_proof_stale(frontier: &mut Project, reason: String) {
if frontier.proof_state.latest_packet.status != "never_exported" {
frontier.proof_state.latest_packet.status = "stale".to_string();
frontier.proof_state.stale_reason = Some(reason);
}
}
pub fn proof_state_json(proof_state: &ProofState) -> Value {
serde_json::to_value(proof_state).unwrap_or_else(|_| json!({"status": "never_exported"}))
}
pub fn proposal_state_hash(proposals: &[StateProposal]) -> String {
let bytes = canonical::to_canonical_bytes(proposals).unwrap_or_default();
hex::encode(Sha256::digest(bytes))
}
fn load_proposals(source: &Path) -> Result<Vec<StateProposal>, String> {
if source.is_file() {
let data = std::fs::read_to_string(source)
.map_err(|e| format!("Failed to read proposal file '{}': {e}", source.display()))?;
if let Ok(proposals) = serde_json::from_str::<Vec<StateProposal>>(&data) {
return Ok(proposals);
}
let proposal = serde_json::from_str::<StateProposal>(&data)
.map_err(|e| format!("Failed to parse proposal JSON '{}': {e}", source.display()))?;
return Ok(vec![proposal]);
}
if source.is_dir() {
let mut entries = std::fs::read_dir(source)
.map_err(|e| format!("Failed to read proposal dir '{}': {e}", source.display()))?
.filter_map(|entry| entry.ok().map(|entry| entry.path()))
.filter(|path| path.extension().is_some_and(|ext| ext == "json"))
.collect::<Vec<_>>();
entries.sort();
let mut proposals = Vec::new();
for path in entries {
proposals.extend(load_proposals(&path)?);
}
return Ok(proposals);
}
Err(format!(
"Proposal source does not exist: {}",
source.display()
))
}
fn validate_new_proposal(frontier: &Project, proposal: &StateProposal) -> Result<(), String> {
if proposal.schema != PROPOSAL_SCHEMA {
return Err(format!("Unsupported proposal schema '{}'", proposal.schema));
}
if frontier
.proposals
.iter()
.any(|existing| existing.id == proposal.id)
{
return Err(format!("Duplicate proposal id {}", proposal.id));
}
validate_proposal_shape(frontier, proposal)?;
validate_decision_state(proposal)
}
fn validate_proposal_shape(frontier: &Project, proposal: &StateProposal) -> Result<(), String> {
if !matches!(
proposal.target.r#type.as_str(),
"finding"
| "artifact"
| "negative_result"
| "trajectory"
| "evidence_atom"
| "frontier_observation"
) {
return Err(format!(
"Unsupported proposal target type '{}'; valid: finding, artifact, negative_result, trajectory, evidence_atom, frontier_observation",
proposal.target.r#type
));
}
if proposal.reason.trim().is_empty() {
return Err("Proposal reason must be non-empty".to_string());
}
if !matches!(
proposal.status.as_str(),
"pending_review" | "accepted" | "rejected" | "applied"
) {
return Err(format!("Unsupported proposal status '{}'", proposal.status));
}
match proposal.kind.as_str() {
"finding.add" => {
let finding_value = proposal
.payload
.get("finding")
.ok_or("finding.add proposal missing payload.finding")?
.clone();
let finding: FindingBundle = serde_json::from_value(finding_value)
.map_err(|e| format!("Invalid finding.add payload: {e}"))?;
if finding.id != proposal.target.id {
return Err(format!(
"finding.add target {} does not match payload finding {}",
proposal.target.id, finding.id
));
}
if frontier
.findings
.iter()
.any(|existing| existing.id == proposal.target.id)
{
return Err(format!(
"Refusing to add duplicate finding with existing finding ID {}",
proposal.target.id
));
}
}
"finding.review" => {
require_existing_finding(frontier, &proposal.target.id)?;
let status = proposal
.payload
.get("status")
.and_then(Value::as_str)
.ok_or("finding.review proposal missing payload.status")?;
if !matches!(
status,
"accepted" | "approved" | "contested" | "needs_revision" | "rejected"
) {
return Err(format!("Unsupported review proposal status '{status}'"));
}
}
"finding.caveat" => {
require_existing_finding(frontier, &proposal.target.id)?;
let text = proposal
.payload
.get("text")
.and_then(Value::as_str)
.ok_or("finding.caveat proposal missing payload.text")?;
if text.trim().is_empty() {
return Err("finding.caveat payload.text must be non-empty".to_string());
}
}
"finding.note" => {
require_existing_finding(frontier, &proposal.target.id)?;
let text = proposal
.payload
.get("text")
.and_then(Value::as_str)
.ok_or("finding.note proposal missing payload.text")?;
if text.trim().is_empty() {
return Err("finding.note payload.text must be non-empty".to_string());
}
}
"finding.confidence_revise" => {
require_existing_finding(frontier, &proposal.target.id)?;
let score = proposal
.payload
.get("confidence")
.and_then(Value::as_f64)
.ok_or("finding.confidence_revise proposal missing payload.confidence")?;
if !(0.0..=1.0).contains(&score) {
return Err(
"finding.confidence_revise confidence must be between 0.0 and 1.0".to_string(),
);
}
}
"finding.reject" => {
require_existing_finding(frontier, &proposal.target.id)?;
}
"finding.retract" => {
let idx = require_existing_finding(frontier, &proposal.target.id)?;
if frontier.findings[idx].flags.retracted {
return Err(format!(
"Finding {} is already retracted",
proposal.target.id
));
}
}
"finding.supersede" => {
let idx = require_existing_finding(frontier, &proposal.target.id)?;
if frontier.findings[idx].flags.superseded {
return Err(format!(
"Finding {} is already superseded",
proposal.target.id
));
}
let new_finding_value = proposal
.payload
.get("new_finding")
.ok_or("finding.supersede proposal missing payload.new_finding")?
.clone();
let new_finding: FindingBundle = serde_json::from_value(new_finding_value)
.map_err(|e| format!("Invalid finding.supersede payload.new_finding: {e}"))?;
if new_finding.id == proposal.target.id {
return Err(
"finding.supersede new_finding has same content address as the superseded target — change assertion text, type, or provenance to derive a distinct vf_…".to_string(),
);
}
if frontier
.findings
.iter()
.any(|existing| existing.id == new_finding.id)
{
return Err(format!(
"Refusing to add superseding finding with existing finding ID {}",
new_finding.id
));
}
}
"artifact.assert" => {
if proposal.target.r#type != "artifact" {
return Err(format!(
"artifact.assert proposal target.type must be 'artifact', got '{}'",
proposal.target.r#type
));
}
let artifact_value = proposal
.payload
.get("artifact")
.ok_or("artifact.assert proposal missing payload.artifact")?
.clone();
let artifact: Artifact = serde_json::from_value(artifact_value)
.map_err(|e| format!("Invalid artifact.assert payload: {e}"))?;
if artifact.id != proposal.target.id {
return Err(format!(
"artifact.assert target {} does not match payload id {}",
proposal.target.id, artifact.id
));
}
if frontier.artifacts.iter().any(|a| a.id == artifact.id) {
return Err(format!(
"Refusing to add duplicate artifact with existing id {}",
artifact.id
));
}
}
"negative_result.assert" => {
if proposal.target.r#type != "negative_result" {
return Err(format!(
"negative_result.assert proposal target.type must be 'negative_result', got '{}'",
proposal.target.r#type
));
}
let nr_value = proposal
.payload
.get("negative_result")
.ok_or("negative_result.assert proposal missing payload.negative_result")?
.clone();
let nr: crate::bundle::NegativeResult = serde_json::from_value(nr_value)
.map_err(|e| format!("Invalid negative_result.assert payload: {e}"))?;
if nr.id != proposal.target.id {
return Err(format!(
"negative_result.assert target {} does not match payload id {}",
proposal.target.id, nr.id
));
}
if frontier.negative_results.iter().any(|n| n.id == nr.id) {
return Err(format!(
"Refusing to add duplicate negative_result with existing id {}",
nr.id
));
}
}
"trajectory.create" => {
if proposal.target.r#type != "trajectory" {
return Err(format!(
"trajectory.create proposal target.type must be 'trajectory', got '{}'",
proposal.target.r#type
));
}
let traj_value = proposal
.payload
.get("trajectory")
.ok_or("trajectory.create proposal missing payload.trajectory")?
.clone();
let traj: crate::bundle::Trajectory = serde_json::from_value(traj_value)
.map_err(|e| format!("Invalid trajectory.create payload: {e}"))?;
if traj.id != proposal.target.id {
return Err(format!(
"trajectory.create target {} does not match payload id {}",
proposal.target.id, traj.id
));
}
if frontier.trajectories.iter().any(|t| t.id == traj.id) {
return Err(format!(
"Refusing to add duplicate trajectory with existing id {}",
traj.id
));
}
}
"finding.span_repair" => {
if proposal.target.r#type != "finding" {
return Err(format!(
"finding.span_repair target.type must be 'finding', got '{}'",
proposal.target.r#type
));
}
require_existing_finding(frontier, &proposal.target.id)?;
let section = proposal
.payload
.get("section")
.and_then(Value::as_str)
.ok_or("finding.span_repair proposal missing payload.section")?;
if section.trim().is_empty() {
return Err("finding.span_repair payload.section must be non-empty".to_string());
}
let text = proposal
.payload
.get("text")
.and_then(Value::as_str)
.ok_or("finding.span_repair proposal missing payload.text")?;
if text.trim().is_empty() {
return Err("finding.span_repair payload.text must be non-empty".to_string());
}
}
"finding.entity_resolve" => {
if proposal.target.r#type != "finding" {
return Err(format!(
"finding.entity_resolve target.type must be 'finding', got '{}'",
proposal.target.r#type
));
}
let f_idx = require_existing_finding(frontier, &proposal.target.id)?;
let entity_name = proposal
.payload
.get("entity_name")
.and_then(Value::as_str)
.ok_or("finding.entity_resolve proposal missing payload.entity_name")?;
if entity_name.trim().is_empty() {
return Err(
"finding.entity_resolve payload.entity_name must be non-empty".to_string(),
);
}
let _e_idx = frontier.findings[f_idx]
.assertion
.entities
.iter()
.position(|e| e.name == entity_name)
.ok_or_else(|| {
format!(
"finding.entity_resolve entity_name '{entity_name}' not in finding {}",
proposal.target.id
)
})?;
let source = proposal
.payload
.get("source")
.and_then(Value::as_str)
.ok_or("finding.entity_resolve proposal missing payload.source")?;
if source.trim().is_empty() {
return Err("finding.entity_resolve payload.source must be non-empty".to_string());
}
let id = proposal
.payload
.get("id")
.and_then(Value::as_str)
.ok_or("finding.entity_resolve proposal missing payload.id")?;
if id.trim().is_empty() {
return Err("finding.entity_resolve payload.id must be non-empty".to_string());
}
let confidence = proposal
.payload
.get("confidence")
.and_then(Value::as_f64)
.ok_or("finding.entity_resolve proposal missing payload.confidence")?;
if !(0.0..=1.0).contains(&confidence) {
return Err(format!(
"finding.entity_resolve confidence {confidence} out of [0.0, 1.0]"
));
}
}
"finding.entity_add" => {
if proposal.target.r#type != "finding" {
return Err(format!(
"finding.entity_add target.type must be 'finding', got '{}'",
proposal.target.r#type
));
}
let _f_idx = require_existing_finding(frontier, &proposal.target.id)?;
let entity_name = proposal
.payload
.get("entity_name")
.and_then(Value::as_str)
.ok_or("finding.entity_add proposal missing payload.entity_name")?;
if entity_name.trim().is_empty() {
return Err("finding.entity_add payload.entity_name must be non-empty".to_string());
}
let entity_type = proposal
.payload
.get("entity_type")
.and_then(Value::as_str)
.ok_or("finding.entity_add proposal missing payload.entity_type")?;
const VALID_ENTITY_TYPES: &[&str] = &[
"gene",
"protein",
"compound",
"disease",
"cell_type",
"organism",
"pathway",
"assay",
"anatomical_structure",
"particle",
"instrument",
"dataset",
"quantity",
"other",
];
if !VALID_ENTITY_TYPES.contains(&entity_type) {
return Err(format!(
"finding.entity_add payload.entity_type '{entity_type}' not in {VALID_ENTITY_TYPES:?}"
));
}
let reason_text = proposal
.payload
.get("reason")
.and_then(Value::as_str)
.ok_or("finding.entity_add proposal missing payload.reason")?;
if reason_text.trim().is_empty() {
return Err("finding.entity_add payload.reason must be non-empty".to_string());
}
}
"evidence_atom.locator_repair" => {
if proposal.target.r#type != "evidence_atom" {
return Err(format!(
"evidence_atom.locator_repair target.type must be 'evidence_atom', got '{}'",
proposal.target.r#type
));
}
let atom_id = proposal.target.id.as_str();
let atom = frontier
.evidence_atoms
.iter()
.find(|atom| atom.id == atom_id)
.ok_or_else(|| {
format!("evidence_atom.locator_repair targets unknown atom {atom_id}")
})?;
let locator = proposal
.payload
.get("locator")
.and_then(Value::as_str)
.ok_or("evidence_atom.locator_repair proposal missing payload.locator")?;
if locator.trim().is_empty() {
return Err(
"evidence_atom.locator_repair payload.locator must be non-empty".to_string(),
);
}
let source_id = proposal
.payload
.get("source_id")
.and_then(Value::as_str)
.ok_or("evidence_atom.locator_repair proposal missing payload.source_id")?;
if source_id.trim().is_empty() {
return Err(
"evidence_atom.locator_repair payload.source_id must be non-empty".to_string(),
);
}
if atom.source_id != source_id {
return Err(format!(
"evidence_atom.locator_repair payload.source_id '{source_id}' does not match atom.source_id '{}'",
atom.source_id
));
}
if let Some(existing) = &atom.locator
&& existing == locator
{
return Err(format!(
"evidence_atom {atom_id} already carries locator '{existing}'"
));
}
if let Some(existing) = &atom.locator
&& existing != locator
{
return Err(format!(
"evidence_atom {atom_id} already carries locator '{existing}'; refusing to overwrite with '{locator}'"
));
}
}
"trajectory.step_append" => {
if proposal.target.r#type != "trajectory" {
return Err(format!(
"trajectory.step_append proposal target.type must be 'trajectory', got '{}'",
proposal.target.r#type
));
}
let parent_id = proposal.target.id.as_str();
let parent_idx = frontier
.trajectories
.iter()
.position(|t| t.id == parent_id)
.ok_or_else(|| {
format!("trajectory.step_append targets unknown trajectory {parent_id}")
})?;
let step_value = proposal
.payload
.get("step")
.ok_or("trajectory.step_append proposal missing payload.step")?
.clone();
let step: crate::bundle::TrajectoryStep = serde_json::from_value(step_value)
.map_err(|e| format!("Invalid trajectory.step_append payload.step: {e}"))?;
if frontier.trajectories[parent_idx]
.steps
.iter()
.any(|s| s.id == step.id)
{
return Err(format!(
"Refusing to add duplicate step with existing id {} on trajectory {}",
step.id, parent_id
));
}
}
"frontier.conflict_resolve" => {
if proposal.target.r#type != "frontier_observation" {
return Err(format!(
"frontier.conflict_resolve target.type must be 'frontier_observation', got '{}'",
proposal.target.r#type
));
}
let conflict_event_id = proposal
.payload
.get("conflict_event_id")
.and_then(Value::as_str)
.ok_or("frontier.conflict_resolve proposal missing payload.conflict_event_id")?;
if conflict_event_id.trim().is_empty() {
return Err(
"frontier.conflict_resolve payload.conflict_event_id must be non-empty"
.to_string(),
);
}
let conflict_event = frontier
.events
.iter()
.find(|e| e.id == conflict_event_id)
.ok_or_else(|| {
format!(
"frontier.conflict_resolve targets unknown event id '{conflict_event_id}'"
)
})?;
if conflict_event.kind != "frontier.conflict_detected" {
return Err(format!(
"frontier.conflict_resolve target event '{conflict_event_id}' has kind '{}', expected 'frontier.conflict_detected'",
conflict_event.kind
));
}
if frontier.events.iter().any(|e| {
e.kind == "frontier.conflict_resolved"
&& e.payload.get("conflict_event_id").and_then(Value::as_str)
== Some(conflict_event_id)
}) {
return Err(format!(
"Conflict event '{conflict_event_id}' already has a recorded resolution"
));
}
let note = proposal
.payload
.get("resolution_note")
.and_then(Value::as_str)
.ok_or("frontier.conflict_resolve proposal missing payload.resolution_note")?;
if note.trim().is_empty() {
return Err(
"frontier.conflict_resolve payload.resolution_note must be non-empty"
.to_string(),
);
}
if let Some(value) = proposal.payload.get("winning_proposal_id")
&& !value.is_null()
&& value.as_str().is_none()
{
return Err(
"frontier.conflict_resolve payload.winning_proposal_id must be a string when present"
.to_string(),
);
}
}
other => {
return Err(format!("Unsupported proposal kind '{other}'"));
}
}
Ok(())
}
fn validate_decision_state(proposal: &StateProposal) -> Result<(), String> {
match proposal.status.as_str() {
"pending_review" => Ok(()),
"accepted" | "applied" | "rejected" => {
let reviewer = proposal
.reviewed_by
.as_deref()
.ok_or_else(|| format!("Proposal {} missing reviewed_by", proposal.id))?;
validate_reviewer_identity(reviewer)?;
if proposal
.decision_reason
.as_deref()
.is_none_or(|reason| reason.trim().is_empty())
{
return Err(format!("Proposal {} missing decision_reason", proposal.id));
}
if proposal.status == "applied" && proposal.applied_event_id.is_none() {
return Err(format!(
"Applied proposal {} missing applied_event_id",
proposal.id
));
}
Ok(())
}
other => Err(format!("Unsupported proposal status '{}'", other)),
}
}
fn validate_standalone_proposal(
_frontier: &Project,
proposal: &StateProposal,
) -> Result<(), String> {
if proposal.schema != PROPOSAL_SCHEMA {
return Err(format!("Unsupported proposal schema '{}'", proposal.schema));
}
if !matches!(
proposal.target.r#type.as_str(),
"finding" | "evidence_atom" | "frontier_observation"
) {
return Err(
"Only finding, evidence_atom, and frontier_observation proposals are supported in v0"
.to_string(),
);
}
if proposal.reason.trim().is_empty() {
return Err("Proposal reason must be non-empty".to_string());
}
match proposal.kind.as_str() {
"finding.add" => {
let finding_value = proposal
.payload
.get("finding")
.ok_or("finding.add proposal missing payload.finding")?
.clone();
let finding: FindingBundle = serde_json::from_value(finding_value)
.map_err(|e| format!("Invalid finding.add payload: {e}"))?;
if finding.id != proposal.target.id {
return Err(format!(
"finding.add target {} does not match payload finding {}",
proposal.target.id, finding.id
));
}
}
"finding.review" => {
let status = proposal
.payload
.get("status")
.and_then(Value::as_str)
.ok_or("finding.review proposal missing payload.status")?;
if !matches!(
status,
"accepted" | "approved" | "contested" | "needs_revision" | "rejected"
) {
return Err(format!("Unsupported review proposal status '{status}'"));
}
}
"finding.caveat" => {
let text = proposal
.payload
.get("text")
.and_then(Value::as_str)
.ok_or("finding.caveat proposal missing payload.text")?;
if text.trim().is_empty() {
return Err("finding.caveat payload.text must be non-empty".to_string());
}
}
"finding.note" => {
let text = proposal
.payload
.get("text")
.and_then(Value::as_str)
.ok_or("finding.note proposal missing payload.text")?;
if text.trim().is_empty() {
return Err("finding.note payload.text must be non-empty".to_string());
}
}
"finding.confidence_revise" => {
let score = proposal
.payload
.get("confidence")
.and_then(Value::as_f64)
.ok_or("finding.confidence_revise proposal missing payload.confidence")?;
if !(0.0..=1.0).contains(&score) {
return Err(
"finding.confidence_revise confidence must be between 0.0 and 1.0".to_string(),
);
}
}
"finding.reject" | "finding.retract" => {}
"finding.supersede" => {
let new_finding_value = proposal
.payload
.get("new_finding")
.ok_or("finding.supersede proposal missing payload.new_finding")?
.clone();
let new_finding: FindingBundle = serde_json::from_value(new_finding_value)
.map_err(|e| format!("Invalid finding.supersede payload.new_finding: {e}"))?;
if new_finding.id == proposal.target.id {
return Err(
"finding.supersede new_finding has same content address as the superseded target"
.to_string(),
);
}
}
"finding.span_repair" => {
if proposal.target.r#type != "finding" {
return Err(format!(
"finding.span_repair target.type must be 'finding', got '{}'",
proposal.target.r#type
));
}
let section = proposal
.payload
.get("section")
.and_then(Value::as_str)
.ok_or("finding.span_repair proposal missing payload.section")?;
if section.trim().is_empty() {
return Err("finding.span_repair payload.section must be non-empty".to_string());
}
let text = proposal
.payload
.get("text")
.and_then(Value::as_str)
.ok_or("finding.span_repair proposal missing payload.text")?;
if text.trim().is_empty() {
return Err("finding.span_repair payload.text must be non-empty".to_string());
}
}
"finding.entity_resolve" => {
if proposal.target.r#type != "finding" {
return Err(format!(
"finding.entity_resolve target.type must be 'finding', got '{}'",
proposal.target.r#type
));
}
let entity_name = proposal
.payload
.get("entity_name")
.and_then(Value::as_str)
.ok_or("finding.entity_resolve proposal missing payload.entity_name")?;
if entity_name.trim().is_empty() {
return Err(
"finding.entity_resolve payload.entity_name must be non-empty".to_string(),
);
}
let source = proposal
.payload
.get("source")
.and_then(Value::as_str)
.ok_or("finding.entity_resolve proposal missing payload.source")?;
if source.trim().is_empty() {
return Err("finding.entity_resolve payload.source must be non-empty".to_string());
}
let id = proposal
.payload
.get("id")
.and_then(Value::as_str)
.ok_or("finding.entity_resolve proposal missing payload.id")?;
if id.trim().is_empty() {
return Err("finding.entity_resolve payload.id must be non-empty".to_string());
}
let confidence = proposal
.payload
.get("confidence")
.and_then(Value::as_f64)
.ok_or("finding.entity_resolve proposal missing payload.confidence")?;
if !(0.0..=1.0).contains(&confidence) {
return Err(format!(
"finding.entity_resolve confidence {confidence} out of [0.0, 1.0]"
));
}
}
"finding.entity_add" => {
if proposal.target.r#type != "finding" {
return Err(format!(
"finding.entity_add target.type must be 'finding', got '{}'",
proposal.target.r#type
));
}
let entity_name = proposal
.payload
.get("entity_name")
.and_then(Value::as_str)
.ok_or("finding.entity_add proposal missing payload.entity_name")?;
if entity_name.trim().is_empty() {
return Err("finding.entity_add payload.entity_name must be non-empty".to_string());
}
let entity_type = proposal
.payload
.get("entity_type")
.and_then(Value::as_str)
.ok_or("finding.entity_add proposal missing payload.entity_type")?;
const VALID_ENTITY_TYPES: &[&str] = &[
"gene",
"protein",
"compound",
"disease",
"cell_type",
"organism",
"pathway",
"assay",
"anatomical_structure",
"particle",
"instrument",
"dataset",
"quantity",
"other",
];
if !VALID_ENTITY_TYPES.contains(&entity_type) {
return Err(format!(
"finding.entity_add payload.entity_type '{entity_type}' not in {VALID_ENTITY_TYPES:?}"
));
}
let reason = proposal
.payload
.get("reason")
.and_then(Value::as_str)
.ok_or("finding.entity_add proposal missing payload.reason")?;
if reason.trim().is_empty() {
return Err("finding.entity_add payload.reason must be non-empty".to_string());
}
}
"evidence_atom.locator_repair" => {
if proposal.target.r#type != "evidence_atom" {
return Err(format!(
"evidence_atom.locator_repair target.type must be 'evidence_atom', got '{}'",
proposal.target.r#type
));
}
let locator = proposal
.payload
.get("locator")
.and_then(Value::as_str)
.ok_or("evidence_atom.locator_repair proposal missing payload.locator")?;
if locator.trim().is_empty() {
return Err(
"evidence_atom.locator_repair payload.locator must be non-empty".to_string(),
);
}
let source_id = proposal
.payload
.get("source_id")
.and_then(Value::as_str)
.ok_or("evidence_atom.locator_repair proposal missing payload.source_id")?;
if source_id.trim().is_empty() {
return Err(
"evidence_atom.locator_repair payload.source_id must be non-empty".to_string(),
);
}
}
"frontier.conflict_resolve" => {
if proposal.target.r#type != "frontier_observation" {
return Err(format!(
"frontier.conflict_resolve target.type must be 'frontier_observation', got '{}'",
proposal.target.r#type
));
}
let conflict_event_id = proposal
.payload
.get("conflict_event_id")
.and_then(Value::as_str)
.ok_or("frontier.conflict_resolve proposal missing payload.conflict_event_id")?;
if conflict_event_id.trim().is_empty() {
return Err(
"frontier.conflict_resolve payload.conflict_event_id must be non-empty"
.to_string(),
);
}
let note = proposal
.payload
.get("resolution_note")
.and_then(Value::as_str)
.ok_or("frontier.conflict_resolve proposal missing payload.resolution_note")?;
if note.trim().is_empty() {
return Err(
"frontier.conflict_resolve payload.resolution_note must be non-empty"
.to_string(),
);
}
}
other => return Err(format!("Unsupported proposal kind '{other}'")),
}
validate_decision_state(proposal)
}
fn require_existing_finding(frontier: &Project, finding_id: &str) -> Result<usize, String> {
frontier
.findings
.iter()
.position(|finding| finding.id == finding_id)
.ok_or_else(|| format!("Finding not found: {finding_id}"))
}
fn accept_proposal_in_frontier(
frontier: &mut Project,
proposal_id: &str,
reviewer: &str,
reason: &str,
) -> Result<String, String> {
validate_reviewer_identity(reviewer)?;
if reason.trim().is_empty() {
return Err("Decision reason must be non-empty".to_string());
}
let index = frontier
.proposals
.iter()
.position(|proposal| proposal.id == proposal_id)
.ok_or_else(|| format!("Proposal not found: {proposal_id}"))?;
let status = frontier.proposals[index].status.clone();
if status == "rejected" {
return Err(format!("Cannot accept rejected proposal {}", proposal_id));
}
if status == "applied" {
return frontier.proposals[index]
.applied_event_id
.clone()
.ok_or_else(|| format!("Proposal {} is applied but has no event id", proposal_id));
}
let proposal = frontier.proposals[index].clone();
validate_proposal_shape(frontier, &proposal)?;
frontier.proposals[index].status = "accepted".to_string();
frontier.proposals[index].reviewed_by = Some(reviewer.to_string());
frontier.proposals[index].reviewed_at = Some(Utc::now().to_rfc3339());
frontier.proposals[index].decision_reason = Some(reason.to_string());
let event_id = apply_proposal(frontier, &proposal, reviewer, reason)?;
frontier.proposals[index].status = "applied".to_string();
frontier.proposals[index].applied_event_id = Some(event_id.clone());
Ok(event_id)
}
fn reject_proposal_in_frontier(
frontier: &mut Project,
proposal_id: &str,
reviewer: &str,
reason: &str,
) -> Result<(), String> {
validate_reviewer_identity(reviewer)?;
if reason.trim().is_empty() {
return Err("Decision reason must be non-empty".to_string());
}
let index = frontier
.proposals
.iter()
.position(|proposal| proposal.id == proposal_id)
.ok_or_else(|| format!("Proposal not found: {proposal_id}"))?;
match frontier.proposals[index].status.as_str() {
"pending_review" | "accepted" => {}
"rejected" => {
return Err(format!("Proposal {} is already rejected", proposal_id));
}
"applied" => {
return Err(format!("Proposal {} is already applied", proposal_id));
}
other => {
return Err(format!("Unsupported proposal status '{}'", other));
}
}
frontier.proposals[index].status = "rejected".to_string();
frontier.proposals[index].reviewed_by = Some(reviewer.to_string());
frontier.proposals[index].reviewed_at = Some(Utc::now().to_rfc3339());
frontier.proposals[index].decision_reason = Some(reason.to_string());
Ok(())
}
fn request_revision_in_frontier(
frontier: &mut Project,
proposal_id: &str,
reviewer: &str,
reason: &str,
) -> Result<(), String> {
validate_reviewer_identity(reviewer)?;
if reason.trim().is_empty() {
return Err("Decision reason must be non-empty".to_string());
}
let index = frontier
.proposals
.iter()
.position(|proposal| proposal.id == proposal_id)
.ok_or_else(|| format!("Proposal not found: {proposal_id}"))?;
match frontier.proposals[index].status.as_str() {
"pending_review" => {}
"needs_revision" => {
return Err(format!("Proposal {} already needs revision", proposal_id));
}
"rejected" => {
return Err(format!("Proposal {} is already rejected", proposal_id));
}
"applied" => {
return Err(format!("Proposal {} is already applied", proposal_id));
}
other => {
return Err(format!("Unsupported proposal status '{}'", other));
}
}
frontier.proposals[index].status = "needs_revision".to_string();
frontier.proposals[index].reviewed_by = Some(reviewer.to_string());
frontier.proposals[index].reviewed_at = Some(Utc::now().to_rfc3339());
frontier.proposals[index].decision_reason = Some(reason.to_string());
Ok(())
}
fn apply_proposal(
frontier: &mut Project,
proposal: &StateProposal,
reviewer: &str,
decision_reason: &str,
) -> Result<String, String> {
if proposal.kind.as_str() == "finding.retract" {
let events = apply_retract(frontier, proposal, reviewer, decision_reason)?;
let primary_id = events
.first()
.map(|event| event.id.clone())
.ok_or_else(|| "apply_retract returned no events".to_string())?;
for event in events {
frontier.events.push(event);
}
mark_proof_stale(
frontier,
format!("Applied proposal {} after latest proof export", proposal.id),
);
return Ok(primary_id);
}
if proposal.kind.as_str() == "finding.confidence_revise" {
let events = apply_confidence_revise(frontier, proposal, reviewer, decision_reason)?;
let primary_id = events
.first()
.map(|event| event.id.clone())
.ok_or_else(|| "apply_confidence_revise returned no events".to_string())?;
for event in events {
frontier.events.push(event);
}
mark_proof_stale(
frontier,
format!("Applied proposal {} after latest proof export", proposal.id),
);
return Ok(primary_id);
}
let event = match proposal.kind.as_str() {
"finding.add" => apply_add(frontier, proposal, reviewer, decision_reason)?,
"finding.review" => apply_review(frontier, proposal, reviewer, decision_reason)?,
"finding.caveat" => apply_caveat(frontier, proposal, reviewer, decision_reason)?,
"finding.note" => apply_note(frontier, proposal, reviewer, decision_reason)?,
"finding.reject" => apply_reject(frontier, proposal, reviewer, decision_reason)?,
"finding.supersede" => apply_supersede(frontier, proposal, reviewer, decision_reason)?,
"artifact.assert" => apply_artifact_assert(frontier, proposal, reviewer, decision_reason)?,
"negative_result.assert" => {
apply_negative_result_assert(frontier, proposal, reviewer, decision_reason)?
}
"trajectory.create" => {
apply_trajectory_create(frontier, proposal, reviewer, decision_reason)?
}
"trajectory.step_append" => {
apply_trajectory_step_append(frontier, proposal, reviewer, decision_reason)?
}
"evidence_atom.locator_repair" => {
apply_evidence_atom_locator_repair(frontier, proposal, reviewer, decision_reason)?
}
"finding.span_repair" => {
apply_finding_span_repair(frontier, proposal, reviewer, decision_reason)?
}
"finding.entity_resolve" => {
apply_finding_entity_resolve(frontier, proposal, reviewer, decision_reason)?
}
"finding.entity_add" => {
apply_finding_entity_add(frontier, proposal, reviewer, decision_reason)?
}
"frontier.conflict_resolve" => {
apply_frontier_conflict_resolve(frontier, proposal, reviewer, decision_reason)?
}
other => return Err(format!("Unsupported proposal kind '{other}'")),
};
let event_id = event.id.clone();
frontier.events.push(event);
mark_proof_stale(
frontier,
format!("Applied proposal {} after latest proof export", proposal.id),
);
Ok(event_id)
}
fn apply_supersede(
frontier: &mut Project,
proposal: &StateProposal,
reviewer: &str,
_decision_reason: &str,
) -> Result<StateEvent, String> {
use crate::bundle::Link;
let old_id = proposal.target.id.clone();
let new_finding_value = proposal
.payload
.get("new_finding")
.ok_or("finding.supersede proposal missing payload.new_finding")?
.clone();
let mut new_finding: FindingBundle = serde_json::from_value(new_finding_value)
.map_err(|e| format!("Invalid finding.supersede payload.new_finding: {e}"))?;
let old_idx = find_finding_index(frontier, &old_id)?;
if frontier.findings[old_idx].flags.superseded {
return Err(format!(
"Refusing to supersede already-superseded finding {old_id}"
));
}
if new_finding.id == old_id {
return Err(
"Refusing to supersede with a finding that has the same content address as the old finding (assertion / type / provenance_id are unchanged)".to_string(),
);
}
if frontier
.findings
.iter()
.any(|existing| existing.id == new_finding.id)
{
return Err(format!(
"Refusing to add superseding finding with existing finding ID {}",
new_finding.id
));
}
let before_hash = events::finding_hash(&frontier.findings[old_idx]);
let already_links_old = new_finding
.links
.iter()
.any(|l| l.target == old_id && l.link_type == "supersedes");
if !already_links_old {
new_finding.links.push(Link {
target: old_id.clone(),
link_type: "supersedes".to_string(),
note: format!(
"Supersedes {old_id} via finding.supersede proposal {}.",
proposal.id
),
inferred_by: "reviewer".to_string(),
created_at: Utc::now().to_rfc3339(),
mechanism: None,
});
}
let new_finding_id = new_finding.id.clone();
frontier.findings.push(new_finding);
frontier.findings[old_idx].flags.superseded = true;
let after_hash = events::finding_hash(&frontier.findings[old_idx]);
Ok(events::new_finding_event(events::FindingEventInput {
kind: "finding.superseded",
finding_id: &old_id,
actor_id: reviewer,
actor_type: "human",
reason: &proposal.reason,
before_hash: &before_hash,
after_hash: &after_hash,
payload: json!({
"proposal_id": proposal.id,
"new_finding_id": new_finding_id,
}),
caveats: proposal.caveats.clone(),
}))
}
fn apply_add(
frontier: &mut Project,
proposal: &StateProposal,
reviewer: &str,
_decision_reason: &str,
) -> Result<StateEvent, String> {
let finding_value = proposal
.payload
.get("finding")
.ok_or("finding.add proposal missing payload.finding")?
.clone();
let finding: FindingBundle = serde_json::from_value(finding_value)
.map_err(|e| format!("Invalid finding.add payload: {e}"))?;
let finding_id = finding.id.clone();
if frontier
.findings
.iter()
.any(|existing| existing.id == finding_id)
{
return Err(format!(
"Refusing to add duplicate finding with existing finding ID {finding_id}"
));
}
frontier.findings.push(finding);
let after_hash = events::finding_hash_by_id(frontier, &finding_id);
Ok(events::new_finding_event(events::FindingEventInput {
kind: "finding.asserted",
finding_id: &finding_id,
actor_id: reviewer,
actor_type: "human",
reason: &proposal.reason,
before_hash: NULL_HASH,
after_hash: &after_hash,
payload: json!({
"proposal_id": proposal.id,
}),
caveats: proposal.caveats.clone(),
}))
}
fn apply_artifact_assert(
frontier: &mut Project,
proposal: &StateProposal,
reviewer: &str,
_decision_reason: &str,
) -> Result<StateEvent, String> {
let artifact_value = proposal
.payload
.get("artifact")
.ok_or("artifact.assert proposal missing payload.artifact")?
.clone();
let artifact: Artifact = serde_json::from_value(artifact_value)
.map_err(|e| format!("Invalid artifact.assert payload: {e}"))?;
let artifact_id = artifact.id.clone();
if frontier
.artifacts
.iter()
.any(|existing| existing.id == artifact_id)
{
return Err(format!(
"Refusing to add duplicate artifact with existing id {artifact_id}"
));
}
frontier.artifacts.push(artifact.clone());
let mut event = StateEvent {
schema: events::EVENT_SCHEMA.to_string(),
id: String::new(),
kind: events::EVENT_KIND_ARTIFACT_ASSERTED.to_string(),
target: StateTarget {
r#type: "artifact".to_string(),
id: artifact_id,
},
actor: StateActor {
id: reviewer.to_string(),
r#type: if reviewer.starts_with("agent:") {
"agent"
} else {
"human"
}
.to_string(),
},
timestamp: Utc::now().to_rfc3339(),
reason: proposal.reason.clone(),
before_hash: NULL_HASH.to_string(),
after_hash: NULL_HASH.to_string(),
payload: json!({
"proposal_id": proposal.id,
"artifact": artifact,
}),
caveats: proposal.caveats.clone(),
signature: None,
schema_artifact_id: None,
};
events::validate_event_payload(&event.kind, &event.payload)?;
event.id = events::compute_event_id(&event);
Ok(event)
}
fn apply_review(
frontier: &mut Project,
proposal: &StateProposal,
reviewer: &str,
_decision_reason: &str,
) -> Result<StateEvent, String> {
let finding_id = proposal.target.id.as_str();
let idx = find_finding_index(frontier, finding_id)?;
let before_hash = events::finding_hash(&frontier.findings[idx]);
let status = proposal
.payload
.get("status")
.and_then(Value::as_str)
.ok_or("finding.review proposal missing payload.status")?;
use crate::bundle::ReviewState;
let new_state = match status {
"accepted" | "approved" => ReviewState::Accepted,
"contested" => ReviewState::Contested,
"needs_revision" => ReviewState::NeedsRevision,
"rejected" => ReviewState::Rejected,
other => return Err(format!("Unknown review proposal status '{other}'")),
};
frontier.findings[idx].flags.contested = new_state.implies_contested();
frontier.findings[idx].flags.review_state = Some(new_state);
let after_hash = events::finding_hash(&frontier.findings[idx]);
Ok(events::new_finding_event(events::FindingEventInput {
kind: "finding.reviewed",
finding_id,
actor_id: reviewer,
actor_type: "human",
reason: &proposal.reason,
before_hash: &before_hash,
after_hash: &after_hash,
payload: json!({
"status": status,
"proposal_id": proposal.id,
}),
caveats: proposal.caveats.clone(),
}))
}
fn apply_caveat(
frontier: &mut Project,
proposal: &StateProposal,
reviewer: &str,
_decision_reason: &str,
) -> Result<StateEvent, String> {
let finding_id = proposal.target.id.as_str();
let idx = find_finding_index(frontier, finding_id)?;
let before_hash = events::finding_hash(&frontier.findings[idx]);
let now = Utc::now().to_rfc3339();
let text = proposal
.payload
.get("text")
.and_then(Value::as_str)
.ok_or("finding.caveat proposal missing payload.text")?;
let provenance = extract_annotation_provenance(&proposal.payload);
let annotation_id = annotation_id(finding_id, text, reviewer, &now);
frontier.findings[idx].annotations.push(Annotation {
id: annotation_id.clone(),
text: text.to_string(),
author: reviewer.to_string(),
timestamp: now,
provenance: provenance.clone(),
});
let after_hash = events::finding_hash(&frontier.findings[idx]);
let mut payload = json!({
"annotation_id": annotation_id,
"text": text,
"proposal_id": proposal.id,
});
if let Some(prov) = &provenance {
payload["provenance"] = serde_json::to_value(prov).unwrap_or(Value::Null);
}
Ok(events::new_finding_event(events::FindingEventInput {
kind: "finding.caveated",
finding_id,
actor_id: reviewer,
actor_type: "human",
reason: text,
before_hash: &before_hash,
after_hash: &after_hash,
payload,
caveats: proposal.caveats.clone(),
}))
}
fn apply_note(
frontier: &mut Project,
proposal: &StateProposal,
reviewer: &str,
_decision_reason: &str,
) -> Result<StateEvent, String> {
let finding_id = proposal.target.id.as_str();
let idx = find_finding_index(frontier, finding_id)?;
let before_hash = events::finding_hash(&frontier.findings[idx]);
let now = Utc::now().to_rfc3339();
let text = proposal
.payload
.get("text")
.and_then(Value::as_str)
.ok_or("finding.note proposal missing payload.text")?;
let provenance = extract_annotation_provenance(&proposal.payload);
let annotation_id = annotation_id(finding_id, text, reviewer, &now);
frontier.findings[idx].annotations.push(Annotation {
id: annotation_id.clone(),
text: text.to_string(),
author: reviewer.to_string(),
timestamp: now,
provenance: provenance.clone(),
});
let after_hash = events::finding_hash(&frontier.findings[idx]);
let mut payload = json!({
"annotation_id": annotation_id,
"text": text,
"proposal_id": proposal.id,
});
if let Some(prov) = &provenance {
payload["provenance"] = serde_json::to_value(prov).unwrap_or(Value::Null);
}
Ok(events::new_finding_event(events::FindingEventInput {
kind: "finding.noted",
finding_id,
actor_id: reviewer,
actor_type: "human",
reason: text,
before_hash: &before_hash,
after_hash: &after_hash,
payload,
caveats: proposal.caveats.clone(),
}))
}
fn apply_finding_entity_resolve(
frontier: &mut Project,
proposal: &StateProposal,
reviewer: &str,
_decision_reason: &str,
) -> Result<StateEvent, String> {
use crate::bundle::{ResolutionMethod, ResolvedId};
let finding_id = proposal.target.id.as_str();
let entity_name = proposal
.payload
.get("entity_name")
.and_then(Value::as_str)
.ok_or("finding.entity_resolve proposal missing payload.entity_name")?
.to_string();
let source = proposal
.payload
.get("source")
.and_then(Value::as_str)
.ok_or("finding.entity_resolve proposal missing payload.source")?
.to_string();
let id = proposal
.payload
.get("id")
.and_then(Value::as_str)
.ok_or("finding.entity_resolve proposal missing payload.id")?
.to_string();
let confidence = proposal
.payload
.get("confidence")
.and_then(Value::as_f64)
.ok_or("finding.entity_resolve proposal missing payload.confidence")?;
let matched_name = proposal
.payload
.get("matched_name")
.and_then(Value::as_str)
.map(str::to_string);
let provenance = proposal
.payload
.get("resolution_provenance")
.and_then(Value::as_str)
.unwrap_or("delegated_human_curation")
.to_string();
let method_str = proposal
.payload
.get("resolution_method")
.and_then(Value::as_str)
.unwrap_or("manual");
let method = match method_str {
"exact_match" => ResolutionMethod::ExactMatch,
"fuzzy_match" => ResolutionMethod::FuzzyMatch,
"llm_inference" => ResolutionMethod::LlmInference,
"manual" => ResolutionMethod::Manual,
other => {
return Err(format!(
"finding.entity_resolve unknown resolution_method '{other}'"
));
}
};
let f_idx = find_finding_index(frontier, finding_id)?;
let e_idx = frontier.findings[f_idx]
.assertion
.entities
.iter()
.position(|e| e.name == entity_name)
.ok_or_else(|| {
format!("finding.entity_resolve entity '{entity_name}' not in finding {finding_id}")
})?;
let before_hash = events::finding_hash(&frontier.findings[f_idx]);
let entity = &mut frontier.findings[f_idx].assertion.entities[e_idx];
entity.canonical_id = Some(ResolvedId {
source: source.clone(),
id: id.clone(),
confidence,
matched_name: matched_name.clone(),
});
entity.resolution_method = Some(method);
entity.resolution_provenance = Some(provenance.clone());
entity.resolution_confidence = confidence;
entity.needs_review = false;
let after_hash = events::finding_hash(&frontier.findings[f_idx]);
let mut payload = json!({
"proposal_id": proposal.id,
"entity_name": entity_name,
"source": source,
"id": id,
"confidence": confidence,
"resolution_method": method_str,
"resolution_provenance": provenance,
});
if let Some(m) = matched_name {
payload["matched_name"] = serde_json::Value::String(m);
}
Ok(events::new_finding_event(events::FindingEventInput {
kind: "finding.entity_resolved",
finding_id,
actor_id: reviewer,
actor_type: "human",
reason: &proposal.reason,
before_hash: &before_hash,
after_hash: &after_hash,
payload,
caveats: proposal.caveats.clone(),
}))
}
fn apply_finding_entity_add(
frontier: &mut Project,
proposal: &StateProposal,
reviewer: &str,
_decision_reason: &str,
) -> Result<StateEvent, String> {
use crate::bundle::Entity;
let finding_id = proposal.target.id.as_str();
let entity_name = proposal
.payload
.get("entity_name")
.and_then(Value::as_str)
.ok_or("finding.entity_add proposal missing payload.entity_name")?
.to_string();
let entity_type = proposal
.payload
.get("entity_type")
.and_then(Value::as_str)
.ok_or("finding.entity_add proposal missing payload.entity_type")?
.to_string();
let reason_text = proposal
.payload
.get("reason")
.and_then(Value::as_str)
.ok_or("finding.entity_add proposal missing payload.reason")?
.to_string();
let idx = find_finding_index(frontier, finding_id)?;
let already_present = frontier.findings[idx]
.assertion
.entities
.iter()
.any(|e| e.name == entity_name);
let before_hash = events::finding_hash(&frontier.findings[idx]);
if !already_present {
let entity = Entity {
name: entity_name.clone(),
entity_type: entity_type.clone(),
identifiers: serde_json::Map::new(),
canonical_id: None,
candidates: Vec::new(),
aliases: Vec::new(),
resolution_provenance: None,
resolution_confidence: 1.0,
resolution_method: None,
species_context: None,
needs_review: false,
};
frontier.findings[idx].assertion.entities.push(entity);
}
let after_hash = events::finding_hash(&frontier.findings[idx]);
let payload = json!({
"proposal_id": proposal.id,
"entity_name": entity_name,
"entity_type": entity_type,
"reason": reason_text,
"idempotent_noop": already_present,
});
Ok(events::new_finding_event(events::FindingEventInput {
kind: "finding.entity_added",
finding_id,
actor_id: reviewer,
actor_type: "human",
reason: &proposal.reason,
before_hash: &before_hash,
after_hash: &after_hash,
payload,
caveats: proposal.caveats.clone(),
}))
}
fn apply_finding_span_repair(
frontier: &mut Project,
proposal: &StateProposal,
reviewer: &str,
_decision_reason: &str,
) -> Result<StateEvent, String> {
let finding_id = proposal.target.id.as_str();
let section = proposal
.payload
.get("section")
.and_then(Value::as_str)
.ok_or("finding.span_repair proposal missing payload.section")?
.to_string();
let text = proposal
.payload
.get("text")
.and_then(Value::as_str)
.ok_or("finding.span_repair proposal missing payload.text")?
.to_string();
let idx = find_finding_index(frontier, finding_id)?;
let already_present = frontier.findings[idx]
.evidence
.evidence_spans
.iter()
.any(|existing| {
existing.get("section").and_then(Value::as_str) == Some(section.as_str())
&& existing.get("text").and_then(Value::as_str) == Some(text.as_str())
});
if already_present {
return Err(format!(
"finding {finding_id} already carries an identical (section, text) span"
));
}
let before_hash = events::finding_hash(&frontier.findings[idx]);
let span_value = json!({"section": section, "text": text});
frontier.findings[idx]
.evidence
.evidence_spans
.push(span_value);
let after_hash = events::finding_hash(&frontier.findings[idx]);
let payload = json!({
"proposal_id": proposal.id,
"section": section,
"text": text,
});
Ok(events::new_finding_event(events::FindingEventInput {
kind: "finding.span_repaired",
finding_id,
actor_id: reviewer,
actor_type: "human",
reason: &proposal.reason,
before_hash: &before_hash,
after_hash: &after_hash,
payload,
caveats: proposal.caveats.clone(),
}))
}
fn apply_evidence_atom_locator_repair(
frontier: &mut Project,
proposal: &StateProposal,
reviewer: &str,
_decision_reason: &str,
) -> Result<StateEvent, String> {
let atom_id = proposal.target.id.as_str();
let locator = proposal
.payload
.get("locator")
.and_then(Value::as_str)
.ok_or("evidence_atom.locator_repair proposal missing payload.locator")?
.to_string();
let source_id = proposal
.payload
.get("source_id")
.and_then(Value::as_str)
.ok_or("evidence_atom.locator_repair proposal missing payload.source_id")?
.to_string();
let idx = frontier
.evidence_atoms
.iter()
.position(|atom| atom.id == atom_id)
.ok_or_else(|| format!("evidence_atom.locator_repair targets unknown atom {atom_id}"))?;
if frontier.evidence_atoms[idx].source_id != source_id {
return Err(format!(
"evidence_atom.locator_repair payload.source_id '{source_id}' does not match atom.source_id '{}'",
frontier.evidence_atoms[idx].source_id
));
}
if let Some(existing) = &frontier.evidence_atoms[idx].locator {
if existing == &locator {
return Err(format!(
"evidence_atom {atom_id} already carries locator '{existing}'"
));
}
return Err(format!(
"evidence_atom {atom_id} already carries locator '{existing}'; refusing to overwrite with '{locator}'"
));
}
let before_hash = events::evidence_atom_hash(&frontier.evidence_atoms[idx]);
frontier.evidence_atoms[idx].locator = Some(locator.clone());
frontier.evidence_atoms[idx]
.caveats
.retain(|c| c != "missing evidence locator");
let after_hash = events::evidence_atom_hash(&frontier.evidence_atoms[idx]);
let payload = json!({
"proposal_id": proposal.id,
"locator": locator,
"source_id": source_id,
});
Ok(events::new_evidence_atom_locator_repair_event(
atom_id,
reviewer,
"human",
&proposal.reason,
&before_hash,
&after_hash,
payload,
proposal.caveats.clone(),
))
}
fn apply_frontier_conflict_resolve(
frontier: &mut Project,
proposal: &StateProposal,
reviewer: &str,
_decision_reason: &str,
) -> Result<StateEvent, String> {
let conflict_event_id = proposal
.payload
.get("conflict_event_id")
.and_then(Value::as_str)
.ok_or("frontier.conflict_resolve proposal missing payload.conflict_event_id")?
.to_string();
let resolution_note = proposal
.payload
.get("resolution_note")
.and_then(Value::as_str)
.ok_or("frontier.conflict_resolve proposal missing payload.resolution_note")?
.to_string();
let winning_proposal_id = proposal
.payload
.get("winning_proposal_id")
.and_then(Value::as_str)
.map(|s| s.to_string());
let conflict_event = frontier
.events
.iter()
.find(|e| e.id == conflict_event_id)
.ok_or_else(|| {
format!("frontier.conflict_resolve targets unknown event id '{conflict_event_id}'")
})?
.clone();
if conflict_event.kind != "frontier.conflict_detected" {
return Err(format!(
"frontier.conflict_resolve target event '{conflict_event_id}' has kind '{}', expected 'frontier.conflict_detected'",
conflict_event.kind
));
}
if frontier.events.iter().any(|e| {
e.kind == "frontier.conflict_resolved"
&& e.payload.get("conflict_event_id").and_then(Value::as_str)
== Some(&conflict_event_id)
}) {
return Err(format!(
"Conflict event '{conflict_event_id}' already has a recorded resolution"
));
}
let mut payload = json!({
"proposal_id": proposal.id,
"conflict_event_id": conflict_event_id,
"resolved_by": reviewer,
"resolution_note": resolution_note,
});
if let Some(wpid) = &winning_proposal_id {
payload["winning_proposal_id"] = json!(wpid);
}
let frontier_id = frontier.frontier_id();
Ok(events::new_frontier_conflict_resolved_event(
&frontier_id,
reviewer,
"human",
&proposal.reason,
payload,
proposal.caveats.clone(),
))
}
fn extract_annotation_provenance(payload: &Value) -> Option<crate::bundle::ProvenanceRef> {
let prov = payload.get("provenance")?;
let parsed: crate::bundle::ProvenanceRef = serde_json::from_value(prov.clone()).ok()?;
if parsed.has_identifier() {
Some(parsed)
} else {
None
}
}
fn apply_confidence_revise(
frontier: &mut Project,
proposal: &StateProposal,
reviewer: &str,
_decision_reason: &str,
) -> Result<Vec<StateEvent>, String> {
let finding_id = proposal.target.id.as_str();
let idx = find_finding_index(frontier, finding_id)?;
let now = Utc::now().to_rfc3339();
let previous = frontier.findings[idx].confidence.score;
let new_score = proposal
.payload
.get("confidence")
.and_then(Value::as_f64)
.ok_or("finding.confidence_revise proposal missing payload.confidence")?;
let cascade_threshold_crossed = previous >= 0.5 && new_score < 0.5;
let pre_cascade_hashes: std::collections::HashMap<String, String> = if cascade_threshold_crossed
{
frontier
.findings
.iter()
.map(|finding| (finding.id.clone(), events::finding_hash(finding)))
.collect()
} else {
std::collections::HashMap::new()
};
let before_hash = events::finding_hash(&frontier.findings[idx]);
frontier.findings[idx].confidence.score = new_score;
frontier.findings[idx].confidence.basis = format!(
"expert revision from {:.3} to {:.3}: {}",
previous, new_score, proposal.reason
);
frontier.findings[idx].confidence.method = ConfidenceMethod::ExpertJudgment;
frontier.findings[idx].updated = Some(now.clone());
let cascade = if cascade_threshold_crossed {
Some(propagate::propagate_correction(
frontier,
finding_id,
PropagationAction::ConfidenceReduced { new_score },
))
} else {
None
};
let after_hash = events::finding_hash(&frontier.findings[idx]);
let source_event = events::new_finding_event(events::FindingEventInput {
kind: "finding.confidence_revised",
finding_id,
actor_id: reviewer,
actor_type: "human",
reason: &proposal.reason,
before_hash: &before_hash,
after_hash: &after_hash,
payload: json!({
"previous_score": previous,
"new_score": new_score,
"updated_at": now,
"proposal_id": proposal.id,
"cascade_fired": cascade_threshold_crossed,
"affected": cascade.as_ref().map(|c| c.affected).unwrap_or(0),
}),
caveats: proposal.caveats.clone(),
});
let source_event_id = source_event.id.clone();
let mut emitted = vec![source_event];
if let Some(cascade) = cascade {
for (depth_idx, level) in cascade.cascade.iter().enumerate() {
let depth = (depth_idx as u32) + 1;
for dep_id in level {
let before = pre_cascade_hashes
.get(dep_id)
.cloned()
.unwrap_or_else(|| events::NULL_HASH.to_string());
let after = events::finding_hash_by_id(frontier, dep_id);
emitted.push(events::new_finding_event(events::FindingEventInput {
kind: "finding.dependency_invalidated",
finding_id: dep_id,
actor_id: reviewer,
actor_type: "human",
reason: &format!(
"Upstream finding {finding_id} confidence reduced to {new_score:.2}; cascade depth {depth}"
),
before_hash: &before,
after_hash: &after,
payload: json!({
"upstream_finding_id": finding_id,
"upstream_event_id": source_event_id,
"depth": depth,
"new_score": new_score,
"previous_score": previous,
"proposal_id": proposal.id,
}),
caveats: vec![],
}));
}
}
}
Ok(emitted)
}
fn apply_reject(
frontier: &mut Project,
proposal: &StateProposal,
reviewer: &str,
_decision_reason: &str,
) -> Result<StateEvent, String> {
let finding_id = proposal.target.id.as_str();
let idx = find_finding_index(frontier, finding_id)?;
let before_hash = events::finding_hash(&frontier.findings[idx]);
frontier.findings[idx].flags.contested = true;
let after_hash = events::finding_hash(&frontier.findings[idx]);
Ok(events::new_finding_event(events::FindingEventInput {
kind: "finding.rejected",
finding_id,
actor_id: reviewer,
actor_type: "human",
reason: &proposal.reason,
before_hash: &before_hash,
after_hash: &after_hash,
payload: json!({
"proposal_id": proposal.id,
"status": "rejected",
}),
caveats: proposal.caveats.clone(),
}))
}
fn apply_retract(
frontier: &mut Project,
proposal: &StateProposal,
reviewer: &str,
_decision_reason: &str,
) -> Result<Vec<StateEvent>, String> {
let finding_id = proposal.target.id.as_str();
let idx = find_finding_index(frontier, finding_id)?;
if frontier.findings[idx].flags.retracted {
return Err(format!("Finding {finding_id} is already retracted"));
}
let pre_cascade_hashes: std::collections::HashMap<String, String> = frontier
.findings
.iter()
.map(|finding| (finding.id.clone(), events::finding_hash(finding)))
.collect();
let before_hash = events::finding_hash(&frontier.findings[idx]);
let cascade =
propagate::propagate_correction(frontier, finding_id, PropagationAction::Retracted);
let after_hash = events::finding_hash_by_id(frontier, finding_id);
let source_event = events::new_finding_event(events::FindingEventInput {
kind: "finding.retracted",
finding_id,
actor_id: reviewer,
actor_type: "human",
reason: &proposal.reason,
before_hash: &before_hash,
after_hash: &after_hash,
payload: json!({
"proposal_id": proposal.id,
"affected": cascade.affected,
"cascade": cascade.cascade,
}),
caveats: vec!["Retraction impact is simulated over declared dependency links.".to_string()],
});
let source_event_id = source_event.id.clone();
let mut emitted = vec![source_event];
for (depth_idx, level) in cascade.cascade.iter().enumerate() {
let depth = (depth_idx as u32) + 1;
for dep_id in level {
let before = pre_cascade_hashes
.get(dep_id)
.cloned()
.unwrap_or_else(|| events::NULL_HASH.to_string());
let after = events::finding_hash_by_id(frontier, dep_id);
emitted.push(events::new_finding_event(events::FindingEventInput {
kind: "finding.dependency_invalidated",
finding_id: dep_id,
actor_id: reviewer,
actor_type: "human",
reason: &format!("Upstream finding {finding_id} retracted; cascade depth {depth}"),
before_hash: &before,
after_hash: &after,
payload: json!({
"upstream_finding_id": finding_id,
"upstream_event_id": source_event_id,
"depth": depth,
"proposal_id": proposal.id,
}),
caveats: vec![],
}));
}
}
Ok(emitted)
}
fn find_finding_index(frontier: &Project, finding_id: &str) -> Result<usize, String> {
frontier
.findings
.iter()
.position(|finding| finding.id == finding_id)
.ok_or_else(|| format!("Finding not found: {finding_id}"))
}
fn apply_negative_result_assert(
frontier: &mut Project,
proposal: &StateProposal,
reviewer: &str,
_decision_reason: &str,
) -> Result<StateEvent, String> {
let nr_value = proposal
.payload
.get("negative_result")
.ok_or("negative_result.assert proposal missing payload.negative_result")?
.clone();
let nr: crate::bundle::NegativeResult = serde_json::from_value(nr_value.clone())
.map_err(|e| format!("Invalid negative_result.assert payload: {e}"))?;
if frontier.negative_results.iter().any(|n| n.id == nr.id) {
return Err(format!(
"Refusing to add duplicate negative_result with existing id {}",
nr.id
));
}
let nr_id = nr.id.clone();
frontier.negative_results.push(nr);
let mut event = StateEvent {
schema: events::EVENT_SCHEMA.to_string(),
id: String::new(),
kind: events::EVENT_KIND_NEGATIVE_RESULT_ASSERTED.to_string(),
target: StateTarget {
r#type: "negative_result".to_string(),
id: nr_id,
},
actor: StateActor {
id: reviewer.to_string(),
r#type: "human".to_string(),
},
timestamp: Utc::now().to_rfc3339(),
reason: proposal.reason.clone(),
before_hash: NULL_HASH.to_string(),
after_hash: NULL_HASH.to_string(),
payload: json!({
"proposal_id": proposal.id,
"negative_result": nr_value,
}),
caveats: proposal.caveats.clone(),
signature: None,
schema_artifact_id: None,
};
event.id = events::compute_event_id(&event);
Ok(event)
}
fn apply_trajectory_create(
frontier: &mut Project,
proposal: &StateProposal,
reviewer: &str,
_decision_reason: &str,
) -> Result<StateEvent, String> {
let traj_value = proposal
.payload
.get("trajectory")
.ok_or("trajectory.create proposal missing payload.trajectory")?
.clone();
let traj: crate::bundle::Trajectory = serde_json::from_value(traj_value.clone())
.map_err(|e| format!("Invalid trajectory.create payload: {e}"))?;
if frontier.trajectories.iter().any(|t| t.id == traj.id) {
return Err(format!(
"Refusing to add duplicate trajectory with existing id {}",
traj.id
));
}
let traj_id = traj.id.clone();
frontier.trajectories.push(traj);
let mut event = StateEvent {
schema: events::EVENT_SCHEMA.to_string(),
id: String::new(),
kind: events::EVENT_KIND_TRAJECTORY_CREATED.to_string(),
target: StateTarget {
r#type: "trajectory".to_string(),
id: traj_id,
},
actor: StateActor {
id: reviewer.to_string(),
r#type: "human".to_string(),
},
timestamp: Utc::now().to_rfc3339(),
reason: proposal.reason.clone(),
before_hash: NULL_HASH.to_string(),
after_hash: NULL_HASH.to_string(),
payload: json!({
"proposal_id": proposal.id,
"trajectory": traj_value,
}),
caveats: proposal.caveats.clone(),
signature: None,
schema_artifact_id: None,
};
event.id = events::compute_event_id(&event);
Ok(event)
}
fn apply_trajectory_step_append(
frontier: &mut Project,
proposal: &StateProposal,
reviewer: &str,
_decision_reason: &str,
) -> Result<StateEvent, String> {
let parent_id = proposal.target.id.clone();
let parent_idx = frontier
.trajectories
.iter()
.position(|t| t.id == parent_id)
.ok_or_else(|| format!("trajectory.step_append targets unknown trajectory {parent_id}"))?;
let step_value = proposal
.payload
.get("step")
.ok_or("trajectory.step_append proposal missing payload.step")?
.clone();
let step: crate::bundle::TrajectoryStep = serde_json::from_value(step_value.clone())
.map_err(|e| format!("Invalid trajectory.step_append payload.step: {e}"))?;
if frontier.trajectories[parent_idx]
.steps
.iter()
.any(|s| s.id == step.id)
{
return Err(format!(
"Refusing to add duplicate step with existing id {} on trajectory {}",
step.id, parent_id
));
}
frontier.trajectories[parent_idx].steps.push(step);
let mut event = StateEvent {
schema: events::EVENT_SCHEMA.to_string(),
id: String::new(),
kind: events::EVENT_KIND_TRAJECTORY_STEP_APPENDED.to_string(),
target: StateTarget {
r#type: "trajectory".to_string(),
id: parent_id.clone(),
},
actor: StateActor {
id: reviewer.to_string(),
r#type: "human".to_string(),
},
timestamp: Utc::now().to_rfc3339(),
reason: proposal.reason.clone(),
before_hash: NULL_HASH.to_string(),
after_hash: NULL_HASH.to_string(),
payload: json!({
"proposal_id": proposal.id,
"parent_trajectory_id": parent_id,
"step": step_value,
}),
caveats: proposal.caveats.clone(),
signature: None,
schema_artifact_id: None,
};
event.id = events::compute_event_id(&event);
Ok(event)
}
fn annotation_id(finding_id: &str, text: &str, author: &str, timestamp: &str) -> String {
let hash = Sha256::digest(format!("{finding_id}|{text}|{author}|{timestamp}").as_bytes());
format!("ann_{}", &hex::encode(hash)[..16])
}
pub fn manifest_hash(path: &Path) -> Result<String, String> {
let bytes = std::fs::read(path)
.map_err(|e| format!("Failed to read manifest '{}': {e}", path.display()))?;
Ok(hex::encode(Sha256::digest(bytes)))
}
pub fn repo_proposals_dir(root: &Path) -> PathBuf {
root.join(".vela/proposals")
}
#[cfg(test)]
mod tests {
use super::*;
use crate::bundle::{
Assertion, Conditions, Confidence, ConfidenceKind, ConfidenceMethod, Entity, Evidence,
Extraction, Flags, Provenance,
};
use crate::project;
use tempfile::TempDir;
fn finding(id: &str) -> FindingBundle {
FindingBundle {
id: id.to_string(),
version: 1,
previous_version: None,
assertion: Assertion {
text: "Test finding".to_string(),
assertion_type: "mechanism".to_string(),
entities: vec![Entity {
name: "LRP1".to_string(),
entity_type: "protein".to_string(),
identifiers: serde_json::Map::new(),
canonical_id: None,
candidates: Vec::new(),
aliases: Vec::new(),
resolution_provenance: None,
resolution_confidence: 1.0,
resolution_method: None,
species_context: None,
needs_review: false,
}],
relation: None,
direction: None,
causal_claim: None,
causal_evidence_grade: None,
},
evidence: Evidence {
evidence_type: "experimental".to_string(),
model_system: String::new(),
species: None,
method: "manual".to_string(),
sample_size: None,
effect_size: None,
p_value: None,
replicated: false,
replication_count: None,
evidence_spans: Vec::new(),
},
conditions: Conditions {
text: "mouse".to_string(),
species_verified: Vec::new(),
species_unverified: Vec::new(),
in_vitro: false,
in_vivo: true,
human_data: false,
clinical_trial: false,
concentration_range: None,
duration: None,
age_group: None,
cell_type: None,
},
confidence: Confidence {
kind: ConfidenceKind::FrontierEpistemic,
score: 0.7,
basis: "test".to_string(),
method: ConfidenceMethod::ExpertJudgment,
components: None,
extraction_confidence: 1.0,
},
provenance: Provenance {
source_type: "published_paper".to_string(),
doi: None,
pmid: None,
pmc: None,
openalex_id: None,
url: None,
title: "Test".to_string(),
authors: Vec::new(),
year: Some(2024),
journal: None,
license: None,
publisher: None,
funders: Vec::new(),
extraction: Extraction::default(),
review: None,
citation_count: None,
},
flags: Flags {
gap: false,
negative_space: false,
contested: false,
retracted: false,
declining: false,
gravity_well: false,
review_state: None,
superseded: false,
signature_threshold: None,
jointly_accepted: false,
},
links: Vec::new(),
annotations: Vec::new(),
attachments: Vec::new(),
created: "2026-04-23T00:00:00Z".to_string(),
updated: None,
access_tier: crate::access_tier::AccessTier::Public,
}
}
#[test]
fn pending_review_proposal_does_not_mutate_frontier() {
let tmp = TempDir::new().unwrap();
let path = tmp.path().join("frontier.json");
let frontier = project::assemble("test", vec![finding("vf_test")], 0, 0, "test");
repo::save_to_path(&path, &frontier).unwrap();
let proposal = new_proposal(
"finding.review",
StateTarget {
r#type: "finding".to_string(),
id: "vf_test".to_string(),
},
"reviewer:test",
"human",
"Mouse-only evidence",
json!({"status": "contested"}),
Vec::new(),
Vec::new(),
);
create_or_apply(&path, proposal, false).unwrap();
let loaded = repo::load_from_path(&path).unwrap();
assert_eq!(loaded.events.len(), 1); assert_eq!(loaded.proposals.len(), 1);
assert!(!loaded.findings[0].flags.contested);
}
#[test]
fn applied_proposal_emits_event_and_stales_proof() {
let tmp = TempDir::new().unwrap();
let path = tmp.path().join("frontier.json");
let mut frontier = project::assemble("test", vec![finding("vf_test")], 0, 0, "test");
record_proof_export(
&mut frontier,
ProofPacketRecord {
generated_at: "2026-04-23T00:00:00Z".to_string(),
snapshot_hash: "a".repeat(64),
event_log_hash: "b".repeat(64),
packet_manifest_hash: "c".repeat(64),
},
);
repo::save_to_path(&path, &frontier).unwrap();
let proposal = new_proposal(
"finding.review",
StateTarget {
r#type: "finding".to_string(),
id: "vf_test".to_string(),
},
"reviewer:test",
"human",
"Mouse-only evidence",
json!({"status": "contested"}),
Vec::new(),
Vec::new(),
);
create_or_apply(&path, proposal, true).unwrap();
let loaded = repo::load_from_path(&path).unwrap();
assert_eq!(loaded.events.len(), 2); assert!(loaded.findings[0].flags.contested);
assert_eq!(loaded.proposals[0].status, "applied");
assert_eq!(loaded.proof_state.latest_packet.status, "stale");
}
#[test]
fn preview_reports_changed_objects_and_event_kind_without_mutation() {
let tmp = TempDir::new().unwrap();
let path = tmp.path().join("frontier.json");
let frontier = project::assemble("test", vec![finding("vf_test")], 0, 0, "test");
repo::save_to_path(&path, &frontier).unwrap();
let proposal = new_proposal(
"finding.review",
StateTarget {
r#type: "finding".to_string(),
id: "vf_test".to_string(),
},
"reviewer:test",
"human",
"Mouse-only evidence",
json!({"status": "contested"}),
Vec::new(),
Vec::new(),
);
let proposal_id = create_or_apply(&path, proposal, false).unwrap().proposal_id;
let preview = preview_at_path(&path, &proposal_id, "reviewer:test").unwrap();
assert_eq!(preview.changed_findings, vec!["vf_test"]);
assert!(preview.changed_artifacts.is_empty());
assert_eq!(preview.event_kinds, vec!["finding.reviewed"]);
assert_eq!(
preview.new_event_ids,
vec![preview.applied_event_id.clone()]
);
assert_eq!(preview.events_delta, 1);
let loaded = repo::load_from_path(&path).unwrap();
assert_eq!(loaded.events.len(), 1, "preview must not mutate events");
assert_eq!(
loaded.proposals[0].status, "pending_review",
"preview must not accept the proposal"
);
}
#[test]
fn pending_note_proposal_does_not_mutate_annotations() {
let tmp = TempDir::new().unwrap();
let path = tmp.path().join("frontier.json");
let frontier = project::assemble("test", vec![finding("vf_test")], 0, 0, "test");
repo::save_to_path(&path, &frontier).unwrap();
let proposal = new_proposal(
"finding.note",
StateTarget {
r#type: "finding".to_string(),
id: "vf_test".to_string(),
},
"reviewer:test",
"human",
"Track mouse-only evidence",
json!({"text": "Track mouse-only evidence"}),
Vec::new(),
Vec::new(),
);
create_or_apply(&path, proposal, false).unwrap();
let loaded = repo::load_from_path(&path).unwrap();
assert_eq!(loaded.events.len(), 1); assert_eq!(loaded.proposals.len(), 1);
assert!(loaded.findings[0].annotations.is_empty());
assert_eq!(loaded.proposals[0].kind, "finding.note");
}
#[test]
fn applied_note_emits_noted_event_and_stales_proof() {
let tmp = TempDir::new().unwrap();
let path = tmp.path().join("frontier.json");
let mut frontier = project::assemble("test", vec![finding("vf_test")], 0, 0, "test");
record_proof_export(
&mut frontier,
ProofPacketRecord {
generated_at: "2026-04-23T00:00:00Z".to_string(),
snapshot_hash: "a".repeat(64),
event_log_hash: "b".repeat(64),
packet_manifest_hash: "c".repeat(64),
},
);
repo::save_to_path(&path, &frontier).unwrap();
let proposal = new_proposal(
"finding.note",
StateTarget {
r#type: "finding".to_string(),
id: "vf_test".to_string(),
},
"reviewer:test",
"human",
"Track mouse-only evidence",
json!({"text": "Track mouse-only evidence"}),
Vec::new(),
Vec::new(),
);
let result = create_or_apply(&path, proposal, true).unwrap();
let loaded = repo::load_from_path(&path).unwrap();
assert_eq!(loaded.events.len(), 2); assert_eq!(loaded.events[1].kind, "finding.noted");
assert_eq!(loaded.findings[0].annotations.len(), 1);
assert_eq!(loaded.proposals[0].status, "applied");
assert_eq!(
loaded.proposals[0].applied_event_id,
result.applied_event_id
);
assert_eq!(loaded.proof_state.latest_packet.status, "stale");
}
#[test]
fn retract_emits_per_dependent_cascade_events() {
let tmp = TempDir::new().unwrap();
let path = tmp.path().join("frontier.json");
let mut src = finding("vf_src");
let mut dep1 = finding("vf_dep1");
let mut dep2 = finding("vf_dep2");
src.assertion.text = "src finding".into();
dep1.assertion.text = "dep1 finding".into();
dep2.assertion.text = "dep2 finding".into();
dep1.add_link("vf_src", "supports", "");
dep2.add_link("vf_dep1", "depends", "");
let frontier = project::assemble("test", vec![src, dep1, dep2], 0, 0, "test");
repo::save_to_path(&path, &frontier).unwrap();
let proposal = new_proposal(
"finding.retract",
StateTarget {
r#type: "finding".to_string(),
id: "vf_src".to_string(),
},
"reviewer:test",
"human",
"Source paper retracted by publisher",
json!({}),
Vec::new(),
Vec::new(),
);
create_or_apply(&path, proposal, true).unwrap();
let loaded = repo::load_from_path(&path).unwrap();
assert_eq!(loaded.events.len(), 4, "{:?}", loaded.events);
let kinds: Vec<&str> = loaded.events.iter().map(|e| e.kind.as_str()).collect();
assert_eq!(kinds[0], "frontier.created");
assert_eq!(kinds[1], "finding.retracted");
assert_eq!(kinds[2], "finding.dependency_invalidated");
assert_eq!(kinds[3], "finding.dependency_invalidated");
let source_event_id = loaded.events[1].id.clone();
let dep1_event = &loaded.events[2];
let dep2_event = &loaded.events[3];
assert_eq!(dep1_event.target.id, "vf_dep1");
assert_eq!(dep2_event.target.id, "vf_dep2");
assert_eq!(
dep1_event
.payload
.get("upstream_event_id")
.and_then(|v| v.as_str()),
Some(source_event_id.as_str())
);
assert_eq!(
dep1_event.payload.get("depth").and_then(|v| v.as_u64()),
Some(1)
);
assert_eq!(
dep2_event.payload.get("depth").and_then(|v| v.as_u64()),
Some(2)
);
let dep1 = loaded.findings.iter().find(|f| f.id == "vf_dep1").unwrap();
let dep2 = loaded.findings.iter().find(|f| f.id == "vf_dep2").unwrap();
assert!(dep1.flags.contested);
assert!(dep2.flags.contested);
let src = loaded.findings.iter().find(|f| f.id == "vf_src").unwrap();
assert!(src.flags.retracted);
}
#[test]
fn proposal_id_is_content_addressed_independent_of_created_at() {
let target = StateTarget {
r#type: "finding".to_string(),
id: "vf_test".to_string(),
};
let mut a = new_proposal(
"finding.review",
target.clone(),
"reviewer:test",
"human",
"scope narrower than claim",
json!({"status": "contested"}),
Vec::new(),
Vec::new(),
);
let mut b = new_proposal(
"finding.review",
target,
"reviewer:test",
"human",
"scope narrower than claim",
json!({"status": "contested"}),
Vec::new(),
Vec::new(),
);
a.created_at = "2026-04-25T00:00:00Z".to_string();
b.created_at = "2026-09-12T17:32:00Z".to_string();
a.id = proposal_id(&a);
b.id = proposal_id(&b);
assert_eq!(a.id, b.id, "vpr_… must not depend on created_at");
}
#[test]
fn create_or_apply_is_idempotent_under_repeated_calls() {
let tmp = TempDir::new().unwrap();
let path = tmp.path().join("frontier.json");
let frontier = project::assemble("test", vec![finding("vf_test")], 0, 0, "test");
repo::save_to_path(&path, &frontier).unwrap();
let make = || {
new_proposal(
"finding.review",
StateTarget {
r#type: "finding".to_string(),
id: "vf_test".to_string(),
},
"reviewer:test",
"human",
"agent retry test",
json!({"status": "contested"}),
Vec::new(),
Vec::new(),
)
};
let first = create_or_apply(&path, make(), true).unwrap();
let second = create_or_apply(&path, make(), true).unwrap();
assert_eq!(first.proposal_id, second.proposal_id);
assert_eq!(first.applied_event_id, second.applied_event_id);
let loaded = repo::load_from_path(&path).unwrap();
assert_eq!(
loaded.proposals.len(),
1,
"second create_or_apply must not insert a duplicate proposal"
);
assert_eq!(
loaded.events.len(),
2,
"second create_or_apply must not emit a duplicate event"
);
}
#[test]
fn accepting_applied_proposal_is_idempotent() {
let tmp = TempDir::new().unwrap();
let path = tmp.path().join("frontier.json");
let frontier = project::assemble("test", vec![finding("vf_test")], 0, 0, "test");
repo::save_to_path(&path, &frontier).unwrap();
let proposal = new_proposal(
"finding.review",
StateTarget {
r#type: "finding".to_string(),
id: "vf_test".to_string(),
},
"reviewer:test",
"human",
"Mouse-only evidence",
json!({"status": "contested"}),
Vec::new(),
Vec::new(),
);
let created = create_or_apply(&path, proposal, true).unwrap();
let first_event = created.applied_event_id.clone().unwrap();
let second_event =
accept_at_path(&path, &created.proposal_id, "reviewer:test", "same").unwrap();
assert_eq!(first_event, second_event);
}
#[test]
fn v0_13_apply_materializes_source_records_inline() {
let tmp = TempDir::new().unwrap();
let path = tmp.path().join("frontier.json");
let mut frontier = project::assemble("test", vec![], 0, 0, "test");
repo::save_to_path(&path, &frontier).unwrap();
let f = finding("vf_v013_inline_src");
let proposal = new_proposal(
"finding.add",
StateTarget {
r#type: "finding".to_string(),
id: f.id.clone(),
},
"reviewer:test",
"human",
"Manual finding for v0.13 source-record materialization test",
json!({"finding": f}),
Vec::new(),
Vec::new(),
);
create_or_apply(&path, proposal, true).unwrap();
let loaded = repo::load_from_path(&path).unwrap();
assert!(
!loaded.sources.is_empty(),
"v0.13: source_records should materialize inline at apply time"
);
assert!(
!loaded.evidence_atoms.is_empty(),
"v0.13: evidence_atoms should materialize inline at apply time"
);
assert!(
!loaded.condition_records.is_empty(),
"v0.13: condition_records should materialize inline at apply time"
);
assert_eq!(loaded.stats.source_count, loaded.sources.len());
let _ = &mut frontier;
}
fn make_supersede_payload(old_id: &str, new_text: &str) -> (FindingBundle, Value) {
let mut new_finding = finding("vf_supersede_new");
new_finding.assertion.text = new_text.to_string();
new_finding.id = format!(
"vf_{:0>16}",
old_id
.bytes()
.fold(0u64, |acc, b| acc.wrapping_add(b as u64))
);
let payload = json!({"new_finding": new_finding.clone()});
(new_finding, payload)
}
#[test]
fn v0_14_supersede_creates_new_finding_and_marks_old() {
let tmp = TempDir::new().unwrap();
let path = tmp.path().join("frontier.json");
let mut frontier = project::assemble("test", vec![finding("vf_old")], 0, 0, "test");
repo::save_to_path(&path, &frontier).unwrap();
let (new_finding, payload) = make_supersede_payload("vf_old", "Newer claim");
let proposal = new_proposal(
"finding.supersede",
StateTarget {
r#type: "finding".to_string(),
id: "vf_old".to_string(),
},
"reviewer:test",
"human",
"Newer evidence updates the wording",
payload,
Vec::new(),
Vec::new(),
);
let result = create_or_apply(&path, proposal, true).unwrap();
assert!(result.applied_event_id.is_some());
let loaded = repo::load_from_path(&path).unwrap();
let old = loaded.findings.iter().find(|f| f.id == "vf_old").unwrap();
assert!(
old.flags.superseded,
"old finding should be flagged superseded"
);
let new_f = loaded
.findings
.iter()
.find(|f| f.id == new_finding.id)
.expect("new finding should be in frontier");
assert!(
new_f
.links
.iter()
.any(|l| l.target == "vf_old" && l.link_type == "supersedes"),
"new finding should have an auto-injected supersedes link to old finding"
);
let supersede_event = loaded
.events
.iter()
.find(|e| e.kind == "finding.superseded")
.expect("a finding.superseded event should be emitted");
assert_eq!(supersede_event.target.id, "vf_old");
assert_eq!(
supersede_event.payload["new_finding_id"].as_str(),
Some(new_finding.id.as_str())
);
let _ = &mut frontier;
}
#[test]
fn v0_14_supersede_refuses_already_superseded() {
let tmp = TempDir::new().unwrap();
let path = tmp.path().join("frontier.json");
let mut old = finding("vf_already_done");
old.flags.superseded = true;
let frontier = project::assemble("test", vec![old], 0, 0, "test");
repo::save_to_path(&path, &frontier).unwrap();
let (_, payload) = make_supersede_payload("vf_already_done", "Newer wording");
let proposal = new_proposal(
"finding.supersede",
StateTarget {
r#type: "finding".to_string(),
id: "vf_already_done".to_string(),
},
"reviewer:test",
"human",
"Attempt to double-supersede",
payload,
Vec::new(),
Vec::new(),
);
let result = create_or_apply(&path, proposal, true);
assert!(
result.is_err(),
"double-supersede should be refused; got {result:?}"
);
}
#[test]
fn v0_14_supersede_refuses_same_content_address() {
let tmp = TempDir::new().unwrap();
let path = tmp.path().join("frontier.json");
let frontier = project::assemble("test", vec![finding("vf_same")], 0, 0, "test");
repo::save_to_path(&path, &frontier).unwrap();
let mut new_finding = finding("vf_same");
new_finding.assertion.text = "Different text but reused id".to_string();
let proposal = new_proposal(
"finding.supersede",
StateTarget {
r#type: "finding".to_string(),
id: "vf_same".to_string(),
},
"reviewer:test",
"human",
"Same id, should fail",
json!({"new_finding": new_finding}),
Vec::new(),
Vec::new(),
);
let result = create_or_apply(&path, proposal, true);
assert!(
result.is_err(),
"supersede with same content address should be refused; got {result:?}"
);
}
#[test]
fn agent_run_none_skips_serialization() {
let p = new_proposal(
"finding.add",
StateTarget {
r#type: "finding".to_string(),
id: "vf_test0000000000".to_string(),
},
"reviewer:will-blair",
"human",
"test",
json!({}),
Vec::new(),
Vec::new(),
);
let bytes = canonical::to_canonical_bytes(&p).unwrap();
let s = std::str::from_utf8(&bytes).unwrap();
assert!(
!s.contains("agent_run"),
"proposal without agent_run leaked the field into canonical JSON: {s}"
);
}
#[test]
fn agent_run_does_not_change_proposal_id() {
let bare = new_proposal(
"finding.add",
StateTarget {
r#type: "finding".to_string(),
id: "vf_test0000000000".to_string(),
},
"agent:literature-scout",
"agent",
"scout extracted this from paper_014",
json!({}),
vec!["src_paper_014".to_string()],
Vec::new(),
);
let id_bare = bare.id.clone();
let mut with_run = bare.clone();
with_run.agent_run = Some(AgentRun {
agent: "literature-scout".to_string(),
model: "claude-opus-4-7".to_string(),
run_id: "vrun_abc1234567890def".to_string(),
started_at: "2026-04-26T01:23:45Z".to_string(),
finished_at: Some("2026-04-26T01:24:10Z".to_string()),
context: BTreeMap::from([
("input_folder".to_string(), "./papers".to_string()),
("pdf_count".to_string(), "12".to_string()),
]),
tool_calls: Vec::new(),
permissions: None,
});
let id_with_run = proposal_id(&with_run);
assert_eq!(
id_bare, id_with_run,
"agent_run leaked into proposal_id preimage"
);
}
#[test]
fn agent_run_empty_tool_calls_and_permissions_skip_serialization() {
let p = new_proposal(
"finding.add",
StateTarget {
r#type: "finding".to_string(),
id: "vf_test0000000000".to_string(),
},
"agent:scout",
"agent",
"test",
json!({}),
Vec::new(),
Vec::new(),
);
let mut with_run = p.clone();
with_run.agent_run = Some(AgentRun {
agent: "scout".to_string(),
model: "claude-opus-4-7".to_string(),
run_id: "vrun_x".to_string(),
started_at: "2026-04-26T01:00:00Z".to_string(),
finished_at: None,
context: BTreeMap::new(),
tool_calls: Vec::new(),
permissions: None,
});
let bytes = canonical::to_canonical_bytes(&with_run).unwrap();
let s = std::str::from_utf8(&bytes).unwrap();
assert!(
!s.contains("tool_calls"),
"empty tool_calls leaked into canonical JSON: {s}"
);
assert!(
!s.contains("permissions"),
"empty permissions leaked into canonical JSON: {s}"
);
}
#[test]
fn agent_run_populated_tool_calls_and_permissions_roundtrip() {
let mut p = new_proposal(
"finding.add",
StateTarget {
r#type: "finding".to_string(),
id: "vf_test0000000000".to_string(),
},
"agent:scout",
"agent",
"test",
json!({}),
Vec::new(),
Vec::new(),
);
p.agent_run = Some(AgentRun {
agent: "scout".to_string(),
model: "claude-opus-4-7".to_string(),
run_id: "vrun_x".to_string(),
started_at: "2026-04-26T01:00:00Z".to_string(),
finished_at: None,
context: BTreeMap::new(),
tool_calls: vec![
ToolCallTrace {
tool: "pubmed_search".to_string(),
input_sha256: "a".repeat(64),
output_sha256: Some("b".repeat(64)),
at: "2026-04-26T01:00:05Z".to_string(),
duration_ms: Some(842),
status: "ok".to_string(),
error_message: String::new(),
},
ToolCallTrace {
tool: "arxiv_fetch".to_string(),
input_sha256: "c".repeat(64),
output_sha256: None,
at: "2026-04-26T01:00:18Z".to_string(),
duration_ms: Some(1200),
status: "error".to_string(),
error_message: "HTTP 503 from arxiv.org; retry budget exhausted".to_string(),
},
],
permissions: Some(PermissionState {
data_access: vec!["pubmed:".to_string(), "frontier:vfr_bd91".to_string()],
tool_access: vec!["pubmed_search".to_string(), "arxiv_fetch".to_string()],
note: "read-only access to BBB Flagship".to_string(),
}),
});
let bytes = canonical::to_canonical_bytes(&p).unwrap();
let json: serde_json::Value =
serde_json::from_slice(&bytes).expect("canonical bytes round-trip");
assert_eq!(
json["agent_run"]["tool_calls"][0]["tool"], "pubmed_search",
"tool_calls did not survive the round trip: {json}"
);
assert_eq!(
json["agent_run"]["permissions"]["data_access"][0], "pubmed:",
"permissions did not survive the round trip: {json}"
);
assert_eq!(
json["agent_run"]["tool_calls"][1]["status"], "error",
"failed tool call status did not survive: {json}"
);
assert_eq!(
json["agent_run"]["tool_calls"][1]["error_message"],
"HTTP 503 from arxiv.org; retry budget exhausted",
"error_message did not survive the round trip: {json}"
);
let raw = std::str::from_utf8(&bytes).unwrap();
let okay_call_block_end = raw.find("pubmed_search").unwrap();
let until_first_call = &raw[..okay_call_block_end + 200];
assert!(
!until_first_call.contains("\"error_message\":\"\""),
"successful tool call leaked an empty error_message: {until_first_call}"
);
}
}