use std::collections::BTreeMap;
use std::path::Path;
use chrono::Utc;
use ed25519_dalek::{Signer, SigningKey, Verifier, VerifyingKey};
use serde::{Deserialize, Serialize};
use crate::bundle::FindingBundle;
use crate::project::Project;
use crate::repo;
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct SignedEnvelope {
pub finding_id: String,
pub signature: String,
pub public_key: String,
pub signed_at: String,
pub algorithm: String,
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
pub struct ActorRecord {
pub id: String,
pub public_key: String,
#[serde(default = "default_algorithm")]
pub algorithm: String,
pub created_at: String,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tier: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub orcid: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub access_clearance: Option<crate::access_tier::AccessTier>,
}
pub fn validate_orcid(s: &str) -> Result<String, String> {
let trimmed = s.trim();
let bare = trimmed
.strip_prefix("https://orcid.org/")
.or_else(|| trimmed.strip_prefix("http://orcid.org/"))
.or_else(|| trimmed.strip_prefix("orcid:"))
.unwrap_or(trimmed);
if bare.len() != 19 {
return Err(format!(
"ORCID must be 19 chars (0000-0000-0000-000X), got {}",
bare.len()
));
}
let mut groups = bare.split('-');
for i in 0..4 {
let g = groups
.next()
.ok_or_else(|| format!("ORCID missing group {} of 4", i + 1))?;
if g.len() != 4 {
return Err(format!(
"ORCID group {} must be 4 chars, got {}",
i + 1,
g.len()
));
}
for (j, c) in g.chars().enumerate() {
let allow_x = i == 3 && j == 3;
if !c.is_ascii_digit() && !(allow_x && c == 'X') {
return Err(format!(
"ORCID character '{c}' at group {} pos {} not a digit (or X check digit)",
i + 1,
j + 1
));
}
}
}
if groups.next().is_some() {
return Err("ORCID has too many hyphenated groups".to_string());
}
Ok(bare.to_string())
}
fn default_algorithm() -> String {
"ed25519".to_string()
}
#[must_use]
pub fn actor_can_auto_apply(actor: &ActorRecord, kind: &str) -> bool {
matches!(
(actor.tier.as_deref(), kind),
(Some("auto-notes"), "finding.note")
)
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct VerifyReport {
pub total_findings: usize,
pub signed: usize,
pub unsigned: usize,
pub valid: usize,
pub invalid: usize,
pub signers: Vec<String>,
#[serde(default)]
pub findings_with_threshold: usize,
#[serde(default)]
pub jointly_accepted: usize,
}
pub fn generate_keypair(output_dir: &Path) -> Result<String, String> {
use rand::rngs::OsRng;
std::fs::create_dir_all(output_dir)
.map_err(|e| format!("Failed to create output directory: {e}"))?;
let signing_key = SigningKey::generate(&mut OsRng);
let verifying_key = signing_key.verifying_key();
let private_hex = hex::encode(signing_key.to_bytes());
let public_hex = hex::encode(verifying_key.to_bytes());
let private_path = output_dir.join("private.key");
let public_path = output_dir.join("public.key");
std::fs::write(&private_path, &private_hex)
.map_err(|e| format!("Failed to write private key: {e}"))?;
std::fs::write(&public_path, &public_hex)
.map_err(|e| format!("Failed to write public key: {e}"))?;
Ok(public_hex)
}
pub fn canonical_json(finding: &FindingBundle) -> Result<String, String> {
let mut value =
serde_json::to_value(finding).map_err(|e| format!("Failed to serialize finding: {e}"))?;
if let Some(flags) = value.get_mut("flags").and_then(|v| v.as_object_mut()) {
flags.remove("jointly_accepted");
}
let sorted = sort_value(&value);
serde_json::to_string(&sorted).map_err(|e| format!("Failed to produce canonical JSON: {e}"))
}
fn sort_value(v: &serde_json::Value) -> serde_json::Value {
match v {
serde_json::Value::Object(map) => {
let sorted: BTreeMap<String, serde_json::Value> = map
.iter()
.map(|(k, v)| (k.clone(), sort_value(v)))
.collect();
serde_json::to_value(sorted).unwrap()
}
serde_json::Value::Array(arr) => {
serde_json::Value::Array(arr.iter().map(sort_value).collect())
}
other => other.clone(),
}
}
fn load_signing_key(path: &Path) -> Result<SigningKey, String> {
let hex_str =
std::fs::read_to_string(path).map_err(|e| format!("Failed to read private key: {e}"))?;
let bytes =
hex::decode(hex_str.trim()).map_err(|e| format!("Invalid hex in private key: {e}"))?;
let key_bytes: [u8; 32] = bytes
.try_into()
.map_err(|_| "Private key must be exactly 32 bytes".to_string())?;
Ok(SigningKey::from_bytes(&key_bytes))
}
pub fn load_signing_key_from_path(path: &Path) -> Result<SigningKey, String> {
load_signing_key(path)
}
pub fn sign_bytes(signing_key: &SigningKey, bytes: &[u8]) -> [u8; 64] {
signing_key.sign(bytes).to_bytes()
}
pub fn pubkey_hex(signing_key: &SigningKey) -> String {
hex::encode(signing_key.verifying_key().to_bytes())
}
fn load_verifying_key(path: &Path) -> Result<VerifyingKey, String> {
let hex_str =
std::fs::read_to_string(path).map_err(|e| format!("Failed to read public key: {e}"))?;
parse_verifying_key(hex_str.trim())
}
fn parse_verifying_key(hex_str: &str) -> Result<VerifyingKey, String> {
let bytes = hex::decode(hex_str).map_err(|e| format!("Invalid hex in public key: {e}"))?;
let key_bytes: [u8; 32] = bytes
.try_into()
.map_err(|_| "Public key must be exactly 32 bytes".to_string())?;
VerifyingKey::from_bytes(&key_bytes).map_err(|e| format!("Invalid public key: {e}"))
}
pub fn sign_finding(
finding: &FindingBundle,
signing_key: &SigningKey,
) -> Result<SignedEnvelope, String> {
let canonical = canonical_json(finding)?;
let signature = signing_key.sign(canonical.as_bytes());
let public_key = signing_key.verifying_key();
Ok(SignedEnvelope {
finding_id: finding.id.clone(),
signature: hex::encode(signature.to_bytes()),
public_key: hex::encode(public_key.to_bytes()),
signed_at: Utc::now().to_rfc3339(),
algorithm: "ed25519".to_string(),
})
}
pub fn verify_finding(finding: &FindingBundle, envelope: &SignedEnvelope) -> Result<bool, String> {
if finding.id != envelope.finding_id {
return Ok(false);
}
let verifying_key = parse_verifying_key(&envelope.public_key)?;
let sig_bytes =
hex::decode(&envelope.signature).map_err(|e| format!("Invalid signature hex: {e}"))?;
let signature = ed25519_dalek::Signature::from_bytes(
&sig_bytes
.try_into()
.map_err(|_| "Signature must be 64 bytes")?,
);
let canonical = canonical_json(finding)?;
Ok(verifying_key
.verify(canonical.as_bytes(), &signature)
.is_ok())
}
#[allow(dead_code)]
pub fn verify_finding_with_pubkey(
finding: &FindingBundle,
envelope: &SignedEnvelope,
expected_pubkey: &str,
) -> Result<bool, String> {
if envelope.public_key != expected_pubkey {
return Ok(false);
}
verify_finding(finding, envelope)
}
pub fn event_signing_bytes(event: &crate::events::StateEvent) -> Result<Vec<u8>, String> {
use serde_json::json;
let preimage = json!({
"schema": event.schema,
"id": event.id,
"kind": event.kind,
"target": event.target,
"actor": event.actor,
"timestamp": event.timestamp,
"reason": event.reason,
"before_hash": event.before_hash,
"after_hash": event.after_hash,
"payload": event.payload,
"caveats": event.caveats,
});
crate::canonical::to_canonical_bytes(&preimage)
}
pub fn sign_event(
event: &crate::events::StateEvent,
signing_key: &SigningKey,
) -> Result<String, String> {
let bytes = event_signing_bytes(event)?;
let signature = signing_key.sign(&bytes);
Ok(hex::encode(signature.to_bytes()))
}
pub fn verify_event_signature(
event: &crate::events::StateEvent,
expected_pubkey_hex: &str,
) -> Result<bool, String> {
let signature_hex = event
.signature
.as_deref()
.ok_or_else(|| format!("event {} has no signature field", event.id))?;
let verifying_key = parse_verifying_key(expected_pubkey_hex)?;
let sig_bytes =
hex::decode(signature_hex).map_err(|e| format!("invalid signature hex: {e}"))?;
let signature = ed25519_dalek::Signature::from_bytes(
&sig_bytes
.try_into()
.map_err(|_| "Signature must be 64 bytes")?,
);
let bytes = event_signing_bytes(event)?;
Ok(verifying_key.verify(&bytes, &signature).is_ok())
}
pub fn proposal_signing_bytes(
proposal: &crate::proposals::StateProposal,
) -> Result<Vec<u8>, String> {
use serde_json::json;
let preimage = json!({
"schema": proposal.schema,
"id": proposal.id,
"kind": proposal.kind,
"target": proposal.target,
"actor": proposal.actor,
"created_at": proposal.created_at,
"reason": proposal.reason,
"payload": proposal.payload,
"source_refs": proposal.source_refs,
"caveats": proposal.caveats,
});
crate::canonical::to_canonical_bytes(&preimage)
}
pub fn sign_proposal(
proposal: &crate::proposals::StateProposal,
signing_key: &SigningKey,
) -> Result<String, String> {
let bytes = proposal_signing_bytes(proposal)?;
Ok(hex::encode(signing_key.sign(&bytes).to_bytes()))
}
pub fn verify_proposal_signature(
proposal: &crate::proposals::StateProposal,
signature_hex: &str,
expected_pubkey_hex: &str,
) -> Result<bool, String> {
let verifying_key = parse_verifying_key(expected_pubkey_hex)?;
let sig_bytes =
hex::decode(signature_hex).map_err(|e| format!("invalid signature hex: {e}"))?;
let signature = ed25519_dalek::Signature::from_bytes(
&sig_bytes
.try_into()
.map_err(|_| "Signature must be 64 bytes")?,
);
let bytes = proposal_signing_bytes(proposal)?;
Ok(verifying_key.verify(&bytes, &signature).is_ok())
}
pub fn verify_action_signature(
signing_bytes: &[u8],
signature_hex: &str,
expected_pubkey_hex: &str,
) -> Result<bool, String> {
let verifying_key = parse_verifying_key(expected_pubkey_hex)?;
let sig_bytes =
hex::decode(signature_hex).map_err(|e| format!("invalid signature hex: {e}"))?;
let signature = ed25519_dalek::Signature::from_bytes(
&sig_bytes
.try_into()
.map_err(|_| "Signature must be 64 bytes")?,
);
Ok(verifying_key.verify(signing_bytes, &signature).is_ok())
}
pub fn sign_frontier(frontier_path: &Path, private_key_path: &Path) -> Result<usize, String> {
let mut frontier: Project = repo::load_from_path(frontier_path)?;
let signing_key = load_signing_key(private_key_path)?;
let our_pubkey_hex = hex::encode(signing_key.verifying_key().to_bytes());
let mut signed_count = 0usize;
let finding_by_id = frontier
.findings
.iter()
.map(|finding| (finding.id.as_str(), finding))
.collect::<std::collections::HashMap<_, _>>();
let mut already_signed_by_us = std::collections::HashSet::new();
let mut stale_signed_by_us = std::collections::HashSet::new();
for envelope in &frontier.signatures {
if envelope.public_key != our_pubkey_hex {
continue;
}
let valid = finding_by_id
.get(envelope.finding_id.as_str())
.and_then(|finding| verify_finding(finding, envelope).ok())
.unwrap_or(false);
if valid {
already_signed_by_us.insert(envelope.finding_id.clone());
} else {
stale_signed_by_us.insert(envelope.finding_id.clone());
}
}
if !stale_signed_by_us.is_empty() {
frontier.signatures.retain(|envelope| {
envelope.public_key != our_pubkey_hex
|| !stale_signed_by_us.contains(&envelope.finding_id)
});
already_signed_by_us.retain(|finding_id| !stale_signed_by_us.contains(finding_id));
}
for finding in &frontier.findings {
if already_signed_by_us.contains(&finding.id) {
continue;
}
let envelope = sign_finding(finding, &signing_key)?;
frontier.signatures.push(envelope);
signed_count += 1;
}
let actor_ids_for_key: std::collections::HashSet<String> = frontier
.actors
.iter()
.filter(|actor| actor.public_key == our_pubkey_hex)
.map(|actor| actor.id.clone())
.collect();
if !actor_ids_for_key.is_empty() {
for event in &mut frontier.events {
if event.signature.is_some()
|| event.actor.r#type != "human"
|| !actor_ids_for_key.contains(&event.actor.id)
{
continue;
}
event.signature = Some(sign_event(event, &signing_key)?);
signed_count += 1;
}
}
refresh_jointly_accepted(&mut frontier);
repo::save_to_path(frontier_path, &frontier)?;
Ok(signed_count)
}
#[must_use]
pub fn signers_for(project: &Project, finding_id: &str) -> Vec<String> {
let Some(finding) = project.findings.iter().find(|f| f.id == finding_id) else {
return Vec::new();
};
let mut seen: std::collections::HashSet<String> = std::collections::HashSet::new();
for env in &project.signatures {
if env.finding_id != finding_id {
continue;
}
if seen.contains(&env.public_key) {
continue;
}
if let Ok(true) = verify_finding(finding, env) {
seen.insert(env.public_key.clone());
}
}
seen.into_iter().collect()
}
#[must_use]
pub fn valid_signature_count(project: &Project, finding_id: &str) -> usize {
signers_for(project, finding_id).len()
}
#[must_use]
pub fn threshold_met(project: &Project, finding_id: &str) -> bool {
let Some(finding) = project.findings.iter().find(|f| f.id == finding_id) else {
return false;
};
let Some(threshold) = finding.flags.signature_threshold else {
return false;
};
valid_signature_count(project, finding_id) >= threshold as usize
}
pub fn refresh_jointly_accepted(project: &mut Project) {
let truth: std::collections::HashMap<String, bool> = project
.findings
.iter()
.map(|f| (f.id.clone(), threshold_met(project, &f.id)))
.collect();
for f in &mut project.findings {
f.flags.jointly_accepted = truth.get(&f.id).copied().unwrap_or(false);
}
}
pub fn verify_frontier(
frontier_path: &Path,
pubkey_path: Option<&Path>,
) -> Result<VerifyReport, String> {
let frontier: Project = repo::load_from_path(frontier_path)?;
verify_frontier_data(&frontier, pubkey_path)
}
pub fn verify_frontier_data(
frontier: &Project,
pubkey_path: Option<&Path>,
) -> Result<VerifyReport, String> {
let expected_pubkey = match pubkey_path {
Some(path) => {
let key = load_verifying_key(path)?;
Some(hex::encode(key.to_bytes()))
}
None => None,
};
let finding_map: std::collections::HashMap<&str, &FindingBundle> = frontier
.findings
.iter()
.map(|f| (f.id.as_str(), f))
.collect();
let mut valid = 0usize;
let mut invalid = 0usize;
let mut signers: std::collections::HashSet<String> = std::collections::HashSet::new();
let mut findings_with_signature: std::collections::HashSet<&str> =
std::collections::HashSet::new();
for envelope in &frontier.signatures {
if let Some(ref expected) = expected_pubkey
&& &envelope.public_key != expected
{
invalid += 1;
findings_with_signature.insert(envelope.finding_id.as_str());
continue;
}
let Some(finding) = finding_map.get(envelope.finding_id.as_str()) else {
invalid += 1;
continue;
};
findings_with_signature.insert(envelope.finding_id.as_str());
match verify_finding(finding, envelope) {
Ok(true) => {
valid += 1;
signers.insert(envelope.public_key.clone());
}
_ => {
invalid += 1;
}
}
}
let unsigned = frontier
.findings
.iter()
.filter(|f| !findings_with_signature.contains(f.id.as_str()))
.count();
let mut findings_with_threshold = 0usize;
let mut jointly_accepted = 0usize;
for f in &frontier.findings {
if f.flags.signature_threshold.is_some() {
findings_with_threshold += 1;
if threshold_met(frontier, &f.id) {
jointly_accepted += 1;
}
}
}
Ok(VerifyReport {
total_findings: frontier.findings.len(),
signed: valid + invalid,
unsigned,
valid,
invalid,
signers: signers.into_iter().collect(),
findings_with_threshold,
jointly_accepted,
})
}
#[cfg(test)]
mod tests {
use super::*;
use crate::bundle::*;
fn sample_finding() -> FindingBundle {
FindingBundle::new(
Assertion {
text: "NLRP3 activates IL-1B".into(),
assertion_type: "mechanism".into(),
entities: vec![Entity {
name: "NLRP3".into(),
entity_type: "protein".into(),
identifiers: serde_json::Map::new(),
canonical_id: None,
candidates: vec![],
aliases: vec![],
resolution_provenance: None,
resolution_confidence: 1.0,
resolution_method: None,
species_context: None,
needs_review: false,
}],
relation: Some("activates".into()),
direction: Some("positive".into()),
causal_claim: None,
causal_evidence_grade: None,
},
Evidence {
evidence_type: "experimental".into(),
model_system: "mouse".into(),
species: Some("Mus musculus".into()),
method: "Western blot".into(),
sample_size: Some("n=30".into()),
effect_size: None,
p_value: Some("p<0.05".into()),
replicated: true,
replication_count: Some(3),
evidence_spans: vec![],
},
Conditions {
text: "In vitro, mouse microglia".into(),
species_verified: vec!["Mus musculus".into()],
species_unverified: vec![],
in_vitro: true,
in_vivo: false,
human_data: false,
clinical_trial: false,
concentration_range: None,
duration: None,
age_group: None,
cell_type: Some("microglia".into()),
},
Confidence::raw(0.85, "Experimental with replication", 0.9),
Provenance {
source_type: "published_paper".into(),
doi: Some("10.1234/test".into()),
pmid: None,
pmc: None,
openalex_id: None,
url: None,
title: "Test Paper".into(),
authors: vec![Author {
name: "Smith J".into(),
orcid: None,
}],
year: Some(2024),
journal: Some("Nature".into()),
license: None,
publisher: None,
funders: vec![],
extraction: Extraction::default(),
review: None,
citation_count: Some(100),
},
Flags {
gap: false,
negative_space: false,
contested: false,
retracted: false,
declining: false,
gravity_well: false,
review_state: None,
superseded: false,
signature_threshold: None,
jointly_accepted: false,
},
)
}
fn test_keypair() -> SigningKey {
use rand::rngs::OsRng;
SigningKey::generate(&mut OsRng)
}
#[test]
fn keygen_produces_valid_files() {
let dir = std::env::temp_dir().join("vela_test_keygen");
let _ = std::fs::remove_dir_all(&dir);
let pubkey = generate_keypair(&dir).unwrap();
assert_eq!(pubkey.len(), 64);
let private_hex = std::fs::read_to_string(dir.join("private.key")).unwrap();
let public_hex = std::fs::read_to_string(dir.join("public.key")).unwrap();
assert_eq!(private_hex.len(), 64);
assert_eq!(public_hex, pubkey);
let _ = std::fs::remove_dir_all(&dir);
}
#[test]
fn sign_and_verify_roundtrip() {
let finding = sample_finding();
let key = test_keypair();
let envelope = sign_finding(&finding, &key).unwrap();
assert_eq!(envelope.finding_id, finding.id);
assert_eq!(envelope.algorithm, "ed25519");
assert_eq!(envelope.signature.len(), 128);
let valid = verify_finding(&finding, &envelope).unwrap();
assert!(valid, "Signature should verify against original finding");
}
#[test]
fn tampered_finding_fails_verification() {
let finding = sample_finding();
let key = test_keypair();
let envelope = sign_finding(&finding, &key).unwrap();
let mut tampered = finding.clone();
tampered.assertion.text = "Tampered assertion text".into();
let valid = verify_finding(&tampered, &envelope).unwrap();
assert!(!valid, "Tampered finding should fail verification");
}
#[test]
fn sign_frontier_replaces_stale_same_key_signature() {
let dir = tempfile::tempdir().unwrap();
let frontier_path = dir.path().join("frontier.json");
let private_key_path = dir.path().join("private.key");
let key = test_keypair();
std::fs::write(&private_key_path, hex::encode(key.to_bytes())).unwrap();
let mut finding = sample_finding();
let stale_envelope = sign_finding(&finding, &key).unwrap();
finding.assertion.text = "NLRP3 activates IL-1B under revised scope".into();
let mut frontier = empty_project(vec![finding], vec![stale_envelope]);
crate::repo::save_to_path(&frontier_path, &frontier).unwrap();
let signed = sign_frontier(&frontier_path, &private_key_path).unwrap();
assert_eq!(signed, 1);
frontier = crate::repo::load_from_path(&frontier_path).unwrap();
let report = verify_frontier_data(&frontier, None).unwrap();
assert_eq!(report.valid, 1);
assert_eq!(report.invalid, 0);
assert_eq!(frontier.signatures.len(), 1);
}
#[test]
fn wrong_key_fails_verification() {
let finding = sample_finding();
let key1 = test_keypair();
let key2 = test_keypair();
let envelope = sign_finding(&finding, &key1).unwrap();
let pubkey2_hex = hex::encode(key2.verifying_key().to_bytes());
let valid = verify_finding_with_pubkey(&finding, &envelope, &pubkey2_hex).unwrap();
assert!(!valid, "Wrong public key should fail verification");
}
#[test]
fn canonical_json_is_deterministic() {
let finding = sample_finding();
let json1 = canonical_json(&finding).unwrap();
let json2 = canonical_json(&finding).unwrap();
assert_eq!(json1, json2, "Canonical JSON must be deterministic");
}
#[test]
fn registered_actor_signed_event_roundtrip() {
use crate::events::{
EVENT_SCHEMA, NULL_HASH, StateActor, StateEvent, StateTarget, compute_event_id,
};
let key = test_keypair();
let pubkey_hex = hex::encode(key.verifying_key().to_bytes());
let mut event = StateEvent {
schema: EVENT_SCHEMA.to_string(),
id: String::new(),
kind: "finding.reviewed".to_string(),
target: StateTarget {
r#type: "finding".to_string(),
id: "vf_test".to_string(),
},
actor: StateActor {
id: "reviewer:registered".to_string(),
r#type: "human".to_string(),
},
timestamp: "2026-04-25T00:00:00Z".to_string(),
reason: "phase-m round-trip test".to_string(),
before_hash: NULL_HASH.to_string(),
after_hash: "sha256:abc".to_string(),
payload: serde_json::json!({"status": "accepted", "proposal_id": "vpr_test"}),
caveats: vec![],
signature: None,
schema_artifact_id: None,
};
event.id = compute_event_id(&event);
event.signature = Some(sign_event(&event, &key).unwrap());
assert!(verify_event_signature(&event, &pubkey_hex).unwrap());
let mut tampered = event.clone();
tampered.reason = "different reason".to_string();
assert!(!verify_event_signature(&tampered, &pubkey_hex).unwrap());
}
#[test]
fn verify_frontier_data_reports_correctly() {
let f1 = sample_finding();
let mut f2 = sample_finding();
f2.id = "vf_other_id_12345".into();
f2.assertion.text = "Different finding".into();
let key = test_keypair();
let env1 = sign_finding(&f1, &key).unwrap();
let frontier = Project {
vela_version: "0.1.0".into(),
schema: "test".into(),
frontier_id: None,
project: crate::project::ProjectMeta {
name: "test".into(),
description: "test".into(),
compiled_at: "2024-01-01T00:00:00Z".into(),
compiler: "vela/0.2.0".into(),
papers_processed: 0,
errors: 0,
dependencies: Vec::new(),
},
stats: crate::project::ProjectStats {
findings: 2,
links: 0,
replicated: 0,
unreplicated: 2,
avg_confidence: 0.85,
gaps: 0,
negative_space: 0,
contested: 0,
categories: std::collections::HashMap::new(),
link_types: std::collections::HashMap::new(),
human_reviewed: 0,
review_event_count: 0,
confidence_update_count: 0,
event_count: 0,
source_count: 0,
evidence_atom_count: 0,
condition_record_count: 0,
proposal_count: 0,
confidence_distribution: crate::project::ConfidenceDistribution {
high_gt_80: 2,
medium_60_80: 0,
low_lt_60: 0,
},
},
findings: vec![f1, f2],
sources: vec![],
evidence_atoms: vec![],
condition_records: vec![],
review_events: vec![],
confidence_updates: vec![],
events: vec![],
proposals: vec![],
proof_state: Default::default(),
signatures: vec![env1],
actors: vec![],
replications: vec![],
datasets: vec![],
code_artifacts: vec![],
artifacts: vec![],
predictions: vec![],
resolutions: vec![],
peers: vec![],
negative_results: vec![],
trajectories: vec![],
};
let report = verify_frontier_data(&frontier, None).unwrap();
assert_eq!(report.total_findings, 2);
assert_eq!(report.signed, 1);
assert_eq!(report.unsigned, 1);
assert_eq!(report.valid, 1);
assert_eq!(report.invalid, 0);
assert_eq!(report.signers.len(), 1);
}
fn empty_project(findings: Vec<FindingBundle>, signatures: Vec<SignedEnvelope>) -> Project {
Project {
vela_version: "0.37.0".into(),
schema: "test".into(),
frontier_id: None,
project: crate::project::ProjectMeta {
name: "test".into(),
description: "test".into(),
compiled_at: "2026-04-27T00:00:00Z".into(),
compiler: "vela/0.37.0".into(),
papers_processed: 0,
errors: 0,
dependencies: Vec::new(),
},
stats: crate::project::ProjectStats::default(),
findings,
sources: vec![],
evidence_atoms: vec![],
condition_records: vec![],
review_events: vec![],
confidence_updates: vec![],
events: vec![],
proposals: vec![],
proof_state: Default::default(),
signatures,
actors: vec![],
replications: vec![],
datasets: vec![],
code_artifacts: vec![],
artifacts: vec![],
predictions: vec![],
resolutions: vec![],
peers: vec![],
negative_results: vec![],
trajectories: vec![],
}
}
#[test]
fn signers_for_dedupes_by_pubkey() {
let mut f = sample_finding();
f.flags.signature_threshold = Some(2);
let key1 = test_keypair();
let key2 = test_keypair();
let env1 = sign_finding(&f, &key1).unwrap();
let env1_dup = sign_finding(&f, &key1).unwrap();
let env2 = sign_finding(&f, &key2).unwrap();
let project = empty_project(vec![f.clone()], vec![env1, env1_dup, env2]);
let signers = signers_for(&project, &f.id);
assert_eq!(signers.len(), 2, "duplicate pubkey must be counted once");
}
#[test]
fn threshold_met_requires_k_unique_signers() {
let mut f = sample_finding();
f.flags.signature_threshold = Some(2);
let key1 = test_keypair();
let env1 = sign_finding(&f, &key1).unwrap();
let project_one = empty_project(vec![f.clone()], vec![env1.clone()]);
assert!(!threshold_met(&project_one, &f.id), "1 of 2 not met");
let key2 = test_keypair();
let env2 = sign_finding(&f, &key2).unwrap();
let project_two = empty_project(vec![f.clone()], vec![env1, env2]);
assert!(threshold_met(&project_two, &f.id), "2 of 2 met");
}
#[test]
fn threshold_none_reports_not_met() {
let f = sample_finding();
let key = test_keypair();
let env = sign_finding(&f, &key).unwrap();
let project = empty_project(vec![f.clone()], vec![env]);
assert!(
!threshold_met(&project, &f.id),
"no policy → never met (single-sig regime)"
);
}
#[test]
fn refresh_jointly_accepted_sets_flag() {
let mut f = sample_finding();
f.flags.signature_threshold = Some(1);
let key = test_keypair();
let env = sign_finding(&f, &key).unwrap();
let mut project = empty_project(vec![f.clone()], vec![env]);
refresh_jointly_accepted(&mut project);
assert!(project.findings[0].flags.jointly_accepted);
}
#[test]
fn invalid_signature_does_not_count_toward_threshold() {
let mut f = sample_finding();
f.flags.signature_threshold = Some(2);
let key1 = test_keypair();
let key2 = test_keypair();
let env1 = sign_finding(&f, &key1).unwrap();
let mut env2_tampered = sign_finding(&f, &key2).unwrap();
env2_tampered.signature = "00".repeat(64);
let project = empty_project(vec![f.clone()], vec![env1, env2_tampered]);
assert_eq!(valid_signature_count(&project, &f.id), 1);
assert!(!threshold_met(&project, &f.id));
}
#[test]
fn verify_report_surfaces_threshold_counts() {
let mut f = sample_finding();
f.flags.signature_threshold = Some(1);
let key = test_keypair();
let env = sign_finding(&f, &key).unwrap();
let project = empty_project(vec![f.clone()], vec![env]);
let report = verify_frontier_data(&project, None).unwrap();
assert_eq!(report.findings_with_threshold, 1);
assert_eq!(report.jointly_accepted, 1);
}
#[test]
fn validate_orcid_accepts_canonical_form() {
assert_eq!(
validate_orcid("0000-0001-2345-6789").unwrap(),
"0000-0001-2345-6789"
);
}
#[test]
fn validate_orcid_accepts_check_digit_x() {
assert_eq!(
validate_orcid("0000-0001-5109-393X").unwrap(),
"0000-0001-5109-393X"
);
}
#[test]
fn validate_orcid_strips_url_prefix() {
assert_eq!(
validate_orcid("https://orcid.org/0000-0001-2345-6789").unwrap(),
"0000-0001-2345-6789"
);
}
#[test]
fn validate_orcid_strips_orcid_prefix() {
assert_eq!(
validate_orcid("orcid:0000-0001-2345-6789").unwrap(),
"0000-0001-2345-6789"
);
}
#[test]
fn validate_orcid_rejects_short() {
assert!(validate_orcid("0000-0001").is_err());
}
#[test]
fn validate_orcid_rejects_letters_in_non_check_position() {
assert!(validate_orcid("0000-A001-2345-6789").is_err());
}
#[test]
fn validate_orcid_rejects_x_in_first_three_groups() {
assert!(validate_orcid("000X-0001-2345-6789").is_err());
}
#[test]
fn validate_orcid_rejects_extra_groups() {
assert!(validate_orcid("0000-0001-2345-6789-9999").is_err());
}
}