use crate::{
benchmark, bridge, bundle, carina_validate, conformance, decision, diff, events, export,
frontier_repo, impact, lint, normalize, packet, project, propagate, proposals, repo, review,
search, serve, sign, signals, sources, state, state_integrity, tensions, validate,
};
use std::collections::BTreeMap;
use std::future::Future;
use std::path::{Path, PathBuf};
use std::pin::Pin;
use std::sync::OnceLock;
use clap::{Parser, Subcommand};
use colored::Colorize;
use crate::cli_style as style;
use reqwest::Client;
use serde::Serialize;
use serde_json::{Value, json};
use sha2::{Digest, Sha256};
#[derive(Parser)]
#[command(name = "vela", version)]
#[command(about = "Portable frontier state for science")]
struct Cli {
#[command(subcommand)]
command: Commands,
}
#[derive(Subcommand)]
enum Commands {
Scout {
folder: PathBuf,
#[arg(long)]
frontier: PathBuf,
#[arg(short, long)]
backend: Option<String>,
#[arg(long)]
dry_run: bool,
#[arg(long)]
json: bool,
},
CompileNotes {
vault: PathBuf,
#[arg(long)]
frontier: PathBuf,
#[arg(short, long)]
backend: Option<String>,
#[arg(long)]
max_files: Option<usize>,
#[arg(long)]
max_items_per_category: Option<usize>,
#[arg(long)]
dry_run: bool,
#[arg(long)]
json: bool,
},
CompileCode {
root: PathBuf,
#[arg(long)]
frontier: PathBuf,
#[arg(short, long)]
backend: Option<String>,
#[arg(long)]
max_files: Option<usize>,
#[arg(long)]
dry_run: bool,
#[arg(long)]
json: bool,
},
ReviewPending {
#[arg(long)]
frontier: PathBuf,
#[arg(short, long)]
backend: Option<String>,
#[arg(long)]
max_proposals: Option<usize>,
#[arg(long, default_value = "1")]
batch_size: usize,
#[arg(long)]
dry_run: bool,
#[arg(long)]
json: bool,
},
FindTensions {
#[arg(long)]
frontier: PathBuf,
#[arg(short, long)]
backend: Option<String>,
#[arg(long)]
max_findings: Option<usize>,
#[arg(long)]
dry_run: bool,
#[arg(long)]
json: bool,
},
PlanExperiments {
#[arg(long)]
frontier: PathBuf,
#[arg(short, long)]
backend: Option<String>,
#[arg(long)]
max_findings: Option<usize>,
#[arg(long)]
dry_run: bool,
#[arg(long)]
json: bool,
},
CompileData {
root: PathBuf,
#[arg(long)]
frontier: PathBuf,
#[arg(short, long)]
backend: Option<String>,
#[arg(long)]
sample_rows: Option<usize>,
#[arg(long)]
dry_run: bool,
#[arg(long)]
json: bool,
},
Check {
source: Option<PathBuf>,
#[arg(long)]
schema: bool,
#[arg(long)]
stats: bool,
#[arg(long)]
conformance: bool,
#[arg(long, default_value = "tests/conformance")]
conformance_dir: PathBuf,
#[arg(long)]
all: bool,
#[arg(long)]
schema_only: bool,
#[arg(long)]
strict: bool,
#[arg(long)]
fix: bool,
#[arg(long)]
json: bool,
},
Integrity {
frontier: PathBuf,
#[arg(long)]
json: bool,
},
Impact {
frontier: PathBuf,
finding_id: String,
#[arg(long)]
depth: Option<usize>,
#[arg(long)]
json: bool,
},
Discord {
frontier: PathBuf,
#[arg(long)]
json: bool,
#[arg(long)]
kind: Option<String>,
},
Normalize {
source: PathBuf,
#[arg(short, long)]
out: Option<PathBuf>,
#[arg(long)]
write: bool,
#[arg(long)]
dry_run: bool,
#[arg(long)]
rewrite_ids: bool,
#[arg(long)]
id_map: Option<PathBuf>,
#[arg(long)]
resync_provenance: bool,
#[arg(long)]
json: bool,
},
Proof {
frontier: PathBuf,
#[arg(long, short = 'o', default_value = "proof-packet")]
out: PathBuf,
#[arg(long, default_value = "bbb-alzheimer")]
template: String,
#[arg(long)]
gold: Option<PathBuf>,
#[arg(long)]
record_proof_state: bool,
#[arg(long)]
json: bool,
},
Repo {
#[command(subcommand)]
action: RepoAction,
},
Serve {
#[arg(required_unless_present_any = ["frontiers", "setup"])]
frontier: Option<PathBuf>,
#[arg(long)]
frontiers: Option<PathBuf>,
#[arg(short, long)]
backend: Option<String>,
#[arg(long)]
http: Option<u16>,
#[arg(long)]
setup: bool,
#[arg(long)]
check_tools: bool,
#[arg(long)]
json: bool,
#[arg(long)]
workbench: bool,
},
Status {
frontier: PathBuf,
#[arg(long)]
json: bool,
},
Log {
frontier: PathBuf,
#[arg(long, default_value = "20")]
limit: usize,
#[arg(long)]
kind: Option<String>,
#[arg(long)]
json: bool,
},
Inbox {
frontier: PathBuf,
#[arg(long)]
kind: Option<String>,
#[arg(long, default_value = "30")]
limit: usize,
#[arg(long)]
json: bool,
},
Ask {
frontier: PathBuf,
#[arg(trailing_var_arg = true)]
question: Vec<String>,
#[arg(long)]
json: bool,
},
Stats {
frontier: PathBuf,
#[arg(long)]
json: bool,
},
Search {
query: String,
#[arg(long)]
source: Option<PathBuf>,
#[arg(long)]
entity: Option<String>,
#[arg(long)]
r#type: Option<String>,
#[arg(long)]
all: Option<PathBuf>,
#[arg(long, default_value = "20")]
limit: usize,
#[arg(long)]
json: bool,
},
Tensions {
source: PathBuf,
#[arg(long)]
both_high: bool,
#[arg(long)]
cross_domain: bool,
#[arg(long, default_value = "20")]
top: usize,
#[arg(long)]
json: bool,
},
Gaps {
#[command(subcommand)]
action: GapsAction,
},
Bridge {
#[arg(required = true)]
inputs: Vec<PathBuf>,
#[arg(long, default_value = "true", action = clap::ArgAction::Set)]
novelty: bool,
#[arg(long, default_value = "30")]
top: usize,
},
Export {
frontier: PathBuf,
#[arg(short, long, default_value = "csv")]
format: String,
#[arg(short, long)]
output: Option<PathBuf>,
},
Packet {
#[command(subcommand)]
action: PacketAction,
},
Verify {
path: PathBuf,
#[arg(long)]
json: bool,
},
Bench {
frontier: Option<PathBuf>,
#[arg(long)]
gold: Option<PathBuf>,
#[arg(long)]
candidate: Option<PathBuf>,
#[arg(long)]
sources: Option<PathBuf>,
#[arg(long)]
threshold: Option<f64>,
#[arg(long)]
report: Option<PathBuf>,
#[arg(long)]
entity_gold: Option<PathBuf>,
#[arg(long)]
link_gold: Option<PathBuf>,
#[arg(long)]
suite: Option<PathBuf>,
#[arg(long)]
suite_ready: bool,
#[arg(long)]
min_f1: Option<f64>,
#[arg(long)]
min_precision: Option<f64>,
#[arg(long)]
min_recall: Option<f64>,
#[arg(long)]
no_thresholds: bool,
#[arg(long)]
json: bool,
},
Conformance {
#[arg(default_value = "tests/conformance")]
dir: PathBuf,
},
Version,
Sign {
#[command(subcommand)]
action: SignAction,
},
Actor {
#[command(subcommand)]
action: ActorAction,
},
Federation {
#[command(subcommand)]
action: FederationAction,
},
Causal {
#[command(subcommand)]
action: CausalAction,
},
Frontier {
#[command(subcommand)]
action: FrontierAction,
},
Queue {
#[command(subcommand)]
action: QueueAction,
},
Registry {
#[command(subcommand)]
action: RegistryAction,
},
Init {
#[arg(default_value = ".")]
path: PathBuf,
#[arg(long, default_value = "unnamed")]
name: String,
#[arg(long, default_value = "default")]
template: String,
#[arg(long)]
no_git: bool,
#[arg(long)]
json: bool,
},
Quickstart {
#[arg(default_value = "demo")]
path: PathBuf,
#[arg(long, default_value = "Quickstart frontier")]
name: String,
#[arg(long, default_value = "reviewer:you")]
reviewer: String,
#[arg(long)]
assertion: Option<String>,
#[arg(long)]
keys_out: Option<PathBuf>,
#[arg(long)]
json: bool,
},
Lock {
path: PathBuf,
#[arg(long)]
check: bool,
#[arg(long)]
json: bool,
},
Doc {
path: PathBuf,
#[arg(long)]
out: Option<PathBuf>,
#[arg(long)]
json: bool,
},
Import {
frontier: PathBuf,
#[arg(long)]
into: Option<PathBuf>,
},
Diff {
target: String,
frontier_b: Option<PathBuf>,
#[arg(long)]
frontier: Option<PathBuf>,
#[arg(long, default_value = "reviewer:preview")]
reviewer: String,
#[arg(long)]
json: bool,
#[arg(long)]
quiet: bool,
},
Proposals {
#[command(subcommand)]
action: ProposalAction,
},
ArtifactToState {
frontier: PathBuf,
packet: PathBuf,
#[arg(long)]
actor: String,
#[arg(long)]
apply_artifacts: bool,
#[arg(long)]
json: bool,
},
BridgeKit {
#[command(subcommand)]
action: BridgeKitAction,
},
SourceAdapter {
#[command(subcommand)]
action: SourceAdapterAction,
},
RuntimeAdapter {
#[command(subcommand)]
action: RuntimeAdapterAction,
},
Finding {
#[command(subcommand)]
command: FindingCommands,
},
Link {
#[command(subcommand)]
action: LinkAction,
},
Workbench {
#[arg(default_value = ".")]
path: PathBuf,
#[arg(long, default_value_t = 3850)]
port: u16,
#[arg(long)]
no_open: bool,
},
Bridges {
#[command(subcommand)]
action: BridgesAction,
},
Entity {
#[command(subcommand)]
action: EntityAction,
},
Review {
frontier: PathBuf,
finding_id: String,
#[arg(long)]
status: Option<String>,
#[arg(long)]
reason: Option<String>,
#[arg(long)]
reviewer: String,
#[arg(long)]
apply: bool,
#[arg(long)]
json: bool,
},
Note {
frontier: PathBuf,
finding_id: String,
#[arg(long)]
text: String,
#[arg(long)]
author: String,
#[arg(long)]
apply: bool,
#[arg(long)]
json: bool,
},
Caveat {
frontier: PathBuf,
finding_id: String,
#[arg(long)]
text: String,
#[arg(long)]
author: String,
#[arg(long)]
apply: bool,
#[arg(long)]
json: bool,
},
Revise {
frontier: PathBuf,
finding_id: String,
#[arg(long)]
confidence: f64,
#[arg(long)]
reason: String,
#[arg(long)]
reviewer: String,
#[arg(long)]
apply: bool,
#[arg(long)]
json: bool,
},
Reject {
frontier: PathBuf,
finding_id: String,
#[arg(long)]
reason: String,
#[arg(long)]
reviewer: String,
#[arg(long)]
apply: bool,
#[arg(long)]
json: bool,
},
History {
frontier: PathBuf,
finding_id: String,
#[arg(long)]
json: bool,
#[arg(long, value_name = "RFC3339_TIMESTAMP")]
as_of: Option<String>,
},
ImportEvents {
source: PathBuf,
#[arg(long)]
into: PathBuf,
#[arg(long)]
json: bool,
},
Retract {
source: PathBuf,
finding_id: String,
#[arg(long)]
reason: String,
#[arg(long)]
reviewer: String,
#[arg(long)]
apply: bool,
#[arg(long)]
json: bool,
},
EntityAdd {
frontier: PathBuf,
finding_id: String,
#[arg(long)]
entity: String,
#[arg(long)]
entity_type: String,
#[arg(long)]
reviewer: String,
#[arg(long)]
reason: String,
#[arg(long)]
apply: bool,
#[arg(long)]
json: bool,
},
EntityResolve {
frontier: PathBuf,
finding_id: String,
#[arg(long)]
entity: String,
#[arg(long)]
source: String,
#[arg(long)]
id: String,
#[arg(long)]
confidence: f64,
#[arg(long)]
matched_name: Option<String>,
#[arg(long, default_value = "manual")]
resolution_method: String,
#[arg(long)]
reviewer: String,
#[arg(long)]
reason: String,
#[arg(long)]
apply: bool,
#[arg(long)]
json: bool,
},
SourceFetch {
identifier: String,
#[arg(long)]
cache: Option<PathBuf>,
#[arg(long)]
out: Option<PathBuf>,
#[arg(long)]
refresh: bool,
#[arg(long)]
json: bool,
},
SpanRepair {
frontier: PathBuf,
finding_id: String,
#[arg(long)]
section: String,
#[arg(long)]
text: String,
#[arg(long)]
reviewer: String,
#[arg(long)]
reason: String,
#[arg(long)]
apply: bool,
#[arg(long)]
json: bool,
},
LocatorRepair {
frontier: PathBuf,
atom_id: String,
#[arg(long)]
locator: Option<String>,
#[arg(long)]
reviewer: String,
#[arg(long)]
reason: String,
#[arg(long)]
apply: bool,
#[arg(long)]
json: bool,
},
Propagate {
frontier: PathBuf,
#[arg(long)]
retract: Option<String>,
#[arg(long)]
reduce_confidence: Option<String>,
#[arg(long)]
to: Option<f64>,
#[arg(short, long)]
output: Option<PathBuf>,
},
Replicate {
frontier: PathBuf,
target: String,
#[arg(long)]
outcome: String,
#[arg(long)]
by: String,
#[arg(long)]
conditions: String,
#[arg(long)]
source_title: String,
#[arg(long)]
doi: Option<String>,
#[arg(long)]
pmid: Option<String>,
#[arg(long)]
sample_size: Option<String>,
#[arg(long, default_value = "")]
note: String,
#[arg(long)]
previous_attempt: Option<String>,
#[arg(long, default_value_t = false)]
no_cascade: bool,
#[arg(long)]
json: bool,
},
Replications {
frontier: PathBuf,
#[arg(long)]
target: Option<String>,
#[arg(long)]
json: bool,
},
DatasetAdd {
frontier: PathBuf,
#[arg(long)]
name: String,
#[arg(long)]
version: Option<String>,
#[arg(long)]
content_hash: String,
#[arg(long)]
url: Option<String>,
#[arg(long)]
license: Option<String>,
#[arg(long)]
source_title: String,
#[arg(long)]
doi: Option<String>,
#[arg(long)]
row_count: Option<u64>,
#[arg(long)]
json: bool,
},
Datasets {
frontier: PathBuf,
#[arg(long)]
json: bool,
},
CodeAdd {
frontier: PathBuf,
#[arg(long)]
language: String,
#[arg(long)]
repo_url: Option<String>,
#[arg(long)]
commit: Option<String>,
#[arg(long)]
path: String,
#[arg(long)]
content_hash: String,
#[arg(long)]
line_start: Option<u32>,
#[arg(long)]
line_end: Option<u32>,
#[arg(long)]
entry_point: Option<String>,
#[arg(long)]
json: bool,
},
CodeArtifacts {
frontier: PathBuf,
#[arg(long)]
json: bool,
},
ArtifactAdd {
frontier: PathBuf,
#[arg(long)]
kind: String,
#[arg(long)]
name: String,
#[arg(long)]
file: Option<PathBuf>,
#[arg(long)]
url: Option<String>,
#[arg(long)]
content_hash: Option<String>,
#[arg(long)]
media_type: Option<String>,
#[arg(long)]
license: Option<String>,
#[arg(long)]
source_title: Option<String>,
#[arg(long)]
source_url: Option<String>,
#[arg(long)]
doi: Option<String>,
#[arg(long)]
target: Vec<String>,
#[arg(long)]
metadata: Vec<String>,
#[arg(long, default_value = "public")]
access_tier: String,
#[arg(long, default_value = "reviewer:manual")]
deposited_by: String,
#[arg(long, default_value = "artifact deposit")]
reason: String,
#[arg(long)]
json: bool,
},
Artifacts {
frontier: PathBuf,
#[arg(long)]
target: Option<String>,
#[arg(long)]
json: bool,
},
ArtifactAudit {
frontier: PathBuf,
#[arg(long)]
json: bool,
},
DecisionBrief {
frontier: PathBuf,
#[arg(long)]
json: bool,
},
TrialSummary {
frontier: PathBuf,
#[arg(long)]
json: bool,
},
SourceVerification {
frontier: PathBuf,
#[arg(long)]
json: bool,
},
SourceIngestPlan {
frontier: PathBuf,
#[arg(long)]
json: bool,
},
ClinicalTrialImport {
frontier: PathBuf,
nct_id: String,
#[arg(long)]
input_json: Option<PathBuf>,
#[arg(long)]
target: Vec<String>,
#[arg(long, default_value = "reviewer:manual")]
deposited_by: String,
#[arg(long, default_value = "clinical trial record import")]
reason: String,
#[arg(long, default_value = "ClinicalTrials.gov public record")]
license: String,
#[arg(long)]
json: bool,
},
NegativeResultAdd {
frontier: PathBuf,
#[arg(long)]
kind: String,
#[arg(long)]
deposited_by: String,
#[arg(long)]
reason: String,
#[arg(long)]
conditions_text: String,
#[arg(long, default_value = "")]
notes: String,
#[arg(long)]
target: Vec<String>,
#[arg(long)]
endpoint: Option<String>,
#[arg(long)]
intervention: Option<String>,
#[arg(long)]
comparator: Option<String>,
#[arg(long)]
population: Option<String>,
#[arg(long)]
n_enrolled: Option<u32>,
#[arg(long)]
power: Option<f64>,
#[arg(long)]
ci_lower: Option<f64>,
#[arg(long)]
ci_upper: Option<f64>,
#[arg(long)]
effect_size_threshold: Option<f64>,
#[arg(long)]
registry_id: Option<String>,
#[arg(long)]
reagent: Option<String>,
#[arg(long)]
observation: Option<String>,
#[arg(long)]
attempts: Option<u32>,
#[arg(long)]
source_title: String,
#[arg(long)]
doi: Option<String>,
#[arg(long)]
url: Option<String>,
#[arg(long)]
year: Option<i32>,
#[arg(long)]
json: bool,
},
NegativeResults {
frontier: PathBuf,
#[arg(long)]
target: Option<String>,
#[arg(long)]
json: bool,
},
TrajectoryCreate {
frontier: PathBuf,
#[arg(long)]
deposited_by: String,
#[arg(long)]
reason: String,
#[arg(long)]
target: Vec<String>,
#[arg(long, default_value = "")]
notes: String,
#[arg(long)]
json: bool,
},
TrajectoryStep {
frontier: PathBuf,
trajectory_id: String,
#[arg(long)]
kind: String,
#[arg(long)]
description: String,
#[arg(long)]
actor: String,
#[arg(long)]
reason: String,
#[arg(long)]
reference: Vec<String>,
#[arg(long)]
json: bool,
},
Trajectories {
frontier: PathBuf,
#[arg(long)]
target: Option<String>,
#[arg(long)]
json: bool,
},
TierSet {
frontier: PathBuf,
#[arg(long)]
object_type: String,
#[arg(long)]
object_id: String,
#[arg(long)]
tier: String,
#[arg(long)]
actor: String,
#[arg(long)]
reason: String,
#[arg(long)]
json: bool,
},
Predict {
frontier: PathBuf,
#[arg(long)]
by: String,
#[arg(long)]
claim: String,
#[arg(long)]
criterion: String,
#[arg(long)]
resolves_by: Option<String>,
#[arg(long)]
confidence: f64,
#[arg(long, default_value = "")]
target: String,
#[arg(long, default_value = "affirmed")]
outcome: String,
#[arg(long, default_value = "")]
conditions: String,
#[arg(long)]
json: bool,
},
Resolve {
frontier: PathBuf,
prediction: String,
#[arg(long)]
outcome: String,
#[arg(long)]
matched: bool,
#[arg(long)]
by: String,
#[arg(long, default_value = "1.0")]
confidence: f64,
#[arg(long, default_value = "")]
source_title: String,
#[arg(long)]
doi: Option<String>,
#[arg(long)]
json: bool,
},
Predictions {
frontier: PathBuf,
#[arg(long)]
by: Option<String>,
#[arg(long)]
open: bool,
#[arg(long)]
json: bool,
},
Calibration {
frontier: PathBuf,
#[arg(long)]
actor: Option<String>,
#[arg(long)]
json: bool,
},
PredictionsExpire {
frontier: PathBuf,
#[arg(long)]
now: Option<String>,
#[arg(long)]
dry_run: bool,
#[arg(long)]
json: bool,
},
Consensus {
frontier: PathBuf,
target: String,
#[arg(long, default_value = "composite")]
weighting: String,
#[arg(long)]
causal_claim: Option<String>,
#[arg(long)]
causal_grade_min: Option<String>,
#[arg(long)]
json: bool,
},
Ingest {
path: String,
#[arg(long)]
frontier: PathBuf,
#[arg(short, long)]
backend: Option<String>,
#[arg(long)]
actor: Option<String>,
#[arg(long)]
dry_run: bool,
#[arg(long)]
json: bool,
},
Propose {
frontier: PathBuf,
finding_id: String,
#[arg(long)]
status: String,
#[arg(long)]
reason: String,
#[arg(long)]
reviewer: String,
#[arg(long)]
apply: bool,
#[arg(long)]
json: bool,
},
Accept {
frontier: PathBuf,
proposal_id: String,
#[arg(long)]
reviewer: String,
#[arg(long)]
reason: String,
#[arg(long)]
json: bool,
},
Attest {
frontier: PathBuf,
#[arg(long)]
event: Option<String>,
#[arg(long)]
attester: Option<String>,
#[arg(long)]
scope_note: Option<String>,
#[arg(long)]
proof_id: Option<String>,
#[arg(long)]
signature: Option<String>,
#[arg(long)]
key: Option<PathBuf>,
#[arg(long)]
json: bool,
},
Lineage {
frontier: PathBuf,
finding_id: String,
#[arg(long, value_name = "RFC3339_TIMESTAMP")]
as_of: Option<String>,
#[arg(long)]
json: bool,
},
Carina {
#[command(subcommand)]
action: CarinaAction,
},
Atlas {
#[command(subcommand)]
action: AtlasAction,
},
Constellation {
#[command(subcommand)]
action: ConstellationAction,
},
}
#[derive(Subcommand)]
enum AtlasAction {
Init {
name: String,
#[arg(long, value_delimiter = ',', num_args = 1..)]
frontiers: Vec<PathBuf>,
#[arg(long, default_value = "general")]
domain: String,
#[arg(long)]
scope_note: Option<String>,
#[arg(long, default_value = "atlases")]
atlases_root: PathBuf,
#[arg(long)]
json: bool,
},
Materialize {
name: String,
#[arg(long, default_value = "atlases")]
atlases_root: PathBuf,
#[arg(long)]
json: bool,
},
Serve {
name: String,
#[arg(long, default_value = "atlases")]
atlases_root: PathBuf,
#[arg(long, default_value_t = 3848)]
port: u16,
#[arg(long)]
no_open: bool,
},
Update {
name: String,
#[arg(long, value_delimiter = ',')]
add_frontier: Vec<PathBuf>,
#[arg(long, value_delimiter = ',')]
remove_vfr_id: Vec<String>,
#[arg(long, default_value = "atlases")]
atlases_root: PathBuf,
#[arg(long)]
json: bool,
},
}
#[derive(Subcommand)]
enum ConstellationAction {
Init {
name: String,
#[arg(long, value_delimiter = ',', num_args = 1..)]
atlases: Vec<PathBuf>,
#[arg(long)]
scope_note: Option<String>,
#[arg(long, default_value = "constellations")]
constellations_root: PathBuf,
#[arg(long)]
json: bool,
},
Materialize {
name: String,
#[arg(long, default_value = "constellations")]
constellations_root: PathBuf,
#[arg(long)]
json: bool,
},
Serve {
name: String,
#[arg(long, default_value = "constellations")]
constellations_root: PathBuf,
#[arg(long, default_value_t = 3849)]
port: u16,
#[arg(long)]
no_open: bool,
},
}
#[derive(Subcommand)]
enum CarinaAction {
Validate {
path: PathBuf,
#[arg(long)]
primitive: Option<String>,
#[arg(long)]
json: bool,
},
List {
#[arg(long)]
json: bool,
},
Schema { primitive: String },
}
#[derive(Subcommand)]
enum PacketAction {
Inspect {
path: PathBuf,
#[arg(long)]
json: bool,
},
Validate {
path: PathBuf,
#[arg(long)]
json: bool,
},
}
#[derive(Subcommand)]
enum SignAction {
GenerateKeypair {
#[arg(long, default_value = ".vela/keys")]
out: PathBuf,
#[arg(long)]
json: bool,
},
Apply {
frontier: PathBuf,
#[arg(long)]
private_key: PathBuf,
#[arg(long)]
json: bool,
},
Verify {
frontier: PathBuf,
#[arg(long)]
public_key: Option<PathBuf>,
#[arg(long)]
json: bool,
},
ThresholdSet {
frontier: PathBuf,
finding_id: String,
#[arg(long)]
to: u32,
#[arg(long)]
json: bool,
},
}
#[derive(Subcommand)]
enum ActorAction {
Add {
frontier: PathBuf,
id: String,
#[arg(long)]
pubkey: String,
#[arg(long)]
tier: Option<String>,
#[arg(long)]
orcid: Option<String>,
#[arg(long)]
clearance: Option<String>,
#[arg(long)]
json: bool,
},
List {
frontier: PathBuf,
#[arg(long)]
json: bool,
},
}
#[derive(Subcommand)]
enum CausalAction {
Audit {
frontier: PathBuf,
#[arg(long)]
problems_only: bool,
#[arg(long)]
json: bool,
},
Effect {
frontier: PathBuf,
source: String,
#[arg(long)]
on: String,
#[arg(long)]
json: bool,
},
Graph {
frontier: PathBuf,
#[arg(long)]
node: Option<String>,
#[arg(long)]
json: bool,
},
Counterfactual {
frontier: PathBuf,
intervene_on: String,
#[arg(long)]
set_to: f64,
#[arg(long)]
target: String,
#[arg(long)]
json: bool,
},
}
#[derive(Subcommand)]
enum BridgesAction {
Derive {
frontier_a: PathBuf,
#[arg(long, default_value = "a")]
label_a: String,
frontier_b: PathBuf,
#[arg(long, default_value = "b")]
label_b: String,
#[arg(long)]
json: bool,
},
List {
frontier: PathBuf,
#[arg(long)]
status: Option<String>,
#[arg(long)]
json: bool,
},
Show {
frontier: PathBuf,
bridge_id: String,
#[arg(long)]
json: bool,
},
Confirm {
frontier: PathBuf,
bridge_id: String,
#[arg(long)]
reviewer: Option<String>,
#[arg(long)]
note: Option<String>,
#[arg(long)]
json: bool,
},
Refute {
frontier: PathBuf,
bridge_id: String,
#[arg(long)]
reviewer: Option<String>,
#[arg(long)]
note: Option<String>,
#[arg(long)]
json: bool,
},
}
#[derive(Subcommand)]
enum FederationAction {
PeerAdd {
frontier: PathBuf,
id: String,
#[arg(long)]
url: String,
#[arg(long)]
pubkey: String,
#[arg(long, default_value = "")]
note: String,
#[arg(long)]
json: bool,
},
PeerList {
frontier: PathBuf,
#[arg(long)]
json: bool,
},
PeerRemove {
frontier: PathBuf,
id: String,
#[arg(long)]
json: bool,
},
Sync {
frontier: PathBuf,
peer_id: String,
#[arg(long)]
url: Option<String>,
#[arg(long)]
via_hub: bool,
#[arg(long)]
vfr_id: Option<String>,
#[arg(long)]
allow_cross_vfr: bool,
#[arg(long)]
dry_run: bool,
#[arg(long)]
json: bool,
},
PushResolution {
frontier: PathBuf,
conflict_event_id: String,
#[arg(long = "to")]
to: String,
#[arg(long)]
key: Option<PathBuf>,
#[arg(long)]
vfr_id: Option<String>,
#[arg(long)]
json: bool,
},
}
#[derive(Subcommand)]
enum FrontierAction {
New {
path: PathBuf,
#[arg(long)]
name: String,
#[arg(long, default_value = "")]
description: String,
#[arg(long)]
force: bool,
#[arg(long)]
json: bool,
},
Materialize {
frontier: PathBuf,
#[arg(long)]
json: bool,
},
AddDep {
frontier: PathBuf,
vfr_id: String,
#[arg(long)]
locator: String,
#[arg(long)]
snapshot: String,
#[arg(long)]
name: Option<String>,
#[arg(long)]
json: bool,
},
ListDeps {
frontier: PathBuf,
#[arg(long)]
json: bool,
},
RemoveDep {
frontier: PathBuf,
vfr_id: String,
#[arg(long)]
json: bool,
},
RefreshDeps {
frontier: PathBuf,
#[arg(long, default_value = "https://vela-hub.fly.dev")]
from: String,
#[arg(long)]
dry_run: bool,
#[arg(long)]
json: bool,
},
Diff {
frontier: PathBuf,
#[arg(long)]
since: Option<String>,
#[arg(long)]
week: Option<String>,
#[arg(long)]
json: bool,
},
}
#[derive(Subcommand)]
enum RepoAction {
Status {
frontier: PathBuf,
#[arg(long)]
json: bool,
},
Doctor {
frontier: PathBuf,
#[arg(long)]
json: bool,
},
}
#[derive(Subcommand)]
enum QueueAction {
List {
#[arg(long)]
queue_file: Option<PathBuf>,
#[arg(long)]
json: bool,
},
Sign {
#[arg(long)]
actor: String,
#[arg(long)]
key: PathBuf,
#[arg(long)]
queue_file: Option<PathBuf>,
#[arg(long, alias = "all")]
yes_to_all: bool,
#[arg(long)]
json: bool,
},
Clear {
#[arg(long)]
queue_file: Option<PathBuf>,
#[arg(long)]
json: bool,
},
}
#[derive(Subcommand)]
enum RegistryAction {
List {
#[arg(long)]
from: Option<String>,
#[arg(long)]
json: bool,
},
Publish {
frontier: PathBuf,
#[arg(long)]
owner: String,
#[arg(long)]
key: PathBuf,
#[arg(long)]
locator: Option<String>,
#[arg(long)]
to: Option<String>,
#[arg(long)]
json: bool,
},
DependsOn {
vfr_id: String,
#[arg(long, default_value = "https://vela-hub.fly.dev")]
from: String,
#[arg(long)]
json: bool,
},
Mirror {
vfr_id: String,
#[arg(long)]
from: String,
#[arg(long)]
to: String,
#[arg(long)]
json: bool,
},
Pull {
vfr_id: String,
#[arg(long)]
from: Option<String>,
#[arg(long)]
out: PathBuf,
#[arg(long)]
transitive: bool,
#[arg(long, default_value = "4")]
depth: usize,
#[arg(long)]
json: bool,
},
}
#[derive(Subcommand)]
enum GapsAction {
Rank {
frontier: PathBuf,
#[arg(long, default_value = "10")]
top: usize,
#[arg(long)]
domain: Option<String>,
#[arg(long)]
json: bool,
},
}
#[derive(Subcommand)]
enum LinkAction {
Add {
frontier: PathBuf,
#[arg(long)]
from: String,
#[arg(long)]
to: String,
#[arg(long, default_value = "supports")]
r#type: String,
#[arg(long, default_value = "")]
note: String,
#[arg(long, default_value = "reviewer")]
inferred_by: String,
#[arg(long)]
no_check_target: bool,
#[arg(long)]
json: bool,
},
}
#[derive(Subcommand)]
enum EntityAction {
Resolve {
frontier: PathBuf,
#[arg(long)]
force: bool,
#[arg(long)]
json: bool,
},
List {
#[arg(long)]
json: bool,
},
}
#[derive(Subcommand)]
enum FindingCommands {
Add {
frontier: PathBuf,
#[arg(long)]
assertion: String,
#[arg(long, default_value = "mechanism")]
r#type: String,
#[arg(long, default_value = "manual finding")]
source: String,
#[arg(long, default_value = "expert_assertion")]
source_type: String,
#[arg(long)]
author: String,
#[arg(long, default_value = "0.3")]
confidence: f64,
#[arg(long, default_value = "theoretical")]
evidence_type: String,
#[arg(long, default_value = "")]
entities: String,
#[arg(long)]
entities_reviewed: bool,
#[arg(long)]
evidence_span: Vec<String>,
#[arg(long)]
gap: bool,
#[arg(long)]
negative_space: bool,
#[arg(long)]
doi: Option<String>,
#[arg(long)]
pmid: Option<String>,
#[arg(long)]
year: Option<i32>,
#[arg(long)]
journal: Option<String>,
#[arg(long)]
url: Option<String>,
#[arg(long)]
source_authors: Option<String>,
#[arg(long)]
conditions_text: Option<String>,
#[arg(long)]
species: Option<String>,
#[arg(long)]
in_vivo: bool,
#[arg(long)]
in_vitro: bool,
#[arg(long)]
human_data: bool,
#[arg(long)]
clinical_trial: bool,
#[arg(long)]
json: bool,
#[arg(long)]
apply: bool,
},
Supersede {
frontier: PathBuf,
old_id: String,
#[arg(long)]
assertion: String,
#[arg(long, default_value = "mechanism")]
r#type: String,
#[arg(long, default_value = "manual finding")]
source: String,
#[arg(long, default_value = "expert_assertion")]
source_type: String,
#[arg(long)]
author: String,
#[arg(long)]
reason: String,
#[arg(long, default_value = "0.5")]
confidence: f64,
#[arg(long, default_value = "experimental")]
evidence_type: String,
#[arg(long, default_value = "")]
entities: String,
#[arg(long)]
doi: Option<String>,
#[arg(long)]
pmid: Option<String>,
#[arg(long)]
year: Option<i32>,
#[arg(long)]
journal: Option<String>,
#[arg(long)]
url: Option<String>,
#[arg(long)]
source_authors: Option<String>,
#[arg(long)]
conditions_text: Option<String>,
#[arg(long)]
species: Option<String>,
#[arg(long)]
in_vivo: bool,
#[arg(long)]
in_vitro: bool,
#[arg(long)]
human_data: bool,
#[arg(long)]
clinical_trial: bool,
#[arg(long)]
json: bool,
#[arg(long)]
apply: bool,
},
CausalSet {
frontier: PathBuf,
finding_id: String,
#[arg(long)]
claim: String,
#[arg(long)]
grade: Option<String>,
#[arg(long)]
actor: String,
#[arg(long)]
reason: String,
#[arg(long)]
json: bool,
},
}
#[derive(Subcommand)]
enum ProposalAction {
List {
frontier: PathBuf,
#[arg(long)]
status: Option<String>,
#[arg(long)]
json: bool,
},
Show {
frontier: PathBuf,
proposal_id: String,
#[arg(long)]
json: bool,
},
Preview {
frontier: PathBuf,
proposal_id: String,
#[arg(long, default_value = "reviewer:preview")]
reviewer: String,
#[arg(long)]
json: bool,
},
Import {
frontier: PathBuf,
source: PathBuf,
#[arg(long)]
json: bool,
},
Validate {
source: PathBuf,
#[arg(long)]
json: bool,
},
Export {
frontier: PathBuf,
output: PathBuf,
#[arg(long)]
status: Option<String>,
#[arg(long)]
json: bool,
},
Accept {
frontier: PathBuf,
proposal_id: String,
#[arg(long)]
reviewer: String,
#[arg(long)]
reason: String,
#[arg(long)]
json: bool,
},
Reject {
frontier: PathBuf,
proposal_id: String,
#[arg(long)]
reviewer: String,
#[arg(long)]
reason: String,
#[arg(long)]
json: bool,
},
}
#[derive(Subcommand)]
enum SourceAdapterAction {
Run {
frontier: PathBuf,
adapter: String,
#[arg(long)]
actor: String,
#[arg(long = "entry")]
entries: Vec<String>,
#[arg(long)]
priority: Option<String>,
#[arg(long)]
include_excluded: bool,
#[arg(long)]
allow_partial: bool,
#[arg(long)]
dry_run: bool,
#[arg(long)]
input_dir: Option<PathBuf>,
#[arg(long)]
apply_artifacts: bool,
#[arg(long)]
json: bool,
},
}
#[derive(Subcommand)]
enum RuntimeAdapterAction {
Run {
frontier: PathBuf,
adapter: String,
#[arg(long)]
input: PathBuf,
#[arg(long)]
actor: String,
#[arg(long)]
dry_run: bool,
#[arg(long)]
apply_artifacts: bool,
#[arg(long)]
json: bool,
},
}
#[derive(Subcommand)]
enum BridgeKitAction {
Validate {
source: PathBuf,
#[arg(long)]
json: bool,
},
VerifyProvenance {
packet: PathBuf,
#[arg(long)]
json: bool,
},
}
pub async fn run_command() {
dotenvy::dotenv().ok();
match Cli::parse().command {
Commands::Scout {
folder,
frontier,
backend,
dry_run,
json,
} => {
cmd_scout(&folder, &frontier, backend.as_deref(), dry_run, json).await;
}
Commands::CompileNotes {
vault,
frontier,
backend,
max_files,
max_items_per_category,
dry_run,
json,
} => {
cmd_compile_notes(
&vault,
&frontier,
backend.as_deref(),
max_files,
max_items_per_category,
dry_run,
json,
)
.await;
}
Commands::CompileCode {
root,
frontier,
backend,
max_files,
dry_run,
json,
} => {
cmd_compile_code(
&root,
&frontier,
backend.as_deref(),
max_files,
dry_run,
json,
)
.await;
}
Commands::CompileData {
root,
frontier,
backend,
sample_rows,
dry_run,
json,
} => {
cmd_compile_data(
&root,
&frontier,
backend.as_deref(),
sample_rows,
dry_run,
json,
)
.await;
}
Commands::ReviewPending {
frontier,
backend,
max_proposals,
batch_size,
dry_run,
json,
} => {
cmd_review_pending(
&frontier,
backend.as_deref(),
max_proposals,
batch_size,
dry_run,
json,
)
.await;
}
Commands::FindTensions {
frontier,
backend,
max_findings,
dry_run,
json,
} => {
cmd_find_tensions(&frontier, backend.as_deref(), max_findings, dry_run, json).await;
}
Commands::PlanExperiments {
frontier,
backend,
max_findings,
dry_run,
json,
} => {
cmd_plan_experiments(&frontier, backend.as_deref(), max_findings, dry_run, json).await;
}
Commands::Check {
source,
schema,
stats,
conformance,
conformance_dir,
all,
schema_only,
strict,
fix,
json,
} => cmd_check(
source.as_deref(),
schema,
stats,
conformance,
&conformance_dir,
all,
schema_only,
strict,
fix,
json,
),
Commands::Integrity { frontier, json } => cmd_integrity(&frontier, json),
Commands::Impact {
frontier,
finding_id,
depth,
json,
} => cmd_impact(&frontier, &finding_id, depth, json),
Commands::Discord {
frontier,
json,
kind,
} => cmd_discord(&frontier, json, kind.as_deref()),
Commands::Normalize {
source,
out,
write,
dry_run,
rewrite_ids,
id_map,
resync_provenance,
json,
} => cmd_normalize(
&source,
out.as_deref(),
write,
dry_run,
rewrite_ids,
id_map.as_deref(),
resync_provenance,
json,
),
Commands::Proof {
frontier,
out,
template,
gold,
record_proof_state,
json,
} => cmd_proof(
&frontier,
&out,
&template,
gold.as_deref(),
record_proof_state,
json,
),
Commands::Repo { action } => cmd_repo(action),
Commands::Serve {
frontier,
frontiers,
backend,
http,
setup,
check_tools,
json,
workbench,
} => {
if setup {
cmd_mcp_setup(frontier.as_deref(), frontiers.as_deref());
} else if check_tools {
let source =
serve::ProjectSource::from_args(frontier.as_deref(), frontiers.as_deref());
match serve::check_tools(source) {
Ok(report) => {
if json {
println!(
"{}",
serde_json::to_string_pretty(&report)
.expect("failed to serialize tool check report")
);
} else {
print_tool_check_report(&report);
}
}
Err(e) => fail(&format!("Tool check failed: {e}")),
}
} else {
let source =
serve::ProjectSource::from_args(frontier.as_deref(), frontiers.as_deref());
let resolved_port = if workbench {
Some(http.unwrap_or(3848))
} else {
http
};
if let Some(port) = resolved_port {
serve::run_http(source, backend.as_deref(), port, workbench).await;
} else {
serve::run(source, backend.as_deref()).await;
}
}
}
Commands::Status { frontier, json } => cmd_status(&frontier, json),
Commands::Log {
frontier,
limit,
kind,
json,
} => cmd_log(&frontier, limit, kind.as_deref(), json),
Commands::Inbox {
frontier,
kind,
limit,
json,
} => cmd_inbox(&frontier, kind.as_deref(), limit, json),
Commands::Ask {
frontier,
question,
json,
} => cmd_ask(&frontier, &question.join(" "), json),
Commands::Stats { frontier, json } => {
if json {
print_stats_json(&frontier);
} else {
cmd_stats(&frontier);
}
}
Commands::Search {
source,
query,
entity,
r#type,
all,
limit,
json,
} => cmd_search(
source.as_deref(),
&query,
entity.as_deref(),
r#type.as_deref(),
all.as_deref(),
limit,
json,
),
Commands::Tensions {
source,
both_high,
cross_domain,
top,
json,
} => cmd_tensions(&source, both_high, cross_domain, top, json),
Commands::Gaps { action } => cmd_gaps(action),
Commands::Bridge {
inputs,
novelty,
top,
} => cmd_bridge(&inputs, novelty, top).await,
Commands::Export {
frontier,
format,
output,
} => export::run(&frontier, &format, output.as_deref()),
Commands::Packet { action } => cmd_packet(action),
Commands::Verify { path, json } => cmd_verify(&path, json),
Commands::Bench {
frontier,
gold,
candidate,
sources,
threshold,
report,
entity_gold,
link_gold,
suite,
suite_ready,
min_f1,
min_precision,
min_recall,
no_thresholds,
json,
} => {
if let Some(cand) = candidate.clone() {
let Some(g) = gold.clone() else {
eprintln!(
"{} `vela bench --candidate <…>` requires `--gold <…>`",
style::err_prefix()
);
std::process::exit(2);
};
cmd_agent_bench(
&g,
&cand,
sources.as_deref(),
threshold,
report.as_deref(),
json,
);
} else {
cmd_bench(BenchArgs {
frontier,
gold,
entity_gold,
link_gold,
suite,
suite_ready,
min_f1,
min_precision,
min_recall,
no_thresholds,
json,
});
}
}
Commands::Conformance { dir } => {
let _ = conformance::run(&dir);
}
Commands::Version => println!("vela 0.36.0"),
Commands::Sign { action } => cmd_sign(action),
Commands::Actor { action } => cmd_actor(action),
Commands::Federation { action } => cmd_federation(action),
Commands::Causal { action } => cmd_causal(action),
Commands::Frontier { action } => cmd_frontier(action),
Commands::Queue { action } => cmd_queue(action),
Commands::Registry { action } => cmd_registry(action),
Commands::Init {
path,
name,
template,
no_git,
json,
} => cmd_init(&path, &name, &template, !no_git, json),
Commands::Quickstart {
path,
name,
reviewer,
assertion,
keys_out,
json,
} => cmd_quickstart(
&path,
&name,
&reviewer,
assertion.as_deref(),
keys_out.as_deref(),
json,
),
Commands::Lock { path, check, json } => cmd_lock(&path, check, json),
Commands::Doc { path, out, json } => cmd_doc(&path, out.as_deref(), json),
Commands::Import { frontier, into } => cmd_import(&frontier, into.as_deref()),
Commands::Diff {
target,
frontier_b,
frontier,
reviewer,
json,
quiet,
} => {
if target.starts_with("vpr_") {
let frontier_root = frontier
.clone()
.or_else(|| frontier_b.clone())
.unwrap_or_else(|| std::path::PathBuf::from("."));
let preview = proposals::preview_at_path(&frontier_root, &target, &reviewer)
.unwrap_or_else(|e| fail_return(&e));
let payload = json!({
"ok": true,
"command": "diff.proposal",
"frontier": frontier_root.display().to_string(),
"proposal_id": target,
"preview": preview,
});
if json {
println!(
"{}",
serde_json::to_string_pretty(&payload)
.expect("failed to serialize diff preview")
);
} else {
println!("vela diff · proposal preview");
println!(" proposal: {}", target);
println!(" kind: {}", preview.kind);
println!(
" findings: {} -> {}",
preview.findings_before, preview.findings_after
);
println!(
" artifacts: {} -> {}",
preview.artifacts_before, preview.artifacts_after
);
println!(
" events: {} -> {}",
preview.events_before, preview.events_after
);
if !preview.changed_findings.is_empty() {
println!(
" findings changed: {}",
preview.changed_findings.join(", ")
);
}
}
} else {
let frontier_a = std::path::PathBuf::from(&target);
let b = frontier_b.unwrap_or_else(|| {
fail_return(
"diff: two-frontier mode needs a second positional path; for proposal preview pass a `vpr_*` id",
)
});
diff::run(&frontier_a, &b, json, quiet);
}
}
Commands::Proposals { action } => cmd_proposals(action),
Commands::ArtifactToState {
frontier,
packet,
actor,
apply_artifacts,
json,
} => cmd_artifact_to_state(&frontier, &packet, &actor, apply_artifacts, json),
Commands::BridgeKit { action } => cmd_bridge_kit(action).await,
Commands::SourceAdapter { action } => cmd_source_adapter(action).await,
Commands::RuntimeAdapter { action } => cmd_runtime_adapter(action),
Commands::Link { action } => cmd_link(action),
Commands::Workbench {
path,
port,
no_open,
} => {
if let Err(e) = crate::workbench::run(path, port, !no_open).await {
fail(&e);
}
}
Commands::Bridges { action } => cmd_bridges(action),
Commands::Entity { action } => cmd_entity(action),
Commands::Finding { command } => match command {
FindingCommands::Add {
frontier,
assertion,
r#type,
source,
source_type,
author,
confidence,
evidence_type,
entities,
entities_reviewed,
evidence_span,
gap,
negative_space,
doi,
pmid,
year,
journal,
url,
source_authors,
conditions_text,
species,
in_vivo,
in_vitro,
human_data,
clinical_trial,
json,
apply,
} => {
validate_enum_arg("--type", &r#type, bundle::VALID_ASSERTION_TYPES);
validate_enum_arg(
"--evidence-type",
&evidence_type,
bundle::VALID_EVIDENCE_TYPES,
);
validate_enum_arg(
"--source-type",
&source_type,
bundle::VALID_PROVENANCE_SOURCE_TYPES,
);
let parsed_entities = parse_entities(&entities);
let parsed_evidence_spans = parse_evidence_spans(&evidence_span);
for (name, etype) in &parsed_entities {
if !bundle::VALID_ENTITY_TYPES.contains(&etype.as_str()) {
fail(&format!(
"invalid entity type '{}' for '{}'. Valid: {}",
etype,
name,
bundle::VALID_ENTITY_TYPES.join(", "),
));
}
}
let parsed_source_authors = source_authors
.map(|s| {
s.split(';')
.map(|a| a.trim().to_string())
.filter(|a| !a.is_empty())
.collect()
})
.unwrap_or_default();
let parsed_species = species
.map(|s| {
s.split(';')
.map(|a| a.trim().to_string())
.filter(|a| !a.is_empty())
.collect()
})
.unwrap_or_default();
let report = state::add_finding(
&frontier,
state::FindingDraftOptions {
text: assertion,
assertion_type: r#type,
source,
source_type,
author,
confidence,
evidence_type,
entities: parsed_entities,
doi,
pmid,
year,
journal,
url,
source_authors: parsed_source_authors,
conditions_text,
species: parsed_species,
in_vivo,
in_vitro,
human_data,
clinical_trial,
entities_reviewed,
evidence_spans: parsed_evidence_spans,
gap,
negative_space,
},
apply,
)
.unwrap_or_else(|e| fail_return(&e));
print_state_report(&report, json);
}
FindingCommands::Supersede {
frontier,
old_id,
assertion,
r#type,
source,
source_type,
author,
reason,
confidence,
evidence_type,
entities,
doi,
pmid,
year,
journal,
url,
source_authors,
conditions_text,
species,
in_vivo,
in_vitro,
human_data,
clinical_trial,
json,
apply,
} => {
validate_enum_arg("--type", &r#type, bundle::VALID_ASSERTION_TYPES);
validate_enum_arg(
"--evidence-type",
&evidence_type,
bundle::VALID_EVIDENCE_TYPES,
);
validate_enum_arg(
"--source-type",
&source_type,
bundle::VALID_PROVENANCE_SOURCE_TYPES,
);
let parsed_entities = parse_entities(&entities);
for (name, etype) in &parsed_entities {
if !bundle::VALID_ENTITY_TYPES.contains(&etype.as_str()) {
fail(&format!(
"invalid entity type '{}' for '{}'. Valid: {}",
etype,
name,
bundle::VALID_ENTITY_TYPES.join(", "),
));
}
}
let parsed_source_authors = source_authors
.map(|s| {
s.split(';')
.map(|a| a.trim().to_string())
.filter(|a| !a.is_empty())
.collect()
})
.unwrap_or_default();
let parsed_species = species
.map(|s| {
s.split(';')
.map(|a| a.trim().to_string())
.filter(|a| !a.is_empty())
.collect()
})
.unwrap_or_default();
let report = state::supersede_finding(
&frontier,
&old_id,
&reason,
state::FindingDraftOptions {
text: assertion,
assertion_type: r#type,
source,
source_type,
author,
confidence,
evidence_type,
entities: parsed_entities,
doi,
pmid,
year,
journal,
url,
source_authors: parsed_source_authors,
conditions_text,
species: parsed_species,
in_vivo,
in_vitro,
human_data,
clinical_trial,
entities_reviewed: false,
evidence_spans: Vec::new(),
gap: false,
negative_space: false,
},
apply,
)
.unwrap_or_else(|e| fail_return(&e));
print_state_report(&report, json);
}
FindingCommands::CausalSet {
frontier,
finding_id,
claim,
grade,
actor,
reason,
json,
} => {
if !bundle::VALID_CAUSAL_CLAIMS.contains(&claim.as_str()) {
fail(&format!(
"invalid --claim '{claim}'; valid: {:?}",
bundle::VALID_CAUSAL_CLAIMS
));
}
if let Some(g) = grade.as_deref()
&& !bundle::VALID_CAUSAL_EVIDENCE_GRADES.contains(&g)
{
fail(&format!(
"invalid --grade '{g}'; valid: {:?}",
bundle::VALID_CAUSAL_EVIDENCE_GRADES
));
}
let report = state::set_causal(
&frontier,
&finding_id,
&claim,
grade.as_deref(),
&actor,
&reason,
)
.unwrap_or_else(|e| fail_return(&e));
print_state_report(&report, json);
}
},
Commands::Review {
frontier,
finding_id,
status,
reason,
reviewer,
apply,
json,
} => {
let status = status.unwrap_or_else(|| fail_return("--status is required for review"));
let reason = reason.unwrap_or_else(|| fail_return("--reason is required for review"));
let report = state::review_finding(
&frontier,
&finding_id,
state::ReviewOptions {
status,
reason,
reviewer,
},
apply,
)
.unwrap_or_else(|e| fail_return(&e));
print_state_report(&report, json);
}
Commands::Note {
frontier,
finding_id,
text,
author,
apply,
json,
} => {
let report = state::add_note(&frontier, &finding_id, &text, &author, apply)
.unwrap_or_else(|e| fail_return(&e));
print_state_report(&report, json);
}
Commands::Caveat {
frontier,
finding_id,
text,
author,
apply,
json,
} => {
let report = state::caveat_finding(&frontier, &finding_id, &text, &author, apply)
.unwrap_or_else(|e| fail_return(&e));
print_state_report(&report, json);
}
Commands::Revise {
frontier,
finding_id,
confidence,
reason,
reviewer,
apply,
json,
} => {
let report = state::revise_confidence(
&frontier,
&finding_id,
state::ReviseOptions {
confidence,
reason,
reviewer,
},
apply,
)
.unwrap_or_else(|e| fail_return(&e));
print_state_report(&report, json);
}
Commands::Reject {
frontier,
finding_id,
reason,
reviewer,
apply,
json,
} => {
let report = state::reject_finding(&frontier, &finding_id, &reviewer, &reason, apply)
.unwrap_or_else(|e| fail_return(&e));
print_state_report(&report, json);
}
Commands::History {
frontier,
finding_id,
json,
as_of,
} => {
let payload = state::history_as_of(&frontier, &finding_id, as_of.as_deref())
.unwrap_or_else(|e| fail_return(&e));
if json {
println!(
"{}",
serde_json::to_string_pretty(&payload)
.expect("failed to serialize history response")
);
} else {
print_history(&payload);
}
}
Commands::ImportEvents { source, into, json } => {
let report =
review::import_review_events(&source, &into).unwrap_or_else(|e| fail_return(&e));
if json {
println!(
"{}",
serde_json::to_string_pretty(&json!({
"ok": true,
"command": "import-events",
"source": report.source,
"target": into.display().to_string(),
"summary": {
"imported": report.imported,
"new": report.new,
"duplicate": report.duplicate,
"canonical_events_imported": report.events_imported,
"canonical_events_new": report.events_new,
"canonical_events_duplicate": report.events_duplicate,
}
}))
.expect("failed to serialize import-events response")
);
} else {
println!("{report}");
}
}
Commands::Retract {
source,
finding_id,
reason,
reviewer,
apply,
json,
} => {
let report = state::retract_finding(&source, &finding_id, &reviewer, &reason, apply)
.unwrap_or_else(|e| fail_return(&e));
print_state_report(&report, json);
}
Commands::LocatorRepair {
frontier,
atom_id,
locator,
reviewer,
reason,
apply,
json,
} => {
cmd_locator_repair(
&frontier,
&atom_id,
locator.as_deref(),
&reviewer,
&reason,
apply,
json,
);
}
Commands::SourceFetch {
identifier,
cache,
out,
refresh,
json,
} => {
cmd_source_fetch(&identifier, cache.as_deref(), out.as_deref(), refresh, json).await;
}
Commands::SpanRepair {
frontier,
finding_id,
section,
text,
reviewer,
reason,
apply,
json,
} => {
cmd_span_repair(
&frontier,
&finding_id,
§ion,
&text,
&reviewer,
&reason,
apply,
json,
);
}
Commands::EntityAdd {
frontier,
finding_id,
entity,
entity_type,
reviewer,
reason,
apply,
json,
} => {
let report = state::add_finding_entity(
&frontier,
&finding_id,
&entity,
&entity_type,
&reviewer,
&reason,
apply,
)
.unwrap_or_else(|e| fail_return(&e));
print_state_report(&report, json);
}
Commands::EntityResolve {
frontier,
finding_id,
entity,
source,
id,
confidence,
matched_name,
resolution_method,
reviewer,
reason,
apply,
json,
} => {
cmd_entity_resolve(
&frontier,
&finding_id,
&entity,
&source,
&id,
confidence,
matched_name.as_deref(),
&resolution_method,
&reviewer,
&reason,
apply,
json,
);
}
Commands::Propagate {
frontier,
retract,
reduce_confidence,
to,
output,
} => cmd_propagate(&frontier, retract, reduce_confidence, to, output.as_deref()),
Commands::Replicate {
frontier,
target,
outcome,
by,
conditions,
source_title,
doi,
pmid,
sample_size,
note,
previous_attempt,
no_cascade,
json,
} => cmd_replicate(
&frontier,
&target,
&outcome,
&by,
&conditions,
&source_title,
doi.as_deref(),
pmid.as_deref(),
sample_size.as_deref(),
¬e,
previous_attempt.as_deref(),
no_cascade,
json,
),
Commands::Replications {
frontier,
target,
json,
} => cmd_replications(&frontier, target.as_deref(), json),
Commands::DatasetAdd {
frontier,
name,
version,
content_hash,
url,
license,
source_title,
doi,
row_count,
json,
} => cmd_dataset_add(
&frontier,
&name,
version.as_deref(),
&content_hash,
url.as_deref(),
license.as_deref(),
&source_title,
doi.as_deref(),
row_count,
json,
),
Commands::Datasets { frontier, json } => cmd_datasets(&frontier, json),
Commands::CodeAdd {
frontier,
language,
repo_url,
commit,
path,
content_hash,
line_start,
line_end,
entry_point,
json,
} => cmd_code_add(
&frontier,
&language,
repo_url.as_deref(),
commit.as_deref(),
&path,
&content_hash,
line_start,
line_end,
entry_point.as_deref(),
json,
),
Commands::CodeArtifacts { frontier, json } => cmd_code_artifacts(&frontier, json),
Commands::ArtifactAdd {
frontier,
kind,
name,
file,
url,
content_hash,
media_type,
license,
source_title,
source_url,
doi,
target,
metadata,
access_tier,
deposited_by,
reason,
json,
} => cmd_artifact_add(
&frontier,
&kind,
&name,
file.as_deref(),
url.as_deref(),
content_hash.as_deref(),
media_type.as_deref(),
license.as_deref(),
source_title.as_deref(),
source_url.as_deref(),
doi.as_deref(),
target,
metadata,
&access_tier,
&deposited_by,
&reason,
json,
),
Commands::Artifacts {
frontier,
target,
json,
} => cmd_artifacts(&frontier, target.as_deref(), json),
Commands::ArtifactAudit { frontier, json } => cmd_artifact_audit(&frontier, json),
Commands::DecisionBrief { frontier, json } => cmd_decision_brief(&frontier, json),
Commands::TrialSummary { frontier, json } => cmd_trial_summary(&frontier, json),
Commands::SourceVerification { frontier, json } => cmd_source_verification(&frontier, json),
Commands::SourceIngestPlan { frontier, json } => cmd_source_ingest_plan(&frontier, json),
Commands::ClinicalTrialImport {
frontier,
nct_id,
input_json,
target,
deposited_by,
reason,
license,
json,
} => {
cmd_clinical_trial_import(
&frontier,
&nct_id,
input_json.as_deref(),
target,
&deposited_by,
&reason,
&license,
json,
)
.await
}
Commands::NegativeResultAdd {
frontier,
kind,
deposited_by,
reason,
conditions_text,
notes,
target,
endpoint,
intervention,
comparator,
population,
n_enrolled,
power,
ci_lower,
ci_upper,
effect_size_threshold,
registry_id,
reagent,
observation,
attempts,
source_title,
doi,
url,
year,
json,
} => cmd_negative_result_add(
&frontier,
&kind,
&deposited_by,
&reason,
&conditions_text,
¬es,
target,
endpoint.as_deref(),
intervention.as_deref(),
comparator.as_deref(),
population.as_deref(),
n_enrolled,
power,
ci_lower,
ci_upper,
effect_size_threshold,
registry_id.as_deref(),
reagent.as_deref(),
observation.as_deref(),
attempts,
&source_title,
doi.as_deref(),
url.as_deref(),
year,
json,
),
Commands::NegativeResults {
frontier,
target,
json,
} => cmd_negative_results(&frontier, target.as_deref(), json),
Commands::TrajectoryCreate {
frontier,
deposited_by,
reason,
target,
notes,
json,
} => cmd_trajectory_create(&frontier, &deposited_by, &reason, target, ¬es, json),
Commands::TrajectoryStep {
frontier,
trajectory_id,
kind,
description,
actor,
reason,
reference,
json,
} => cmd_trajectory_step(
&frontier,
&trajectory_id,
&kind,
&description,
&actor,
&reason,
reference,
json,
),
Commands::Trajectories {
frontier,
target,
json,
} => cmd_trajectories(&frontier, target.as_deref(), json),
Commands::TierSet {
frontier,
object_type,
object_id,
tier,
actor,
reason,
json,
} => cmd_tier_set(
&frontier,
&object_type,
&object_id,
&tier,
&actor,
&reason,
json,
),
Commands::Predict {
frontier,
by,
claim,
criterion,
resolves_by,
confidence,
target,
outcome,
conditions,
json,
} => cmd_predict(
&frontier,
&by,
&claim,
&criterion,
resolves_by.as_deref(),
confidence,
&target,
&outcome,
&conditions,
json,
),
Commands::Resolve {
frontier,
prediction,
outcome,
matched,
by,
confidence,
source_title,
doi,
json,
} => cmd_resolve(
&frontier,
&prediction,
&outcome,
matched,
&by,
confidence,
&source_title,
doi.as_deref(),
json,
),
Commands::Predictions {
frontier,
by,
open,
json,
} => cmd_predictions(&frontier, by.as_deref(), open, json),
Commands::Calibration {
frontier,
actor,
json,
} => cmd_calibration(&frontier, actor.as_deref(), json),
Commands::PredictionsExpire {
frontier,
now,
dry_run,
json,
} => cmd_predictions_expire(&frontier, now.as_deref(), dry_run, json),
Commands::Consensus {
frontier,
target,
weighting,
causal_claim,
causal_grade_min,
json,
} => cmd_consensus(
&frontier,
&target,
&weighting,
causal_claim.as_deref(),
causal_grade_min.as_deref(),
json,
),
Commands::Ingest {
path,
frontier,
backend,
actor,
dry_run,
json,
} => {
cmd_ingest(
&path,
&frontier,
backend.as_deref(),
actor.as_deref(),
dry_run,
json,
)
.await
}
Commands::Propose {
frontier,
finding_id,
status,
reason,
reviewer,
apply,
json,
} => {
let options = state::ReviewOptions {
status: status.clone(),
reason: reason.clone(),
reviewer: reviewer.clone(),
};
let report = state::review_finding(&frontier, &finding_id, options, apply)
.unwrap_or_else(|e| fail_return(&e));
print_state_report(&report, json);
}
Commands::Accept {
frontier,
proposal_id,
reviewer,
reason,
json,
} => {
let event_id = proposals::accept_at_path(&frontier, &proposal_id, &reviewer, &reason)
.unwrap_or_else(|e| fail_return(&e));
let payload = json!({
"ok": true,
"command": "accept",
"frontier": frontier.display().to_string(),
"proposal_id": proposal_id,
"reviewer": reviewer,
"applied_event_id": event_id,
});
if json {
println!(
"{}",
serde_json::to_string_pretty(&payload)
.expect("failed to serialize accept response")
);
} else {
println!(
"{} accepted and applied proposal {}",
style::ok("ok"),
proposal_id
);
println!(" event: {}", event_id);
}
}
Commands::Attest {
frontier,
event,
attester,
scope_note,
proof_id,
signature,
key,
json,
} => {
if let Some(target_event_id) = event {
let attester_id = attester.unwrap_or_else(|| {
fail_return("attest: --attester is required in per-event mode")
});
let scope = scope_note.unwrap_or_else(|| {
fail_return("attest: --scope-note is required in per-event mode")
});
let attestation_event_id = state::record_attestation(
&frontier,
&target_event_id,
&attester_id,
&scope,
proof_id.as_deref(),
signature.as_deref(),
)
.unwrap_or_else(|e| fail_return(&e));
if json {
let payload = json!({
"ok": true,
"command": "attest.event",
"frontier": frontier.display().to_string(),
"target_event_id": target_event_id,
"attestation_event_id": attestation_event_id,
"attester_id": attester_id,
});
println!(
"{}",
serde_json::to_string_pretty(&payload)
.expect("failed to serialize attest.event response")
);
} else {
println!(
"{} attested {} by {} ({})",
style::ok("ok"),
target_event_id,
attester_id,
attestation_event_id
);
}
return;
}
let key_path = key.unwrap_or_else(|| {
fail_return(
"attest: --key is required in frontier-wide mode (or pass --event for per-event mode)",
)
});
let count =
sign::sign_frontier(&frontier, &key_path).unwrap_or_else(|e| fail_return(&e));
let payload = json!({
"ok": true,
"command": "attest",
"frontier": frontier.display().to_string(),
"private_key": key_path.display().to_string(),
"signed": count,
});
if json {
println!(
"{}",
serde_json::to_string_pretty(&payload)
.expect("failed to serialize attest response")
);
} else {
println!(
"{} {count} findings in {}",
style::ok("attested"),
frontier.display()
);
}
}
Commands::Lineage {
frontier,
finding_id,
as_of,
json,
} => {
let payload = state::history_as_of(&frontier, &finding_id, as_of.as_deref())
.unwrap_or_else(|e| fail_return(&e));
if json {
println!(
"{}",
serde_json::to_string_pretty(&payload)
.expect("failed to serialize lineage response")
);
} else {
print_history(&payload);
}
}
Commands::Carina { action } => cmd_carina(action),
Commands::Atlas { action } => cmd_atlas(action).await,
Commands::Constellation { action } => cmd_constellation(action).await,
}
}
async fn cmd_atlas(action: AtlasAction) {
match action {
AtlasAction::Init {
name,
frontiers,
domain,
scope_note,
atlases_root,
json,
} => match ATLAS_INIT_HANDLER.get() {
Some(handler) => {
handler(atlases_root, name, domain, scope_note, frontiers, json).await;
}
None => fail("vela atlas init: handler not registered (built without vela-atlas)"),
},
AtlasAction::Materialize {
name,
atlases_root,
json,
} => match ATLAS_MATERIALIZE_HANDLER.get() {
Some(handler) => handler(atlases_root, name, json).await,
None => fail("vela atlas materialize: handler not registered"),
},
AtlasAction::Serve {
name,
atlases_root,
port,
no_open,
} => {
match ATLAS_SERVE_HANDLER.get() {
Some(handler) => handler(atlases_root, name, port, !no_open).await,
None => fail("vela atlas serve: handler not registered"),
}
}
AtlasAction::Update {
name,
add_frontier,
remove_vfr_id,
atlases_root,
json,
} => match ATLAS_UPDATE_HANDLER.get() {
Some(handler) => {
handler(atlases_root, name, add_frontier, remove_vfr_id, json).await;
}
None => fail("vela atlas update: handler not registered"),
},
}
}
async fn cmd_constellation(action: ConstellationAction) {
match action {
ConstellationAction::Init {
name,
atlases,
scope_note,
constellations_root,
json,
} => match CONSTELLATION_INIT_HANDLER.get() {
Some(handler) => {
handler(constellations_root, name, scope_note, atlases, json).await;
}
None => fail(
"vela constellation init: handler not registered (built without vela-constellation)",
),
},
ConstellationAction::Materialize {
name,
constellations_root,
json,
} => match CONSTELLATION_MATERIALIZE_HANDLER.get() {
Some(handler) => handler(constellations_root, name, json).await,
None => fail("vela constellation materialize: handler not registered"),
},
ConstellationAction::Serve {
name,
constellations_root,
port,
no_open,
} => match CONSTELLATION_SERVE_HANDLER.get() {
Some(handler) => handler(constellations_root, name, port, !no_open).await,
None => fail("vela constellation serve: handler not registered"),
},
}
}
fn cmd_carina(action: CarinaAction) {
match action {
CarinaAction::List { json } => {
if json {
println!(
"{}",
serde_json::to_string_pretty(&json!({
"ok": true,
"command": "carina.list",
"primitives": carina_validate::PRIMITIVE_NAMES,
}))
.expect("failed to serialize carina.list")
);
} else {
println!("Carina primitives bundled with this build:");
for name in carina_validate::PRIMITIVE_NAMES {
println!(" · {name}");
}
}
}
CarinaAction::Schema { primitive } => match carina_validate::schema_text(&primitive) {
Some(text) => print!("{text}"),
None => fail(&format!("carina: unknown primitive '{primitive}'")),
},
CarinaAction::Validate {
path,
primitive,
json,
} => {
let text = std::fs::read_to_string(&path)
.unwrap_or_else(|e| fail_return(&format!("read {}: {e}", path.display())));
let value: Value = serde_json::from_str(&text)
.unwrap_or_else(|e| fail_return(&format!("parse {}: {e}", path.display())));
type CarinaValidateOutcome = Result<Option<&'static str>, Vec<String>>;
let mut report: Vec<(String, CarinaValidateOutcome)> = Vec::new();
if value.get("primitives").and_then(Value::as_object).is_some() && primitive.is_none() {
let primitives = value.get("primitives").and_then(Value::as_object).unwrap();
for (key, child) in primitives {
let outcome = carina_validate::validate(key, child)
.map(|()| carina_validate::detect_primitive(child));
report.push((key.clone(), outcome));
}
} else {
let outcome = match primitive.as_deref() {
Some(name) => carina_validate::validate(name, &value).map(|()| {
carina_validate::PRIMITIVE_NAMES
.iter()
.copied()
.find(|p| *p == name)
}),
None => carina_validate::validate_auto(&value).map(Some),
};
let label = primitive.clone().unwrap_or_else(|| "<auto>".to_string());
report.push((label, outcome));
}
let total = report.len();
let pass = report.iter().filter(|(_, r)| r.is_ok()).count();
let fail = total - pass;
if json {
let entries: Vec<Value> = report
.iter()
.map(|(label, r)| match r {
Ok(name) => json!({
"key": label,
"primitive": name,
"ok": true,
}),
Err(errs) => json!({
"key": label,
"ok": false,
"errors": errs,
}),
})
.collect();
println!(
"{}",
serde_json::to_string_pretty(&json!({
"ok": fail == 0,
"command": "carina.validate",
"file": path.display().to_string(),
"total": total,
"passed": pass,
"failed": fail,
"entries": entries,
}))
.expect("failed to serialize carina.validate")
);
} else {
for (label, r) in &report {
match r {
Ok(Some(name)) => println!(" {} {label} (as {name})", style::ok("ok")),
Ok(None) => println!(" {} {label}", style::ok("ok")),
Err(errs) => {
println!(" {} {label}", style::lost("fail"));
for e in errs {
println!(" {e}");
}
}
}
}
println!();
if fail == 0 {
println!("{} {pass}/{total} valid", style::ok("carina.validate"));
} else {
println!(
"{} {pass}/{total} valid · {fail} failed",
style::lost("carina.validate")
);
}
}
if fail > 0 {
std::process::exit(1);
}
}
}
}
fn cmd_consensus(
frontier: &Path,
target: &str,
weighting_str: &str,
causal_claim: Option<&str>,
causal_grade_min: Option<&str>,
json: bool,
) {
use crate::bundle::{CausalClaim, CausalEvidenceGrade};
if !target.starts_with("vf_") {
fail(&format!("target `{target}` is not a vf_ finding id"));
}
let scheme =
crate::aggregate::WeightingScheme::parse(weighting_str).unwrap_or_else(|e| fail_return(&e));
let parsed_claim = match causal_claim {
None => None,
Some("correlation") => Some(CausalClaim::Correlation),
Some("mediation") => Some(CausalClaim::Mediation),
Some("intervention") => Some(CausalClaim::Intervention),
Some(other) => fail_return(&format!(
"invalid --causal-claim '{other}'; valid: correlation | mediation | intervention"
)),
};
let parsed_grade = match causal_grade_min {
None => None,
Some("theoretical") => Some(CausalEvidenceGrade::Theoretical),
Some("observational") => Some(CausalEvidenceGrade::Observational),
Some("quasi_experimental") => Some(CausalEvidenceGrade::QuasiExperimental),
Some("rct") => Some(CausalEvidenceGrade::Rct),
Some(other) => fail_return(&format!(
"invalid --causal-grade-min '{other}'; valid: theoretical | observational | quasi_experimental | rct"
)),
};
let filter = crate::aggregate::AggregateFilter {
causal_claim: parsed_claim,
causal_grade_min: parsed_grade,
};
let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
let result = crate::aggregate::consensus_for_with_filter(&project, target, scheme, &filter)
.unwrap_or_else(|| fail_return(&format!("target `{target}` not in frontier")));
if json {
println!(
"{}",
serde_json::to_string_pretty(&result).expect("serialize consensus")
);
return;
}
println!();
println!(
" {}",
format!(
"VELA · CONSENSUS · {} ({})",
result.target, result.weighting
)
.to_uppercase()
.dimmed()
);
println!(" {}", style::tick_row(60));
println!(
" target: {}",
truncate(&result.target_assertion, 80)
);
println!(" similar findings: {}", result.n_findings);
println!(
" consensus: {:.3} ({:.3} – {:.3} 95% credible)",
result.consensus_confidence, result.credible_interval_lo, result.credible_interval_hi
);
println!();
println!(" constituents (sorted by weight):");
let mut sorted = result.constituents.clone();
sorted.sort_by(|a, b| {
b.weight
.partial_cmp(&a.weight)
.unwrap_or(std::cmp::Ordering::Equal)
});
for c in sorted.iter().take(10) {
let repls = if c.n_replications > 0 {
format!(" ({}r {}f)", c.n_replicated, c.n_failed_replications)
} else {
String::new()
};
println!(
" · w={:.2} raw={:.2} adj={:.2}{}",
c.weight, c.raw_score, c.adjusted_score, repls
);
println!(" {}", truncate(&c.assertion_text, 88));
}
if result.constituents.len() > 10 {
println!(" ... ({} more)", result.constituents.len() - 10);
}
}
fn parse_expected_outcome(s: &str) -> Result<crate::bundle::ExpectedOutcome, String> {
let trimmed = s.trim();
if trimmed.eq_ignore_ascii_case("affirmed") {
return Ok(crate::bundle::ExpectedOutcome::Affirmed);
}
if trimmed.eq_ignore_ascii_case("falsified") {
return Ok(crate::bundle::ExpectedOutcome::Falsified);
}
if let Some(rest) = trimmed.strip_prefix("cat:") {
return Ok(crate::bundle::ExpectedOutcome::Categorical {
value: rest.to_string(),
});
}
if let Some(rest) = trimmed.strip_prefix("quant:") {
let (vt, units) = rest.split_once(' ').unwrap_or((rest, ""));
let (val_s, tol_s) = vt
.split_once('±')
.or_else(|| vt.split_once("+/-"))
.ok_or_else(|| format!("expected `quant:VALUE±TOL UNITS`, got `quant:{rest}`"))?;
let value: f64 = val_s
.parse()
.map_err(|e| format!("bad quant value `{val_s}`: {e}"))?;
let tolerance: f64 = tol_s
.parse()
.map_err(|e| format!("bad quant tolerance `{tol_s}`: {e}"))?;
return Ok(crate::bundle::ExpectedOutcome::Quantitative {
value,
tolerance,
units: units.to_string(),
});
}
Err(format!(
"unknown outcome `{s}`; expected one of: affirmed | falsified | quant:V±T units | cat:label"
))
}
#[allow(clippy::too_many_arguments)]
fn cmd_predict(
frontier: &Path,
by: &str,
claim: &str,
criterion: &str,
resolves_by: Option<&str>,
confidence: f64,
target_csv: &str,
outcome: &str,
conditions_text: &str,
json: bool,
) {
if !(0.0..=1.0).contains(&confidence) {
fail(&format!("confidence must be in [0, 1]; got {confidence}"));
}
let expected = parse_expected_outcome(outcome).unwrap_or_else(|e| fail_return(&e));
let mut project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
let targets: Vec<String> = target_csv
.split(',')
.map(|s| s.trim().to_string())
.filter(|s| !s.is_empty())
.collect();
for t in &targets {
if !t.starts_with("vf_") {
fail(&format!("target `{t}` is not a vf_ id"));
}
if !project.findings.iter().any(|f| f.id == *t) {
fail(&format!("target `{t}` not present in frontier"));
}
}
let lower = conditions_text.to_lowercase();
let conditions = crate::bundle::Conditions {
text: conditions_text.to_string(),
species_verified: Vec::new(),
species_unverified: Vec::new(),
in_vitro: lower.contains("in vitro"),
in_vivo: lower.contains("in vivo"),
human_data: lower.contains("human") || lower.contains("clinical"),
clinical_trial: lower.contains("clinical trial") || lower.contains("phase "),
concentration_range: None,
duration: None,
age_group: None,
cell_type: None,
};
let prediction = crate::bundle::Prediction::new(
claim.to_string(),
targets,
None,
resolves_by.map(|s| s.to_string()),
criterion.to_string(),
expected,
by.to_string(),
confidence,
conditions,
);
if project.predictions.iter().any(|p| p.id == prediction.id) {
if json {
println!(
"{}",
serde_json::to_string_pretty(&json!({
"ok": false,
"command": "predict",
"reason": "prediction_already_exists",
"id": prediction.id,
}))
.expect("serialize")
);
} else {
println!(
"{} prediction {} already exists in {}; skipping.",
style::warn("predict"),
prediction.id,
frontier.display()
);
}
return;
}
let new_id = prediction.id.clone();
project.predictions.push(prediction);
repo::save_to_path(frontier, &project).unwrap_or_else(|e| fail_return(&e));
if json {
println!(
"{}",
serde_json::to_string_pretty(&json!({
"ok": true,
"command": "predict",
"id": new_id,
"made_by": by,
"confidence": confidence,
"frontier": frontier.display().to_string(),
}))
.expect("serialize predict result")
);
} else {
println!();
println!(
" {}",
format!("VELA · PREDICT · {}", new_id)
.to_uppercase()
.dimmed()
);
println!(" {}", style::tick_row(60));
println!(" by: {by}");
println!(" confidence: {confidence:.3}");
if let Some(d) = resolves_by {
println!(" resolves by: {d}");
}
println!(" outcome: {outcome}");
println!(" claim: {}", truncate(claim, 88));
println!();
println!(
" {} prediction recorded in {}",
style::ok("ok"),
frontier.display()
);
}
}
#[allow(clippy::too_many_arguments)]
fn cmd_resolve(
frontier: &Path,
prediction_id: &str,
actual_outcome: &str,
matched: bool,
by: &str,
confidence: f64,
source_title: &str,
doi: Option<&str>,
json: bool,
) {
if !prediction_id.starts_with("vpred_") {
fail(&format!("prediction `{prediction_id}` is not a vpred_ id"));
}
if !(0.0..=1.0).contains(&confidence) {
fail(&format!("confidence must be in [0, 1]; got {confidence}"));
}
let mut project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
if !project.predictions.iter().any(|p| p.id == prediction_id) {
fail(&format!(
"prediction `{prediction_id}` not present in frontier"
));
}
let evidence = crate::bundle::Evidence {
evidence_type: "experimental".to_string(),
model_system: String::new(),
species: None,
method: "prediction_resolution".to_string(),
sample_size: None,
effect_size: None,
p_value: None,
replicated: false,
replication_count: None,
evidence_spans: if source_title.is_empty() {
Vec::new()
} else {
vec![serde_json::json!({"text": source_title})]
},
};
let _ = doi;
let resolution = crate::bundle::Resolution::new(
prediction_id.to_string(),
actual_outcome.to_string(),
matched,
by.to_string(),
evidence,
confidence,
);
if project.resolutions.iter().any(|r| r.id == resolution.id) {
if json {
println!(
"{}",
serde_json::to_string_pretty(&json!({
"ok": false,
"command": "resolve",
"reason": "resolution_already_exists",
"id": resolution.id,
}))
.expect("serialize")
);
} else {
println!(
"{} resolution {} already exists in {}; skipping.",
style::warn("resolve"),
resolution.id,
frontier.display()
);
}
return;
}
let new_id = resolution.id.clone();
project.resolutions.push(resolution);
repo::save_to_path(frontier, &project).unwrap_or_else(|e| fail_return(&e));
if json {
println!(
"{}",
serde_json::to_string_pretty(&json!({
"ok": true,
"command": "resolve",
"id": new_id,
"prediction": prediction_id,
"matched": matched,
"frontier": frontier.display().to_string(),
}))
.expect("serialize resolve result")
);
} else {
println!();
println!(
" {}",
format!("VELA · RESOLVE · {}", new_id)
.to_uppercase()
.dimmed()
);
println!(" {}", style::tick_row(60));
println!(" prediction: {prediction_id}");
println!(
" matched: {}",
if matched {
style::ok("yes")
} else {
style::lost("no")
}
);
println!(" by: {by}");
println!(" outcome: {}", truncate(actual_outcome, 80));
println!();
println!(
" {} resolution recorded in {}",
style::ok("ok"),
frontier.display()
);
}
}
fn cmd_predictions(frontier: &Path, by: Option<&str>, open: bool, json: bool) {
let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
let resolved_ids: std::collections::HashSet<&str> = project
.resolutions
.iter()
.map(|r| r.prediction_id.as_str())
.collect();
let mut filtered: Vec<&crate::bundle::Prediction> = project
.predictions
.iter()
.filter(|p| by.is_none_or(|b| p.made_by == b))
.filter(|p| !open || !resolved_ids.contains(p.id.as_str()))
.collect();
filtered.sort_by(|a, b| {
a.resolves_by
.as_deref()
.unwrap_or("9999")
.cmp(b.resolves_by.as_deref().unwrap_or("9999"))
});
if json {
let payload: Vec<serde_json::Value> = filtered
.iter()
.map(|p| {
json!({
"id": p.id,
"claim_text": p.claim_text,
"made_by": p.made_by,
"confidence": p.confidence,
"predicted_at": p.predicted_at,
"resolves_by": p.resolves_by,
"expected_outcome": p.expected_outcome,
"resolved": resolved_ids.contains(p.id.as_str()),
})
})
.collect();
println!(
"{}",
serde_json::to_string_pretty(&json!({
"ok": true,
"command": "predictions",
"frontier": frontier.display().to_string(),
"count": payload.len(),
"predictions": payload,
}))
.expect("serialize predictions")
);
return;
}
println!();
println!(
" {}",
format!("VELA · PREDICTIONS · {}", frontier.display())
.to_uppercase()
.dimmed()
);
println!(" {}", style::tick_row(60));
if filtered.is_empty() {
println!(" (no predictions matching filters)");
return;
}
for p in &filtered {
let resolved = resolved_ids.contains(p.id.as_str());
let chip = if resolved {
style::ok("resolved")
} else {
style::warn("open")
};
let deadline = p.resolves_by.as_deref().unwrap_or("(no deadline)");
println!(
" · {} {} by {} → {}",
p.id.dimmed(),
chip,
p.made_by,
deadline,
);
println!(" claim: {}", truncate(&p.claim_text, 90));
println!(" confidence: {:.2}", p.confidence);
}
}
fn cmd_predictions_expire(frontier: &Path, now_override: Option<&str>, dry_run: bool, json: bool) {
use chrono::DateTime;
let now_dt = match now_override {
Some(s) => DateTime::parse_from_rfc3339(s)
.map(|dt| dt.with_timezone(&chrono::Utc))
.unwrap_or_else(|e| fail_return(&format!("invalid --now '{s}': {e}"))),
None => chrono::Utc::now(),
};
let mut project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
if dry_run {
let mut probe = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
let report = crate::calibration::expire_overdue_predictions(&mut probe, now_dt);
if json {
println!(
"{}",
serde_json::to_string_pretty(&json!({
"ok": true,
"command": "predictions.expire",
"dry_run": true,
"report": report,
}))
.expect("serialize predictions.expire (dry-run)")
);
} else {
println!(
"{} dry-run @ {}: {} would expire, {} already expired, {} resolved, {} still open",
style::ok("ok"),
report.now,
report.newly_expired.len(),
report.already_expired.len(),
report.already_resolved.len(),
report.still_open.len(),
);
for id in &report.newly_expired {
println!(" · {id}");
}
}
return;
}
let report = crate::calibration::expire_overdue_predictions(&mut project, now_dt);
repo::save_to_path(frontier, &project).unwrap_or_else(|e| fail_return(&e));
if json {
println!(
"{}",
serde_json::to_string_pretty(&json!({
"ok": true,
"command": "predictions.expire",
"report": report,
}))
.expect("serialize predictions.expire")
);
} else {
println!(
"{} @ {}: {} newly expired, {} already expired, {} resolved, {} still open",
style::ok("expired"),
report.now,
report.newly_expired.len(),
report.already_expired.len(),
report.already_resolved.len(),
report.still_open.len(),
);
for id in &report.newly_expired {
println!(" · {id}");
}
}
}
fn cmd_calibration(frontier: &Path, actor: Option<&str>, json: bool) {
let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
let records = match actor {
Some(a) => {
crate::calibration::calibration_for_actor(a, &project.predictions, &project.resolutions)
.map(|r| vec![r])
.unwrap_or_default()
}
None => crate::calibration::calibration_records(&project.predictions, &project.resolutions),
};
if json {
println!(
"{}",
serde_json::to_string_pretty(&json!({
"ok": true,
"command": "calibration",
"frontier": frontier.display().to_string(),
"filter_actor": actor,
"records": records,
}))
.expect("serialize calibration")
);
return;
}
println!();
println!(
" {}",
format!("VELA · CALIBRATION · {}", frontier.display())
.to_uppercase()
.dimmed()
);
println!(" {}", style::tick_row(60));
if records.is_empty() {
println!(" (no calibration records)");
return;
}
for r in &records {
println!(" · {}", r.actor);
println!(
" predictions: {} resolved: {} hits: {}",
r.n_predictions, r.n_resolved, r.n_hit
);
match r.hit_rate {
Some(h) => println!(" hit rate: {:.1}%", h * 100.0),
None => println!(" hit rate: n/a"),
}
match r.brier_score {
Some(b) => println!(
" brier: {:.4} (lower is better; 0.25 = chance)",
b
),
None => println!(" brier: n/a"),
}
match r.log_score {
Some(l) => println!(
" log score: {:.4} (higher is better; 0 = perfect)",
l
),
None => println!(" log score: n/a"),
}
}
}
#[allow(clippy::too_many_arguments)]
fn cmd_dataset_add(
frontier: &Path,
name: &str,
version: Option<&str>,
content_hash: &str,
url: Option<&str>,
license: Option<&str>,
source_title: &str,
doi: Option<&str>,
row_count: Option<u64>,
json: bool,
) {
let mut project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
let provenance = crate::bundle::Provenance {
source_type: "data_release".to_string(),
doi: doi.map(|s| s.to_string()),
pmid: None,
pmc: None,
openalex_id: None,
url: url.map(|s| s.to_string()),
title: source_title.to_string(),
authors: Vec::new(),
year: None,
journal: None,
license: license.map(|s| s.to_string()),
publisher: None,
funders: Vec::new(),
extraction: crate::bundle::Extraction {
method: "manual_curation".to_string(),
model: None,
model_version: None,
extracted_at: chrono::Utc::now().to_rfc3339(),
extractor_version: env!("CARGO_PKG_VERSION").to_string(),
},
review: None,
citation_count: None,
};
let mut dataset = crate::bundle::Dataset::new(
name.to_string(),
version.map(|s| s.to_string()),
content_hash.to_string(),
url.map(|s| s.to_string()),
license.map(|s| s.to_string()),
provenance,
);
dataset.row_count = row_count;
if project.datasets.iter().any(|d| d.id == dataset.id) {
if json {
println!(
"{}",
serde_json::to_string_pretty(&json!({
"ok": false,
"command": "dataset.add",
"reason": "dataset_already_exists",
"id": dataset.id,
}))
.expect("serialize")
);
} else {
println!(
"{} dataset {} already exists in {}; skipping.",
style::warn("dataset"),
dataset.id,
frontier.display()
);
}
return;
}
let new_id = dataset.id.clone();
project.datasets.push(dataset);
repo::save_to_path(frontier, &project).unwrap_or_else(|e| fail_return(&e));
if json {
println!(
"{}",
serde_json::to_string_pretty(&json!({
"ok": true,
"command": "dataset.add",
"id": new_id,
"name": name,
"version": version,
"frontier": frontier.display().to_string(),
}))
.expect("failed to serialize dataset.add result")
);
} else {
println!();
println!(
" {}",
format!("VELA · DATASET · {}", new_id)
.to_uppercase()
.dimmed()
);
println!(" {}", style::tick_row(60));
println!(" name: {name}");
if let Some(v) = version {
println!(" version: {v}");
}
println!(" content_hash: {content_hash}");
if let Some(u) = url {
println!(" url: {u}");
}
println!(" source: {source_title}");
println!();
println!(
" {} dataset recorded in {}",
style::ok("ok"),
frontier.display()
);
}
}
#[allow(clippy::too_many_arguments)]
fn cmd_negative_result_add(
frontier: &Path,
kind: &str,
deposited_by: &str,
reason: &str,
conditions_text: &str,
notes: &str,
targets: Vec<String>,
endpoint: Option<&str>,
intervention: Option<&str>,
comparator: Option<&str>,
population: Option<&str>,
n_enrolled: Option<u32>,
power: Option<f64>,
ci_lower: Option<f64>,
ci_upper: Option<f64>,
effect_size_threshold: Option<f64>,
registry_id: Option<&str>,
reagent: Option<&str>,
observation: Option<&str>,
attempts: Option<u32>,
source_title: &str,
doi: Option<&str>,
url: Option<&str>,
year: Option<i32>,
json: bool,
) {
let nr_kind = match kind {
"registered_trial" => {
let endpoint =
endpoint.unwrap_or_else(|| fail_return("--endpoint required for registered_trial"));
let intervention = intervention
.unwrap_or_else(|| fail_return("--intervention required for registered_trial"));
let comparator = comparator
.unwrap_or_else(|| fail_return("--comparator required for registered_trial"));
let population = population
.unwrap_or_else(|| fail_return("--population required for registered_trial"));
let n_enrolled = n_enrolled
.unwrap_or_else(|| fail_return("--n-enrolled required for registered_trial"));
let power =
power.unwrap_or_else(|| fail_return("--power required for registered_trial"));
let ci_lower =
ci_lower.unwrap_or_else(|| fail_return("--ci-lower required for registered_trial"));
let ci_upper =
ci_upper.unwrap_or_else(|| fail_return("--ci-upper required for registered_trial"));
crate::bundle::NegativeResultKind::RegisteredTrial {
endpoint: endpoint.to_string(),
intervention: intervention.to_string(),
comparator: comparator.to_string(),
population: population.to_string(),
n_enrolled,
power,
effect_size_ci: (ci_lower, ci_upper),
effect_size_threshold,
registry_id: registry_id.map(|s| s.to_string()),
}
}
"exploratory" => {
let reagent =
reagent.unwrap_or_else(|| fail_return("--reagent required for exploratory"));
let observation = observation
.unwrap_or_else(|| fail_return("--observation required for exploratory"));
let attempts =
attempts.unwrap_or_else(|| fail_return("--attempts required for exploratory"));
crate::bundle::NegativeResultKind::Exploratory {
reagent: reagent.to_string(),
observation: observation.to_string(),
attempts,
}
}
other => fail_return(&format!(
"--kind must be 'registered_trial' or 'exploratory', got '{other}'"
)),
};
let conditions = crate::bundle::Conditions {
text: conditions_text.to_string(),
species_verified: Vec::new(),
species_unverified: Vec::new(),
in_vitro: false,
in_vivo: false,
human_data: false,
clinical_trial: matches!(kind, "registered_trial"),
concentration_range: None,
duration: None,
age_group: None,
cell_type: None,
};
let provenance = crate::bundle::Provenance {
source_type: if matches!(kind, "registered_trial") {
"clinical_trial".to_string()
} else {
"lab_notebook".to_string()
},
doi: doi.map(|s| s.to_string()),
pmid: None,
pmc: None,
openalex_id: None,
url: url.map(|s| s.to_string()),
title: source_title.to_string(),
authors: Vec::new(),
year,
journal: None,
license: None,
publisher: None,
funders: Vec::new(),
extraction: crate::bundle::Extraction {
method: "manual_curation".to_string(),
model: None,
model_version: None,
extracted_at: chrono::Utc::now().to_rfc3339(),
extractor_version: env!("CARGO_PKG_VERSION").to_string(),
},
review: None,
citation_count: None,
};
let report = state::add_negative_result(
frontier,
nr_kind,
targets,
deposited_by,
conditions,
provenance,
notes,
reason,
)
.unwrap_or_else(|e| fail_return(&e));
if json {
println!(
"{}",
serde_json::to_string_pretty(&report).expect("serialize report")
);
} else {
println!();
println!(
" {}",
format!("VELA · NEGATIVE-RESULT · {}", report.finding_id)
.to_uppercase()
.dimmed()
);
println!(" {}", style::tick_row(60));
println!(" kind: {kind}");
println!(" deposited_by: {deposited_by}");
if let Some(ev) = &report.applied_event_id {
println!(" event: {ev}");
}
println!(
" {} negative_result deposited in {}",
style::ok("ok"),
frontier.display()
);
}
}
fn cmd_negative_results(frontier: &Path, target: Option<&str>, json: bool) {
let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
let filtered: Vec<&crate::bundle::NegativeResult> = project
.negative_results
.iter()
.filter(|nr| {
target
.map(|t| nr.target_findings.iter().any(|f| f == t))
.unwrap_or(true)
})
.collect();
if json {
println!(
"{}",
serde_json::to_string_pretty(&json!({
"ok": true,
"command": "negative_results",
"frontier": frontier.display().to_string(),
"count": filtered.len(),
"negative_results": filtered,
}))
.expect("serialize negative_results")
);
return;
}
if filtered.is_empty() {
println!(" no negative_results in {}", frontier.display());
return;
}
println!();
println!(
" {} ({})",
"VELA · NEGATIVE RESULTS".dimmed(),
filtered.len()
);
println!(" {}", style::tick_row(60));
for nr in &filtered {
let kind_label = match &nr.kind {
crate::bundle::NegativeResultKind::RegisteredTrial {
endpoint, power, ..
} => format!("trial · {endpoint} · power {power:.2}"),
crate::bundle::NegativeResultKind::Exploratory {
reagent, attempts, ..
} => format!("exploratory · {reagent} · {attempts} attempts"),
};
let retracted = if nr.retracted { " [retracted]" } else { "" };
let review = nr
.review_state
.as_ref()
.map(|s| format!(" [{s:?}]"))
.unwrap_or_default();
println!(" {}{}{}", nr.id, retracted, review);
println!(" {kind_label}");
if !nr.target_findings.is_empty() {
println!(" targets: {}", nr.target_findings.join(", "));
}
}
println!();
}
#[allow(clippy::too_many_arguments)]
fn cmd_tier_set(
frontier: &Path,
object_type: &str,
object_id: &str,
tier: &str,
actor: &str,
reason: &str,
json: bool,
) {
let parsed_tier =
crate::access_tier::AccessTier::parse(tier).unwrap_or_else(|e| fail_return(&e));
let report = state::set_tier(frontier, object_type, object_id, parsed_tier, actor, reason)
.unwrap_or_else(|e| fail_return(&e));
if json {
println!(
"{}",
serde_json::to_string_pretty(&report).expect("serialize tier-set report")
);
} else {
println!();
println!(
" {}",
format!("VELA · TIER · {}", object_id)
.to_uppercase()
.dimmed()
);
println!(" {}", style::tick_row(60));
println!(" object_type: {object_type}");
println!(" new_tier: {}", parsed_tier.canonical());
println!(" actor: {actor}");
if let Some(ev) = &report.applied_event_id {
println!(" event: {ev}");
}
println!(" {} tier set in {}", style::ok("ok"), frontier.display());
}
}
#[allow(clippy::too_many_arguments)]
fn cmd_trajectory_create(
frontier: &Path,
deposited_by: &str,
reason: &str,
targets: Vec<String>,
notes: &str,
json: bool,
) {
let report = state::create_trajectory(frontier, targets, deposited_by, notes, reason)
.unwrap_or_else(|e| fail_return(&e));
if json {
println!(
"{}",
serde_json::to_string_pretty(&report).expect("serialize trajectory report")
);
} else {
println!();
println!(
" {}",
format!("VELA · TRAJECTORY · {}", report.finding_id)
.to_uppercase()
.dimmed()
);
println!(" {}", style::tick_row(60));
println!(" deposited_by: {deposited_by}");
if let Some(ev) = &report.applied_event_id {
println!(" event: {ev}");
}
println!(
" {} trajectory opened in {}",
style::ok("ok"),
frontier.display()
);
}
}
#[allow(clippy::too_many_arguments)]
fn cmd_trajectory_step(
frontier: &Path,
trajectory_id: &str,
kind: &str,
description: &str,
actor: &str,
reason: &str,
references: Vec<String>,
json: bool,
) {
let parsed_kind = match kind {
"hypothesis" => crate::bundle::TrajectoryStepKind::Hypothesis,
"tried" => crate::bundle::TrajectoryStepKind::Tried,
"ruled_out" => crate::bundle::TrajectoryStepKind::RuledOut,
"observed" => crate::bundle::TrajectoryStepKind::Observed,
"refined" => crate::bundle::TrajectoryStepKind::Refined,
other => fail_return(&format!(
"--kind must be one of hypothesis|tried|ruled_out|observed|refined, got '{other}'"
)),
};
let report = state::append_trajectory_step(
frontier,
trajectory_id,
parsed_kind,
description,
actor,
references,
reason,
)
.unwrap_or_else(|e| fail_return(&e));
if json {
println!(
"{}",
serde_json::to_string_pretty(&report).expect("serialize step report")
);
} else {
println!();
println!(
" {}",
format!("VELA · STEP · {}", report.finding_id)
.to_uppercase()
.dimmed()
);
println!(" {}", style::tick_row(60));
println!(" trajectory: {trajectory_id}");
println!(" kind: {kind}");
println!(" actor: {actor}");
println!(
" {} step appended in {}",
style::ok("ok"),
frontier.display()
);
}
}
fn cmd_trajectories(frontier: &Path, target: Option<&str>, json: bool) {
let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
let filtered: Vec<&crate::bundle::Trajectory> = project
.trajectories
.iter()
.filter(|t| {
target
.map(|tg| t.target_findings.iter().any(|f| f == tg))
.unwrap_or(true)
})
.collect();
if json {
println!(
"{}",
serde_json::to_string_pretty(&json!({
"ok": true,
"command": "trajectories",
"frontier": frontier.display().to_string(),
"count": filtered.len(),
"trajectories": filtered,
}))
.expect("serialize trajectories")
);
return;
}
if filtered.is_empty() {
println!(" no trajectories in {}", frontier.display());
return;
}
println!();
println!(" {} ({})", "VELA · TRAJECTORIES".dimmed(), filtered.len());
println!(" {}", style::tick_row(60));
for t in &filtered {
let retracted = if t.retracted { " [retracted]" } else { "" };
let review = t
.review_state
.as_ref()
.map(|s| format!(" [{s:?}]"))
.unwrap_or_default();
println!(" {}{}{}", t.id, retracted, review);
println!(
" {} step(s){}",
t.steps.len(),
if t.target_findings.is_empty() {
String::new()
} else {
format!(" · targets: {}", t.target_findings.join(", "))
}
);
for step in &t.steps {
let label = match step.kind {
crate::bundle::TrajectoryStepKind::Hypothesis => "hypothesis",
crate::bundle::TrajectoryStepKind::Tried => "tried",
crate::bundle::TrajectoryStepKind::RuledOut => "ruled_out",
crate::bundle::TrajectoryStepKind::Observed => "observed",
crate::bundle::TrajectoryStepKind::Refined => "refined",
};
let preview: String = step.description.chars().take(80).collect();
println!(" [{label}] {preview}");
}
}
println!();
}
fn cmd_datasets(frontier: &Path, json: bool) {
let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
if json {
println!(
"{}",
serde_json::to_string_pretty(&json!({
"ok": true,
"command": "datasets",
"frontier": frontier.display().to_string(),
"count": project.datasets.len(),
"datasets": project.datasets,
}))
.expect("serialize datasets")
);
return;
}
println!();
println!(
" {}",
format!("VELA · DATASETS · {}", frontier.display())
.to_uppercase()
.dimmed()
);
println!(" {}", style::tick_row(60));
if project.datasets.is_empty() {
println!(" (no datasets registered)");
return;
}
for ds in &project.datasets {
let v = ds
.version
.as_deref()
.map(|s| format!("@{s}"))
.unwrap_or_default();
println!(" · {} {}{}", ds.id.dimmed(), ds.name, v);
if let Some(u) = &ds.url {
println!(" url: {}", truncate(u, 80));
}
println!(" hash: {}", truncate(&ds.content_hash, 80));
}
}
#[allow(clippy::too_many_arguments)]
fn cmd_code_add(
frontier: &Path,
language: &str,
repo_url: Option<&str>,
commit: Option<&str>,
path: &str,
content_hash: &str,
line_start: Option<u32>,
line_end: Option<u32>,
entry_point: Option<&str>,
json: bool,
) {
let mut project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
let line_range = match (line_start, line_end) {
(Some(a), Some(b)) => Some((a, b)),
(Some(a), None) => Some((a, a)),
_ => None,
};
let artifact = crate::bundle::CodeArtifact::new(
language.to_string(),
repo_url.map(|s| s.to_string()),
commit.map(|s| s.to_string()),
path.to_string(),
line_range,
content_hash.to_string(),
entry_point.map(|s| s.to_string()),
);
if project.code_artifacts.iter().any(|c| c.id == artifact.id) {
if json {
println!(
"{}",
serde_json::to_string_pretty(&json!({
"ok": false,
"command": "code.add",
"reason": "artifact_already_exists",
"id": artifact.id,
}))
.expect("serialize")
);
} else {
println!(
"{} code artifact {} already exists in {}; skipping.",
style::warn("code"),
artifact.id,
frontier.display()
);
}
return;
}
let new_id = artifact.id.clone();
project.code_artifacts.push(artifact);
repo::save_to_path(frontier, &project).unwrap_or_else(|e| fail_return(&e));
if json {
println!(
"{}",
serde_json::to_string_pretty(&json!({
"ok": true,
"command": "code.add",
"id": new_id,
"language": language,
"path": path,
"frontier": frontier.display().to_string(),
}))
.expect("failed to serialize code.add result")
);
} else {
println!();
println!(
" {}",
format!("VELA · CODE · {}", new_id).to_uppercase().dimmed()
);
println!(" {}", style::tick_row(60));
println!(" language: {language}");
if let Some(r) = repo_url {
println!(" repo: {r}");
}
if let Some(c) = commit {
println!(" commit: {c}");
}
println!(" path: {path}");
if let Some((a, b)) = line_range {
println!(" lines: {a}-{b}");
}
println!(" content_hash: {content_hash}");
println!();
println!(
" {} code artifact recorded in {}",
style::ok("ok"),
frontier.display()
);
}
}
fn cmd_code_artifacts(frontier: &Path, json: bool) {
let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
if json {
println!(
"{}",
serde_json::to_string_pretty(&json!({
"ok": true,
"command": "code-artifacts",
"frontier": frontier.display().to_string(),
"count": project.code_artifacts.len(),
"code_artifacts": project.code_artifacts,
}))
.expect("serialize code-artifacts")
);
return;
}
println!();
println!(
" {}",
format!("VELA · CODE · {}", frontier.display())
.to_uppercase()
.dimmed()
);
println!(" {}", style::tick_row(60));
if project.code_artifacts.is_empty() {
println!(" (no code artifacts registered)");
return;
}
for c in &project.code_artifacts {
let lr = c
.line_range
.map(|(a, b)| format!(":{a}-{b}"))
.unwrap_or_default();
println!(" · {} {} {}{}", c.id.dimmed(), c.language, c.path, lr);
if let Some(r) = &c.repo_url {
println!(" repo: {}", truncate(r, 80));
}
if let Some(g) = &c.git_commit {
println!(" commit: {g}");
}
}
}
fn sha256_for_bytes(bytes: &[u8]) -> String {
format!("sha256:{}", hex::encode(Sha256::digest(bytes)))
}
fn sha256_hex_part(content_hash: &str) -> &str {
content_hash.strip_prefix("sha256:").unwrap_or(content_hash)
}
fn artifact_blob_locator(frontier: &Path, content_hash: &str, bytes: &[u8]) -> Option<String> {
let Ok(repo::VelaSource::VelaRepo(root)) = repo::detect(frontier) else {
return None;
};
let hex = sha256_hex_part(content_hash);
let rel = format!(".vela/artifact-blobs/sha256/{hex}");
let path = root.join(&rel);
if let Some(parent) = path.parent() {
std::fs::create_dir_all(parent).unwrap_or_else(|e| {
fail(&format!(
"Failed to create artifact blob directory {}: {e}",
parent.display()
))
});
}
if !path.is_file() {
std::fs::write(&path, bytes)
.unwrap_or_else(|e| fail(&format!("Failed to write artifact blob: {e}")));
}
Some(rel)
}
fn parse_metadata_pairs(pairs: Vec<String>) -> BTreeMap<String, Value> {
let mut out = BTreeMap::new();
for pair in pairs {
let Some((key, value)) = pair.split_once('=') else {
fail(&format!("--metadata must be key=value, got {pair:?}"));
};
let key = key.trim();
if key.is_empty() {
fail("--metadata key must be non-empty");
}
out.insert(key.to_string(), Value::String(value.trim().to_string()));
}
out
}
fn artifact_source_type(kind: &str) -> &'static str {
match kind {
"clinical_trial_record" | "protocol" => "clinical_trial",
"dataset" => "data_release",
"model_output" => "model_output",
"registry_record" => "database_record",
"lab_file" => "lab_notebook",
_ => "database_record",
}
}
fn artifact_provenance(
kind: &str,
title: &str,
url: Option<&str>,
doi: Option<&str>,
license: Option<&str>,
) -> crate::bundle::Provenance {
crate::bundle::Provenance {
source_type: artifact_source_type(kind).to_string(),
doi: doi.map(str::to_string),
pmid: None,
pmc: None,
openalex_id: None,
url: url.map(str::to_string),
title: title.to_string(),
authors: Vec::new(),
year: None,
journal: None,
license: license.map(str::to_string),
publisher: None,
funders: Vec::new(),
extraction: crate::bundle::Extraction {
method: "artifact_deposit".to_string(),
model: None,
model_version: None,
extracted_at: chrono::Utc::now().to_rfc3339(),
extractor_version: env!("CARGO_PKG_VERSION").to_string(),
},
review: None,
citation_count: None,
}
}
#[allow(clippy::too_many_arguments)]
fn cmd_artifact_add(
frontier: &Path,
kind: &str,
name: &str,
file: Option<&Path>,
url: Option<&str>,
content_hash: Option<&str>,
media_type: Option<&str>,
license: Option<&str>,
source_title: Option<&str>,
source_url: Option<&str>,
doi: Option<&str>,
target: Vec<String>,
metadata: Vec<String>,
access_tier: &str,
deposited_by: &str,
reason: &str,
json_out: bool,
) {
let tier =
crate::access_tier::AccessTier::parse(access_tier).unwrap_or_else(|e| fail_return(&e));
let mut size_bytes = None;
let mut storage_mode = "pointer".to_string();
let mut locator = url.map(str::to_string);
let mut computed_hash = content_hash.map(str::to_string);
if let Some(path) = file {
let bytes = std::fs::read(path)
.unwrap_or_else(|e| fail(&format!("Failed to read artifact file: {e}")));
let actual_hash = sha256_for_bytes(&bytes);
if let Some(expected) = content_hash {
let expected_hex = sha256_hex_part(expected);
let actual_hex = sha256_hex_part(&actual_hash);
if !expected_hex.eq_ignore_ascii_case(actual_hex) {
fail(&format!(
"--content-hash does not match file bytes: expected {expected}, got {actual_hash}"
));
}
}
size_bytes = Some(bytes.len() as u64);
computed_hash = Some(actual_hash.clone());
if let Some(rel) = artifact_blob_locator(frontier, &actual_hash, &bytes) {
storage_mode = "local_blob".to_string();
locator = Some(rel);
} else {
storage_mode = "local_file".to_string();
locator = Some(path.display().to_string());
}
}
let Some(content_hash) = computed_hash else {
fail("Provide --content-hash unless --file is present.");
};
let content_hash_for_print = content_hash.clone();
if file.is_none() && url.is_some() {
storage_mode = "remote".to_string();
}
let source_url_effective = source_url.or(url);
let source_title = source_title.unwrap_or(name);
let provenance = artifact_provenance(kind, source_title, source_url_effective, doi, license);
let metadata = parse_metadata_pairs(metadata);
let artifact = crate::bundle::Artifact::new(
kind.to_string(),
name.to_string(),
content_hash,
size_bytes,
media_type.map(str::to_string),
storage_mode,
locator,
source_url_effective.map(str::to_string),
license.map(str::to_string),
target,
provenance,
metadata,
tier,
)
.unwrap_or_else(|e| fail_return(&e));
let artifact_id = artifact.id.clone();
let report = state::add_artifact(frontier, artifact, deposited_by, reason)
.unwrap_or_else(|e| fail_return(&e));
if json_out {
println!(
"{}",
serde_json::to_string_pretty(&json!({
"ok": true,
"command": "artifact.add",
"id": artifact_id,
"frontier": frontier.display().to_string(),
"event": report.applied_event_id,
}))
.expect("serialize artifact.add")
);
} else {
println!();
println!(
" {}",
format!("VELA · ARTIFACT · {}", artifact_id)
.to_uppercase()
.dimmed()
);
println!(" {}", style::tick_row(60));
println!(" kind: {kind}");
println!(" name: {name}");
println!(" hash: {content_hash_for_print}");
println!(
" {} artifact recorded in {}",
style::ok("ok"),
frontier.display()
);
}
}
fn cmd_artifacts(frontier: &Path, target: Option<&str>, json_out: bool) {
let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
let filtered: Vec<&crate::bundle::Artifact> = project
.artifacts
.iter()
.filter(|artifact| {
target
.map(|t| artifact.target_findings.iter().any(|f| f == t))
.unwrap_or(true)
})
.collect();
if json_out {
println!(
"{}",
serde_json::to_string_pretty(&json!({
"ok": true,
"command": "artifacts",
"frontier": frontier.display().to_string(),
"count": filtered.len(),
"artifacts": filtered,
}))
.expect("serialize artifacts")
);
return;
}
println!();
println!(
" {}",
format!("VELA · ARTIFACTS · {}", frontier.display())
.to_uppercase()
.dimmed()
);
println!(" {}", style::tick_row(60));
if filtered.is_empty() {
println!(" (no artifacts registered)");
return;
}
for artifact in filtered {
println!(
" · {} {} · {}",
artifact.id.dimmed(),
artifact.kind,
artifact.name
);
if let Some(locator) = &artifact.locator {
println!(" locator: {}", truncate(locator, 88));
}
if !artifact.target_findings.is_empty() {
println!(" targets: {}", artifact.target_findings.join(", "));
}
}
}
fn cmd_artifact_audit(frontier: &Path, json_out: bool) {
let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
let audit = crate::artifact_audit::audit_artifacts(frontier, &project);
if json_out {
println!(
"{}",
serde_json::to_string_pretty(&audit).expect("serialize artifact audit")
);
if !audit.ok {
std::process::exit(1);
}
return;
}
println!();
println!(
" {}",
format!("VELA · ARTIFACT AUDIT · {}", frontier.display())
.to_uppercase()
.dimmed()
);
println!(" {}", style::tick_row(60));
println!(" artifacts: {}", audit.artifact_count);
println!(" checked local blobs: {}", audit.checked_local_blobs);
println!(" local blob bytes: {}", audit.local_blob_bytes);
if !audit.by_kind.is_empty() {
let kinds = audit
.by_kind
.iter()
.map(|(kind, count)| format!("{kind}:{count}"))
.collect::<Vec<_>>()
.join(", ");
println!(" kinds: {kinds}");
}
if audit.ok {
println!(" {} artifact audit passed.", style::ok("ok"));
return;
}
for issue in &audit.issues {
println!(
" {} {} {}: {}",
style::lost("invalid"),
issue.id,
issue.field,
issue.message
);
}
std::process::exit(1);
}
fn cmd_decision_brief(frontier: &Path, json_out: bool) {
let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
let report = decision::load_decision_brief(frontier, &project);
if json_out {
println!(
"{}",
serde_json::to_string_pretty(&report).expect("serialize decision brief report")
);
if !report.ok {
std::process::exit(1);
}
return;
}
println!();
println!(
" {}",
format!("VELA · DECISION BRIEF · {}", project.project.name)
.to_uppercase()
.dimmed()
);
println!(" {}", style::tick_row(60));
if !report.ok {
print_projection_issues(&report.issues, report.error.as_deref());
std::process::exit(1);
}
let brief = report
.projection
.as_ref()
.expect("ok decision report carries projection");
for question in &brief.questions {
println!(" · {} · {}", question.id.dimmed(), question.title);
println!(" answer: {}", wrap_line(&question.short_answer, 82));
println!(" caveat: {}", wrap_line(&question.caveat, 82));
println!(" support: {}", question.supporting_findings.join(", "));
if !question.tension_findings.is_empty() {
println!(" tensions: {}", question.tension_findings.join(", "));
}
if !question.gap_findings.is_empty() {
println!(" gaps: {}", question.gap_findings.join(", "));
}
if !question.artifact_ids.is_empty() {
println!(" artifacts: {}", question.artifact_ids.join(", "));
}
println!(
" would change: {}",
wrap_line(&question.what_would_change_this_answer, 82)
);
}
}
fn cmd_trial_summary(frontier: &Path, json_out: bool) {
let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
let report = decision::load_trial_outcomes(frontier, &project);
if json_out {
println!(
"{}",
serde_json::to_string_pretty(&report).expect("serialize trial summary report")
);
if !report.ok {
std::process::exit(1);
}
return;
}
println!();
println!(
" {}",
format!("VELA · TRIAL SUMMARY · {}", project.project.name)
.to_uppercase()
.dimmed()
);
println!(" {}", style::tick_row(60));
if !report.ok {
print_projection_issues(&report.issues, report.error.as_deref());
std::process::exit(1);
}
let outcomes = report
.projection
.as_ref()
.expect("ok trial report carries projection");
for row in &outcomes.rows {
println!(" · {} · {} ({})", row.id.dimmed(), row.program, row.drug);
println!(" population: {}", wrap_line(&row.population, 82));
println!(" endpoint: {}", wrap_line(&row.primary_endpoint, 82));
println!(" cognition: {}", wrap_line(&row.cognitive_result, 82));
println!(" biomarker: {}", wrap_line(&row.biomarker_result, 82));
println!(" risk: {}", wrap_line(&row.aria_or_safety_result, 82));
println!(" status: {}", wrap_line(&row.regulatory_status, 82));
if !row.finding_ids.is_empty() {
println!(" findings: {}", row.finding_ids.join(", "));
}
if !row.artifact_ids.is_empty() {
println!(" artifacts: {}", row.artifact_ids.join(", "));
}
}
}
fn cmd_source_verification(frontier: &Path, json_out: bool) {
let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
let report = decision::load_source_verification(frontier, &project);
if json_out {
println!(
"{}",
serde_json::to_string_pretty(&report).expect("serialize source verification report")
);
if !report.ok {
std::process::exit(1);
}
return;
}
println!();
println!(
" {}",
format!("VELA · SOURCE VERIFICATION · {}", project.project.name)
.to_uppercase()
.dimmed()
);
println!(" {}", style::tick_row(60));
if !report.ok {
print_projection_issues(&report.issues, report.error.as_deref());
std::process::exit(1);
}
let verification = report
.projection
.as_ref()
.expect("ok source verification report carries projection");
println!(" verified_at: {}", verification.verified_at);
for source in &verification.sources {
println!(" · {} · {}", source.id.dimmed(), source.title);
println!(" agency: {}", source.agency);
println!(" url: {}", truncate(&source.url, 88));
println!(" status: {}", wrap_line(&source.current_status, 82));
}
}
fn cmd_source_ingest_plan(frontier: &Path, json_out: bool) {
let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
let report = decision::load_source_ingest_plan(frontier, &project);
if json_out {
println!(
"{}",
serde_json::to_string_pretty(&report).expect("serialize source ingest plan report")
);
if !report.ok {
std::process::exit(1);
}
return;
}
println!();
println!(
" {}",
format!("VELA · SOURCE INGEST PLAN · {}", project.project.name)
.to_uppercase()
.dimmed()
);
println!(" {}", style::tick_row(60));
if !report.ok {
print_projection_issues(&report.issues, report.error.as_deref());
std::process::exit(1);
}
let plan = report
.projection
.as_ref()
.expect("ok source ingest plan report carries projection");
println!(" verified_at: {}", plan.verified_at);
println!(" entries: {}", plan.entries.len());
for entry in &plan.entries {
println!(
" · {} · {} · {} · {}",
entry.id.dimmed(),
entry.category,
entry.priority,
entry.ingest_status
);
println!(" name: {}", wrap_line(&entry.name, 82));
println!(" locator: {}", truncate(&entry.locator, 88));
println!(" use: {}", wrap_line(&entry.target_use, 82));
if let Some(id) = &entry.current_frontier_artifact_id {
println!(" artifact: {id}");
}
if !entry.target_findings.is_empty() {
println!(" findings: {}", entry.target_findings.join(", "));
}
}
}
fn print_projection_issues(issues: &[decision::ProjectionIssue], error: Option<&str>) {
if let Some(error) = error {
println!(" {} {error}", style::lost("unavailable"));
}
for issue in issues {
println!(
" {} {}: {}",
style::lost("invalid"),
issue.path,
issue.message
);
}
}
fn wrap_line(text: &str, max_chars: usize) -> String {
if text.chars().count() <= max_chars {
return text.to_string();
}
let mut out = String::new();
let mut line_len = 0usize;
for word in text.split_whitespace() {
let word_len = word.chars().count();
if line_len > 0 && line_len + 1 + word_len > max_chars {
out.push('\n');
out.push_str(" ");
out.push_str(word);
line_len = word_len;
} else {
if line_len > 0 {
out.push(' ');
line_len += 1;
}
out.push_str(word);
line_len += word_len;
}
}
out
}
fn clinical_str<'a>(study: &'a Value, pointer: &str) -> Option<&'a str> {
study.pointer(pointer).and_then(Value::as_str)
}
fn clinical_string_array(study: &Value, pointer: &str) -> Vec<String> {
study
.pointer(pointer)
.and_then(Value::as_array)
.map(|items| {
items
.iter()
.filter_map(Value::as_str)
.map(str::to_string)
.collect()
})
.unwrap_or_default()
}
fn clinical_named_array(study: &Value, pointer: &str, field: &str) -> Vec<String> {
study
.pointer(pointer)
.and_then(Value::as_array)
.map(|items| {
items
.iter()
.filter_map(|item| item.get(field).and_then(Value::as_str))
.map(str::to_string)
.collect()
})
.unwrap_or_default()
}
fn insert_string_vec_metadata(
metadata: &mut BTreeMap<String, Value>,
key: &str,
values: Vec<String>,
) {
if values.is_empty() {
return;
}
metadata.insert(
key.to_string(),
Value::Array(values.into_iter().map(Value::String).collect()),
);
}
async fn cmd_clinical_trial_import(
frontier: &Path,
nct_id: &str,
input_json: Option<&Path>,
target: Vec<String>,
deposited_by: &str,
reason: &str,
license: &str,
json_out: bool,
) {
let api_url = format!("https://clinicaltrials.gov/api/v2/studies/{nct_id}");
let raw = if let Some(path) = input_json {
std::fs::read_to_string(path)
.unwrap_or_else(|e| fail(&format!("Failed to read ClinicalTrials.gov JSON: {e}")))
} else {
let response = reqwest::get(&api_url).await.unwrap_or_else(|e| {
fail(&format!(
"Failed to fetch ClinicalTrials.gov record {nct_id}: {e}"
))
});
let response = response.error_for_status().unwrap_or_else(|e| {
fail(&format!(
"Failed to fetch ClinicalTrials.gov record {nct_id}: {e}"
))
});
response.text().await.unwrap_or_else(|e| {
fail(&format!(
"Failed to read ClinicalTrials.gov record {nct_id}: {e}"
))
})
};
let study: Value = serde_json::from_str(&raw)
.unwrap_or_else(|e| fail(&format!("Failed to parse ClinicalTrials.gov JSON: {e}")));
let canonical_bytes = crate::canonical::to_canonical_bytes(&study)
.unwrap_or_else(|e| fail(&format!("Failed to canonicalize trial JSON: {e}")));
let content_hash = sha256_for_bytes(&canonical_bytes);
let locator = artifact_blob_locator(frontier, &content_hash, &canonical_bytes)
.unwrap_or_else(|| api_url.clone());
let storage_mode = if locator.starts_with(".vela/") {
"local_blob"
} else {
"remote"
};
let parsed_nct = clinical_str(&study, "/protocolSection/identificationModule/nctId")
.unwrap_or(nct_id)
.to_string();
let title = clinical_str(&study, "/protocolSection/identificationModule/briefTitle")
.or_else(|| {
clinical_str(
&study,
"/protocolSection/identificationModule/officialTitle",
)
})
.unwrap_or(nct_id);
let public_url = format!("https://clinicaltrials.gov/study/{parsed_nct}");
let mut metadata = BTreeMap::new();
metadata.insert("nct_id".to_string(), Value::String(parsed_nct.clone()));
metadata.insert(
"source_api".to_string(),
Value::String("clinicaltrials.gov-v2".to_string()),
);
metadata.insert(
"retrieved_at".to_string(),
Value::String(chrono::Utc::now().to_rfc3339()),
);
for (key, pointer) in [
(
"overall_status",
"/protocolSection/statusModule/overallStatus",
),
(
"start_date",
"/protocolSection/statusModule/startDateStruct/date",
),
(
"completion_date",
"/protocolSection/statusModule/completionDateStruct/date",
),
] {
if let Some(value) = clinical_str(&study, pointer) {
metadata.insert(key.to_string(), Value::String(value.to_string()));
}
}
insert_string_vec_metadata(
&mut metadata,
"phases",
clinical_string_array(&study, "/protocolSection/designModule/phases"),
);
insert_string_vec_metadata(
&mut metadata,
"conditions",
clinical_string_array(&study, "/protocolSection/conditionsModule/conditions"),
);
insert_string_vec_metadata(
&mut metadata,
"interventions",
clinical_named_array(
&study,
"/protocolSection/armsInterventionsModule/interventions",
"name",
),
);
insert_string_vec_metadata(
&mut metadata,
"primary_outcomes",
clinical_named_array(
&study,
"/protocolSection/outcomesModule/primaryOutcomes",
"measure",
),
);
if let Some(has_results) = study.get("hasResults").and_then(Value::as_bool) {
metadata.insert("has_results".to_string(), Value::Bool(has_results));
}
let provenance = artifact_provenance(
"clinical_trial_record",
title,
Some(&public_url),
None,
Some(license),
);
let artifact = crate::bundle::Artifact::new(
"clinical_trial_record",
title.to_string(),
content_hash,
Some(canonical_bytes.len() as u64),
Some("application/json".to_string()),
storage_mode.to_string(),
Some(locator),
Some(public_url.clone()),
Some(license.to_string()),
target,
provenance,
metadata,
crate::access_tier::AccessTier::Public,
)
.unwrap_or_else(|e| fail_return(&e));
let artifact_id = artifact.id.clone();
let report = state::add_artifact(frontier, artifact, deposited_by, reason)
.unwrap_or_else(|e| fail_return(&e));
if json_out {
println!(
"{}",
serde_json::to_string_pretty(&json!({
"ok": true,
"command": "clinical-trial-import",
"nct_id": parsed_nct,
"id": artifact_id,
"frontier": frontier.display().to_string(),
"event": report.applied_event_id,
"source_url": public_url,
}))
.expect("serialize clinical-trial-import")
);
} else {
println!();
println!(
" {}",
format!("VELA · CLINICAL TRIAL · {}", artifact_id)
.to_uppercase()
.dimmed()
);
println!(" {}", style::tick_row(60));
println!(" nct_id: {parsed_nct}");
println!(" title: {}", truncate(title, 96));
println!(" source: {public_url}");
println!(
" {} trial record imported into {}",
style::ok("ok"),
frontier.display()
);
}
}
#[allow(clippy::too_many_arguments)]
fn cmd_replicate(
frontier: &Path,
target: &str,
outcome: &str,
attempted_by: &str,
conditions_text: &str,
source_title: &str,
doi: Option<&str>,
pmid: Option<&str>,
sample_size: Option<&str>,
note: &str,
previous_attempt: Option<&str>,
no_cascade: bool,
json: bool,
) {
if !crate::bundle::VALID_REPLICATION_OUTCOMES.contains(&outcome) {
fail(&format!(
"invalid outcome '{outcome}'; valid: {:?}",
crate::bundle::VALID_REPLICATION_OUTCOMES
));
}
if !target.starts_with("vf_") {
fail(&format!("target '{target}' is not a vf_ finding id"));
}
let mut project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
if !project.findings.iter().any(|f| f.id == target) {
fail(&format!(
"target finding '{target}' not present in frontier '{}'",
frontier.display()
));
}
let lower = conditions_text.to_lowercase();
let conditions = crate::bundle::Conditions {
text: conditions_text.to_string(),
species_verified: Vec::new(),
species_unverified: Vec::new(),
in_vitro: lower.contains("in vitro") || lower.contains("ipsc"),
in_vivo: lower.contains("in vivo") || lower.contains("mouse") || lower.contains("rat"),
human_data: lower.contains("human")
|| lower.contains("clinical")
|| lower.contains("patient"),
clinical_trial: lower.contains("clinical trial") || lower.contains("phase "),
concentration_range: None,
duration: None,
age_group: None,
cell_type: None,
};
let evidence = crate::bundle::Evidence {
evidence_type: "experimental".to_string(),
model_system: String::new(),
species: None,
method: "replication_attempt".to_string(),
sample_size: sample_size.map(|s| s.to_string()),
effect_size: None,
p_value: None,
replicated: outcome == "replicated",
replication_count: None,
evidence_spans: Vec::new(),
};
let provenance = crate::bundle::Provenance {
source_type: "published_paper".to_string(),
doi: doi.map(|s| s.to_string()),
pmid: pmid.map(|s| s.to_string()),
pmc: None,
openalex_id: None,
url: None,
title: source_title.to_string(),
authors: Vec::new(),
year: None,
journal: None,
license: None,
publisher: None,
funders: Vec::new(),
extraction: crate::bundle::Extraction {
method: "manual_curation".to_string(),
model: None,
model_version: None,
extracted_at: chrono::Utc::now().to_rfc3339(),
extractor_version: env!("CARGO_PKG_VERSION").to_string(),
},
review: None,
citation_count: None,
};
let mut rep = crate::bundle::Replication::new(
target.to_string(),
attempted_by.to_string(),
outcome.to_string(),
evidence,
conditions,
provenance,
note.to_string(),
);
rep.previous_attempt = previous_attempt.map(|s| s.to_string());
if project.replications.iter().any(|r| r.id == rep.id) {
if json {
println!(
"{}",
serde_json::to_string_pretty(&json!({
"ok": false,
"command": "replicate",
"reason": "replication_already_exists",
"id": rep.id,
}))
.expect("serialize")
);
} else {
println!(
"{} replication {} already exists in {}; skipping.",
style::warn("replicate"),
rep.id,
frontier.display()
);
}
return;
}
let new_id = rep.id.clone();
project.replications.push(rep);
let cascade_result = if no_cascade {
None
} else {
let result = propagate::propagate_correction(
&mut project,
target,
propagate::PropagationAction::ReplicationOutcome {
outcome: outcome.to_string(),
vrep_id: new_id.clone(),
},
);
project.review_events.extend(result.events.clone());
project::recompute_stats(&mut project);
Some(result)
};
repo::save_to_path(frontier, &project).unwrap_or_else(|e| fail_return(&e));
if json {
let cascade_json = cascade_result.as_ref().map(|r| {
json!({
"affected": r.affected,
"events": r.events.len(),
})
});
println!(
"{}",
serde_json::to_string_pretty(&json!({
"ok": true,
"command": "replicate",
"id": new_id,
"target": target,
"outcome": outcome,
"attempted_by": attempted_by,
"cascade": cascade_json,
"frontier": frontier.display().to_string(),
}))
.expect("failed to serialize replicate result")
);
} else {
println!();
println!(
" {}",
format!("VELA · REPLICATE · {}", new_id)
.to_uppercase()
.dimmed()
);
println!(" {}", style::tick_row(60));
println!(" target: {target}");
println!(" outcome: {outcome}");
println!(" attempted by: {attempted_by}");
println!(" conditions: {conditions_text}");
println!(" source: {source_title}");
if let Some(d) = doi {
println!(" doi: {d}");
}
println!();
println!(
" {} replication recorded in {}",
style::ok("ok"),
frontier.display()
);
if let Some(result) = cascade_result {
println!(
" {} cascade: {} dependent(s) flagged, {} review event(s) recorded",
style::ok("ok"),
result.affected,
result.events.len()
);
} else {
println!(" {} cascade skipped (--no-cascade)", style::warn("info"));
}
}
}
fn cmd_replications(frontier: &Path, target: Option<&str>, json: bool) {
let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
let filtered: Vec<&crate::bundle::Replication> = project
.replications
.iter()
.filter(|r| target.is_none_or(|t| r.target_finding == t))
.collect();
if json {
let payload = json!({
"ok": true,
"command": "replications",
"frontier": frontier.display().to_string(),
"filter_target": target,
"count": filtered.len(),
"replications": filtered,
});
println!(
"{}",
serde_json::to_string_pretty(&payload).expect("failed to serialize replications list")
);
return;
}
println!();
let header = match target {
Some(t) => format!("VELA · REPLICATIONS · {t}"),
None => format!("VELA · REPLICATIONS · {}", frontier.display()),
};
println!(" {}", header.to_uppercase().dimmed());
println!(" {}", style::tick_row(60));
if filtered.is_empty() {
println!(" (no replications recorded)");
return;
}
for rep in &filtered {
let outcome_chip = match rep.outcome.as_str() {
"replicated" => style::ok(&rep.outcome),
"failed" => style::lost(&rep.outcome),
"partial" => style::warn(&rep.outcome),
_ => rep.outcome.clone().normal().to_string(),
};
println!(
" · {} {} by {}",
rep.id.dimmed(),
outcome_chip,
rep.attempted_by
);
println!(" target: {}", rep.target_finding);
if !rep.conditions.text.is_empty() {
println!(" conditions: {}", truncate(&rep.conditions.text, 80));
}
if !rep.provenance.title.is_empty() {
println!(" source: {}", truncate(&rep.provenance.title, 80));
}
}
}
async fn cmd_ingest(
path: &str,
frontier: &Path,
backend: Option<&str>,
actor: Option<&str>,
dry_run: bool,
json: bool,
) {
let lowered = path.trim().to_lowercase();
if lowered.starts_with("doi:") || lowered.starts_with("pmid:") || lowered.starts_with("nct:") {
cmd_source_fetch(path.trim(), None, None, false, json).await;
if !json {
eprintln!();
eprintln!(
" vela ingest · note: doi:/pmid:/nct: URIs only fetch metadata; no frontier state was written."
);
eprintln!(
" next: turn this paper into a proposal with `vela finding add {} --assertion '...' --author 'reviewer:you' --apply`",
frontier.display()
);
}
return;
}
let p = std::path::PathBuf::from(path);
if !p.exists() {
fail(&format!(
"ingest: path '{path}' does not exist (and is not a doi:/pmid:/nct: URI)"
));
}
let ext = p
.extension()
.and_then(|s| s.to_str())
.map(|s| s.to_ascii_lowercase());
if p.is_file() {
match ext.as_deref() {
Some("pdf") => {
cmd_scout(&p, frontier, backend, dry_run, json).await;
}
Some("md") | Some("markdown") => {
cmd_compile_notes(&p, frontier, backend, None, None, dry_run, json).await;
}
Some("csv") | Some("tsv") => {
cmd_compile_data(&p, frontier, backend, None, dry_run, json).await;
}
Some("json") => {
let actor_id = actor.unwrap_or("agent:vela-ingest-bot");
cmd_artifact_to_state(frontier, &p, actor_id, false, json);
}
other => {
fail(&format!(
"ingest: unsupported file type '{}' (expected .pdf, .md, .csv, .tsv, .json, or a doi:/pmid:/nct: URI)",
other.unwrap_or("(none)")
));
}
}
return;
}
if p.is_dir() {
let mut pdf_count = 0usize;
let mut md_count = 0usize;
let mut data_count = 0usize;
let mut json_count = 0usize;
let mut unhandled_exts: std::collections::BTreeSet<String> =
std::collections::BTreeSet::new();
if let Ok(entries) = std::fs::read_dir(&p) {
for entry in entries.flatten() {
let path = entry.path();
if !path.is_file() {
continue;
}
if let Some(name) = entry.file_name().to_str()
&& let Some(dot) = name.rfind('.')
{
let ext = name[dot + 1..].to_ascii_lowercase();
match ext.as_str() {
"pdf" => pdf_count += 1,
"md" | "markdown" => md_count += 1,
"csv" | "tsv" => data_count += 1,
"json" => json_count += 1,
other => {
if !name.starts_with('.') {
unhandled_exts.insert(other.to_string());
}
}
}
}
}
}
let dispatched_types = (pdf_count > 0) as usize
+ (md_count > 0) as usize
+ (data_count > 0) as usize
+ (json_count > 0) as usize;
if dispatched_types == 0 {
cmd_compile_code(&p, frontier, backend, None, dry_run, json).await;
return;
}
if dispatched_types > 1 {
eprintln!(
" vela ingest · folder has multiple handlable types; running each in sequence"
);
eprintln!(
" pdf:{pdf_count} md:{md_count} csv/tsv:{data_count} json:{json_count}"
);
}
if pdf_count > 0 {
cmd_scout(&p, frontier, backend, dry_run, json).await;
}
if md_count > 0 {
cmd_compile_notes(&p, frontier, backend, None, None, dry_run, json).await;
}
if data_count > 0 {
cmd_compile_data(&p, frontier, backend, None, dry_run, json).await;
}
if json_count > 0 {
let actor_id = actor.unwrap_or("agent:vela-ingest-bot");
if let Ok(entries) = std::fs::read_dir(&p) {
for entry in entries.flatten() {
let path = entry.path();
if path.is_file()
&& path
.extension()
.and_then(|s| s.to_str())
.map(|s| s.eq_ignore_ascii_case("json"))
.unwrap_or(false)
{
cmd_artifact_to_state(frontier, &path, actor_id, false, json);
}
}
}
}
if !unhandled_exts.is_empty() {
let kinds: Vec<String> = unhandled_exts.into_iter().collect();
eprintln!(
" vela ingest · skipped {} file extension(s) with no handler: {}",
kinds.len(),
kinds.join(", ")
);
}
return;
}
fail(&format!(
"ingest: path '{path}' is neither a file nor a directory"
));
}
#[allow(clippy::too_many_arguments)]
async fn cmd_compile_data(
root: &Path,
frontier: &Path,
backend: Option<&str>,
sample_rows: Option<usize>,
dry_run: bool,
json_out: bool,
) {
match DATASETS_HANDLER.get() {
Some(handler) => {
handler(
root.to_path_buf(),
frontier.to_path_buf(),
backend.map(String::from),
sample_rows,
dry_run,
json_out,
)
.await;
}
None => {
eprintln!(
"{} `vela compile-data` requires the vela CLI binary; the library is unwired without a registered datasets handler.",
style::err_prefix()
);
std::process::exit(1);
}
}
}
async fn cmd_review_pending(
frontier: &Path,
backend: Option<&str>,
max_proposals: Option<usize>,
batch_size: usize,
dry_run: bool,
json_out: bool,
) {
match REVIEWER_HANDLER.get() {
Some(handler) => {
handler(
frontier.to_path_buf(),
backend.map(String::from),
max_proposals,
batch_size,
dry_run,
json_out,
)
.await;
}
None => {
eprintln!(
"{} `vela review-pending` requires the vela CLI binary; the library is unwired without a registered reviewer handler.",
style::err_prefix()
);
std::process::exit(1);
}
}
}
async fn cmd_find_tensions(
frontier: &Path,
backend: Option<&str>,
max_findings: Option<usize>,
dry_run: bool,
json_out: bool,
) {
match TENSIONS_HANDLER.get() {
Some(handler) => {
handler(
frontier.to_path_buf(),
backend.map(String::from),
max_findings,
dry_run,
json_out,
)
.await;
}
None => {
eprintln!(
"{} `vela find-tensions` requires the vela CLI binary; the library is unwired without a registered tensions handler.",
style::err_prefix()
);
std::process::exit(1);
}
}
}
async fn cmd_plan_experiments(
frontier: &Path,
backend: Option<&str>,
max_findings: Option<usize>,
dry_run: bool,
json_out: bool,
) {
match EXPERIMENTS_HANDLER.get() {
Some(handler) => {
handler(
frontier.to_path_buf(),
backend.map(String::from),
max_findings,
dry_run,
json_out,
)
.await;
}
None => {
eprintln!(
"{} `vela plan-experiments` requires the vela CLI binary; the library is unwired without a registered experiments handler.",
style::err_prefix()
);
std::process::exit(1);
}
}
}
async fn cmd_compile_code(
root: &Path,
frontier: &Path,
backend: Option<&str>,
max_files: Option<usize>,
dry_run: bool,
json_out: bool,
) {
match CODE_HANDLER.get() {
Some(handler) => {
handler(
root.to_path_buf(),
frontier.to_path_buf(),
backend.map(String::from),
max_files,
dry_run,
json_out,
)
.await;
}
None => {
eprintln!(
"{} `vela compile-code` requires the vela CLI binary; the library is unwired without a registered code handler.",
style::err_prefix()
);
std::process::exit(1);
}
}
}
async fn cmd_compile_notes(
vault: &Path,
frontier: &Path,
backend: Option<&str>,
max_files: Option<usize>,
max_items_per_category: Option<usize>,
dry_run: bool,
json_out: bool,
) {
match NOTES_HANDLER.get() {
Some(handler) => {
handler(
vault.to_path_buf(),
frontier.to_path_buf(),
backend.map(String::from),
max_files,
max_items_per_category,
dry_run,
json_out,
)
.await;
}
None => {
eprintln!(
"{} `vela compile-notes` requires the vela CLI binary; the library is unwired without a registered notes handler.",
style::err_prefix()
);
std::process::exit(1);
}
}
}
async fn cmd_scout(
folder: &Path,
frontier: &Path,
backend: Option<&str>,
dry_run: bool,
json_out: bool,
) {
match SCOUT_HANDLER.get() {
Some(handler) => {
handler(
folder.to_path_buf(),
frontier.to_path_buf(),
backend.map(String::from),
dry_run,
json_out,
)
.await;
}
None => {
eprintln!(
"{} `vela scout` requires the vela CLI binary; the library is unwired without a registered scout handler.",
style::err_prefix()
);
std::process::exit(1);
}
}
}
#[allow(clippy::too_many_arguments)]
fn cmd_check(
source: Option<&Path>,
schema: bool,
stats: bool,
conformance_flag: bool,
conformance_dir: &Path,
all: bool,
schema_only: bool,
strict: bool,
fix: bool,
json_output: bool,
) {
if json_output {
let Some(src) = source else {
fail("--json requires a frontier source");
};
let payload = check_json_payload(src, schema_only, strict);
println!(
"{}",
serde_json::to_string_pretty(&payload).expect("failed to serialize check report")
);
if payload.get("ok").and_then(Value::as_bool) != Some(true) {
std::process::exit(1);
}
return;
}
let run_all = all || (!schema && !stats && !conformance_flag && !schema_only);
if run_all || schema || schema_only {
let Some(src) = source else {
fail("check requires a frontier source");
};
validate::run(src);
}
if !schema_only && (run_all || stats) {
let Some(src) = source else {
fail("--stats requires a frontier source");
};
let frontier = load_frontier_or_fail(src);
let report = lint::lint(&frontier, None, None);
lint::print_report(&report);
let replay_report = events::replay_report(&frontier);
println!("event replay: {}", replay_report.status);
if !replay_report.conflicts.is_empty() {
for conflict in &replay_report.conflicts {
println!(" - {conflict}");
}
}
if let Ok(signature_report) = sign::verify_frontier_data(&frontier, None)
&& signature_report.signed > 0
{
println!(
"Signatures: {} valid / {} invalid / {} unsigned",
signature_report.valid, signature_report.invalid, signature_report.unsigned
);
}
let signal_report = signals::analyze(&frontier, &[]);
print_signal_summary(&signal_report, strict);
if !replay_report.ok
|| (strict
&& (!signal_report.review_queue.is_empty()
|| signal_report.proof_readiness.status != "ready"))
{
std::process::exit(1);
}
}
if run_all || conformance_flag {
if conformance_flag || conformance_dir.is_dir() {
conformance::run(conformance_dir);
} else {
eprintln!(
" conformance: skipped ({} not present; pass --conformance-dir <path> to point at the source repo's tests/conformance)",
conformance_dir.display()
);
}
}
let _ = fix;
}
fn check_json_payload(src: &Path, schema_only: bool, strict: bool) -> Value {
let report = validate::validate(src);
let loaded = repo::load_from_path(src).ok();
let (method_report, graph_report) = if schema_only {
(None, None)
} else if let Some(frontier) = loaded.as_ref() {
(
Some(lint::lint(frontier, None, None)),
Some(lint::lint_frontier(frontier)),
)
} else {
(None, None)
};
let source_hash = hash_path(src).unwrap_or_else(|_| "unavailable".to_string());
let mut diagnostics = Vec::new();
diagnostics.extend(report.errors.iter().map(|e| {
json!({
"severity": "error",
"rule_id": "schema",
"finding_id": null,
"file": &e.file,
"field_path": null,
"message": &e.error,
"suggestion": schema_error_suggestion(&e.error),
"fixable": schema_error_fix(&e.error),
"normalize_action": schema_error_action(&e.error),
})
}));
for (check_id, lint_report) in [
("methodology", method_report.as_ref()),
("frontier_graph", graph_report.as_ref()),
] {
if let Some(lint_report) = lint_report {
diagnostics.extend(lint_report.diagnostics.iter().map(|d| {
json!({
"severity": d.severity.to_string(),
"rule_id": &d.rule_id,
"check": check_id,
"finding_id": &d.finding_id,
"field_path": null,
"message": &d.message,
"suggestion": &d.suggestion,
"fixable": false,
"normalize_action": null,
})
}));
}
}
let method_errors = method_report.as_ref().map_or(0, |r| r.errors);
let method_warnings = method_report.as_ref().map_or(0, |r| r.warnings);
let method_infos = method_report.as_ref().map_or(0, |r| r.infos);
let graph_errors = graph_report.as_ref().map_or(0, |r| r.errors);
let graph_warnings = graph_report.as_ref().map_or(0, |r| r.warnings);
let graph_infos = graph_report.as_ref().map_or(0, |r| r.infos);
let replay_report = loaded.as_ref().map(events::replay_report);
let state_integrity_report = if schema_only {
loaded.as_ref().map(state_integrity::analyze)
} else {
state_integrity::analyze_path(src).ok()
};
if let Some(replay) = replay_report.as_ref()
&& !replay.ok
{
diagnostics.extend(replay.conflicts.iter().map(|conflict| {
json!({
"severity": "error",
"rule_id": "event_replay",
"check": "events",
"finding_id": null,
"field_path": null,
"message": conflict,
"suggestion": "Inspect canonical state events and repair the frontier event log before proof export.",
"fixable": false,
"normalize_action": null,
})
}));
}
let event_errors = replay_report
.as_ref()
.map_or(0, |replay| usize::from(!replay.ok));
let state_integrity_errors = state_integrity_report
.as_ref()
.map_or(0, |report| report.structural_errors.len());
let (source_registry, evidence_atoms, conditions, proposal_summary, proof_state) = loaded
.as_ref()
.map(|frontier| {
(
sources::source_summary(frontier),
sources::evidence_summary(frontier),
sources::condition_summary(frontier),
proposals::summary(frontier),
proposals::proof_state_json(&frontier.proof_state),
)
})
.unwrap_or_else(|| {
(
sources::SourceRegistrySummary::default(),
sources::EvidenceAtomSummary::default(),
sources::ConditionSummary::default(),
proposals::ProposalSummary::default(),
Value::Null,
)
});
let signature_report = loaded
.as_ref()
.and_then(|frontier| sign::verify_frontier_data(frontier, None).ok());
if let Some(frontier) = loaded.as_ref()
&& !schema_only
{
let projection = sources::derive_projection(frontier);
let existing_sources = frontier
.sources
.iter()
.map(|source| source.id.as_str())
.collect::<std::collections::BTreeSet<_>>();
let existing_atoms = frontier
.evidence_atoms
.iter()
.map(|atom| atom.id.as_str())
.collect::<std::collections::BTreeSet<_>>();
let existing_conditions = frontier
.condition_records
.iter()
.map(|record| record.id.as_str())
.collect::<std::collections::BTreeSet<_>>();
for source in projection
.sources
.iter()
.filter(|source| !existing_sources.contains(source.id.as_str()))
{
diagnostics.push(json!({
"severity": "warning",
"rule_id": "missing_source_record",
"check": "source_registry",
"finding_id": source.finding_ids.first(),
"field_path": "sources",
"message": format!("Source record {} is derivable but not materialized in frontier state.", source.id),
"suggestion": "Run `vela normalize` to materialize source records before proof export.",
"fixable": true,
"normalize_action": "materialize_source_record",
}));
}
for atom in projection
.evidence_atoms
.iter()
.filter(|atom| !existing_atoms.contains(atom.id.as_str()))
{
diagnostics.push(json!({
"severity": "warning",
"rule_id": "missing_evidence_atom",
"check": "evidence_atoms",
"finding_id": atom.finding_id,
"field_path": "evidence_atoms",
"message": format!("Evidence atom {} is derivable but not materialized in frontier state.", atom.id),
"suggestion": "Run `vela normalize` to materialize evidence atoms before proof export.",
"fixable": true,
"normalize_action": "materialize_evidence_atom",
}));
}
for atom in projection
.evidence_atoms
.iter()
.filter(|atom| atom.locator.is_none())
{
diagnostics.push(json!({
"severity": "warning",
"rule_id": "missing_evidence_locator",
"check": "evidence_atoms",
"finding_id": atom.finding_id,
"field_path": "evidence_atoms[].locator",
"message": format!("Evidence atom {} has no source locator.", atom.id),
"suggestion": "Add or verify evidence spans, table rows, pages, sections, or run locators.",
"fixable": false,
"normalize_action": null,
}));
}
for condition in projection
.condition_records
.iter()
.filter(|condition| !existing_conditions.contains(condition.id.as_str()))
{
diagnostics.push(json!({
"severity": "warning",
"rule_id": "condition_record_missing",
"check": "conditions",
"finding_id": condition.finding_id,
"field_path": "condition_records",
"message": format!("Condition record {} is derivable but not materialized in frontier state.", condition.id),
"suggestion": "Run `vela normalize` to materialize condition boundaries before proof export.",
"fixable": true,
"normalize_action": "materialize_condition_record",
}));
}
for proposal in frontier.proposals.iter().filter(|proposal| {
matches!(proposal.status.as_str(), "accepted" | "applied")
&& proposal
.reviewed_by
.as_deref()
.is_none_or(proposals::is_placeholder_reviewer)
}) {
diagnostics.push(json!({
"severity": "error",
"rule_id": "reviewer_identity_missing",
"check": "proposals",
"finding_id": proposal.target.id,
"field_path": "proposals[].reviewed_by",
"message": format!("Accepted or applied proposal {} uses a missing or placeholder reviewer identity.", proposal.id),
"suggestion": "Accept the proposal with a stable named reviewer id before strict proof use.",
"fixable": false,
"normalize_action": null,
}));
}
}
let signal_report = loaded
.as_ref()
.map(|frontier| signals::analyze(frontier, &diagnostics))
.unwrap_or_else(empty_signal_report);
let errors =
report.errors.len() + method_errors + graph_errors + event_errors + state_integrity_errors;
let warnings = method_warnings + graph_warnings + signal_report.proof_readiness.warnings;
let infos = method_infos + graph_infos;
let strict_blockers = signal_report
.signals
.iter()
.filter(|signal| signal.blocks.iter().any(|block| block == "strict_check"))
.count();
let fixable = diagnostics
.iter()
.filter(|d| d.get("fixable").and_then(Value::as_bool).unwrap_or(false))
.count();
let ok = errors == 0 && (!strict || (warnings == 0 && strict_blockers == 0));
json!({
"ok": ok,
"command": "check",
"schema_version": project::VELA_SCHEMA_VERSION,
"source": {
"path": src.display().to_string(),
"hash": format!("sha256:{source_hash}"),
},
"summary": {
"status": if ok { "pass" } else { "fail" },
"checked_findings": report.total_files,
"valid_findings": report.valid,
"invalid_findings": report.invalid,
"errors": errors,
"warnings": warnings,
"info": infos,
"fixable": fixable,
"strict": strict,
"schema_only": schema_only,
},
"checks": [
{
"id": "schema",
"status": if report.invalid == 0 { "pass" } else { "fail" },
"checked": report.total_files,
"failed": report.invalid,
"errors": report.errors.iter().map(|e| json!({
"file": e.file,
"message": e.error,
})).collect::<Vec<_>>(),
},
{
"id": "methodology",
"status": if method_errors == 0 { "pass" } else { "fail" },
"checked": method_report.as_ref().map_or(0, |r| r.findings_checked),
"failed": method_errors,
"warnings": method_warnings,
"info": method_infos,
"skipped": schema_only,
},
{
"id": "frontier_graph",
"status": if graph_errors == 0 { "pass" } else { "fail" },
"checked": graph_report.as_ref().map_or(0, |r| r.findings_checked),
"failed": graph_errors,
"warnings": graph_warnings,
"info": graph_infos,
"skipped": schema_only,
},
{
"id": "signals",
"status": if strict_blockers == 0 { "pass" } else { "fail" },
"checked": signal_report.signals.len(),
"failed": strict_blockers,
"warnings": signal_report.proof_readiness.warnings,
"skipped": loaded.is_none(),
"blockers": signal_report.signals.iter()
.filter(|s| s.blocks.iter().any(|b| b == "strict_check"))
.map(|s| json!({
"id": s.id,
"kind": s.kind,
"severity": s.severity,
"reason": s.reason,
}))
.collect::<Vec<_>>(),
},
{
"id": "events",
"status": if replay_report.as_ref().is_none_or(|replay| replay.ok) { "pass" } else { "fail" },
"checked": replay_report.as_ref().map_or(0, |replay| replay.event_log.count),
"failed": event_errors,
"skipped": schema_only || loaded.is_none(),
},
{
"id": "state_integrity",
"status": if state_integrity_report.as_ref().is_none_or(|report| report.status != "fail") { "pass" } else { "fail" },
"checked": state_integrity_report.as_ref().map_or(0, |report| report.summary.get("events").copied().unwrap_or_default()),
"failed": state_integrity_errors,
"skipped": schema_only || loaded.is_none(),
}
],
"event_log": replay_report.as_ref().map(|replay| &replay.event_log),
"replay": replay_report,
"state_integrity": state_integrity_report,
"source_registry": source_registry,
"evidence_atoms": evidence_atoms,
"conditions": conditions,
"proposals": proposal_summary,
"proof_state": proof_state,
"signatures": signature_report,
"diagnostics": diagnostics,
"signals": signal_report.signals,
"review_queue": signal_report.review_queue,
"proof_readiness": signal_report.proof_readiness,
"repair_plan": build_repair_plan(&diagnostics),
})
}
#[allow(clippy::too_many_arguments)]
fn cmd_normalize(
source: &Path,
out: Option<&Path>,
write: bool,
dry_run: bool,
rewrite_ids: bool,
id_map: Option<&Path>,
resync_provenance: bool,
json_output: bool,
) {
if write && out.is_some() {
fail("Use either --write or --out, not both.");
}
if dry_run && (write || out.is_some()) {
fail("--dry-run cannot be combined with --write or --out.");
}
if id_map.is_some() && !rewrite_ids {
fail("--id-map requires --rewrite-ids.");
}
let detected = repo::detect(source).unwrap_or_else(|e| {
eprintln!("{e}");
std::process::exit(1);
});
if matches!(detected, repo::VelaSource::PacketDir(_)) {
fail(
"Cannot normalize a proof packet directory. Export a new packet from frontier state instead.",
);
}
let mut frontier = repo::load(&detected).unwrap_or_else(|e| fail_return(&e));
let has_substantive_events = frontier
.events
.iter()
.any(|event| event.kind != "frontier.created");
if has_substantive_events && (write || out.is_some()) {
fail(
"Refusing to normalize a frontier with canonical events. Normalize before proposal-backed writes, or create a new reviewed transition for the intended change.",
);
}
let source_hash = hash_path(source).unwrap_or_else(|_| "unavailable".to_string());
let before_stats = serde_json::to_value(&frontier.stats).unwrap_or(Value::Null);
let (entity_type_fixes, entity_name_fixes) =
normalize::normalize_findings(&mut frontier.findings);
let confidence_updates =
bundle::recompute_all_confidence(&mut frontier.findings, &frontier.replications);
let provenance_resync_count = if resync_provenance {
sources::resync_provenance_from_sources(&mut frontier)
} else {
0
};
let before_source_count = frontier.sources.len();
let before_evidence_atom_count = frontier.evidence_atoms.len();
let before_condition_record_count = frontier.condition_records.len();
let mut id_rewrites = Vec::new();
if rewrite_ids {
let mut id_map_values = std::collections::BTreeMap::<String, String>::new();
for finding in &frontier.findings {
let expected =
bundle::FindingBundle::content_address(&finding.assertion, &finding.provenance);
if expected != finding.id {
id_map_values.insert(finding.id.clone(), expected);
}
}
let new_ids = id_map_values
.values()
.map(String::as_str)
.collect::<std::collections::HashSet<_>>();
if new_ids.len() != id_map_values.len() {
fail("Refusing to rewrite IDs because two findings map to the same content address.");
}
for finding in &mut frontier.findings {
if let Some(new_id) = id_map_values.get(&finding.id) {
id_rewrites.push(json!({"old": finding.id, "new": new_id}));
finding.previous_version = Some(finding.id.clone());
finding.id = new_id.clone();
}
}
for finding in &mut frontier.findings {
for link in &mut finding.links {
if let Some(new_target) = id_map_values.get(&link.target) {
link.target = new_target.clone();
}
}
}
if let Some(path) = id_map {
std::fs::write(
path,
serde_json::to_string_pretty(&id_map_values)
.expect("failed to serialize normalize id map"),
)
.unwrap_or_else(|e| fail(&format!("Failed to write {}: {e}", path.display())));
}
}
sources::materialize_project(&mut frontier);
let source_records_materialized = frontier.sources.len().saturating_sub(before_source_count);
let evidence_atoms_materialized = frontier
.evidence_atoms
.len()
.saturating_sub(before_evidence_atom_count);
let condition_records_materialized = frontier
.condition_records
.len()
.saturating_sub(before_condition_record_count);
let after_stats = serde_json::to_value(&frontier.stats).unwrap_or(Value::Null);
let id_rewrite_count = id_rewrites.len();
let wrote_to = if write {
repo::save(&detected, &frontier).unwrap_or_else(|e| fail(&e));
Some(source.display().to_string())
} else if let Some(out_path) = out {
repo::save_to_path(out_path, &frontier).unwrap_or_else(|e| fail(&e));
Some(out_path.display().to_string())
} else {
None
};
let wrote = wrote_to.is_some();
let planned_changes = entity_type_fixes
+ entity_name_fixes
+ confidence_updates
+ id_rewrite_count
+ source_records_materialized
+ evidence_atoms_materialized
+ condition_records_materialized
+ provenance_resync_count;
let payload = json!({
"ok": true,
"command": "normalize",
"schema_version": project::VELA_SCHEMA_VERSION,
"source": {
"path": source.display().to_string(),
"hash": format!("sha256:{source_hash}"),
},
"dry_run": wrote_to.is_none(),
"wrote_to": wrote_to,
"summary": {
"planned": planned_changes,
"safe": planned_changes,
"unsafe": 0,
"applied": if wrote { planned_changes } else { 0 },
},
"changes": {
"entity_type_fixes": entity_type_fixes,
"entity_name_fixes": entity_name_fixes,
"confidence_updates": confidence_updates,
"id_rewrites": id_rewrite_count,
"source_records_materialized": source_records_materialized,
"evidence_atoms_materialized": evidence_atoms_materialized,
"condition_records_materialized": condition_records_materialized,
"provenance_resyncs": provenance_resync_count,
"stats_changed": before_stats != after_stats,
},
"id_rewrites": id_rewrites,
"repair_plan": if wrote { Vec::<Value>::new() } else {
vec![json!({
"action": "apply_normalization",
"command": "vela normalize <frontier> --out frontier.normalized.json"
})]
},
});
if json_output {
println!(
"{}",
serde_json::to_string_pretty(&payload).expect("failed to serialize normalize report")
);
} else if let Some(path) = payload.get("wrote_to").and_then(Value::as_str) {
println!("{} normalized frontier written to {path}", style::ok("ok"));
println!(
" entity type fixes: {}, entity name fixes: {}, confidence updates: {}, id rewrites: {}",
entity_type_fixes, entity_name_fixes, confidence_updates, id_rewrite_count
);
} else {
println!("normalize dry run for {}", source.display());
println!(
" would apply entity type fixes: {}, entity name fixes: {}, confidence updates: {}, id rewrites: {}",
entity_type_fixes, entity_name_fixes, confidence_updates, id_rewrite_count
);
}
}
fn cmd_proof(
frontier: &Path,
out: &Path,
template: &str,
gold: Option<&Path>,
record_proof_state: bool,
json_output: bool,
) {
if template != "bbb-alzheimer" {
fail(&format!(
"Unsupported proof template '{template}'. Supported: bbb-alzheimer"
));
}
let mut loaded = load_frontier_or_fail(frontier);
let source_hash = hash_path_or_fail(frontier);
let export_record = export::export_packet_with_source(&loaded, Some(frontier), out)
.unwrap_or_else(|e| fail(&e));
let benchmark_summary = gold.map(|gold_path| {
let summary = benchmark::run_suite(gold_path).unwrap_or_else(|e| {
fail(&format!(
"Failed to run proof benchmark '{}': {e}",
gold_path.display()
))
});
append_packet_json_file(out, "benchmark-summary.json", &summary).unwrap_or_else(|e| {
fail(&format!("Failed to write benchmark summary: {e}"));
});
if summary.get("ok").and_then(Value::as_bool) != Some(true) {
fail(&format!(
"Proof benchmark failed for {}",
gold_path.display()
));
}
summary
});
let validation_summary = packet::validate(out).unwrap_or_else(|e| {
fail(&format!("Proof packet validation failed: {e}"));
});
proposals::record_proof_export(
&mut loaded,
proposals::ProofPacketRecord {
generated_at: export_record.generated_at.clone(),
snapshot_hash: export_record.snapshot_hash.clone(),
event_log_hash: export_record.event_log_hash.clone(),
packet_manifest_hash: export_record.packet_manifest_hash.clone(),
},
);
project::recompute_stats(&mut loaded);
if record_proof_state {
repo::save_to_path(frontier, &loaded).unwrap_or_else(|e| fail(&e));
}
let signal_report = signals::analyze(&loaded, &[]);
if json_output {
let payload = json!({
"ok": true,
"command": "proof",
"schema_version": project::VELA_SCHEMA_VERSION,
"recorded_proof_state": record_proof_state,
"frontier": {
"name": &loaded.project.name,
"source": frontier.display().to_string(),
"hash": format!("sha256:{source_hash}"),
},
"template": template,
"gold": gold.map(|p| p.display().to_string()),
"benchmark": benchmark_summary,
"output": out.display().to_string(),
"packet": {
"manifest_path": out.join("manifest.json").display().to_string(),
},
"validation": {
"status": "ok",
"summary": validation_summary,
},
"proposals": proposals::summary(&loaded),
"proof_state": loaded.proof_state,
"signals": signal_report.signals,
"review_queue": signal_report.review_queue,
"proof_readiness": signal_report.proof_readiness,
"trace_path": out.join("proof-trace.json").display().to_string(),
});
println!(
"{}",
serde_json::to_string_pretty(&payload).expect("failed to serialize proof response")
);
} else {
println!("vela proof");
println!(" source: {}", frontier.display());
println!(" template: {template}");
println!(" output: {}", out.display());
println!(" trace: {}", out.join("proof-trace.json").display());
println!(
" proof state: {}",
if record_proof_state {
"recorded"
} else {
"not recorded"
}
);
println!();
println!("{validation_summary}");
}
}
fn cmd_status(path: &Path, json: bool) {
let project = repo::load_from_path(path).unwrap_or_else(|e| fail_return(&e));
let mut pending_total = 0usize;
let mut pending_by_kind: std::collections::BTreeMap<String, usize> =
std::collections::BTreeMap::new();
for p in &project.proposals {
if p.status == "pending_review" {
pending_total += 1;
*pending_by_kind.entry(p.kind.clone()).or_insert(0) += 1;
}
}
let audit = crate::causal_reasoning::audit_frontier(&project);
let audit_summary = crate::causal_reasoning::summarize_audit(&audit);
let mut last_sync: Option<&crate::events::StateEvent> = None;
let mut last_conflict: Option<&crate::events::StateEvent> = None;
let mut total_conflicts = 0usize;
for e in &project.events {
match e.kind.as_str() {
"frontier.synced_with_peer" => {
if last_sync
.map(|prev| e.timestamp > prev.timestamp)
.unwrap_or(true)
{
last_sync = Some(e);
}
}
"frontier.conflict_detected" => {
total_conflicts += 1;
if last_conflict
.map(|prev| e.timestamp > prev.timestamp)
.unwrap_or(true)
{
last_conflict = Some(e);
}
}
_ => {}
}
}
let mut targets_with_success = std::collections::HashSet::new();
let mut failed_replications = 0usize;
for r in &project.replications {
if r.outcome == "replicated" {
targets_with_success.insert(r.target_finding.clone());
} else if r.outcome == "failed" {
failed_replications += 1;
}
}
if json {
println!(
"{}",
serde_json::to_string_pretty(&json!({
"ok": true,
"command": "status",
"frontier": frontier_label(&project),
"vfr_id": project.frontier_id(),
"findings": project.findings.len(),
"events": project.events.len(),
"actors": project.actors.len(),
"peers": project.peers.len(),
"inbox": {
"pending_total": pending_total,
"pending_by_kind": pending_by_kind,
},
"causal_audit": {
"identified": audit_summary.identified,
"conditional": audit_summary.conditional,
"underidentified": audit_summary.underidentified,
"underdetermined": audit_summary.underdetermined,
},
"replications": {
"total": project.replications.len(),
"findings_with_success": targets_with_success.len(),
"failed": failed_replications,
},
"federation": {
"peers": project.peers.len(),
"last_sync": last_sync.map(|e| e.timestamp.clone()),
"last_conflict": last_conflict.map(|e| e.timestamp.clone()),
"total_conflicts": total_conflicts,
},
}))
.expect("serialize status")
);
return;
}
println!();
println!(
" {}",
format!("VELA · STATUS · {}", path.display())
.to_uppercase()
.dimmed()
);
println!(" {}", style::tick_row(60));
println!();
println!(" frontier: {}", frontier_label(&project));
println!(" vfr_id: {}", project.frontier_id());
println!(
" findings: {} events: {} peers: {} actors: {}",
project.findings.len(),
project.events.len(),
project.peers.len(),
project.actors.len(),
);
println!();
if pending_total > 0 {
println!(
" {} {pending_total} pending proposals",
style::warn("inbox")
);
for (k, n) in &pending_by_kind {
println!(" · {n:>3} {k}");
}
} else {
println!(" {} inbox clean", style::ok("ok"));
}
println!();
if audit_summary.underidentified > 0 || audit_summary.conditional > 0 {
let chip = if audit_summary.underidentified > 0 {
style::lost("audit")
} else {
style::warn("audit")
};
println!(
" {} identified {} · conditional {} · underidentified {} · underdetermined {}",
chip,
audit_summary.identified,
audit_summary.conditional,
audit_summary.underidentified,
audit_summary.underdetermined,
);
if audit_summary.underidentified > 0 {
println!(
" next: vela causal audit {} --problems-only",
path.display()
);
}
} else if audit_summary.underdetermined == 0 {
println!(
" {} causal audit: all {} identified",
style::ok("ok"),
audit_summary.identified
);
} else {
println!(
" {} causal audit: {} identified, {} ungraded",
style::warn("audit"),
audit_summary.identified,
audit_summary.underdetermined,
);
}
println!();
if !project.replications.is_empty() {
println!(
" {} {} records · {} findings replicated · {} failed",
style::ok("replications"),
project.replications.len(),
targets_with_success.len(),
failed_replications,
);
}
if project.peers.is_empty() {
println!(
" {} no federation peers registered",
style::warn("federation")
);
} else {
let last = last_sync
.map(|e| fmt_timestamp(&e.timestamp))
.unwrap_or_else(|| "never".to_string());
let chip = if total_conflicts > 0 {
style::warn("federation")
} else {
style::ok("federation")
};
println!(
" {} {} peer(s) · last sync {} · {} conflict events",
chip,
project.peers.len(),
last,
total_conflicts,
);
}
println!();
}
fn cmd_log(path: &Path, limit: usize, kind_filter: Option<&str>, json: bool) {
let project = repo::load_from_path(path).unwrap_or_else(|e| fail_return(&e));
let mut events: Vec<&crate::events::StateEvent> = project
.events
.iter()
.filter(|e| match kind_filter {
Some(k) => e.kind.contains(k),
None => true,
})
.collect();
events.sort_by(|a, b| b.timestamp.cmp(&a.timestamp));
events.truncate(limit);
if json {
let payload: Vec<_> = events
.iter()
.map(|e| {
json!({
"id": e.id,
"kind": e.kind,
"actor": e.actor.id,
"target": &e.target.id,
"target_type": &e.target.r#type,
"timestamp": e.timestamp,
"reason": e.reason,
})
})
.collect();
println!(
"{}",
serde_json::to_string_pretty(&json!({
"ok": true,
"command": "log",
"events": payload,
}))
.expect("serialize log")
);
return;
}
println!();
println!(
" {}",
format!("VELA · LOG · {} (latest {})", path.display(), events.len())
.to_uppercase()
.dimmed()
);
println!(" {}", style::tick_row(60));
if events.is_empty() {
println!(" (no events)");
return;
}
for e in &events {
let when = fmt_timestamp(&e.timestamp);
let target_short = if e.target.id.len() > 22 {
format!("{}…", &e.target.id[..21])
} else {
e.target.id.clone()
};
let reason: String = e.reason.chars().take(70).collect();
println!(
" {:<19} {:<32} {:<24} {}",
when, e.kind, target_short, reason
);
}
println!();
}
fn cmd_inbox(path: &Path, kind_filter: Option<&str>, limit: usize, json: bool) {
let project = repo::load_from_path(path).unwrap_or_else(|e| fail_return(&e));
let mut score_map: std::collections::HashMap<String, (f64, f64, f64, f64)> =
std::collections::HashMap::new();
for p in &project.proposals {
if p.kind != "finding.note" {
continue;
}
if p.actor.id != "agent:reviewer-agent" {
continue;
}
let reason = &p.reason;
let Some(target) = reason.split_whitespace().find(|s| s.starts_with("vpr_")) else {
continue;
};
let text = p.payload.get("text").and_then(|v| v.as_str()).unwrap_or("");
let extract = |k: &str| -> f64 {
let pat = format!("{k} ");
text.find(&pat)
.and_then(|idx| text[idx + pat.len()..].split_whitespace().next())
.and_then(|t| t.parse::<f64>().ok())
.unwrap_or(0.0)
};
score_map.insert(
target.to_string(),
(
extract("plausibility"),
extract("evidence"),
extract("scope"),
extract("duplicate-risk"),
),
);
}
let mut pending: Vec<&crate::proposals::StateProposal> = project
.proposals
.iter()
.filter(|p| {
p.status == "pending_review"
&& match kind_filter {
Some(k) => p.kind.contains(k),
None => true,
}
})
.collect();
pending.sort_by(|a, b| {
let sa = score_map
.get(&a.id)
.map(|(p, e, s, d)| 0.4 * p + 0.3 * e + 0.2 * s - 0.3 * d);
let sb = score_map
.get(&b.id)
.map(|(p, e, s, d)| 0.4 * p + 0.3 * e + 0.2 * s - 0.3 * d);
sb.partial_cmp(&sa).unwrap_or(std::cmp::Ordering::Equal)
});
pending.truncate(limit);
if json {
let payload: Vec<_> = pending
.iter()
.map(|p| {
let assertion_text = p
.payload
.get("finding")
.and_then(|f| f.get("assertion"))
.and_then(|a| a.get("text"))
.and_then(|t| t.as_str());
let assertion_type = p
.payload
.get("finding")
.and_then(|f| f.get("assertion"))
.and_then(|a| a.get("type"))
.and_then(|t| t.as_str());
let composite = score_map
.get(&p.id)
.map(|(pl, e, s, d)| 0.4 * pl + 0.3 * e + 0.2 * s - 0.3 * d);
json!({
"proposal_id": p.id,
"kind": p.kind,
"actor": p.actor,
"reason": p.reason,
"assertion_text": assertion_text,
"assertion_type": assertion_type,
"reviewer_composite": composite,
})
})
.collect();
println!(
"{}",
serde_json::to_string_pretty(&json!({
"ok": true,
"command": "inbox",
"shown": pending.len(),
"proposals": payload,
}))
.expect("serialize inbox")
);
return;
}
println!();
println!(
" {}",
format!(
"VELA · INBOX · {} ({} pending shown)",
path.display(),
pending.len()
)
.to_uppercase()
.dimmed()
);
println!(" {}", style::tick_row(60));
if pending.is_empty() {
println!(" (inbox clean)");
return;
}
for p in &pending {
let assertion_text = p
.payload
.get("finding")
.and_then(|f| f.get("assertion"))
.and_then(|a| a.get("text"))
.and_then(|t| t.as_str())
.unwrap_or("");
let assertion_type = p
.payload
.get("finding")
.and_then(|f| f.get("assertion"))
.and_then(|a| a.get("type"))
.and_then(|t| t.as_str())
.unwrap_or("");
let composite = score_map
.get(&p.id)
.map(|(pl, e, s, d)| 0.4 * pl + 0.3 * e + 0.2 * s - 0.3 * d);
let score_str = composite
.map(|c| format!("[{:.2}]", c))
.unwrap_or_else(|| "[—] ".to_string());
let kind_short = if p.kind.len() > 12 {
format!("{}…", &p.kind[..11])
} else {
p.kind.clone()
};
let summary: String = if !assertion_text.is_empty() {
assertion_text.chars().take(80).collect()
} else {
p.reason.chars().take(80).collect()
};
println!(
" {} {} {:<13} {:<18} {}",
score_str, p.id, kind_short, assertion_type, summary
);
}
println!();
}
fn cmd_ask(path: &Path, question: &str, json: bool) {
let project = repo::load_from_path(path).unwrap_or_else(|e| fail_return(&e));
if question.trim().is_empty() {
use std::io::{BufRead, Write};
println!();
println!(
" {}",
format!("VELA · ASK · {}", path.display())
.to_uppercase()
.dimmed()
);
println!(" {}", style::tick_row(60));
println!(" Ask a question. Type `exit` to quit.");
println!(" Examples:");
println!(" · what's pending?");
println!(" · what's underidentified?");
println!(" · how many findings?");
println!(" · what changed recently?");
println!(" · who has what calibration?");
println!();
let stdin = std::io::stdin();
let mut stdout = std::io::stdout();
loop {
print!(" ask> ");
stdout.flush().ok();
let mut line = String::new();
if stdin.lock().read_line(&mut line).is_err() {
break;
}
let q = line.trim();
if q.is_empty() {
continue;
}
if matches!(q, "exit" | "quit" | "q") {
break;
}
answer(&project, q, false);
}
return;
}
answer(&project, question, json);
}
fn answer(project: &crate::project::Project, q: &str, json: bool) {
let lower = q.to_lowercase();
if lower.contains("pending")
|| lower.contains("inbox")
|| lower.contains("queue")
|| lower.contains("to review")
{
let pending: Vec<&crate::proposals::StateProposal> = project
.proposals
.iter()
.filter(|p| p.status == "pending_review")
.collect();
let mut by_kind: std::collections::BTreeMap<String, usize> = Default::default();
for p in &pending {
*by_kind.entry(p.kind.clone()).or_insert(0) += 1;
}
if json {
println!(
"{}",
serde_json::to_string_pretty(&json!({
"answer": "pending",
"total": pending.len(),
"by_kind": by_kind,
}))
.unwrap()
);
} else {
println!(" {} pending proposals.", pending.len());
for (k, n) in &by_kind {
println!(" · {n:>3} {k}");
}
if pending.is_empty() {
println!(" Inbox is clean.");
} else {
println!(" Run `vela inbox <frontier>` to triage.");
}
}
return;
}
if lower.contains("underident")
|| lower.contains("audit")
|| lower.contains("identif")
|| lower.contains("causal")
{
let entries = crate::causal_reasoning::audit_frontier(project);
let summary = crate::causal_reasoning::summarize_audit(&entries);
if json {
println!(
"{}",
serde_json::to_string_pretty(&json!({
"answer": "audit",
"summary": {
"identified": summary.identified,
"conditional": summary.conditional,
"underidentified": summary.underidentified,
"underdetermined": summary.underdetermined,
},
}))
.unwrap()
);
} else {
println!(
" Causal audit: {} identified · {} conditional · {} underidentified · {} underdetermined.",
summary.identified,
summary.conditional,
summary.underidentified,
summary.underdetermined,
);
if summary.underidentified > 0 {
println!(
" The {} underidentified findings are concrete review items:",
summary.underidentified
);
for e in entries
.iter()
.filter(|e| {
matches!(
e.verdict,
crate::causal_reasoning::Identifiability::Underidentified
)
})
.take(8)
{
let txt: String = e.assertion_text.chars().take(70).collect();
println!(" · {} {}", e.finding_id, txt);
}
}
}
return;
}
if lower.contains("recent")
|| lower.contains("changed")
|| lower.contains("latest")
|| lower.contains("happen")
{
let mut events: Vec<&crate::events::StateEvent> = project.events.iter().collect();
events.sort_by(|a, b| b.timestamp.cmp(&a.timestamp));
events.truncate(8);
if json {
println!(
"{}",
serde_json::to_string_pretty(&json!({
"answer": "recent_events",
"events": events.iter().map(|e| json!({
"id": e.id, "kind": e.kind, "timestamp": e.timestamp,
"actor": e.actor.id, "target": e.target.id,
})).collect::<Vec<_>>(),
}))
.unwrap()
);
} else {
println!(" Most recent {} events:", events.len());
for e in &events {
let when = fmt_timestamp(&e.timestamp);
println!(" · {when} {:<28} {}", e.kind, e.target.id);
}
}
return;
}
if lower.starts_with("how many") || lower.contains("count") || lower.contains("total") {
let n = project.findings.len();
let evs = project.events.len();
let peers = project.peers.len();
let actors = project.actors.len();
if json {
println!(
"{}",
serde_json::to_string_pretty(&json!({
"answer": "counts",
"findings": n,
"events": evs,
"peers": peers,
"actors": actors,
"replications": project.replications.len(),
"predictions": project.predictions.len(),
}))
.unwrap()
);
} else {
println!(" {n} findings · {evs} events · {actors} actors · {peers} peers.");
println!(
" {} replications · {} predictions · {} datasets · {} code artifacts.",
project.replications.len(),
project.predictions.len(),
project.datasets.len(),
project.code_artifacts.len(),
);
}
return;
}
if lower.contains("calibration") || lower.contains("brier") || lower.contains("predict") {
let records =
crate::calibration::calibration_records(&project.predictions, &project.resolutions);
if json {
println!("{}", serde_json::to_string_pretty(&records).unwrap());
} else if records.is_empty() {
println!(" No predictions yet. The calibration ledger is empty.");
} else {
println!(" Calibration over {} actor(s):", records.len());
for r in &records {
let brier = r
.brier_score
.map(|b| format!("{:.3}", b))
.unwrap_or_else(|| "—".into());
println!(
" · {:<28} predictions {} · resolved {} · expired {} · Brier {}",
r.actor, r.n_predictions, r.n_resolved, r.n_expired, brier
);
}
}
return;
}
if lower.contains("peer")
|| lower.contains("federat")
|| lower.contains("sync")
|| lower.contains("conflict")
{
let mut total_conflicts = 0usize;
for e in &project.events {
if e.kind == "frontier.conflict_detected" {
total_conflicts += 1;
}
}
if json {
println!(
"{}",
serde_json::to_string_pretty(&json!({
"answer": "federation",
"peers": project.peers.iter().map(|p| &p.id).collect::<Vec<_>>(),
"total_conflicts": total_conflicts,
}))
.unwrap()
);
} else {
println!(" {} peer(s) registered:", project.peers.len());
for p in &project.peers {
println!(" · {:<24} {}", p.id, p.url);
}
println!(" {total_conflicts} conflict events on the canonical log.");
}
return;
}
if json {
println!(
"{}",
serde_json::to_string_pretty(&json!({
"answer": "unknown_question",
"question": q,
"hint": "Try: pending, audit, recent, how many, calibration, peers."
}))
.unwrap()
);
} else {
println!(" Don't know how to route that question yet.");
println!(" Try: pending · audit · recent · how many · calibration · peers");
}
}
fn frontier_label(p: &crate::project::Project) -> String {
if p.project.name.trim().is_empty() {
"(unnamed)".to_string()
} else {
p.project.name.clone()
}
}
fn fmt_timestamp(ts: &str) -> String {
chrono::DateTime::parse_from_rfc3339(ts)
.map(|dt| dt.format("%m-%d %H:%M").to_string())
.unwrap_or_else(|_| ts.chars().take(16).collect())
}
fn cmd_stats(path: &Path) {
let frontier = load_frontier_or_fail(path);
let s = &frontier.stats;
println!();
println!(" {}", "FRONTIER · V0.36.0".dimmed());
println!(" {}", frontier.project.name.bold());
println!(" {}", style::tick_row(60));
println!(" id: {}", frontier.frontier_id());
println!(" compiled: {}", frontier.project.compiled_at);
println!(" papers: {}", frontier.project.papers_processed);
println!(" findings: {}", s.findings);
println!(" links: {}", s.links);
println!(" replicated: {}", s.replicated);
println!(" avg confidence: {}", s.avg_confidence);
println!(" gaps: {}", s.gaps);
println!(" contested: {}", s.contested);
println!(" reviewed: {}", s.human_reviewed);
println!(" proposals: {}", s.proposal_count);
println!(
" recorded proof: {}",
frontier.proof_state.latest_packet.status
);
if frontier.proof_state.latest_packet.status != "never_exported" {
println!(
" proof note: recorded frontier metadata; packet files are checked by `vela packet validate`"
);
}
if !s.categories.is_empty() {
println!();
println!(" {}", "categories".dimmed());
let mut categories = s.categories.iter().collect::<Vec<_>>();
categories.sort_by(|a, b| b.1.cmp(a.1));
for (category, count) in categories {
println!(" {category}: {}", count);
}
}
println!();
println!(" {}", style::tick_row(60));
println!();
}
fn cmd_proposals(action: ProposalAction) {
match action {
ProposalAction::List {
frontier,
status,
json,
} => {
let frontier_state =
repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
let proposals_list = proposals::list(&frontier_state, status.as_deref());
let payload = json!({
"ok": true,
"command": "proposals.list",
"frontier": frontier_state.project.name,
"status_filter": status,
"summary": proposals::summary(&frontier_state),
"proposals": proposals_list,
});
if json {
println!(
"{}",
serde_json::to_string_pretty(&payload)
.expect("failed to serialize proposals list")
);
} else {
println!("vela proposals list");
println!(" frontier: {}", frontier_state.project.name);
println!(
" proposals: {}",
payload["proposals"].as_array().map_or(0, Vec::len)
);
}
}
ProposalAction::Show {
frontier,
proposal_id,
json,
} => {
let frontier_state =
repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
let proposal =
proposals::show(&frontier_state, &proposal_id).unwrap_or_else(|e| fail_return(&e));
let payload = json!({
"ok": true,
"command": "proposals.show",
"frontier": frontier_state.project.name,
"proposal": proposal,
});
if json {
println!(
"{}",
serde_json::to_string_pretty(&payload)
.expect("failed to serialize proposal show")
);
} else {
println!("vela proposals show");
println!(" frontier: {}", frontier_state.project.name);
println!(" proposal: {}", proposal_id);
println!(" kind: {}", proposal.kind);
println!(" status: {}", proposal.status);
}
}
ProposalAction::Preview {
frontier,
proposal_id,
reviewer,
json,
} => {
let preview = proposals::preview_at_path(&frontier, &proposal_id, &reviewer)
.unwrap_or_else(|e| fail_return(&e));
let payload = json!({
"ok": true,
"command": "proposals.preview",
"frontier": frontier.display().to_string(),
"preview": preview,
});
if json {
println!(
"{}",
serde_json::to_string_pretty(&payload)
.expect("failed to serialize proposal preview")
);
} else {
println!("vela proposals preview");
println!(" proposal: {}", proposal_id);
println!(" kind: {}", preview.kind);
println!(
" findings: {} -> {}",
preview.findings_before, preview.findings_after
);
println!(
" artifacts: {} -> {}",
preview.artifacts_before, preview.artifacts_after
);
println!(
" events: {} -> {}",
preview.events_before, preview.events_after
);
if !preview.changed_findings.is_empty() {
println!(
" findings changed: {}",
preview.changed_findings.join(", ")
);
}
if !preview.changed_artifacts.is_empty() {
println!(
" artifacts changed: {}",
preview.changed_artifacts.join(", ")
);
}
if !preview.event_kinds.is_empty() {
println!(" event kinds: {}", preview.event_kinds.join(", "));
}
println!(" event: {}", preview.applied_event_id);
}
}
ProposalAction::Import {
frontier,
source,
json,
} => {
let report =
proposals::import_from_path(&frontier, &source).unwrap_or_else(|e| fail_return(&e));
let payload = json!({
"ok": true,
"command": "proposals.import",
"frontier": frontier.display().to_string(),
"source": source.display().to_string(),
"summary": {
"imported": report.imported,
"applied": report.applied,
"rejected": report.rejected,
"duplicates": report.duplicates,
},
});
if json {
println!(
"{}",
serde_json::to_string_pretty(&payload)
.expect("failed to serialize proposal import")
);
} else {
println!(
"Imported {} proposals into {}",
report.imported, report.wrote_to
);
}
}
ProposalAction::Validate { source, json } => {
let report = proposals::validate_source(&source).unwrap_or_else(|e| fail_return(&e));
let payload = json!({
"ok": report.ok,
"command": "proposals.validate",
"source": source.display().to_string(),
"summary": {
"checked": report.checked,
"valid": report.valid,
"invalid": report.invalid,
},
"proposal_ids": report.proposal_ids,
"errors": report.errors,
});
if json {
println!(
"{}",
serde_json::to_string_pretty(&payload)
.expect("failed to serialize proposal validation")
);
} else if report.ok {
println!("{} validated {} proposals", style::ok("ok"), report.valid);
} else {
println!(
"{} validated {} proposals, {} invalid",
style::lost("lost"),
report.valid,
report.invalid
);
for error in &report.errors {
println!(" · {error}");
}
std::process::exit(1);
}
}
ProposalAction::Export {
frontier,
output,
status,
json,
} => {
let count = proposals::export_to_path(&frontier, &output, status.as_deref())
.unwrap_or_else(|e| fail_return(&e));
let payload = json!({
"ok": true,
"command": "proposals.export",
"frontier": frontier.display().to_string(),
"output": output.display().to_string(),
"status": status,
"exported": count,
});
if json {
println!(
"{}",
serde_json::to_string_pretty(&payload)
.expect("failed to serialize proposal export")
);
} else {
println!("sealed · {count} proposals · {}", output.display());
}
}
ProposalAction::Accept {
frontier,
proposal_id,
reviewer,
reason,
json,
} => {
let event_id = proposals::accept_at_path(&frontier, &proposal_id, &reviewer, &reason)
.unwrap_or_else(|e| fail_return(&e));
let payload = json!({
"ok": true,
"command": "proposals.accept",
"frontier": frontier.display().to_string(),
"proposal_id": proposal_id,
"reviewer": reviewer,
"applied_event_id": event_id,
});
if json {
println!(
"{}",
serde_json::to_string_pretty(&payload)
.expect("failed to serialize proposal accept")
);
} else {
println!(
"{} accepted and applied proposal {}",
style::ok("ok"),
proposal_id
);
println!(" event: {}", event_id);
}
}
ProposalAction::Reject {
frontier,
proposal_id,
reviewer,
reason,
json,
} => {
proposals::reject_at_path(&frontier, &proposal_id, &reviewer, &reason)
.unwrap_or_else(|e| fail_return(&e));
let payload = json!({
"ok": true,
"command": "proposals.reject",
"frontier": frontier.display().to_string(),
"proposal_id": proposal_id,
"reviewer": reviewer,
"status": "rejected",
});
if json {
println!(
"{}",
serde_json::to_string_pretty(&payload)
.expect("failed to serialize proposal reject")
);
} else {
println!(
"{} rejected proposal {}",
style::warn("rejected"),
proposal_id
);
}
}
}
}
fn cmd_artifact_to_state(
frontier: &Path,
packet: &Path,
actor: &str,
apply_artifacts: bool,
json: bool,
) {
let report =
crate::artifact_to_state::import_packet_at_path(frontier, packet, actor, apply_artifacts)
.unwrap_or_else(|e| fail_return(&e));
if json {
println!(
"{}",
serde_json::to_string_pretty(&report)
.expect("failed to serialize artifact-to-state report")
);
} else {
println!("vela artifact-to-state");
println!(" packet: {}", report.packet_id);
println!(" frontier: {}", report.frontier);
println!(" artifact proposals: {}", report.artifact_proposals);
println!(" finding proposals: {}", report.finding_proposals);
println!(" gap proposals: {}", report.gap_proposals);
println!(
" applied artifact events: {}",
report.applied_artifact_events
);
println!(
" pending truth proposals: {}",
report.pending_truth_proposals
);
}
}
async fn cmd_bridge_kit(action: BridgeKitAction) {
match action {
BridgeKitAction::Validate { source, json } => {
let report = crate::artifact_to_state::validate_bridge_kit_path(&source);
if json {
println!(
"{}",
serde_json::to_string_pretty(&report)
.expect("failed to serialize bridge-kit validation report")
);
} else {
println!("vela bridge-kit validate");
println!(" source: {}", report.source);
println!(" packets: {}", report.packet_count);
println!(" valid: {}", report.valid_packet_count);
println!(" invalid: {}", report.invalid_packet_count);
for packet in &report.packets {
if packet.ok {
println!(
" ok: {} · {} artifacts · {} claims · {} needs",
packet
.packet_id
.as_deref()
.unwrap_or("packet id unavailable"),
packet.artifact_count,
packet.candidate_claim_count,
packet.open_need_count
);
} else {
println!(" invalid: {} · {}", packet.path, packet.errors.join("; "));
}
}
for error in &report.errors {
println!(" error: {error}");
}
}
if !report.ok {
std::process::exit(1);
}
}
BridgeKitAction::VerifyProvenance { packet, json } => {
let report = verify_packet_provenance(&packet).await;
if json {
println!(
"{}",
serde_json::to_string_pretty(&report)
.expect("failed to serialize provenance verification report")
);
} else {
println!("vela bridge-kit verify-provenance");
println!(" packet: {}", report.packet);
println!(" identifiers: {}", report.identifiers.len());
println!(" resolved: {}", report.resolved_count);
println!(" unresolved: {}", report.unresolved_count);
println!(" skipped: {}", report.skipped_count);
for entry in &report.identifiers {
let status = match entry.status.as_str() {
"resolved" => "ok ",
"unresolved" => "FAIL",
"skipped" => "skip",
_ => "? ",
};
println!(
" {} {} ({})",
status,
entry.identifier,
entry.note.as_deref().unwrap_or(entry.kind.as_str())
);
}
}
if report.unresolved_count > 0 {
std::process::exit(1);
}
}
}
}
#[derive(Debug, Clone, Serialize)]
struct ProvenanceVerificationReport {
command: String,
packet: String,
identifiers: Vec<ProvenanceVerificationEntry>,
resolved_count: usize,
unresolved_count: usize,
skipped_count: usize,
}
#[derive(Debug, Clone, Serialize)]
struct ProvenanceVerificationEntry {
identifier: String,
kind: String,
status: String,
#[serde(skip_serializing_if = "Option::is_none")]
note: Option<String>,
}
async fn verify_packet_provenance(packet_path: &Path) -> ProvenanceVerificationReport {
use crate::artifact_to_state::ArtifactPacket;
let raw = std::fs::read_to_string(packet_path)
.unwrap_or_else(|e| fail_return(&format!("read packet: {e}")));
let parsed: ArtifactPacket =
serde_json::from_str(&raw).unwrap_or_else(|e| fail_return(&format!("parse packet: {e}")));
let packet = parsed
.validate()
.unwrap_or_else(|e| fail_return(&format!("validate packet: {e}")));
let mut candidates: std::collections::BTreeSet<String> = std::collections::BTreeSet::new();
for artifact in &packet.artifacts {
if let Some(ident) = extract_identifier(&artifact.locator) {
candidates.insert(ident);
}
}
for claim in &packet.candidate_claims {
for source_ref in &claim.source_refs {
if let Some(ident) = extract_identifier(source_ref) {
candidates.insert(ident);
}
}
}
let client = reqwest::Client::builder()
.user_agent("vela/0.108 (+https://github.com/vela-science/vela)")
.timeout(std::time::Duration::from_secs(15))
.build()
.unwrap_or_else(|e| fail_return(&format!("build http client: {e}")));
let mut entries: Vec<ProvenanceVerificationEntry> = Vec::new();
let mut resolved = 0usize;
let mut unresolved = 0usize;
let mut skipped = 0usize;
for candidate in &candidates {
let entry = if let Some(doi) = candidate.strip_prefix("doi:") {
verify_doi(&client, doi).await
} else if let Some(pmid) = candidate.strip_prefix("pmid:") {
verify_pmid(&client, pmid).await
} else {
ProvenanceVerificationEntry {
identifier: candidate.clone(),
kind: "unknown".to_string(),
status: "skipped".to_string(),
note: Some("no recognized identifier prefix".to_string()),
}
};
match entry.status.as_str() {
"resolved" => resolved += 1,
"unresolved" => unresolved += 1,
_ => skipped += 1,
}
entries.push(entry);
}
ProvenanceVerificationReport {
command: "bridge-kit.verify-provenance".to_string(),
packet: packet_path.display().to_string(),
identifiers: entries,
resolved_count: resolved,
unresolved_count: unresolved,
skipped_count: skipped,
}
}
fn extract_identifier(s: &str) -> Option<String> {
let trimmed = s.trim();
if trimmed.is_empty() {
return None;
}
if trimmed.starts_with("doi:") || trimmed.starts_with("pmid:") {
return Some(trimmed.to_string());
}
for prefix in ["https://doi.org/", "http://doi.org/", "https://dx.doi.org/"] {
if let Some(rest) = trimmed.strip_prefix(prefix) {
return Some(format!("doi:{rest}"));
}
}
for prefix in [
"https://pubmed.ncbi.nlm.nih.gov/",
"http://pubmed.ncbi.nlm.nih.gov/",
] {
if let Some(rest) = trimmed.strip_prefix(prefix) {
let pmid = rest.trim_end_matches('/');
return Some(format!("pmid:{pmid}"));
}
}
if trimmed.starts_with("10.") && trimmed.contains('/') && !trimmed.contains(' ') {
return Some(format!("doi:{trimmed}"));
}
None
}
async fn verify_doi(client: &reqwest::Client, doi: &str) -> ProvenanceVerificationEntry {
let url = format!("https://api.crossref.org/works/{doi}");
match client.get(&url).send().await {
Ok(resp) if resp.status().is_success() => ProvenanceVerificationEntry {
identifier: format!("doi:{doi}"),
kind: "doi".to_string(),
status: "resolved".to_string(),
note: None,
},
Ok(resp) => ProvenanceVerificationEntry {
identifier: format!("doi:{doi}"),
kind: "doi".to_string(),
status: "unresolved".to_string(),
note: Some(format!("crossref returned {}", resp.status())),
},
Err(e) => ProvenanceVerificationEntry {
identifier: format!("doi:{doi}"),
kind: "doi".to_string(),
status: "skipped".to_string(),
note: Some(format!("crossref unreachable: {e}")),
},
}
}
async fn verify_pmid(client: &reqwest::Client, pmid: &str) -> ProvenanceVerificationEntry {
let url = format!(
"https://eutils.ncbi.nlm.nih.gov/entrez/eutils/esummary.fcgi?db=pubmed&id={pmid}&retmode=json"
);
match client.get(&url).send().await {
Ok(resp) if resp.status().is_success() => {
let body: serde_json::Value = resp.json().await.unwrap_or(serde_json::Value::Null);
let result = body.get("result");
let uids = result
.and_then(|r| r.get("uids"))
.and_then(|u| u.as_array());
let resolved = uids.is_some_and(|a| !a.is_empty());
if resolved {
ProvenanceVerificationEntry {
identifier: format!("pmid:{pmid}"),
kind: "pmid".to_string(),
status: "resolved".to_string(),
note: None,
}
} else {
ProvenanceVerificationEntry {
identifier: format!("pmid:{pmid}"),
kind: "pmid".to_string(),
status: "unresolved".to_string(),
note: Some("eutils returned empty uids".to_string()),
}
}
}
Ok(resp) => ProvenanceVerificationEntry {
identifier: format!("pmid:{pmid}"),
kind: "pmid".to_string(),
status: "unresolved".to_string(),
note: Some(format!("eutils returned {}", resp.status())),
},
Err(e) => ProvenanceVerificationEntry {
identifier: format!("pmid:{pmid}"),
kind: "pmid".to_string(),
status: "skipped".to_string(),
note: Some(format!("eutils unreachable: {e}")),
},
}
}
async fn cmd_source_adapter(action: SourceAdapterAction) {
match action {
SourceAdapterAction::Run {
frontier,
adapter,
actor,
entries,
priority,
include_excluded,
allow_partial,
dry_run,
input_dir,
apply_artifacts,
json,
} => {
let report = crate::source_adapters::run(
&frontier,
crate::source_adapters::SourceAdapterRunOptions {
adapter,
actor,
entries,
priority,
include_excluded,
allow_partial,
dry_run,
input_dir,
apply_artifacts,
},
)
.await
.unwrap_or_else(|e| fail_return(&e));
if json {
println!(
"{}",
serde_json::to_string_pretty(&report)
.expect("failed to serialize source adapter report")
);
} else {
println!("vela source-adapter run");
println!(" adapter: {}", report.adapter);
println!(" run: {}", report.run_id);
println!(" frontier: {}", report.frontier);
println!(" selected entries: {}", report.selected_entries);
println!(" fetched records: {}", report.fetched_records);
println!(" changed records: {}", report.changed_records);
println!(" unchanged records: {}", report.unchanged_records);
println!(" failed records: {}", report.failed_records.len());
if let Some(packet_id) = report.packet_id {
println!(" packet: {packet_id}");
}
println!(" artifact proposals: {}", report.artifact_proposals);
println!(" review note proposals: {}", report.review_note_proposals);
println!(" applied events: {}", report.applied_event_ids.len());
}
}
}
}
fn cmd_runtime_adapter(action: RuntimeAdapterAction) {
match action {
RuntimeAdapterAction::Run {
frontier,
adapter,
input,
actor,
dry_run,
apply_artifacts,
json,
} => {
let report = crate::runtime_adapters::run(
&frontier,
crate::runtime_adapters::RuntimeAdapterRunOptions {
adapter,
input,
actor,
dry_run,
apply_artifacts,
},
)
.unwrap_or_else(|e| fail_return(&e));
if json {
println!(
"{}",
serde_json::to_string_pretty(&report)
.expect("failed to serialize runtime adapter report")
);
} else {
println!("vela runtime-adapter run");
println!(" adapter: {}", report.adapter);
println!(" run: {}", report.run_id);
println!(" frontier: {}", report.frontier);
if let Some(packet_id) = report.packet_id {
println!(" packet: {packet_id}");
}
println!(" artifact proposals: {}", report.artifact_proposals);
println!(" finding proposals: {}", report.finding_proposals);
println!(" gap proposals: {}", report.gap_proposals);
println!(" review note proposals: {}", report.review_note_proposals);
println!(
" applied artifact events: {}",
report.applied_artifact_events
);
println!(
" pending truth proposals: {}",
report.pending_truth_proposals
);
}
}
}
}
fn cmd_sign(action: SignAction) {
match action {
SignAction::GenerateKeypair { out, json } => {
let public_key = sign::generate_keypair(&out).unwrap_or_else(|e| fail_return(&e));
let payload = json!({
"ok": true,
"command": "sign.generate-keypair",
"output_dir": out.display().to_string(),
"public_key": public_key,
});
if json {
println!(
"{}",
serde_json::to_string_pretty(&payload)
.expect("failed to serialize sign.generate-keypair")
);
} else {
println!("{} keypair · {}", style::ok("generated"), out.display());
println!(" public key: {public_key}");
}
}
SignAction::Apply {
frontier,
private_key,
json,
} => {
let count =
sign::sign_frontier(&frontier, &private_key).unwrap_or_else(|e| fail_return(&e));
let payload = json!({
"ok": true,
"command": "sign.apply",
"frontier": frontier.display().to_string(),
"private_key": private_key.display().to_string(),
"signed": count,
});
if json {
println!(
"{}",
serde_json::to_string_pretty(&payload).expect("failed to serialize sign.apply")
);
} else {
println!(
"{} {count} findings in {}",
style::ok("signed"),
frontier.display()
);
}
}
SignAction::Verify {
frontier,
public_key,
json,
} => {
let report = sign::verify_frontier(&frontier, public_key.as_deref())
.unwrap_or_else(|e| fail_return(&e));
if json {
println!(
"{}",
serde_json::to_string_pretty(&report).expect("failed to serialize sign.verify")
);
} else {
println!();
println!(
" {}",
format!("VELA · SIGN · VERIFY · {}", frontier.display())
.to_uppercase()
.dimmed()
);
println!(" {}", style::tick_row(60));
println!(" total findings: {}", report.total_findings);
println!(" signed: {}", report.signed);
println!(" unsigned: {}", report.unsigned);
println!(" valid: {}", report.valid);
println!(" invalid: {}", report.invalid);
if report.findings_with_threshold > 0 {
println!(" with threshold: {}", report.findings_with_threshold);
println!(" jointly accepted: {}", report.jointly_accepted);
}
}
}
SignAction::ThresholdSet {
frontier,
finding_id,
to,
json,
} => {
if to == 0 {
fail("--to must be >= 1");
}
let mut project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
let Some(idx) = project.findings.iter().position(|f| f.id == finding_id) else {
fail(&format!("finding '{finding_id}' not present in frontier"));
};
project.findings[idx].flags.signature_threshold = Some(to);
sign::refresh_jointly_accepted(&mut project);
let met = project.findings[idx].flags.jointly_accepted;
repo::save_to_path(&frontier, &project).unwrap_or_else(|e| fail_return(&e));
if json {
println!(
"{}",
serde_json::to_string_pretty(&json!({
"ok": true,
"command": "sign.threshold-set",
"finding_id": finding_id,
"threshold": to,
"jointly_accepted": met,
"frontier": frontier.display().to_string(),
}))
.expect("failed to serialize sign.threshold-set")
);
} else {
println!(
"{} signature_threshold={to} on {finding_id} ({})",
style::ok("set"),
if met {
"jointly accepted"
} else {
"awaiting signatures"
}
);
}
}
}
}
fn cmd_actor(action: ActorAction) {
match action {
ActorAction::Add {
frontier,
id,
pubkey,
tier,
orcid,
clearance,
json,
} => {
let trimmed = pubkey.trim();
if trimmed.len() != 64 || hex::decode(trimmed).is_err() {
fail("Public key must be 64 hex characters (32-byte Ed25519 pubkey).");
}
let orcid_normalized = orcid
.as_deref()
.map(|s| sign::validate_orcid(s).unwrap_or_else(|e| fail_return(&e)));
let clearance: Option<crate::access_tier::AccessTier> = clearance.as_deref().map(|s| {
crate::access_tier::AccessTier::parse(s).unwrap_or_else(|e| fail_return(&e))
});
let mut project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
if project.actors.iter().any(|actor| actor.id == id) {
fail(&format!(
"Actor '{id}' already registered in this frontier."
));
}
project.actors.push(sign::ActorRecord {
id: id.clone(),
public_key: trimmed.to_string(),
algorithm: "ed25519".to_string(),
created_at: chrono::Utc::now().to_rfc3339(),
tier: tier.clone(),
orcid: orcid_normalized.clone(),
access_clearance: clearance,
});
repo::save_to_path(&frontier, &project).unwrap_or_else(|e| fail_return(&e));
let payload = json!({
"ok": true,
"command": "actor.add",
"frontier": frontier.display().to_string(),
"actor_id": id,
"public_key": trimmed,
"tier": tier,
"orcid": orcid_normalized,
"registered_count": project.actors.len(),
});
if json {
println!(
"{}",
serde_json::to_string_pretty(&payload).expect("failed to serialize actor.add")
);
} else {
let tier_suffix = tier
.as_deref()
.map_or_else(String::new, |t| format!(" tier={t}"));
println!(
"{} actor {} (pubkey {}{tier_suffix})",
style::ok("registered"),
id,
&trimmed[..16]
);
}
}
ActorAction::List { frontier, json } => {
let project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
if json {
let payload = json!({
"ok": true,
"command": "actor.list",
"frontier": frontier.display().to_string(),
"actors": project.actors,
});
println!(
"{}",
serde_json::to_string_pretty(&payload).expect("failed to serialize actor.list")
);
} else {
println!();
println!(
" {}",
format!("VELA · ACTOR · LIST · {}", frontier.display())
.to_uppercase()
.dimmed()
);
println!(" {}", style::tick_row(60));
if project.actors.is_empty() {
println!(" (no actors registered)");
} else {
for actor in &project.actors {
println!(
" {:<28} {}… registered {}",
actor.id,
&actor.public_key[..16],
actor.created_at
);
}
}
}
}
}
}
fn cmd_causal(action: CausalAction) {
use crate::causal_reasoning;
match action {
CausalAction::Audit {
frontier,
problems_only,
json,
} => {
let project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
let mut entries = causal_reasoning::audit_frontier(&project);
if problems_only {
entries.retain(|e| e.verdict.needs_reviewer_attention());
}
let summary = causal_reasoning::summarize_audit(&entries);
if json {
println!(
"{}",
serde_json::to_string_pretty(&json!({
"ok": true,
"command": "causal.audit",
"frontier": frontier.display().to_string(),
"summary": summary,
"entries": entries,
}))
.expect("serialize causal.audit")
);
return;
}
println!();
println!(
" {}",
format!("VELA · CAUSAL · AUDIT · {}", frontier.display())
.to_uppercase()
.dimmed()
);
println!(" {}", style::tick_row(60));
println!(
" total: {} identified: {} conditional: {} underidentified: {} underdetermined: {}",
summary.total,
summary.identified,
summary.conditional,
summary.underidentified,
summary.underdetermined,
);
if entries.is_empty() {
println!(" (no entries to report)");
return;
}
for e in &entries {
let chip = match e.verdict {
crate::causal_reasoning::Identifiability::Identified => style::ok("identified"),
crate::causal_reasoning::Identifiability::Conditional => {
style::warn("conditional")
}
crate::causal_reasoning::Identifiability::Underidentified => {
style::lost("underidentified")
}
crate::causal_reasoning::Identifiability::Underdetermined => {
style::warn("underdetermined")
}
};
let claim = e
.causal_claim
.map_or("none".to_string(), |c| format!("{c:?}").to_lowercase());
let grade = e
.causal_evidence_grade
.map_or("none".to_string(), |g| format!("{g:?}").to_lowercase());
println!();
println!(" {chip} {} ({}/{})", e.finding_id, claim, grade);
let assertion_short: String = e.assertion_text.chars().take(78).collect();
println!(" {assertion_short}");
println!(" {} {}", style::ok("why:"), e.rationale);
if e.verdict.needs_reviewer_attention()
|| matches!(
e.verdict,
crate::causal_reasoning::Identifiability::Underdetermined
)
{
println!(" {} {}", style::ok("fix:"), e.remediation);
}
}
}
CausalAction::Effect {
frontier,
source,
on: target,
json,
} => {
use crate::causal_graph::{CausalEffectVerdict, identify_effect};
let project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
let verdict = identify_effect(&project, &source, &target);
if json {
println!(
"{}",
serde_json::to_string_pretty(&json!({
"ok": true,
"command": "causal.effect",
"frontier": frontier.display().to_string(),
"source": source,
"target": target,
"verdict": verdict,
}))
.expect("serialize causal.effect")
);
return;
}
println!();
println!(
" {}",
format!("VELA · CAUSAL · EFFECT · {} → {}", source, target)
.to_uppercase()
.dimmed()
);
println!(" {}", style::tick_row(60));
match verdict {
CausalEffectVerdict::Identified {
adjustment_set,
back_door_paths_considered,
} => {
if adjustment_set.is_empty() {
println!(
" {} no back-door adjustment needed",
style::ok("identified")
);
} else {
println!(" {} identified by adjusting on:", style::ok("identified"));
for z in &adjustment_set {
println!(" · {z}");
}
}
println!(
" back-door paths considered: {}",
back_door_paths_considered
);
}
CausalEffectVerdict::IdentifiedByFrontDoor { mediator_set } => {
println!(
" {} identified via front-door criterion (Pearl 1995 §3.3)",
style::ok("identified")
);
println!(" mediators that intercept all directed paths:");
for m in &mediator_set {
println!(" · {m}");
}
println!(
" applies when source-target confounders are unobserved but the mediator chain is."
);
}
CausalEffectVerdict::NoCausalPath { reason } => {
println!(" {} no causal path: {reason}", style::warn("no_path"));
}
CausalEffectVerdict::Underidentified {
unblocked_back_door_paths,
candidates_tried,
} => {
println!(
" {} no observational adjustment set found ({} candidates tried)",
style::lost("underidentified"),
candidates_tried
);
println!(" open back-door paths:");
for path in unblocked_back_door_paths.iter().take(5) {
println!(" · {}", path.join(" — "));
}
println!(
" remediation: either intervene experimentally on {source}, or extend the link graph to make a confounder observable."
);
}
CausalEffectVerdict::UnknownNode { which } => {
fail(&which);
}
}
println!();
}
CausalAction::Graph {
frontier,
node,
json,
} => {
use crate::causal_graph::CausalGraph;
let project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
let graph = CausalGraph::from_project(&project);
let nodes: Vec<&str> = if let Some(n) = node.as_deref() {
if !graph.contains(n) {
fail(&format!("node not in frontier: {n}"));
}
vec![n]
} else {
project.findings.iter().map(|f| f.id.as_str()).collect()
};
if json {
let payload: Vec<_> = nodes
.iter()
.map(|n| {
let parents: Vec<&str> = graph.parents_of(n).collect();
let children: Vec<&str> = graph.children_of(n).collect();
json!({
"node": n,
"parents": parents,
"children": children,
})
})
.collect();
println!(
"{}",
serde_json::to_string_pretty(&json!({
"ok": true,
"command": "causal.graph",
"node_count": graph.node_count(),
"edge_count": graph.edge_count(),
"nodes": payload,
}))
.expect("serialize causal.graph")
);
return;
}
println!();
println!(
" {}",
format!("VELA · CAUSAL · GRAPH · {}", frontier.display())
.to_uppercase()
.dimmed()
);
println!(" {}", style::tick_row(60));
println!(
" {} nodes · {} edges",
graph.node_count(),
graph.edge_count()
);
println!();
for n in &nodes {
let parents: Vec<&str> = graph.parents_of(n).collect();
let children: Vec<&str> = graph.children_of(n).collect();
if parents.is_empty() && children.is_empty() && nodes.len() > 1 {
continue; }
println!(" {n}");
if !parents.is_empty() {
println!(" parents: {}", parents.join(", "));
}
if !children.is_empty() {
println!(" children: {}", children.join(", "));
}
}
}
CausalAction::Counterfactual {
frontier,
intervene_on,
set_to,
target,
json,
} => {
use crate::counterfactual::{
CounterfactualQuery, CounterfactualVerdict, answer_counterfactual,
};
let project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
let query = CounterfactualQuery {
intervene_on: intervene_on.clone(),
set_to,
target: target.clone(),
};
let verdict = answer_counterfactual(&project, &query);
if json {
println!(
"{}",
serde_json::to_string_pretty(&json!({
"ok": true,
"command": "causal.counterfactual",
"frontier": frontier.display().to_string(),
"query": query,
"verdict": verdict,
}))
.expect("serialize causal.counterfactual")
);
return;
}
println!();
println!(
" {}",
format!(
"VELA · CAUSAL · COUNTERFACTUAL · do({intervene_on} := {set_to:.3}) → {target}"
)
.to_uppercase()
.dimmed()
);
println!(" {}", style::tick_row(72));
match verdict {
CounterfactualVerdict::Resolved {
factual,
counterfactual,
delta,
paths_used,
} => {
println!(
" {} factual: {factual:.3} counterfactual: {counterfactual:.3} delta: {delta:+.3}",
style::ok("resolved")
);
println!(
" twin-network propagation through {} causal path(s):",
paths_used.len()
);
for p in paths_used.iter().take(5) {
println!(" · {}", p.join(" → "));
}
println!(
" reading: \"if {intervene_on}'s confidence had been {set_to:.3} \
instead of factual, {target}'s confidence would shift by {delta:+.3}.\""
);
}
CounterfactualVerdict::MechanismUnspecified { unspecified_edges } => {
println!(
" {} causal path exists but {} edge(s) lack a mechanism annotation",
style::warn("mechanism_unspecified"),
unspecified_edges.len()
);
for (parent, child) in unspecified_edges.iter().take(8) {
println!(" · {parent} → {child}");
}
println!(
" remediation: annotate one of the link mechanisms (linear / monotonic / threshold / saturating)."
);
}
CounterfactualVerdict::NoCausalPath { factual } => {
println!(
" {} no directed path from {intervene_on} to {target}; counterfactual = factual = {factual:.3}",
style::warn("no_path")
);
}
CounterfactualVerdict::UnknownNode { which } => {
fail(&format!("node not in frontier: {which}"));
}
CounterfactualVerdict::InvalidIntervention { reason } => {
fail(&reason);
}
}
println!();
}
}
}
fn cmd_bridges(action: BridgesAction) {
use crate::bridge::{Bridge, BridgeStatus, derive_bridges};
use std::collections::HashMap;
fn bridges_dir(frontier: &Path) -> PathBuf {
frontier.join(".vela/bridges")
}
fn load_bridge(frontier: &Path, id: &str) -> Result<Bridge, String> {
let path = bridges_dir(frontier).join(format!("{id}.json"));
if !path.is_file() {
return Err(format!("bridge not found: {id}"));
}
let data = std::fs::read_to_string(&path).map_err(|e| format!("read {id}: {e}"))?;
serde_json::from_str(&data).map_err(|e| format!("parse {id}: {e}"))
}
fn save_bridge(frontier: &Path, b: &Bridge) -> Result<(), String> {
let dir = bridges_dir(frontier);
std::fs::create_dir_all(&dir).map_err(|e| format!("mkdir bridges/: {e}"))?;
let path = dir.join(format!("{}.json", b.id));
let data = serde_json::to_string_pretty(b).map_err(|e| format!("serialize bridge: {e}"))?;
std::fs::write(&path, format!("{data}\n")).map_err(|e| format!("write bridge: {e}"))
}
fn default_reviewer_id() -> String {
std::env::var("VELA_REVIEWER_ID").unwrap_or_else(|_| "reviewer:will-blair".to_string())
}
fn emit_bridge_reviewed_event(
frontier: &Path,
bridge_id: &str,
status: &str,
reviewer_id: &str,
note: Option<&str>,
) -> Result<(), String> {
let mut payload = serde_json::json!({
"bridge_id": bridge_id,
"status": status,
});
if let Some(n) = note
&& !n.trim().is_empty()
{
payload["note"] = serde_json::Value::String(n.to_string());
}
let known_ids: Vec<String> = list_bridges(frontier)
.unwrap_or_default()
.into_iter()
.map(|b| b.id)
.collect();
crate::events::validate_bridge_reviewed_against_state(&payload, &known_ids)?;
let event = crate::events::new_bridge_reviewed_event(
bridge_id,
reviewer_id,
"human",
&format!("Bridge {status} by {reviewer_id}"),
payload,
Vec::new(),
);
let events_dir = frontier.join(".vela/events");
std::fs::create_dir_all(&events_dir).map_err(|e| format!("mkdir .vela/events: {e}"))?;
let event_path = events_dir.join(format!("{}.json", event.id));
let data =
serde_json::to_string_pretty(&event).map_err(|e| format!("serialize event: {e}"))?;
std::fs::write(&event_path, format!("{data}\n")).map_err(|e| format!("write event: {e}"))
}
fn list_bridges(frontier: &Path) -> Result<Vec<Bridge>, String> {
let dir = bridges_dir(frontier);
if !dir.is_dir() {
return Ok(Vec::new());
}
let mut out = Vec::new();
for entry in std::fs::read_dir(&dir).map_err(|e| format!("read bridges/: {e}"))? {
let entry = entry.map_err(|e| format!("read entry: {e}"))?;
let path = entry.path();
if path.extension().and_then(|s| s.to_str()) != Some("json") {
continue;
}
let data = std::fs::read_to_string(&path).map_err(|e| format!("read {path:?}: {e}"))?;
let b: Bridge =
serde_json::from_str(&data).map_err(|e| format!("parse {path:?}: {e}"))?;
out.push(b);
}
out.sort_by(|a, b| {
b.finding_refs
.len()
.cmp(&a.finding_refs.len())
.then(a.entity_name.cmp(&b.entity_name))
});
Ok(out)
}
match action {
BridgesAction::Derive {
frontier_a,
label_a,
frontier_b,
label_b,
json,
} => {
let a = repo::load_from_path(&frontier_a).unwrap_or_else(|e| fail_return(&e));
let b = repo::load_from_path(&frontier_b).unwrap_or_else(|e| fail_return(&e));
let now = chrono::Utc::now().to_rfc3339();
let new_bridges =
derive_bridges(&[(label_a.as_str(), &a), (label_b.as_str(), &b)], &now);
let existing = list_bridges(&frontier_a).unwrap_or_default();
let existing_by_id: HashMap<String, Bridge> =
existing.iter().map(|b| (b.id.clone(), b.clone())).collect();
let mut written = 0;
let mut preserved = 0;
let mut new_ids = Vec::new();
for mut bridge in new_bridges {
if let Some(prev) = existing_by_id.get(&bridge.id)
&& prev.status != BridgeStatus::Derived
{
bridge.status = prev.status;
bridge.derived_at = prev.derived_at.clone();
preserved += 1;
}
save_bridge(&frontier_a, &bridge).unwrap_or_else(|e| fail_return(&e));
new_ids.push(bridge.id.clone());
written += 1;
}
if json {
println!(
"{}",
serde_json::to_string_pretty(&json!({
"ok": true,
"command": "bridges.derive",
"frontier_a": frontier_a.display().to_string(),
"frontier_b": frontier_b.display().to_string(),
"bridges_written": written,
"reviewer_judgments_preserved": preserved,
"ids": new_ids,
}))
.expect("serialize bridges.derive")
);
return;
}
println!();
println!(
" {}",
format!("VELA · BRIDGES · DERIVE · {} ↔ {}", label_a, label_b)
.to_uppercase()
.dimmed()
);
println!(" {}", style::tick_row(60));
println!(" {} {} bridge(s) materialized", style::ok("ok"), written);
if preserved > 0 {
println!(
" {} {} reviewer judgment(s) preserved",
style::ok("kept"),
preserved
);
}
for id in new_ids.iter().take(10) {
println!(" · {id}");
}
if new_ids.len() > 10 {
println!(" … and {} more", new_ids.len() - 10);
}
println!();
}
BridgesAction::List {
frontier,
status,
json,
} => {
let mut bridges = list_bridges(&frontier).unwrap_or_else(|e| fail_return(&e));
if let Some(s) = status.as_deref() {
let want = match s.to_lowercase().as_str() {
"derived" => BridgeStatus::Derived,
"confirmed" => BridgeStatus::Confirmed,
"refuted" => BridgeStatus::Refuted,
other => fail_return(&format!(
"unknown bridge status '{other}' (try derived|confirmed|refuted)"
)),
};
bridges.retain(|b| b.status == want);
}
if json {
println!(
"{}",
serde_json::to_string_pretty(&json!({
"ok": true,
"command": "bridges.list",
"frontier": frontier.display().to_string(),
"count": bridges.len(),
"bridges": bridges,
}))
.expect("serialize bridges.list")
);
return;
}
println!();
println!(
" {}",
format!("VELA · BRIDGES · LIST · {}", frontier.display())
.to_uppercase()
.dimmed()
);
println!(" {}", style::tick_row(60));
println!(" {} bridge(s)", bridges.len());
for b in &bridges {
let chip = match b.status {
BridgeStatus::Derived => style::warn("derived"),
BridgeStatus::Confirmed => style::ok("confirmed"),
BridgeStatus::Refuted => style::lost("refuted"),
};
println!();
println!(
" {chip} {} {} ↔ findings:{}",
b.id,
b.entity_name,
b.finding_refs.len()
);
println!(" frontiers: {}", b.frontiers.join(", "));
if let Some(t) = &b.tension {
println!(" tension: {t}");
}
}
println!();
}
BridgesAction::Show {
frontier,
bridge_id,
json,
} => {
let b = load_bridge(&frontier, &bridge_id).unwrap_or_else(|e| fail_return(&e));
if json {
println!("{}", serde_json::to_string_pretty(&b).expect("serialize"));
return;
}
println!();
println!(
" {}",
format!("VELA · BRIDGES · SHOW · {}", b.id)
.to_uppercase()
.dimmed()
);
println!(" {}", style::tick_row(60));
println!(" entity: {}", b.entity_name);
println!(" status: {:?}", b.status);
println!(" frontiers: {}", b.frontiers.join(", "));
if !b.frontier_ids.is_empty() {
println!(" frontier_ids: {}", b.frontier_ids.join(", "));
}
if let Some(t) = &b.tension {
println!(" tension: {t}");
}
println!(" derived_at: {}", b.derived_at);
println!(" finding refs ({}):", b.finding_refs.len());
for r in &b.finding_refs {
let dir = r.direction.as_deref().unwrap_or("—");
let truncated: String = r.assertion_text.chars().take(72).collect();
println!(
" · [{}] {} (conf={:.2}, dir={})",
r.frontier, r.finding_id, r.confidence, dir
);
println!(" {truncated}");
}
println!();
}
BridgesAction::Confirm {
frontier,
bridge_id,
reviewer,
note,
json,
} => {
let mut b = load_bridge(&frontier, &bridge_id).unwrap_or_else(|e| fail_return(&e));
let reviewer_id = reviewer.unwrap_or_else(default_reviewer_id);
b.status = BridgeStatus::Confirmed;
save_bridge(&frontier, &b).unwrap_or_else(|e| fail_return(&e));
let _ = emit_bridge_reviewed_event(
&frontier,
&bridge_id,
"confirmed",
&reviewer_id,
note.as_deref(),
);
if json {
println!("{}", serde_json::to_string_pretty(&b).expect("serialize"));
return;
}
println!();
println!(" {} {} now confirmed", style::ok("confirmed"), b.id);
println!();
}
BridgesAction::Refute {
frontier,
bridge_id,
reviewer,
note,
json,
} => {
let mut b = load_bridge(&frontier, &bridge_id).unwrap_or_else(|e| fail_return(&e));
let reviewer_id = reviewer.unwrap_or_else(default_reviewer_id);
b.status = BridgeStatus::Refuted;
save_bridge(&frontier, &b).unwrap_or_else(|e| fail_return(&e));
let _ = emit_bridge_reviewed_event(
&frontier,
&bridge_id,
"refuted",
&reviewer_id,
note.as_deref(),
);
if json {
println!("{}", serde_json::to_string_pretty(&b).expect("serialize"));
return;
}
println!();
println!(" {} {} now refuted", style::lost("refuted"), b.id);
println!();
}
}
}
fn cmd_federation(action: FederationAction) {
use crate::federation::PeerHub;
match action {
FederationAction::PeerAdd {
frontier,
id,
url,
pubkey,
note,
json,
} => {
let peer = PeerHub {
id: id.clone(),
url: url.clone(),
public_key: pubkey.trim().to_string(),
added_at: chrono::Utc::now().to_rfc3339(),
note: note.clone(),
};
peer.validate().unwrap_or_else(|e| fail_return(&e));
let mut project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
if project.peers.iter().any(|p| p.id == id) {
fail(&format!("peer '{id}' already in registry"));
}
project.peers.push(peer.clone());
repo::save_to_path(&frontier, &project).unwrap_or_else(|e| fail_return(&e));
if json {
println!(
"{}",
serde_json::to_string_pretty(&json!({
"ok": true,
"command": "federation.peer-add",
"frontier": frontier.display().to_string(),
"peer": peer,
"registered_count": project.peers.len(),
}))
.expect("serialize federation.peer-add")
);
} else {
println!(
"{} peer {} (pubkey {}…) at {}",
style::ok("registered"),
id,
&peer.public_key[..16],
peer.url
);
}
}
FederationAction::PeerList { frontier, json } => {
let project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
if json {
println!(
"{}",
serde_json::to_string_pretty(&json!({
"ok": true,
"command": "federation.peer-list",
"frontier": frontier.display().to_string(),
"peers": project.peers,
}))
.expect("serialize federation.peer-list")
);
} else {
println!();
println!(
" {}",
format!("VELA · FEDERATION · PEERS · {}", frontier.display())
.to_uppercase()
.dimmed()
);
println!(" {}", style::tick_row(60));
if project.peers.is_empty() {
println!(" (no peers registered)");
} else {
for p in &project.peers {
let note_suffix = if p.note.is_empty() {
String::new()
} else {
format!(" · {}", p.note)
};
println!(
" {:<24} {} {}…{note_suffix}",
p.id,
p.url,
&p.public_key[..16]
);
}
}
}
}
FederationAction::Sync {
frontier,
peer_id,
url,
via_hub,
vfr_id,
allow_cross_vfr,
dry_run,
json,
} => {
use crate::federation::{self, DiscoveryResult};
let mut project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
let Some(peer) = project.peers.iter().find(|p| p.id == peer_id).cloned() else {
fail(&format!(
"peer '{peer_id}' not in registry; run `vela federation peer add` first"
));
};
let local_frontier_id = project.frontier_id();
if via_hub
&& let Some(target) = vfr_id.as_deref()
&& target != local_frontier_id
&& !allow_cross_vfr
{
fail(&format!(
"cross-vfr sync refused: --vfr-id {target} differs from local frontier_id {local_frontier_id}. \
Pass --allow-cross-vfr to opt in (every peer-side finding will be recorded as a \
missing_locally conflict). Or omit --vfr-id to default to the local frontier id."
));
}
#[derive(Debug)]
enum SyncOutcome {
Resolved(crate::project::Project, String), BrokenLocator(String, String, u16), UnverifiedEntry(String, String), EntryNotFound(String, u16),
}
let outcome = if via_hub {
let target_vfr = vfr_id.clone().unwrap_or_else(|| local_frontier_id.clone());
match federation::discover_peer_frontier(
&peer.url,
&target_vfr,
Some(&peer.public_key),
) {
DiscoveryResult::Resolved(p) => {
let src =
format!("{}/entries/{}", peer.url.trim_end_matches('/'), target_vfr);
SyncOutcome::Resolved(p, src)
}
DiscoveryResult::BrokenLocator {
vfr_id,
locator,
status,
} => SyncOutcome::BrokenLocator(vfr_id, locator, status),
DiscoveryResult::UnverifiedEntry { vfr_id, reason } => {
SyncOutcome::UnverifiedEntry(vfr_id, reason)
}
DiscoveryResult::EntryNotFound { vfr_id, status } => {
SyncOutcome::EntryNotFound(vfr_id, status)
}
DiscoveryResult::Unreachable { url, error } => {
fail(&format!("peer hub unreachable ({url}): {error}"));
}
}
} else {
let resolved_url = url.unwrap_or_else(|| {
let base = peer.url.trim_end_matches('/');
format!("{base}/manifest/{local_frontier_id}.json")
});
match federation::fetch_peer_frontier(&resolved_url) {
Ok(p) => SyncOutcome::Resolved(p, resolved_url),
Err(e) => fail(&format!("direct fetch failed: {e}")),
}
};
let peer_source: String;
let peer_state = match outcome {
SyncOutcome::Resolved(p, src) => {
if !json {
println!(" · resolved via {src}");
}
peer_source = src;
p
}
SyncOutcome::BrokenLocator(vfr, locator, status) => {
if dry_run {
if json {
println!(
"{}",
serde_json::to_string_pretty(&json!({
"ok": true,
"command": "federation.sync",
"dry_run": true,
"outcome": "broken_locator",
"vfr_id": vfr,
"locator": locator,
"http_status": status,
}))
.expect("serialize")
);
} else {
println!(
"{} dry-run: peer entry resolved but locator dead",
style::warn("broken_locator")
);
println!(" vfr_id: {vfr}");
println!(" locator: {locator} (HTTP {status})");
}
return;
}
let report = federation::record_locator_failure(
&mut project,
&peer_id,
&vfr,
&locator,
status,
);
repo::save_to_path(&frontier, &project).unwrap_or_else(|e| fail_return(&e));
if json {
println!(
"{}",
serde_json::to_string_pretty(&json!({
"ok": true,
"command": "federation.sync",
"outcome": "broken_locator",
"report": report,
}))
.expect("serialize")
);
} else {
println!(
"{} sync recorded broken-locator conflict against {peer_id}",
style::warn("broken_locator")
);
println!(" vfr_id: {vfr}");
println!(" locator: {locator} (HTTP {status})");
println!(" events appended: {}", report.events_appended);
}
return;
}
SyncOutcome::UnverifiedEntry(vfr, reason) => {
if dry_run {
if json {
println!(
"{}",
serde_json::to_string_pretty(&json!({
"ok": true,
"command": "federation.sync",
"dry_run": true,
"outcome": "unverified_peer_entry",
"vfr_id": vfr,
"reason": reason,
}))
.expect("serialize")
);
} else {
println!(
"{} dry-run: peer entry signature did not verify",
style::lost("unverified_peer_entry")
);
println!(" vfr_id: {vfr}");
println!(" reason: {reason}");
}
return;
}
let report =
federation::record_unverified_entry(&mut project, &peer_id, &vfr, &reason);
repo::save_to_path(&frontier, &project).unwrap_or_else(|e| fail_return(&e));
if json {
println!(
"{}",
serde_json::to_string_pretty(&json!({
"ok": true,
"command": "federation.sync",
"outcome": "unverified_peer_entry",
"report": report,
}))
.expect("serialize")
);
} else {
println!(
"{} sync halted; peer's registry entry signature did not verify",
style::lost("unverified_peer_entry")
);
println!(" vfr_id: {vfr}");
println!(" reason: {reason}");
}
return;
}
SyncOutcome::EntryNotFound(vfr, status) => {
if json {
println!(
"{}",
serde_json::to_string_pretty(&json!({
"ok": false,
"command": "federation.sync",
"outcome": "entry_not_found",
"vfr_id": vfr,
"http_status": status,
}))
.expect("serialize")
);
} else {
println!(
"{} peer's hub does not publish vfr_id {vfr} (HTTP {status})",
style::warn("entry_not_found")
);
}
return;
}
};
if dry_run {
let conflicts = federation::diff_frontiers(&project, &peer_state);
if json {
println!(
"{}",
serde_json::to_string_pretty(&json!({
"ok": true,
"command": "federation.sync",
"dry_run": true,
"peer_id": peer_id,
"peer_source": peer_source,
"conflicts": conflicts,
}))
.expect("serialize federation.sync (dry-run)")
);
} else {
println!(
"{} dry-run vs {peer_id} ({}): {} conflict(s)",
style::ok("ok"),
peer_source,
conflicts.len()
);
for c in &conflicts {
println!(" · {} {} {}", c.kind.as_str(), c.finding_id, c.detail);
}
}
return;
}
let report = federation::sync_with_peer(&mut project, &peer_id, &peer_state);
repo::save_to_path(&frontier, &project).unwrap_or_else(|e| fail_return(&e));
if json {
println!(
"{}",
serde_json::to_string_pretty(&json!({
"ok": true,
"command": "federation.sync",
"peer_id": peer_id,
"peer_source": peer_source,
"report": report,
}))
.expect("serialize federation.sync")
);
} else {
println!(
"{} synced with {} ({})",
style::ok("ok"),
peer_id,
peer_source
);
println!(
" our: {}",
&report.our_snapshot_hash[..16.min(report.our_snapshot_hash.len())]
);
println!(
" peer: {}",
&report.peer_snapshot_hash[..16.min(report.peer_snapshot_hash.len())]
);
println!(
" conflicts: {} events appended: {}",
report.conflicts.len(),
report.events_appended
);
for c in &report.conflicts {
println!(" · {} {} {}", c.kind.as_str(), c.finding_id, c.detail);
}
}
}
FederationAction::PushResolution {
frontier,
conflict_event_id,
to,
key,
vfr_id,
json,
} => {
cmd_federation_push_resolution(frontier, conflict_event_id, to, key, vfr_id, json);
}
FederationAction::PeerRemove { frontier, id, json } => {
let mut project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
let before = project.peers.len();
project.peers.retain(|p| p.id != id);
if project.peers.len() == before {
fail(&format!("peer '{id}' not found in registry"));
}
repo::save_to_path(&frontier, &project).unwrap_or_else(|e| fail_return(&e));
if json {
println!(
"{}",
serde_json::to_string_pretty(&json!({
"ok": true,
"command": "federation.peer-remove",
"frontier": frontier.display().to_string(),
"removed": id,
"remaining": project.peers.len(),
}))
.expect("serialize federation.peer-remove")
);
} else {
println!(
"{} peer {} ({} remaining)",
style::ok("removed"),
id,
project.peers.len()
);
}
}
}
}
fn cmd_federation_push_resolution(
frontier: PathBuf,
conflict_event_id: String,
to: String,
key: Option<PathBuf>,
vfr_id: Option<String>,
json: bool,
) {
use crate::canonical;
use crate::sign;
let project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
let Some(peer) = project.peers.iter().find(|p| p.id == to).cloned() else {
fail(&format!(
"peer '{to}' not in registry; run `vela federation peer-add` first"
));
};
let Some(resolution) = project
.events
.iter()
.find(|e| {
e.kind == "frontier.conflict_resolved"
&& e.payload.get("conflict_event_id").and_then(|v| v.as_str())
== Some(conflict_event_id.as_str())
})
.cloned()
else {
fail(&format!(
"no frontier.conflict_resolved event paired with conflict {conflict_event_id} in {}",
frontier.display()
));
};
let actor_id = resolution.actor.id.clone();
let Some(actor) = project.actors.iter().find(|a| a.id == actor_id) else {
fail(&format!(
"resolution event's actor.id ({actor_id}) is not in the frontier's actor registry; \
register the reviewer with `vela actor add` before pushing"
));
};
let key_path = key.unwrap_or_else(|| {
let home = std::env::var("HOME").unwrap_or_default();
let base = PathBuf::from(home)
.join(".config")
.join("vela")
.join("keys");
let safe_id = actor.id.replace([':', '/'], "_");
let by_actor = base.join(format!("{safe_id}.key"));
if by_actor.exists() {
by_actor
} else {
base.join("private.key")
}
});
let signing_key = sign::load_signing_key_from_path(&key_path).unwrap_or_else(|e| {
fail_return(&format!(
"load private key from {}: {e}",
key_path.display()
))
});
let pubkey_hex = sign::pubkey_hex(&signing_key);
if !pubkey_hex.eq_ignore_ascii_case(&actor.public_key) {
fail(&format!(
"private key at {} does not match actor {}'s registered public key. \
Loaded pubkey {}, expected {}.",
key_path.display(),
actor.id,
&pubkey_hex[..16],
&actor.public_key[..16]
));
}
let signature_hex = sign::sign_event(&resolution, &signing_key)
.unwrap_or_else(|e| fail_return(&format!("sign event: {e}")));
let mut body = resolution.clone();
body.signature = None;
let body_value =
serde_json::to_value(&body).unwrap_or_else(|e| fail_return(&format!("serialize: {e}")));
let _canonical_check = canonical::to_canonical_bytes(&body_value)
.unwrap_or_else(|e| fail_return(&format!("canonicalize: {e}")));
let target_vfr = vfr_id.unwrap_or_else(|| project.frontier_id());
let url = format!(
"{}/entries/{}/events",
peer.url.trim_end_matches('/'),
target_vfr
);
let url_owned = url.clone();
let pubkey_owned = pubkey_hex.clone();
let signature_owned = signature_hex.clone();
let body_owned = body_value.clone();
let response: Result<(u16, String), String> = std::thread::spawn(move || {
let client = reqwest::blocking::Client::new();
let resp = client
.post(&url_owned)
.header("X-Vela-Signer-Pubkey", &pubkey_owned)
.header("X-Vela-Signature", &signature_owned)
.json(&body_owned)
.send()
.map_err(|e| format!("HTTP POST {url_owned}: {e}"))?;
let status = resp.status().as_u16();
let text = resp.text().unwrap_or_default();
Ok((status, text))
})
.join()
.map_err(|_| "push thread panicked".to_string())
.unwrap_or_else(|e| fail_return(&e));
let (status, text) = response.unwrap_or_else(|e| fail_return(&e));
let parsed: serde_json::Value =
serde_json::from_str(&text).unwrap_or_else(|_| json!({ "raw": text }));
let accepted = matches!(status, 200..=202);
if json {
println!(
"{}",
serde_json::to_string_pretty(&json!({
"ok": accepted,
"command": "federation.push-resolution",
"frontier": frontier.display().to_string(),
"peer_id": to,
"url": url,
"conflict_event_id": conflict_event_id,
"event_id": resolution.id,
"actor_id": actor.id,
"http_status": status,
"response": parsed,
}))
.expect("serialize federation.push-resolution")
);
} else if accepted {
println!(
"{} resolution {} pushed to {} (HTTP {})",
style::ok("ok"),
&resolution.id[..16.min(resolution.id.len())],
to,
status
);
println!(" url: {url}");
println!(" signer: {} (actor {})", &pubkey_hex[..16], actor.id);
} else {
println!("{} push refused (HTTP {})", style::lost("rejected"), status);
println!(" url: {url}");
println!(" response: {text}");
std::process::exit(1);
}
}
fn cmd_queue(action: QueueAction) {
use crate::queue;
match action {
QueueAction::List { queue_file, json } => {
let path = queue_file.unwrap_or_else(queue::default_queue_path);
let q = queue::load(&path).unwrap_or_else(|e| fail_return(&e));
if json {
let payload = json!({
"ok": true,
"command": "queue.list",
"queue_file": path.display().to_string(),
"schema": q.schema,
"actions": q.actions,
});
println!(
"{}",
serde_json::to_string_pretty(&payload).expect("failed to serialize queue.list")
);
} else {
println!();
println!(
" {}",
format!("VELA · QUEUE · LIST · {}", path.display())
.to_uppercase()
.dimmed()
);
println!(" {}", style::tick_row(60));
if q.actions.is_empty() {
println!(" (queue is empty)");
} else {
for (idx, action) in q.actions.iter().enumerate() {
println!(
" [{idx}] {} → {} queued {}",
action.kind,
action.frontier.display(),
action.queued_at
);
}
}
}
}
QueueAction::Clear { queue_file, json } => {
let path = queue_file.unwrap_or_else(queue::default_queue_path);
let dropped = queue::clear(&path).unwrap_or_else(|e| fail_return(&e));
if json {
let payload = json!({
"ok": true,
"command": "queue.clear",
"queue_file": path.display().to_string(),
"dropped": dropped,
});
println!(
"{}",
serde_json::to_string_pretty(&payload)
.expect("failed to serialize queue.clear")
);
} else {
println!("{} dropped {dropped} queued action(s)", style::ok("ok"));
}
}
QueueAction::Sign {
actor,
key,
queue_file,
yes_to_all,
json,
} => {
let path = queue_file.unwrap_or_else(queue::default_queue_path);
let q = queue::load(&path).unwrap_or_else(|e| fail_return(&e));
if q.actions.is_empty() {
if json {
println!("{}", json!({"ok": true, "signed": 0, "remaining": 0}));
} else {
println!("{} queue is empty", style::ok("ok"));
}
return;
}
let key_hex = std::fs::read_to_string(&key)
.map(|s| s.trim().to_string())
.unwrap_or_else(|e| fail_return(&format!("read key {}: {e}", key.display())));
let signing_key = parse_signing_key(&key_hex);
let mut signed_count = 0usize;
let mut remaining = Vec::new();
for action in q.actions.iter() {
if !yes_to_all && !confirm_action(action) {
remaining.push(action.clone());
continue;
}
match sign_and_apply(&signing_key, &actor, action) {
Ok(report) => {
signed_count += 1;
if !json {
println!(
"{} {} on {} → {}",
style::ok("signed"),
action.kind,
action.frontier.display(),
report
);
}
}
Err(error) => {
remaining.push(action.clone());
if !json {
eprintln!(
"{} {} on {}: {error}",
style::warn("failed"),
action.kind,
action.frontier.display()
);
}
}
}
}
queue::replace_actions(&path, remaining.clone()).unwrap_or_else(|e| fail_return(&e));
if json {
let payload = json!({
"ok": true,
"command": "queue.sign",
"signed": signed_count,
"remaining": remaining.len(),
});
println!(
"{}",
serde_json::to_string_pretty(&payload).expect("failed to serialize queue.sign")
);
} else {
println!(
"{} signed {signed_count} action(s); {} remaining in queue",
style::ok("ok"),
remaining.len()
);
}
}
}
}
fn parse_signing_key(hex_str: &str) -> ed25519_dalek::SigningKey {
let bytes = hex::decode(hex_str)
.unwrap_or_else(|e| fail_return(&format!("invalid private-key hex: {e}")));
let key_bytes: [u8; 32] = bytes
.try_into()
.unwrap_or_else(|_| fail_return("private key must be 32 bytes"));
ed25519_dalek::SigningKey::from_bytes(&key_bytes)
}
fn confirm_action(action: &crate::queue::QueuedAction) -> bool {
use std::io::{self, BufRead, Write};
let mut stdout = io::stdout().lock();
let _ = writeln!(
stdout,
" sign {} on {}? [y/N] ",
action.kind,
action.frontier.display()
);
let _ = stdout.flush();
drop(stdout);
let stdin = io::stdin();
let mut line = String::new();
if stdin.lock().read_line(&mut line).is_err() {
return false;
}
matches!(line.trim().to_lowercase().as_str(), "y" | "yes")
}
fn sign_and_apply(
signing_key: &ed25519_dalek::SigningKey,
actor: &str,
action: &crate::queue::QueuedAction,
) -> Result<String, String> {
use crate::events::StateTarget;
use crate::proposals;
let args = &action.args;
match action.kind.as_str() {
"propose_review" | "propose_note" | "propose_revise_confidence" | "propose_retract" => {
let kind = match action.kind.as_str() {
"propose_review" => "finding.review",
"propose_note" => "finding.note",
"propose_revise_confidence" => "finding.confidence_revise",
"propose_retract" => "finding.retract",
_ => unreachable!(),
};
let target_id = args
.get("target_finding_id")
.and_then(Value::as_str)
.ok_or("target_finding_id missing")?;
let reason = args
.get("reason")
.and_then(Value::as_str)
.ok_or("reason missing")?;
let payload = match action.kind.as_str() {
"propose_review" => {
let status = args
.get("status")
.and_then(Value::as_str)
.ok_or("status missing")?;
json!({"status": status})
}
"propose_note" => {
let text = args
.get("text")
.and_then(Value::as_str)
.ok_or("text missing")?;
json!({"text": text})
}
"propose_revise_confidence" => {
let new_score = args
.get("new_score")
.and_then(Value::as_f64)
.ok_or("new_score missing")?;
json!({"new_score": new_score})
}
"propose_retract" => json!({}),
_ => unreachable!(),
};
let created_at = args
.get("created_at")
.and_then(Value::as_str)
.map(String::from)
.unwrap_or_else(|| chrono::Utc::now().to_rfc3339());
let mut proposal = proposals::new_proposal(
kind,
StateTarget {
r#type: "finding".to_string(),
id: target_id.to_string(),
},
actor,
"human",
reason,
payload,
Vec::new(),
Vec::new(),
);
proposal.created_at = created_at;
proposal.id = proposals::proposal_id(&proposal);
let _signature = crate::sign::sign_proposal(&proposal, signing_key)?;
let result = proposals::create_or_apply(&action.frontier, proposal, false)
.map_err(|e| format!("create_or_apply: {e}"))?;
Ok(format!("proposal {}", result.proposal_id))
}
"accept_proposal" | "reject_proposal" => {
let proposal_id = args
.get("proposal_id")
.and_then(Value::as_str)
.ok_or("proposal_id missing")?;
let reason = args
.get("reason")
.and_then(Value::as_str)
.ok_or("reason missing")?;
let timestamp = args
.get("timestamp")
.and_then(Value::as_str)
.map(String::from)
.unwrap_or_else(|| chrono::Utc::now().to_rfc3339());
let preimage = json!({
"action": if action.kind == "accept_proposal" { "accept" } else { "reject" },
"proposal_id": proposal_id,
"reviewer_id": actor,
"reason": reason,
"timestamp": timestamp,
});
let bytes = crate::canonical::to_canonical_bytes(&preimage)?;
use ed25519_dalek::Signer;
let _signature = hex::encode(signing_key.sign(&bytes).to_bytes());
if action.kind == "accept_proposal" {
let event_id =
crate::proposals::accept_at_path(&action.frontier, proposal_id, actor, reason)
.map_err(|e| format!("accept_at_path: {e}"))?;
Ok(format!("event {event_id}"))
} else {
crate::proposals::reject_at_path(&action.frontier, proposal_id, actor, reason)
.map_err(|e| format!("reject_at_path: {e}"))?;
Ok(format!("rejected {proposal_id}"))
}
}
other => Err(format!("unsupported queued action kind '{other}'")),
}
}
fn cmd_entity(action: EntityAction) {
use crate::entity_resolve;
match action {
EntityAction::Resolve {
frontier,
force,
json,
} => {
let mut p = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
let report = entity_resolve::resolve_frontier(&mut p, force);
repo::save_to_path(&frontier, &p).unwrap_or_else(|e| fail_return(&e));
if json {
println!(
"{}",
serde_json::to_string_pretty(&serde_json::json!({
"ok": true,
"command": "entity.resolve",
"frontier_path": frontier.display().to_string(),
"report": report,
}))
.expect("serialize")
);
} else {
println!(
"{} resolved {} of {} entities ({} already, {} unresolved) across {} findings",
style::ok("entity"),
report.resolved,
report.total_entities,
report.already_resolved,
report.unresolved_count,
report.findings_touched,
);
let unresolved_summary: std::collections::BTreeSet<&str> = report
.per_finding
.iter()
.flat_map(|f| f.unresolved.iter().map(String::as_str))
.collect();
if !unresolved_summary.is_empty() {
let take = unresolved_summary.iter().take(8).collect::<Vec<_>>();
println!(
" unresolved (first {}): {}",
take.len(),
take.iter().copied().cloned().collect::<Vec<_>>().join(", ")
);
}
}
}
EntityAction::List { json } => {
let entries: Vec<serde_json::Value> = entity_resolve::iter_bundled()
.map(|(name, etype, source, id)| {
serde_json::json!({
"canonical_name": name,
"entity_type": etype,
"source": source,
"id": id,
})
})
.collect();
if json {
println!(
"{}",
serde_json::to_string_pretty(&serde_json::json!({
"ok": true,
"command": "entity.list",
"count": entries.len(),
"entries": entries,
}))
.expect("serialize")
);
} else {
println!("{} {} bundled entries", style::ok("entity"), entries.len());
for e in &entries {
println!(
" {:32} {:18} {} {}",
e["canonical_name"].as_str().unwrap_or("?"),
e["entity_type"].as_str().unwrap_or("?"),
e["source"].as_str().unwrap_or("?"),
e["id"].as_str().unwrap_or("?"),
);
}
}
}
}
}
fn cmd_link(action: LinkAction) {
use crate::bundle::{Link, LinkRef};
match action {
LinkAction::Add {
frontier,
from,
to,
r#type,
note,
inferred_by,
no_check_target,
json,
} => {
validate_enum_arg("--type", &r#type, bundle::VALID_LINK_TYPES);
if !["compiler", "reviewer", "author"].contains(&inferred_by.as_str()) {
fail(&format!(
"invalid --inferred-by '{inferred_by}'. Valid: compiler, reviewer, author"
));
}
let parsed = LinkRef::parse(&to).unwrap_or_else(|e| {
fail(&format!(
"invalid --to '{to}': {e}. Expected vf_<hex> or vf_<hex>@vfr_<hex>"
))
});
let mut p = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
let source_idx = p
.findings
.iter()
.position(|f| f.id == from)
.unwrap_or_else(|| {
fail_return(&format!("--from finding '{from}' not in frontier"))
});
if let LinkRef::Local { vf_id } = &parsed
&& !p.findings.iter().any(|f| &f.id == vf_id)
{
fail(&format!(
"local --to target '{vf_id}' not in frontier; add the target finding first"
));
}
if let LinkRef::Cross { vfr_id, .. } = &parsed
&& p.dep_for_vfr(vfr_id).is_none()
{
fail(&format!(
"cross-frontier --to references vfr_id '{vfr_id}' but no matching dep is declared. Run `vela frontier add-dep {vfr_id} --locator <url> --snapshot <hash>` first."
));
}
let mut target_warning: Option<String> = None;
if let LinkRef::Cross {
vfr_id: target_vfr,
vf_id: target_vf,
} = &parsed
&& !no_check_target
&& let Some(dep) = p.dep_for_vfr(target_vfr)
&& let Some(locator) = dep.locator.as_deref()
&& (locator.starts_with("http://") || locator.starts_with("https://"))
{
let client = reqwest::blocking::Client::builder()
.timeout(std::time::Duration::from_secs(15))
.build()
.ok();
if let Some(client) = client
&& let Ok(resp) = client.get(locator).send()
&& resp.status().is_success()
&& let Ok(dep_project) = resp.json::<crate::project::Project>()
{
if let Some(target_finding) =
dep_project.findings.iter().find(|f| &f.id == target_vf)
{
if target_finding.flags.superseded {
target_warning = Some(format!(
"warn · cross-frontier target '{target_vf}' in '{target_vfr}' has flags.superseded = true. \
You may be linking to outdated wording. Pull --transitive and inspect the supersedes chain to find the current finding. \
Use --no-check-target to skip this check."
));
}
} else {
target_warning = Some(format!(
"warn · cross-frontier target '{target_vf}' not found in dep '{target_vfr}' (fetched from {locator}). \
The target may have been removed or never existed in the pinned snapshot."
));
}
}
}
let now = chrono::Utc::now().to_rfc3339_opts(chrono::SecondsFormat::Secs, true);
let link = Link {
target: to.clone(),
link_type: r#type.clone(),
note: note.clone(),
inferred_by: inferred_by.clone(),
created_at: now,
mechanism: None,
};
p.findings[source_idx].links.push(link);
project::recompute_stats(&mut p);
repo::save_to_path(&frontier, &p).unwrap_or_else(|e| fail_return(&e));
let payload = json!({
"ok": true,
"command": "link.add",
"frontier": frontier.display().to_string(),
"from": from,
"to": to,
"type": r#type,
"cross_frontier": parsed.is_cross_frontier(),
});
if json {
let mut p2 = payload.clone();
if let Some(w) = &target_warning
&& let serde_json::Value::Object(m) = &mut p2
{
m.insert(
"target_warning".to_string(),
serde_json::Value::String(w.clone()),
);
}
println!(
"{}",
serde_json::to_string_pretty(&p2).expect("failed to serialize link.add")
);
} else {
println!(
"{} {} --[{}]--> {}{}",
style::ok("link"),
from,
r#type,
to,
if parsed.is_cross_frontier() {
" (cross-frontier)"
} else {
""
}
);
if let Some(w) = target_warning {
println!(" {w}");
}
}
}
}
}
fn cmd_frontier(action: FrontierAction) {
use crate::project::ProjectDependency;
use crate::repo;
match action {
FrontierAction::New {
path,
name,
description,
force,
json,
} => {
if path.exists() && !force {
fail(&format!(
"{} already exists; pass --force to overwrite",
path.display()
));
}
let now = chrono::Utc::now().to_rfc3339_opts(chrono::SecondsFormat::Secs, true);
let project = project::Project {
vela_version: project::VELA_SCHEMA_VERSION.to_string(),
schema: project::VELA_SCHEMA_URL.to_string(),
frontier_id: None,
project: project::ProjectMeta {
name: name.clone(),
description: description.clone(),
compiled_at: now,
compiler: project::VELA_COMPILER_VERSION.to_string(),
papers_processed: 0,
errors: 0,
dependencies: Vec::new(),
},
stats: project::ProjectStats::default(),
findings: Vec::new(),
sources: Vec::new(),
evidence_atoms: Vec::new(),
condition_records: Vec::new(),
review_events: Vec::new(),
confidence_updates: Vec::new(),
events: Vec::new(),
proposals: Vec::new(),
proof_state: proposals::ProofState::default(),
signatures: Vec::new(),
actors: Vec::new(),
replications: Vec::new(),
datasets: Vec::new(),
code_artifacts: Vec::new(),
artifacts: Vec::new(),
predictions: Vec::new(),
resolutions: Vec::new(),
peers: Vec::new(),
negative_results: Vec::new(),
trajectories: Vec::new(),
};
repo::save_to_path(&path, &project).unwrap_or_else(|e| fail_return(&e));
let payload = json!({
"ok": true,
"command": "frontier.new",
"path": path.display().to_string(),
"name": name,
"schema": project::VELA_SCHEMA_URL,
"vela_version": env!("CARGO_PKG_VERSION"),
"next_steps": [
"vela finding add <path> --assertion '...' --author 'reviewer:you' --apply",
"vela sign generate-keypair --out keys",
"vela actor add <path> reviewer:you --pubkey \"$(cat keys/public.key)\"",
"vela registry publish <path> --owner reviewer:you --key keys/private.key --locator <url> --to https://vela-hub.fly.dev",
],
});
if json {
println!(
"{}",
serde_json::to_string_pretty(&payload)
.expect("failed to serialize frontier.new")
);
} else {
println!(
"{} scaffolded frontier '{name}' at {}",
style::ok("frontier"),
path.display()
);
println!(" next steps:");
println!(
" 1. vela finding add {} --assertion '...' --author 'reviewer:you' --apply",
path.display()
);
println!(" 2. vela sign generate-keypair --out keys");
println!(
" 3. vela actor add {} reviewer:you --pubkey \"$(cat keys/public.key)\"",
path.display()
);
println!(
" 4. vela registry publish {} --owner reviewer:you --key keys/private.key --locator <url> --to https://vela-hub.fly.dev",
path.display()
);
}
}
FrontierAction::Materialize { frontier, json } => {
let payload = frontier_repo::materialize(&frontier).unwrap_or_else(|e| fail_return(&e));
if json {
println!(
"{}",
serde_json::to_string_pretty(&payload)
.expect("failed to serialize frontier materialize")
);
} else {
println!(
"{} materialized frontier repo at {}",
style::ok("frontier"),
frontier.display()
);
}
}
FrontierAction::AddDep {
frontier,
vfr_id,
locator,
snapshot,
name,
json,
} => {
let mut p = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
if p.project
.dependencies
.iter()
.any(|d| d.vfr_id.as_deref() == Some(&vfr_id))
{
fail(&format!(
"cross-frontier dependency '{vfr_id}' already declared; remove it first via `vela frontier remove-dep`"
));
}
let dep = ProjectDependency {
name: name.unwrap_or_else(|| vfr_id.clone()),
source: "vela.hub".into(),
version: None,
pinned_hash: None,
vfr_id: Some(vfr_id.clone()),
locator: Some(locator.clone()),
pinned_snapshot_hash: Some(snapshot.clone()),
};
p.project.dependencies.push(dep);
repo::save_to_path(&frontier, &p).unwrap_or_else(|e| fail_return(&e));
let payload = json!({
"ok": true,
"command": "frontier.add-dep",
"frontier": frontier.display().to_string(),
"vfr_id": vfr_id,
"locator": locator,
"pinned_snapshot_hash": snapshot,
"declared_count": p.project.dependencies.len(),
});
if json {
println!(
"{}",
serde_json::to_string_pretty(&payload)
.expect("failed to serialize frontier.add-dep")
);
} else {
println!(
"{} declared cross-frontier dep {vfr_id}",
style::ok("frontier")
);
println!(" locator: {locator}");
println!(" snapshot: {snapshot}");
}
}
FrontierAction::ListDeps { frontier, json } => {
let p = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
let deps: Vec<&ProjectDependency> = p.project.dependencies.iter().collect();
if json {
let payload = json!({
"ok": true,
"command": "frontier.list-deps",
"frontier": frontier.display().to_string(),
"count": deps.len(),
"dependencies": deps,
});
println!(
"{}",
serde_json::to_string_pretty(&payload)
.expect("failed to serialize frontier.list-deps")
);
} else {
println!();
println!(
" {}",
format!("VELA · FRONTIER · LIST-DEPS · {}", frontier.display())
.to_uppercase()
.dimmed()
);
println!(" {}", style::tick_row(60));
if deps.is_empty() {
println!(" (no dependencies declared)");
} else {
for d in &deps {
let kind = if d.is_cross_frontier() {
"cross-frontier"
} else {
"compile-time"
};
println!(" · {} [{kind}]", d.name);
if let Some(v) = &d.vfr_id {
println!(" vfr_id: {v}");
}
if let Some(l) = &d.locator {
println!(" locator: {l}");
}
if let Some(s) = &d.pinned_snapshot_hash {
println!(" snapshot: {s}");
}
}
}
}
}
FrontierAction::RemoveDep {
frontier,
vfr_id,
json,
} => {
let mut p = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
for f in &p.findings {
for l in &f.links {
if let Ok(crate::bundle::LinkRef::Cross { vfr_id: ref v, .. }) =
crate::bundle::LinkRef::parse(&l.target)
&& v == &vfr_id
{
fail(&format!(
"cannot remove dep '{vfr_id}': finding {} still links to it via {}",
f.id, l.target
));
}
}
}
let before = p.project.dependencies.len();
p.project
.dependencies
.retain(|d| d.vfr_id.as_deref() != Some(&vfr_id));
let removed = before - p.project.dependencies.len();
if removed == 0 {
fail(&format!("no cross-frontier dependency '{vfr_id}' found"));
}
repo::save_to_path(&frontier, &p).unwrap_or_else(|e| fail_return(&e));
let payload = json!({
"ok": true,
"command": "frontier.remove-dep",
"frontier": frontier.display().to_string(),
"vfr_id": vfr_id,
"removed": removed,
});
if json {
println!(
"{}",
serde_json::to_string_pretty(&payload)
.expect("failed to serialize frontier.remove-dep")
);
} else {
println!(
"{} removed cross-frontier dep {vfr_id}",
style::ok("frontier")
);
}
}
FrontierAction::RefreshDeps {
frontier,
from,
dry_run,
json,
} => {
let mut p = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
let cross_deps: Vec<String> = p
.project
.dependencies
.iter()
.filter_map(|d| d.vfr_id.clone())
.collect();
if cross_deps.is_empty() {
if json {
println!(
"{}",
serde_json::to_string_pretty(&json!({
"ok": true,
"command": "frontier.refresh-deps",
"frontier": frontier.display().to_string(),
"from": from,
"dry_run": dry_run,
"deps": [],
"summary": { "total": 0, "refreshed": 0, "unchanged": 0, "missing": 0, "unreachable": 0 },
})).expect("serialize")
);
} else {
println!(
"{} no cross-frontier deps declared in {}",
style::ok("frontier"),
frontier.display()
);
}
return;
}
let client = reqwest::blocking::Client::builder()
.timeout(std::time::Duration::from_secs(20))
.build()
.unwrap_or_else(|e| fail_return(&format!("http client init failed: {e}")));
let base = from.trim_end_matches('/');
#[derive(serde::Deserialize)]
struct HubEntry {
latest_snapshot_hash: String,
}
let mut per_dep: Vec<serde_json::Value> = Vec::new();
let (mut refreshed, mut unchanged, mut missing, mut unreachable) =
(0u32, 0u32, 0u32, 0u32);
for vfr in &cross_deps {
let url = format!("{base}/entries/{vfr}");
let resp = client.get(&url).send();
let outcome = match resp {
Ok(r) if r.status().as_u16() == 404 => {
missing += 1;
json!({ "vfr_id": vfr, "status": "missing", "url": url })
}
Ok(r) if !r.status().is_success() => {
unreachable += 1;
json!({ "vfr_id": vfr, "status": "unreachable", "http_status": r.status().as_u16() })
}
Err(e) => {
unreachable += 1;
json!({ "vfr_id": vfr, "status": "unreachable", "error": e.to_string() })
}
Ok(r) => match r.json::<HubEntry>() {
Err(e) => {
unreachable += 1;
json!({ "vfr_id": vfr, "status": "unreachable", "error": format!("invalid hub response: {e}") })
}
Ok(entry) => {
match p
.project
.dependencies
.iter()
.position(|d| d.vfr_id.as_deref() == Some(vfr.as_str()))
{
None => {
unreachable += 1;
json!({ "vfr_id": vfr, "status": "unreachable", "error": "dep disappeared mid-scan" })
}
Some(idx) => {
let local_pin =
p.project.dependencies[idx].pinned_snapshot_hash.clone();
let new_pin = entry.latest_snapshot_hash;
if local_pin.as_deref() == Some(new_pin.as_str()) {
unchanged += 1;
json!({ "vfr_id": vfr, "status": "unchanged", "snapshot": new_pin })
} else {
if !dry_run {
p.project.dependencies[idx].pinned_snapshot_hash =
Some(new_pin.clone());
}
refreshed += 1;
json!({
"vfr_id": vfr,
"status": "refreshed",
"old_snapshot": local_pin,
"new_snapshot": new_pin,
})
}
}
}
}
},
};
per_dep.push(outcome);
}
if !dry_run && refreshed > 0 {
repo::save_to_path(&frontier, &p).unwrap_or_else(|e| fail_return(&e));
}
let payload = json!({
"ok": true,
"command": "frontier.refresh-deps",
"frontier": frontier.display().to_string(),
"from": from,
"dry_run": dry_run,
"deps": per_dep,
"summary": {
"total": cross_deps.len(),
"refreshed": refreshed,
"unchanged": unchanged,
"missing": missing,
"unreachable": unreachable,
},
});
if json {
println!(
"{}",
serde_json::to_string_pretty(&payload)
.expect("failed to serialize frontier.refresh-deps")
);
} else {
let mode = if dry_run { " (dry-run)" } else { "" };
println!(
"{} refresh-deps{mode} · {} total · {refreshed} refreshed · {unchanged} unchanged · {missing} missing · {unreachable} unreachable",
style::ok("frontier"),
cross_deps.len()
);
for d in &per_dep {
let vfr = d["vfr_id"].as_str().unwrap_or("?");
let status = d["status"].as_str().unwrap_or("?");
match status {
"refreshed" => println!(
" {vfr} refreshed {} → {}",
d["old_snapshot"]
.as_str()
.unwrap_or("(none)")
.chars()
.take(16)
.collect::<String>(),
d["new_snapshot"]
.as_str()
.unwrap_or("?")
.chars()
.take(16)
.collect::<String>(),
),
"unchanged" => println!(" {vfr} unchanged"),
"missing" => println!(" {vfr} missing on hub"),
_ => println!(" {vfr} unreachable"),
}
}
}
}
FrontierAction::Diff {
frontier,
since,
week,
json,
} => cmd_frontier_diff(&frontier, since.as_deref(), week.as_deref(), json),
}
}
fn cmd_repo(action: RepoAction) {
match action {
RepoAction::Status { frontier, json } => {
let payload = frontier_repo::repo_status(&frontier).unwrap_or_else(|e| fail_return(&e));
if json {
println!(
"{}",
serde_json::to_string_pretty(&payload)
.expect("failed to serialize repo status")
);
} else {
let summary = payload.get("summary").unwrap_or(&Value::Null);
let freshness = payload.get("freshness").unwrap_or(&Value::Null);
println!("vela repo status");
println!(" frontier: {}", frontier.display());
println!(
" events: {}",
summary
.get("accepted_events")
.and_then(Value::as_u64)
.unwrap_or_default()
);
println!(
" open proposals: {}",
summary
.get("open_proposals")
.and_then(Value::as_u64)
.unwrap_or_default()
);
println!(
" state: {}",
freshness
.get("materialized_state")
.and_then(Value::as_str)
.unwrap_or("unknown")
);
println!(
" proof: {}",
freshness
.get("proof")
.and_then(Value::as_str)
.unwrap_or("unknown")
);
}
}
RepoAction::Doctor { frontier, json } => {
let payload = frontier_repo::repo_doctor(&frontier).unwrap_or_else(|e| fail_return(&e));
if json {
println!(
"{}",
serde_json::to_string_pretty(&payload)
.expect("failed to serialize repo doctor")
);
} else {
let ok = payload.get("ok").and_then(Value::as_bool) == Some(true);
let issues = payload
.get("issues")
.and_then(Value::as_array)
.map_or(0, Vec::len);
println!("vela repo doctor");
println!(" frontier: {}", frontier.display());
println!(" status: {}", if ok { "ok" } else { "needs attention" });
println!(" issues: {issues}");
}
}
}
}
fn cmd_proof_verify(frontier: &Path, json_output: bool) {
let payload = frontier_repo::proof_verify(frontier).unwrap_or_else(|e| fail_return(&e));
if json_output {
println!(
"{}",
serde_json::to_string_pretty(&payload).expect("failed to serialize proof verify")
);
if payload.get("ok").and_then(Value::as_bool) != Some(true) {
std::process::exit(1);
}
} else {
let ok = payload.get("ok").and_then(Value::as_bool) == Some(true);
println!("vela proof verify");
println!(" frontier: {}", frontier.display());
println!(" status: {}", if ok { "ok" } else { "failed" });
if let Some(issues) = payload.get("issues").and_then(Value::as_array) {
for issue in issues {
if let Some(message) = issue.get("message").and_then(Value::as_str) {
println!(" issue: {message}");
}
}
}
if !ok {
std::process::exit(1);
}
}
}
fn cmd_proof_explain(frontier: &Path) {
let text = frontier_repo::proof_explain(frontier).unwrap_or_else(|e| fail_return(&e));
print!("{text}");
}
fn cmd_frontier_diff(frontier: &Path, since: Option<&str>, week: Option<&str>, json: bool) {
let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
let now = chrono::Utc::now();
let (window_start, window_end, week_label): (
chrono::DateTime<chrono::Utc>,
chrono::DateTime<chrono::Utc>,
Option<String>,
) = if let Some(s) = since {
let parsed = chrono::DateTime::parse_from_rfc3339(s)
.map(|d| d.with_timezone(&chrono::Utc))
.unwrap_or_else(|e| fail_return(&format!("invalid --since timestamp '{s}': {e}")));
(parsed, now, None)
} else {
let key = week
.map(str::to_owned)
.unwrap_or_else(|| iso_week_key_for(now.date_naive()));
let (start, end) = iso_week_bounds(&key)
.unwrap_or_else(|e| fail_return(&format!("invalid --week '{key}': {e}")));
(start, end, Some(key))
};
let mut added: Vec<&crate::bundle::FindingBundle> = Vec::new();
let mut updated: Vec<&crate::bundle::FindingBundle> = Vec::new();
let mut new_contradictions: Vec<&crate::bundle::FindingBundle> = Vec::new();
let mut cumulative: usize = 0;
for f in &project.findings {
let created = chrono::DateTime::parse_from_rfc3339(&f.created)
.map(|d| d.with_timezone(&chrono::Utc))
.ok();
let updated_ts = f
.updated
.as_deref()
.and_then(|u| chrono::DateTime::parse_from_rfc3339(u).ok())
.map(|d| d.with_timezone(&chrono::Utc));
if let Some(c) = created
&& c < window_end
{
cumulative += 1;
}
if let Some(c) = created
&& c >= window_start
&& c < window_end
{
added.push(f);
let is_tension = f.flags.contested || f.assertion.assertion_type == "tension";
if is_tension {
new_contradictions.push(f);
}
continue;
}
if let Some(u) = updated_ts
&& u >= window_start
&& u < window_end
{
updated.push(f);
}
}
let summary_for = |list: &[&crate::bundle::FindingBundle]| -> Vec<serde_json::Value> {
list.iter()
.map(|f| {
json!({
"id": f.id,
"assertion": f.assertion.text,
"evidence_type": f.evidence.evidence_type,
"confidence": f.confidence.score,
"doi": f.provenance.doi,
"pmid": f.provenance.pmid,
})
})
.collect()
};
let payload = json!({
"ok": true,
"command": "frontier.diff",
"frontier": frontier.display().to_string(),
"frontier_id": project.frontier_id,
"window": {
"start": window_start.to_rfc3339_opts(chrono::SecondsFormat::Secs, true),
"end": window_end.to_rfc3339_opts(chrono::SecondsFormat::Secs, true),
"iso_week": week_label,
},
"totals": {
"added": added.len(),
"updated": updated.len(),
"new_contradictions": new_contradictions.len(),
"cumulative_claims": cumulative,
},
"added": summary_for(&added),
"updated": summary_for(&updated),
"new_contradictions": summary_for(&new_contradictions),
});
if json {
println!(
"{}",
serde_json::to_string_pretty(&payload).expect("failed to serialize frontier.diff")
);
return;
}
let label = week_label
.clone()
.unwrap_or_else(|| format!("since {}", window_start.format("%Y-%m-%d %H:%M UTC")));
println!();
println!(
" {}",
format!("VELA · FRONTIER · DIFF · {label}")
.to_uppercase()
.dimmed()
);
println!(" {}", style::tick_row(60));
println!(
" range: {} → {}",
window_start.format("%Y-%m-%d %H:%M"),
window_end.format("%Y-%m-%d %H:%M")
);
println!(" added: {}", added.len());
println!(" updated: {}", updated.len());
println!(" contradictions: {}", new_contradictions.len());
println!(" cumulative: {cumulative}");
if added.is_empty() && updated.is_empty() {
println!();
println!(" (quiet window — no findings added or updated)");
} else {
println!();
println!(" added:");
for f in &added {
println!(
" · {} {}",
f.id.dimmed(),
truncate(&f.assertion.text, 88)
);
}
if !updated.is_empty() {
println!();
println!(" updated:");
for f in &updated {
println!(
" · {} {}",
f.id.dimmed(),
truncate(&f.assertion.text, 88)
);
}
}
}
}
fn truncate(s: &str, n: usize) -> String {
if s.chars().count() <= n {
s.to_string()
} else {
let mut out: String = s.chars().take(n.saturating_sub(1)).collect();
out.push('…');
out
}
}
fn iso_week_key_for(d: chrono::NaiveDate) -> String {
use chrono::Datelike;
let iso = d.iso_week();
format!("{:04}-W{:02}", iso.year(), iso.week())
}
fn iso_week_bounds(
key: &str,
) -> Result<(chrono::DateTime<chrono::Utc>, chrono::DateTime<chrono::Utc>), String> {
let (year_str, week_str) = key
.split_once("-W")
.ok_or_else(|| format!("expected YYYY-Www, got '{key}'"))?;
let year: i32 = year_str
.parse()
.map_err(|e| format!("bad year in '{key}': {e}"))?;
let week: u32 = week_str
.parse()
.map_err(|e| format!("bad week in '{key}': {e}"))?;
let monday = chrono::NaiveDate::from_isoywd_opt(year, week, chrono::Weekday::Mon)
.ok_or_else(|| format!("invalid ISO week: {key}"))?;
let next_monday = monday + chrono::Duration::days(7);
let start = monday.and_hms_opt(0, 0, 0).expect("00:00 valid").and_utc();
let end = next_monday
.and_hms_opt(0, 0, 0)
.expect("00:00 valid")
.and_utc();
Ok((start, end))
}
fn cmd_registry(action: RegistryAction) {
use crate::registry;
let default_registry = || -> PathBuf {
let home = std::env::var("HOME").unwrap_or_else(|_| ".".to_string());
PathBuf::from(home)
.join(".vela")
.join("registry")
.join("entries.json")
};
match action {
RegistryAction::DependsOn { vfr_id, from, json } => {
let base = from.trim_end_matches('/');
let url = format!("{base}/entries/{vfr_id}/depends-on");
let client = reqwest::blocking::Client::builder()
.timeout(std::time::Duration::from_secs(30))
.build()
.unwrap_or_else(|e| fail_return(&format!("http client init: {e}")));
let resp = client
.get(&url)
.send()
.unwrap_or_else(|e| fail_return(&format!("GET {url}: {e}")));
if !resp.status().is_success() {
fail(&format!("GET {url}: HTTP {}", resp.status()));
}
let body: serde_json::Value = resp
.json()
.unwrap_or_else(|e| fail_return(&format!("parse response: {e}")));
if json {
println!(
"{}",
serde_json::to_string_pretty(&body).expect("serialize")
);
} else {
let dependents = body
.get("dependents")
.and_then(|v| v.as_array())
.cloned()
.unwrap_or_default();
let count = dependents.len();
println!(
"{} {count} {} on {vfr_id}",
style::ok("registry"),
if count == 1 {
"frontier depends"
} else {
"frontiers depend"
},
);
for e in &dependents {
let v = e.get("vfr_id").and_then(|v| v.as_str()).unwrap_or("?");
let n = e.get("name").and_then(|v| v.as_str()).unwrap_or("?");
let o = e
.get("owner_actor_id")
.and_then(|v| v.as_str())
.unwrap_or("?");
println!(" {v} {n} ({o})");
}
}
}
RegistryAction::Mirror {
vfr_id,
from,
to,
json,
} => {
let src_base = from.trim_end_matches('/');
let dst_base = to.trim_end_matches('/');
let src_url = format!("{src_base}/entries/{vfr_id}");
let dst_url = format!("{dst_base}/entries");
let client = reqwest::blocking::Client::builder()
.timeout(std::time::Duration::from_secs(30))
.build()
.unwrap_or_else(|e| fail_return(&format!("http client init: {e}")));
let entry: serde_json::Value = client
.get(&src_url)
.send()
.unwrap_or_else(|e| fail_return(&format!("GET {src_url}: {e}")))
.error_for_status()
.unwrap_or_else(|e| fail_return(&format!("GET {src_url}: {e}")))
.json()
.unwrap_or_else(|e| fail_return(&format!("parse {src_url}: {e}")));
let resp = client
.post(&dst_url)
.header("content-type", "application/json")
.body(
serde_json::to_vec(&entry)
.unwrap_or_else(|e| fail_return(&format!("serialize: {e}"))),
)
.send()
.unwrap_or_else(|e| fail_return(&format!("POST {dst_url}: {e}")));
let status = resp.status();
if !status.is_success() {
let body = resp.text().unwrap_or_default();
fail(&format!(
"POST {dst_url}: HTTP {status}: {}",
body.chars().take(300).collect::<String>()
));
}
let body: serde_json::Value = resp
.json()
.unwrap_or_else(|e| fail_return(&format!("parse POST response: {e}")));
let duplicate = body
.get("duplicate")
.and_then(serde_json::Value::as_bool)
.unwrap_or(false);
let payload = json!({
"ok": true,
"command": "registry.mirror",
"vfr_id": vfr_id,
"from": src_base,
"to": dst_base,
"duplicate_on_destination": duplicate,
"destination_response": body,
});
if json {
println!(
"{}",
serde_json::to_string_pretty(&payload).expect("serialize")
);
} else {
println!(
"{} mirrored {vfr_id} from {src_base} → {dst_base}{}",
style::ok("registry"),
if duplicate {
" (duplicate; signature already known)"
} else {
" (fresh insert)"
}
);
}
}
RegistryAction::List { from, json } => {
let (label, registry_data) = match &from {
Some(loc) if loc.starts_with("http") => (
loc.clone(),
registry::load_any(loc).unwrap_or_else(|e| fail_return(&e)),
),
Some(loc) => {
let p = registry::resolve_local(loc).unwrap_or_else(|e| fail_return(&e));
(
p.display().to_string(),
registry::load_local(&p).unwrap_or_else(|e| fail_return(&e)),
)
}
None => {
let p = default_registry();
(
p.display().to_string(),
registry::load_local(&p).unwrap_or_else(|e| fail_return(&e)),
)
}
};
let r = registry_data;
let path_label = label;
if json {
let payload = json!({
"ok": true,
"command": "registry.list",
"registry": path_label,
"entry_count": r.entries.len(),
"entries": r.entries,
});
println!(
"{}",
serde_json::to_string_pretty(&payload)
.expect("failed to serialize registry.list")
);
} else {
println!();
println!(
" {}",
format!("VELA · REGISTRY · LIST · {}", path_label)
.to_uppercase()
.dimmed()
);
println!(" {}", style::tick_row(60));
if r.entries.is_empty() {
println!(" (registry is empty)");
} else {
for entry in &r.entries {
println!(
" {} {} ({}) by {} published {}",
entry.vfr_id,
entry.name,
entry.network_locator,
entry.owner_actor_id,
entry.signed_publish_at
);
}
}
}
}
RegistryAction::Publish {
frontier,
owner,
key,
locator,
to,
json,
} => {
let key_hex = std::fs::read_to_string(&key)
.map(|s| s.trim().to_string())
.unwrap_or_else(|e| fail_return(&format!("read key {}: {e}", key.display())));
let signing_key = parse_signing_key(&key_hex);
let derived = hex::encode(signing_key.verifying_key().to_bytes());
let mut frontier_data =
repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
let pubkey = match frontier_data.actors.iter().find(|actor| actor.id == owner) {
Some(actor) => actor.public_key.clone(),
None => {
eprintln!(
" vela registry publish · auto-registering actor {owner} (derived pubkey {})",
&derived[..16]
);
frontier_data.actors.push(sign::ActorRecord {
id: owner.clone(),
public_key: derived.clone(),
algorithm: "ed25519".to_string(),
created_at: chrono::Utc::now().to_rfc3339(),
tier: None,
orcid: None,
access_clearance: None,
});
repo::save_to_path(&frontier, &frontier_data)
.unwrap_or_else(|e| fail_return(&format!("save actor: {e}")));
derived.clone()
}
};
let snapshot_hash = events::snapshot_hash(&frontier_data);
let event_log_hash = events::event_log_hash(&frontier_data.events);
let vfr_id = frontier_data.frontier_id();
let name = frontier_data.project.name.clone();
if derived != pubkey {
fail(&format!(
"private key does not match registered pubkey for owner '{owner}'"
));
}
let to_is_remote = matches!(
to.as_deref(),
Some(loc) if loc.starts_with("http://") || loc.starts_with("https://")
);
let resolved_locator = match locator {
Some(l) => l,
None => {
if to_is_remote {
let hub = to.as_deref().unwrap().trim_end_matches('/');
let hub_root = hub.trim_end_matches("/entries");
format!("{hub_root}/entries/{vfr_id}/snapshot")
} else {
fail_return(
"--locator is required for local publishes; pass e.g. \
--locator file:///path/to/frontier.json or an HTTPS URL.",
)
}
}
};
let mut entry = registry::RegistryEntry {
schema: registry::ENTRY_SCHEMA.to_string(),
vfr_id: vfr_id.clone(),
name: name.clone(),
owner_actor_id: owner.clone(),
owner_pubkey: pubkey,
latest_snapshot_hash: snapshot_hash,
latest_event_log_hash: event_log_hash,
network_locator: resolved_locator,
signed_publish_at: chrono::Utc::now().to_rfc3339(),
signature: String::new(),
};
entry.signature =
registry::sign_entry(&entry, &signing_key).unwrap_or_else(|e| fail_return(&e));
let (registry_label, duplicate) = if to_is_remote {
let hub_url = to.clone().unwrap();
let resp = registry::publish_remote(&entry, &hub_url, Some(&frontier_data))
.unwrap_or_else(|e| fail_return(&e));
(hub_url, resp.duplicate)
} else {
let registry_path = match &to {
Some(loc) => registry::resolve_local(loc).unwrap_or_else(|e| fail_return(&e)),
None => default_registry(),
};
registry::publish_entry(®istry_path, entry.clone())
.unwrap_or_else(|e| fail_return(&e));
(registry_path.display().to_string(), false)
};
let payload = json!({
"ok": true,
"command": "registry.publish",
"registry": registry_label,
"vfr_id": vfr_id,
"name": name,
"owner": owner,
"snapshot_hash": entry.latest_snapshot_hash,
"event_log_hash": entry.latest_event_log_hash,
"signed_publish_at": entry.signed_publish_at,
"signature": entry.signature,
"duplicate": duplicate,
});
if json {
println!(
"{}",
serde_json::to_string_pretty(&payload)
.expect("failed to serialize registry.publish")
);
} else {
let dup_suffix = if duplicate { " (duplicate, no-op)" } else { "" };
println!(
"{} published {vfr_id} → {}{}",
style::ok("registry"),
registry_label,
dup_suffix
);
println!(" snapshot: {}", entry.latest_snapshot_hash);
println!(" event_log: {}", entry.latest_event_log_hash);
println!(" signature: {}…", &entry.signature[..16]);
}
}
RegistryAction::Pull {
vfr_id,
from,
out,
transitive,
depth,
json,
} => {
let (registry_label, registry_data) = match &from {
Some(loc) if loc.starts_with("http") => (
loc.clone(),
registry::load_any(loc).unwrap_or_else(|e| fail_return(&e)),
),
Some(loc) => {
let p = registry::resolve_local(loc).unwrap_or_else(|e| fail_return(&e));
(
p.display().to_string(),
registry::load_local(&p).unwrap_or_else(|e| fail_return(&e)),
)
}
None => {
let p = default_registry();
(
p.display().to_string(),
registry::load_local(&p).unwrap_or_else(|e| fail_return(&e)),
)
}
};
let entry = registry::find_latest(®istry_data, &vfr_id)
.unwrap_or_else(|| fail_return(&format!("{vfr_id} not found in registry")));
if transitive {
let result = registry::pull_transitive(®istry_data, &vfr_id, &out, depth)
.unwrap_or_else(|e| fail_return(&format!("transitive pull failed: {e}")));
let dep_paths_json: serde_json::Value = serde_json::Value::Object(
result
.deps
.iter()
.map(|(k, v)| (k.clone(), serde_json::json!(v.display().to_string())))
.collect(),
);
let payload = json!({
"ok": true,
"command": "registry.pull",
"registry": registry_label,
"vfr_id": vfr_id,
"transitive": true,
"depth": depth,
"out_dir": out.display().to_string(),
"primary": result.primary_path.display().to_string(),
"verified": result.verified,
"deps": dep_paths_json,
});
if json {
println!(
"{}",
serde_json::to_string_pretty(&payload)
.expect("failed to serialize registry.pull")
);
} else {
println!(
"{} pulled {vfr_id} (transitive) → {}",
style::ok("registry"),
out.display()
);
println!(" verified {} frontier(s):", result.verified.len());
for v in &result.verified {
println!(" · {v}");
}
println!(" every cross-frontier dependency's pinned snapshot hash matched");
}
return;
}
registry::fetch_frontier_to_prefer_event_hub(&entry, from.as_deref(), &out)
.unwrap_or_else(|e| fail_return(&format!("fetch frontier: {e}")));
registry::verify_pull(&entry, &out).unwrap_or_else(|e| {
let _ = std::fs::remove_file(&out);
fail_return(&format!("pull verification failed: {e}"))
});
let payload = json!({
"ok": true,
"command": "registry.pull",
"registry": registry_label,
"vfr_id": vfr_id,
"out": out.display().to_string(),
"snapshot_hash": entry.latest_snapshot_hash,
"event_log_hash": entry.latest_event_log_hash,
"verified": true,
});
if json {
println!(
"{}",
serde_json::to_string_pretty(&payload)
.expect("failed to serialize registry.pull")
);
} else {
println!(
"{} pulled {vfr_id} → {}",
style::ok("registry"),
out.display()
);
println!(" verified snapshot+event_log hashes match registry; signature ok");
}
}
}
}
fn print_stats_json(path: &Path) {
let frontier = load_frontier_or_fail(path);
let source_hash = hash_path_or_fail(path);
let payload = json!({
"ok": true,
"command": "stats",
"schema_version": project::VELA_SCHEMA_VERSION,
"frontier": {
"name": &frontier.project.name,
"description": &frontier.project.description,
"source": path.display().to_string(),
"hash": format!("sha256:{source_hash}"),
"compiled_at": &frontier.project.compiled_at,
"compiler": &frontier.project.compiler,
"papers_processed": frontier.project.papers_processed,
"errors": frontier.project.errors,
},
"stats": frontier.stats,
"proposals": proposals::summary(&frontier),
"proof_state": frontier.proof_state,
});
println!(
"{}",
serde_json::to_string_pretty(&payload).expect("failed to serialize stats")
);
}
fn cmd_search(
source: Option<&Path>,
query: &str,
entity: Option<&str>,
assertion_type: Option<&str>,
all: Option<&Path>,
limit: usize,
json_output: bool,
) {
if let Some(dir) = all {
search::run_all(dir, query, entity, assertion_type, limit);
return;
}
let Some(src) = source else {
fail("Provide --source <frontier> or --all <directory>.");
};
if json_output {
let results = search::search(src, query, entity, assertion_type, limit);
let loaded = load_frontier_or_fail(src);
let source_hash = hash_path_or_fail(src);
let payload = json!({
"ok": true,
"command": "search",
"schema_version": project::VELA_SCHEMA_VERSION,
"query": query,
"frontier": {
"name": &loaded.project.name,
"source": src.display().to_string(),
"hash": format!("sha256:{source_hash}"),
},
"filters": {
"entity": entity,
"assertion_type": assertion_type,
"limit": limit,
},
"count": results.len(),
"results": results.iter().map(|result| json!({
"id": &result.id,
"score": result.score,
"assertion": &result.assertion,
"assertion_type": &result.assertion_type,
"confidence": result.confidence,
"entities": &result.entities,
"doi": &result.doi,
})).collect::<Vec<_>>()
});
println!(
"{}",
serde_json::to_string_pretty(&payload).expect("failed to serialize search results")
);
} else {
search::run(src, query, entity, assertion_type, limit);
}
}
fn cmd_tensions(source: &Path, both_high: bool, cross_domain: bool, top: usize, json_output: bool) {
let frontier = load_frontier_or_fail(source);
let result = tensions::analyze(&frontier, both_high, cross_domain, top);
if json_output {
let source_hash = hash_path_or_fail(source);
let payload = json!({
"ok": true,
"command": "tensions",
"schema_version": project::VELA_SCHEMA_VERSION,
"frontier": {
"name": &frontier.project.name,
"source": source.display().to_string(),
"hash": format!("sha256:{source_hash}"),
},
"filters": {
"both_high": both_high,
"cross_domain": cross_domain,
"top": top,
},
"count": result.len(),
"tensions": result.iter().map(|t| json!({
"score": t.score,
"resolved": t.resolved,
"superseding_id": &t.superseding_id,
"finding_a": {
"id": &t.finding_a.id,
"assertion": &t.finding_a.assertion,
"confidence": t.finding_a.confidence,
"assertion_type": &t.finding_a.assertion_type,
"citation_count": t.finding_a.citation_count,
"contradicts_count": t.finding_a.contradicts_count,
},
"finding_b": {
"id": &t.finding_b.id,
"assertion": &t.finding_b.assertion,
"confidence": t.finding_b.confidence,
"assertion_type": &t.finding_b.assertion_type,
"citation_count": t.finding_b.citation_count,
"contradicts_count": t.finding_b.contradicts_count,
}
})).collect::<Vec<_>>()
});
println!(
"{}",
serde_json::to_string_pretty(&payload).expect("failed to serialize tensions")
);
} else {
tensions::print_tensions(&result);
}
}
fn cmd_gaps(action: GapsAction) {
match action {
GapsAction::Rank {
frontier,
top,
domain,
json,
} => cmd_gap_rank(&frontier, top, domain.as_deref(), json),
}
}
fn cmd_gap_rank(frontier_path: &Path, top: usize, domain: Option<&str>, json_output: bool) {
let frontier = load_frontier_or_fail(frontier_path);
let mut ranked = frontier
.findings
.iter()
.filter(|finding| finding.flags.gap || finding.flags.negative_space)
.filter(|finding| {
domain.is_none_or(|domain| {
finding
.assertion
.text
.to_lowercase()
.contains(&domain.to_lowercase())
|| finding
.assertion
.entities
.iter()
.any(|entity| entity.name.to_lowercase().contains(&domain.to_lowercase()))
})
})
.map(|finding| {
let dependency_count = frontier
.findings
.iter()
.flat_map(|candidate| candidate.links.iter())
.filter(|link| link.target == finding.id)
.count();
let score = dependency_count as f64 + finding.confidence.score;
json!({
"id": &finding.id,
"kind": "candidate_gap_review_lead",
"assertion": &finding.assertion.text,
"score": score,
"dependency_count": dependency_count,
"confidence": finding.confidence.score,
"evidence_type": &finding.evidence.evidence_type,
"entities": finding.assertion.entities.iter().map(|e| &e.name).collect::<Vec<_>>(),
"recommended_action": "Review source scope and missing evidence before treating this as an experiment target.",
"caveats": ["Candidate gap rankings are review leads, not guaranteed underexplored areas or experiment targets."],
})
})
.collect::<Vec<_>>();
ranked.sort_by(|a, b| {
b.get("score")
.and_then(Value::as_f64)
.partial_cmp(&a.get("score").and_then(Value::as_f64))
.unwrap_or(std::cmp::Ordering::Equal)
});
ranked.truncate(top);
if json_output {
let source_hash = hash_path_or_fail(frontier_path);
let payload = json!({
"ok": true,
"command": "gaps rank",
"schema_version": project::VELA_SCHEMA_VERSION,
"frontier": {
"name": &frontier.project.name,
"source": frontier_path.display().to_string(),
"hash": format!("sha256:{source_hash}"),
},
"filters": {
"top": top,
"domain": domain,
},
"count": ranked.len(),
"ranking_label": "candidate gap review leads",
"caveats": ["These rankings are navigation signals over flagged findings, not scientific conclusions."],
"review_leads": ranked.clone(),
"gaps": ranked,
});
println!(
"{}",
serde_json::to_string_pretty(&payload).expect("failed to serialize gap ranking")
);
} else {
println!();
println!(" {}", "CANDIDATE GAP REVIEW LEADS".dimmed());
println!(" {}", style::tick_row(60));
println!(" review source scope; these are not guaranteed experiment targets.");
println!();
for (idx, gap) in ranked.iter().enumerate() {
println!(
" {}. [{}] score={} {}",
idx + 1,
gap["id"].as_str().unwrap_or("?"),
gap["score"].as_f64().unwrap_or(0.0),
gap["assertion"].as_str().unwrap_or("")
);
}
}
}
async fn cmd_bridge(inputs: &[PathBuf], check_novelty: bool, top_n: usize) {
if inputs.len() < 2 {
fail("need at least 2 frontier files for bridge detection.");
}
println!();
println!(" {}", "VELA · BRIDGE · V0.36.0".dimmed());
println!(" {}", style::tick_row(60));
println!(" loading {} frontiers...", inputs.len());
let mut named_projects = Vec::<(String, project::Project)>::new();
let mut total_findings = 0;
for path in inputs {
let frontier = load_frontier_or_fail(path);
let name = path
.file_stem()
.unwrap_or_default()
.to_string_lossy()
.to_string();
println!(" {} · {} findings", name, frontier.stats.findings);
total_findings += frontier.stats.findings;
named_projects.push((name, frontier));
}
let refs = named_projects
.iter()
.map(|(name, frontier)| (name.as_str(), frontier))
.collect::<Vec<_>>();
let mut bridges = bridge::detect_bridges(&refs);
if check_novelty && !bridges.is_empty() {
let client = Client::new();
let check_count = bridges.len().min(top_n);
println!(" running rough PubMed prior-art checks for top {check_count} bridges...");
for bridge_item in bridges.iter_mut().take(check_count) {
let query = bridge::novelty_query(&bridge_item.entity_name, bridge_item);
match bridge::check_novelty(&client, &query).await {
Ok(count) => bridge_item.pubmed_count = Some(count),
Err(e) => eprintln!(
" {} prior-art check failed for {}: {e}",
style::err_prefix(),
bridge_item.entity_name
),
}
tokio::time::sleep(std::time::Duration::from_millis(350)).await;
}
}
print!("{}", bridge::format_report(&bridges, total_findings));
}
struct BenchArgs {
frontier: Option<PathBuf>,
gold: Option<PathBuf>,
entity_gold: Option<PathBuf>,
link_gold: Option<PathBuf>,
suite: Option<PathBuf>,
suite_ready: bool,
min_f1: Option<f64>,
min_precision: Option<f64>,
min_recall: Option<f64>,
no_thresholds: bool,
json: bool,
}
fn cmd_agent_bench(
gold: &Path,
candidate: &Path,
sources: Option<&Path>,
threshold: Option<f64>,
report_path: Option<&Path>,
json_out: bool,
) {
let input = crate::agent_bench::BenchInput {
gold_path: gold.to_path_buf(),
candidate_path: candidate.to_path_buf(),
sources: sources.map(Path::to_path_buf),
threshold: threshold.unwrap_or(0.0),
};
let report = match crate::agent_bench::run(input) {
Ok(r) => r,
Err(e) => {
eprintln!("{} bench failed: {e}", style::err_prefix());
std::process::exit(1);
}
};
let json = serde_json::to_string_pretty(&report).unwrap_or_default();
if let Some(path) = report_path
&& let Err(e) = std::fs::write(path, &json)
{
eprintln!(
"{} failed to write report to {}: {e}",
style::err_prefix(),
path.display()
);
}
if json_out {
println!("{json}");
} else {
println!();
println!(" {}", "VELA · BENCH · AGENT STATE-UPDATE".dimmed());
println!(" {}", style::tick_row(60));
print!("{}", crate::agent_bench::render_pretty(&report));
println!();
}
if !report.pass {
std::process::exit(1);
}
}
fn cmd_bench(args: BenchArgs) {
if args.suite_ready {
let suite_path = args
.suite
.unwrap_or_else(|| PathBuf::from("benchmarks/suites/bbb-core.json"));
let payload =
benchmark::suite_ready_report(&suite_path).unwrap_or_else(|e| fail_return(&e));
println!(
"{}",
serde_json::to_string_pretty(&payload).expect("failed to serialize suite-ready report")
);
if payload.get("ok").and_then(Value::as_bool) != Some(true) {
std::process::exit(1);
}
return;
}
if let Some(suite_path) = args.suite {
let payload = benchmark::run_suite(&suite_path).unwrap_or_else(|e| fail_return(&e));
if args.json {
println!(
"{}",
serde_json::to_string_pretty(&payload)
.expect("failed to serialize benchmark suite")
);
} else {
let ok = payload.get("ok").and_then(Value::as_bool) == Some(true);
let metrics = payload.get("metrics").unwrap_or(&Value::Null);
println!();
println!(" {}", "VELA · BENCH · SUITE".dimmed());
println!(" {}", style::tick_row(60));
println!(" suite: {}", suite_path.display());
println!(
" status: {}",
if ok {
style::ok("pass")
} else {
style::lost("fail")
}
);
println!(
" tasks: {}/{} passed",
metrics
.get("tasks_passed")
.and_then(Value::as_u64)
.unwrap_or(0),
metrics
.get("tasks_total")
.and_then(Value::as_u64)
.unwrap_or(0)
);
}
if payload.get("ok").and_then(Value::as_bool) != Some(true) {
std::process::exit(1);
}
return;
}
let frontier = args
.frontier
.unwrap_or_else(|| PathBuf::from("frontiers/bbb-alzheimer.json"));
let thresholds = benchmark::BenchmarkThresholds {
min_f1: if args.no_thresholds {
None
} else {
args.min_f1.or(Some(0.05))
},
min_precision: if args.no_thresholds {
None
} else {
args.min_precision
},
min_recall: if args.no_thresholds {
None
} else {
args.min_recall
},
..Default::default()
};
if let Some(path) = args.link_gold {
print_benchmark_or_exit(benchmark::task_envelope(
&frontier,
None,
benchmark::BenchmarkMode::Link,
Some(&path),
&thresholds,
None,
));
} else if let Some(path) = args.entity_gold {
print_benchmark_or_exit(benchmark::task_envelope(
&frontier,
None,
benchmark::BenchmarkMode::Entity,
Some(&path),
&thresholds,
None,
));
} else if let Some(path) = args.gold {
if args.json {
print_benchmark_or_exit(benchmark::task_envelope(
&frontier,
None,
benchmark::BenchmarkMode::Finding,
Some(&path),
&thresholds,
None,
));
} else {
benchmark::run(&frontier, &path, false);
}
} else {
fail("Provide --suite, --gold, --entity-gold, or --link-gold.");
}
}
fn print_benchmark_or_exit(result: Result<Value, String>) {
let payload = result.unwrap_or_else(|e| fail_return(&e));
println!(
"{}",
serde_json::to_string_pretty(&payload).expect("failed to serialize benchmark report")
);
if payload.get("ok").and_then(Value::as_bool) != Some(true) {
std::process::exit(1);
}
}
fn cmd_packet(action: PacketAction) {
let (result, json_output) = match action {
PacketAction::Inspect { path, json } => (packet::inspect(&path), json),
PacketAction::Validate { path, json } => (packet::validate(&path), json),
};
match result {
Ok(output) if json_output => {
println!(
"{}",
serde_json::to_string_pretty(&json!({
"ok": true,
"command": "packet",
"result": output,
}))
.expect("failed to serialize packet response")
);
}
Ok(output) => println!("{output}"),
Err(e) => fail(&e),
}
}
fn cmd_verify(path: &Path, json_output: bool) {
let result = packet::validate(path);
match result {
Ok(output) if json_output => {
println!(
"{}",
serde_json::to_string_pretty(&json!({
"ok": true,
"command": "verify",
"result": output,
}))
.expect("failed to serialize verify response")
);
}
Ok(output) => {
println!("{output}");
println!(
"\nverify: ok\n every file in the manifest matched its claimed sha256.\n pull this packet on another machine, run the same command, see the same line."
);
}
Err(e) => fail(&e),
}
}
fn cmd_init(path: &Path, name: &str, template: &str, initialize_git: bool, json_output: bool) {
if path.join(".vela").exists() {
fail(&format!(
"already initialized: {} exists",
path.join(".vela").display()
));
}
let payload = frontier_repo::initialize(
path,
frontier_repo::InitOptions {
name,
template,
initialize_git,
},
)
.unwrap_or_else(|e| fail_return(&e));
if json_output {
println!(
"{}",
serde_json::to_string_pretty(&payload).expect("failed to serialize init report")
);
} else {
println!(
"{} initialized frontier repository in {}",
style::ok("ok"),
path.display()
);
}
}
fn cmd_quickstart(
path: &Path,
name: &str,
reviewer: &str,
assertion: Option<&str>,
keys_out: Option<&Path>,
json_output: bool,
) {
use std::process::Command;
if path.join(".vela").exists() {
fail(&format!(
"already initialized: {} exists",
path.join(".vela").display()
));
}
let exe = std::env::current_exe()
.unwrap_or_else(|e| fail_return(&format!("cannot locate current executable: {e}")));
let keys_dir = keys_out
.map(Path::to_path_buf)
.unwrap_or_else(|| path.join("keys"));
let assertion_text =
assertion.unwrap_or("Quickstart placeholder claim. Replace with your real assertion.");
let run_step = |label: &str, args: &[&str]| -> std::process::Output {
let out = Command::new(&exe)
.args(args)
.output()
.unwrap_or_else(|e| fail_return(&format!("{label}: failed to spawn: {e}")));
if !out.status.success() {
let stderr = String::from_utf8_lossy(&out.stderr);
fail(&format!("{label} failed:\n{stderr}"));
}
out
};
run_step(
"init",
&[
"init",
path.to_string_lossy().as_ref(),
"--name",
name,
"--no-git",
"--json",
],
);
let keys_out_str = keys_dir.to_string_lossy().into_owned();
let keypair_out = run_step(
"sign.generate-keypair",
&[
"sign",
"generate-keypair",
"--out",
keys_out_str.as_ref(),
"--json",
],
);
let keypair_json: serde_json::Value = serde_json::from_slice(&keypair_out.stdout)
.unwrap_or_else(|e| fail_return(&format!("sign.generate-keypair: bad json: {e}")));
let public_key = keypair_json
.get("public_key")
.and_then(|v| v.as_str())
.unwrap_or_else(|| fail_return("sign.generate-keypair: missing public_key in output"))
.to_string();
run_step(
"actor.add",
&[
"actor",
"add",
path.to_string_lossy().as_ref(),
reviewer,
"--pubkey",
public_key.as_str(),
"--json",
],
);
let finding_out = run_step(
"finding.add",
&[
"finding",
"add",
path.to_string_lossy().as_ref(),
"--assertion",
assertion_text,
"--author",
reviewer,
"--apply",
"--json",
],
);
let finding_json: serde_json::Value = serde_json::from_slice(&finding_out.stdout)
.unwrap_or_else(|e| fail_return(&format!("finding.add: bad json: {e}")));
let finding_id = finding_json
.get("finding_id")
.and_then(|v| v.as_str())
.map(str::to_string);
if json_output {
let payload = json!({
"ok": true,
"command": "quickstart",
"frontier": path.display().to_string(),
"name": name,
"reviewer": reviewer,
"public_key": public_key,
"keys_dir": keys_dir.display().to_string(),
"finding_id": finding_id,
"next_steps": [
format!("vela serve {}", path.display()),
format!(
"vela ingest <paper.pdf|doi:...> --frontier {}",
path.display()
),
format!("vela log {}", path.display()),
],
});
println!(
"{}",
serde_json::to_string_pretty(&payload).expect("failed to serialize quickstart")
);
return;
}
println!();
println!(
" {}",
format!("VELA · QUICKSTART · {}", path.display())
.to_uppercase()
.dimmed()
);
println!(" {}", style::tick_row(60));
println!(" frontier: {}", path.display());
println!(" name: {name}");
println!(" reviewer: {reviewer}");
println!(" keys: {}", keys_dir.display());
println!(" pubkey: {}…", &public_key[..16]);
if let Some(id) = finding_id.as_deref() {
println!(" finding: {id}");
}
println!();
println!(" {}", style::ok("done"));
println!(" next:");
println!(" vela serve {}", path.display());
println!(
" vela ingest <paper.pdf|doi:10.xxx|pmid:xxx> --frontier {}",
path.display()
);
println!(" vela log {}", path.display());
println!();
}
fn cmd_lock(path: &Path, check: bool, json_output: bool) {
if check {
cmd_lock_check(path, json_output);
return;
}
let payload = crate::frontier_repo::materialize(path).unwrap_or_else(|e| fail_return(&e));
if json_output {
println!(
"{}",
serde_json::to_string_pretty(&json!({
"ok": true,
"command": "lock",
"path": path.display().to_string(),
"snapshot_hash": payload.get("snapshot_hash"),
"event_log_hash": payload.get("event_log_hash"),
"proposal_state_hash": payload.get("proposal_state_hash"),
}))
.expect("failed to serialize lock report")
);
return;
}
println!();
println!(
" {}",
format!("VELA · LOCK · {}", path.display())
.to_uppercase()
.dimmed()
);
println!(" {}", style::tick_row(60));
println!(
" snapshot_hash: {}",
payload
.get("snapshot_hash")
.and_then(|v| v.as_str())
.unwrap_or("?")
);
println!(
" event_log_hash: {}",
payload
.get("event_log_hash")
.and_then(|v| v.as_str())
.unwrap_or("?")
);
println!(
" proposal_state_hash: {}",
payload
.get("proposal_state_hash")
.and_then(|v| v.as_str())
.unwrap_or("?")
);
println!();
println!(" {}", style::ok("locked"));
}
fn cmd_lock_check(path: &Path, json_output: bool) {
use crate::frontier_repo::read_lock;
let lock = read_lock(path).unwrap_or_else(|e| fail_return(&e));
let Some(lock) = lock else {
fail("lock --check: no vela.lock found at path");
};
let project = repo::load_from_path(path).unwrap_or_else(|e| fail_return(&e));
let current_snapshot = format!("sha256:{}", crate::events::snapshot_hash(&project));
let current_event_log = format!("sha256:{}", crate::events::event_log_hash(&project.events));
let mut drift: Vec<String> = Vec::new();
if lock.snapshot_hash != current_snapshot {
drift.push(format!(
"snapshot_hash: lock={} current={}",
lock.snapshot_hash, current_snapshot
));
}
if lock.event_log_hash != current_event_log {
drift.push(format!(
"event_log_hash: lock={} current={}",
lock.event_log_hash, current_event_log
));
}
let ok = drift.is_empty();
if json_output {
println!(
"{}",
serde_json::to_string_pretty(&json!({
"ok": ok,
"command": "lock.check",
"path": path.display().to_string(),
"drift": drift,
"lock_snapshot_hash": lock.snapshot_hash,
"current_snapshot_hash": current_snapshot,
"lock_event_log_hash": lock.event_log_hash,
"current_event_log_hash": current_event_log,
"dependency_count": lock.dependencies.len(),
}))
.expect("failed to serialize lock check report")
);
} else {
println!();
println!(
" {}",
format!("VELA · LOCK · CHECK · {}", path.display())
.to_uppercase()
.dimmed()
);
println!(" {}", style::tick_row(60));
if ok {
println!(" snapshot_hash: {}", lock.snapshot_hash);
println!(" event_log_hash: {}", lock.event_log_hash);
println!(" dependencies pinned: {}", lock.dependencies.len());
println!();
println!(" {} on-disk state matches vela.lock", style::ok("ok"));
} else {
println!(" {} drift detected:", style::err_prefix());
for d in &drift {
println!(" - {d}");
}
}
}
if !ok {
std::process::exit(1);
}
}
fn cmd_doc(path: &Path, out: Option<&Path>, json_output: bool) {
let project = repo::load_from_path(path).unwrap_or_else(|e| fail_return(&e));
let out_dir = out
.map(Path::to_path_buf)
.unwrap_or_else(|| path.join("doc"));
let report =
crate::doc_render::write_site(&project, &out_dir).unwrap_or_else(|e| fail_return(&e));
if json_output {
println!(
"{}",
serde_json::to_string_pretty(&report).expect("failed to serialize doc report")
);
return;
}
println!();
println!(
" {}",
format!("VELA · DOC · {}", path.display())
.to_uppercase()
.dimmed()
);
println!(" {}", style::tick_row(60));
println!(" frontier_id: {}", report.frontier_id);
println!(" out: {}", report.out);
println!(" files written: {}", report.files_written);
println!(" findings: {}", report.findings_documented);
println!(" events: {}", report.events_documented);
println!();
println!(
" {} open {}/index.html in a browser",
style::ok("ok"),
report.out
);
}
fn cmd_import(frontier_path: &Path, into: Option<&Path>) {
let frontier = repo::load_from_path(frontier_path).unwrap_or_else(|e| fail_return(&e));
let target = into
.map(Path::to_path_buf)
.unwrap_or_else(|| PathBuf::from(frontier.project.name.replace(' ', "-").to_lowercase()));
repo::init_repo(&target, &frontier).unwrap_or_else(|e| fail(&e));
println!(
"{} {} findings · {}",
style::ok("imported"),
frontier.findings.len(),
target.display()
);
}
fn cmd_locator_repair(
path: &Path,
atom_id: &str,
locator_override: Option<&str>,
reviewer: &str,
reason: &str,
apply: bool,
json_output: bool,
) {
let report = state::repair_evidence_atom_locator(
path,
atom_id,
locator_override,
reviewer,
reason,
apply,
)
.unwrap_or_else(|e| fail_return(&e));
print_state_report(&report, json_output);
}
async fn cmd_source_fetch(
identifier: &str,
cache_root: Option<&Path>,
out_path: Option<&Path>,
refresh: bool,
_json_output: bool,
) {
use sha2::{Digest, Sha256};
let normalized = normalize_source_identifier(identifier);
let cache_path = cache_root.map(|root| {
let hash = format!("{:x}", Sha256::digest(normalized.as_bytes()));
root.join("sources")
.join("cache")
.join(format!("{hash}.json"))
});
if !refresh
&& let Some(p) = cache_path.as_ref()
&& p.is_file()
{
let body = std::fs::read_to_string(p)
.unwrap_or_else(|e| fail_return(&format!("read cache {}: {e}", p.display())));
emit_source_fetch_result(&body, out_path);
return;
}
let result = fetch_source_metadata(&normalized).await;
let json = match result {
Ok(value) => serde_json::to_string_pretty(&value)
.unwrap_or_else(|e| fail_return(&format!("serialize fetched record: {e}"))),
Err(e) => fail_return(&format!("source-fetch '{identifier}': {e}")),
};
if let Some(p) = cache_path.as_ref() {
if let Some(parent) = p.parent() {
std::fs::create_dir_all(parent)
.unwrap_or_else(|e| fail_return(&format!("mkdir {}: {e}", parent.display())));
}
std::fs::write(p, &json)
.unwrap_or_else(|e| fail_return(&format!("write cache {}: {e}", p.display())));
}
emit_source_fetch_result(&json, out_path);
}
fn emit_source_fetch_result(body: &str, out_path: Option<&Path>) {
if let Some(p) = out_path {
if let Some(parent) = p.parent() {
let _ = std::fs::create_dir_all(parent);
}
std::fs::write(p, body)
.unwrap_or_else(|e| fail_return(&format!("write {}: {e}", p.display())));
} else {
println!("{body}");
}
}
fn normalize_source_identifier(raw: &str) -> String {
let trimmed = raw.trim();
if trimmed.starts_with("doi:")
|| trimmed.starts_with("pmid:")
|| trimmed.starts_with("nct:")
|| trimmed.starts_with("pmc:")
{
return trimmed.to_string();
}
if trimmed.starts_with("10.") {
return format!("doi:{trimmed}");
}
if trimmed.starts_with("NCT") || trimmed.starts_with("nct") {
return format!(
"nct:{}",
trimmed
.to_uppercase()
.trim_start_matches("NCT")
.to_string()
.split_at(0)
.0
);
}
if trimmed.chars().all(|c| c.is_ascii_digit()) {
return format!("pmid:{trimmed}");
}
trimmed.to_string()
}
async fn fetch_source_metadata(normalized: &str) -> Result<Value, String> {
let client = Client::builder()
.user_agent("vela/0.66.0 (vela-source-fetch; +https://vela-science.fly.dev)")
.timeout(std::time::Duration::from_secs(30))
.build()
.map_err(|e| format!("client build: {e}"))?;
if let Some(rest) = normalized.strip_prefix("doi:") {
let mut record = fetch_via_crossref(&client, rest).await?;
let crossref_abstract = record
.get("abstract")
.and_then(|v| v.as_str())
.unwrap_or("");
if crossref_abstract.is_empty()
&& let Some(pmid) = resolve_doi_to_pmid(&client, rest).await
&& let Ok(pubmed_record) = fetch_via_pubmed(&client, &pmid).await
{
let pubmed_abstract = pubmed_record
.get("abstract")
.and_then(|v| v.as_str())
.unwrap_or("")
.to_string();
if !pubmed_abstract.is_empty()
&& let Some(obj) = record.as_object_mut()
{
obj.insert("abstract".to_string(), Value::String(pubmed_abstract));
obj.insert(
"abstract_source".to_string(),
Value::String(format!("pubmed:{pmid}")),
);
}
}
return Ok(record);
}
if let Some(rest) = normalized.strip_prefix("pmid:") {
return fetch_via_pubmed(&client, rest).await;
}
if let Some(rest) = normalized.strip_prefix("nct:") {
return fetch_via_ctgov(&client, rest).await;
}
Err(format!(
"unsupported source identifier '{normalized}'. Use doi:, pmid:, or nct: prefix."
))
}
async fn resolve_doi_to_pmid(client: &Client, doi: &str) -> Option<String> {
let url = format!(
"https://eutils.ncbi.nlm.nih.gov/entrez/eutils/esearch.fcgi?db=pubmed&term={}[doi]&retmode=json",
urlencoding::encode(doi)
);
let resp = client.get(&url).send().await.ok()?;
if !resp.status().is_success() {
return None;
}
let body: Value = resp.json().await.ok()?;
let id_list = body.pointer("/esearchresult/idlist")?.as_array()?;
if id_list.len() != 1 {
return None;
}
id_list.first()?.as_str().map(|s| s.to_string())
}
async fn fetch_via_crossref(client: &Client, doi: &str) -> Result<Value, String> {
let url = format!("https://api.crossref.org/works/{doi}");
let resp = client
.get(&url)
.send()
.await
.map_err(|e| format!("crossref get: {e}"))?;
if !resp.status().is_success() {
return Err(format!("crossref returned {}", resp.status()));
}
let body: Value = resp
.json()
.await
.map_err(|e| format!("crossref json: {e}"))?;
let work = body.get("message").cloned().unwrap_or(Value::Null);
let title = work
.get("title")
.and_then(|v| v.as_array())
.and_then(|a| a.first())
.and_then(|v| v.as_str())
.unwrap_or("")
.to_string();
let abstract_html = work
.get("abstract")
.and_then(|v| v.as_str())
.unwrap_or("")
.to_string();
let abstract_text = strip_jats_tags(&abstract_html);
let year = work
.get("issued")
.and_then(|v| v.get("date-parts"))
.and_then(|v| v.as_array())
.and_then(|a| a.first())
.and_then(|v| v.as_array())
.and_then(|a| a.first())
.and_then(|v| v.as_i64());
let journal = work
.get("container-title")
.and_then(|v| v.as_array())
.and_then(|a| a.first())
.and_then(|v| v.as_str())
.unwrap_or("")
.to_string();
let authors = work
.get("author")
.and_then(|v| v.as_array())
.map(|arr| {
arr.iter()
.filter_map(|a| {
let given = a.get("given").and_then(|v| v.as_str()).unwrap_or("");
let family = a.get("family").and_then(|v| v.as_str()).unwrap_or("");
let combined = format!("{given} {family}").trim().to_string();
if combined.is_empty() {
None
} else {
Some(combined)
}
})
.collect::<Vec<_>>()
})
.unwrap_or_default();
Ok(json!({
"schema": "vela.source_fetch.v0.1",
"identifier": format!("doi:{doi}"),
"source": "crossref",
"title": title,
"abstract": abstract_text,
"year": year,
"journal": journal,
"authors": authors,
"retrieved_at": chrono::Utc::now().to_rfc3339(),
}))
}
async fn fetch_via_pubmed(client: &Client, pmid: &str) -> Result<Value, String> {
let url = format!(
"https://eutils.ncbi.nlm.nih.gov/entrez/eutils/efetch.fcgi?db=pubmed&id={pmid}&retmode=xml"
);
let resp = client
.get(&url)
.send()
.await
.map_err(|e| format!("pubmed get: {e}"))?;
if !resp.status().is_success() {
return Err(format!("pubmed returned {}", resp.status()));
}
let xml = resp.text().await.map_err(|e| format!("pubmed text: {e}"))?;
let title = extract_xml_text(&xml, "<ArticleTitle>", "</ArticleTitle>");
let abstract_text = extract_xml_text(&xml, "<AbstractText>", "</AbstractText>");
let year = extract_xml_text(&xml, "<Year>", "</Year>")
.parse::<i64>()
.ok();
let journal = extract_xml_text(&xml, "<Title>", "</Title>");
Ok(json!({
"schema": "vela.source_fetch.v0.1",
"identifier": format!("pmid:{pmid}"),
"source": "pubmed",
"title": title,
"abstract": abstract_text,
"year": year,
"journal": journal,
"authors": Vec::<String>::new(),
"retrieved_at": chrono::Utc::now().to_rfc3339(),
}))
}
async fn fetch_via_ctgov(client: &Client, nct: &str) -> Result<Value, String> {
let nct_clean = nct.trim();
let nct_id = if nct_clean.starts_with("NCT") || nct_clean.starts_with("nct") {
nct_clean.to_uppercase()
} else {
format!("NCT{nct_clean}")
};
let url = format!("https://clinicaltrials.gov/api/v2/studies/{nct_id}");
let resp = client
.get(&url)
.send()
.await
.map_err(|e| format!("ctgov get: {e}"))?;
if !resp.status().is_success() {
return Err(format!("ctgov returned {}", resp.status()));
}
let body: Value = resp.json().await.map_err(|e| format!("ctgov json: {e}"))?;
let title = body
.pointer("/protocolSection/identificationModule/briefTitle")
.and_then(|v| v.as_str())
.unwrap_or("")
.to_string();
let abstract_text = body
.pointer("/protocolSection/descriptionModule/briefSummary")
.and_then(|v| v.as_str())
.unwrap_or("")
.to_string();
let phase = body
.pointer("/protocolSection/designModule/phases")
.and_then(|v| v.as_array())
.and_then(|a| a.first())
.and_then(|v| v.as_str())
.unwrap_or("")
.to_string();
Ok(json!({
"schema": "vela.source_fetch.v0.1",
"identifier": format!("nct:{nct_id}"),
"source": "clinicaltrials.gov",
"title": title,
"abstract": abstract_text,
"year": Value::Null,
"journal": phase,
"authors": Vec::<String>::new(),
"retrieved_at": chrono::Utc::now().to_rfc3339(),
}))
}
fn extract_xml_text(xml: &str, open: &str, close: &str) -> String {
if let Some(start) = xml.find(open) {
let after = &xml[start + open.len()..];
if let Some(end) = after.find(close) {
return after[..end].trim().to_string();
}
}
String::new()
}
fn strip_jats_tags(html: &str) -> String {
let mut out = String::with_capacity(html.len());
let mut in_tag = false;
for c in html.chars() {
match c {
'<' => in_tag = true,
'>' => in_tag = false,
_ if !in_tag => out.push(c),
_ => {}
}
}
out.split_whitespace().collect::<Vec<_>>().join(" ")
}
fn cmd_span_repair(
path: &Path,
finding_id: &str,
section: &str,
text: &str,
reviewer: &str,
reason: &str,
apply: bool,
json_output: bool,
) {
let report =
state::repair_finding_span(path, finding_id, section, text, reviewer, reason, apply)
.unwrap_or_else(|e| fail_return(&e));
print_state_report(&report, json_output);
}
#[allow(clippy::too_many_arguments)]
fn cmd_entity_resolve(
path: &Path,
finding_id: &str,
entity_name: &str,
source: &str,
id: &str,
confidence: f64,
matched_name: Option<&str>,
resolution_method: &str,
reviewer: &str,
reason: &str,
apply: bool,
json_output: bool,
) {
let report = state::resolve_finding_entity(
path,
finding_id,
entity_name,
source,
id,
confidence,
matched_name,
resolution_method,
reviewer,
reason,
apply,
)
.unwrap_or_else(|e| fail_return(&e));
print_state_report(&report, json_output);
}
fn cmd_propagate(
path: &Path,
retract: Option<String>,
reduce_confidence: Option<String>,
to: Option<f64>,
output: Option<&Path>,
) {
let mut frontier = load_frontier_or_fail(path);
let (finding_id, action, label) = if let Some(id) = retract {
(id, propagate::PropagationAction::Retracted, "retraction")
} else if let Some(id) = reduce_confidence {
let score = to.unwrap_or_else(|| fail_return("--reduce-confidence requires --to <score>"));
if !(0.0..=1.0).contains(&score) {
fail("--to must be between 0.0 and 1.0");
}
(
id,
propagate::PropagationAction::ConfidenceReduced { new_score: score },
"confidence reduction",
)
} else {
fail("specify --retract <id> or --reduce-confidence <id> --to <score>");
};
if !frontier.findings.iter().any(|f| f.id == finding_id) {
fail(&format!("finding not found: {finding_id}"));
}
let result = propagate::propagate_correction(&mut frontier, &finding_id, action);
frontier.review_events.extend(result.events.clone());
project::recompute_stats(&mut frontier);
propagate::print_result(&result, label, &finding_id);
let out = output.unwrap_or(path);
repo::save_to_path(out, &frontier).expect("Failed to save frontier");
println!(" output: {}", out.display());
}
fn cmd_mcp_setup(source: Option<&Path>, frontiers: Option<&Path>) {
let source_desc = source
.map(|p| p.display().to_string())
.or_else(|| frontiers.map(|p| p.display().to_string()))
.unwrap_or_else(|| "frontier.json".to_string());
let args = if let Some(path) = source {
format!(r#""serve", "{}""#, path.display())
} else if let Some(path) = frontiers {
format!(r#""serve", "--frontiers", "{}""#, path.display())
} else {
r#""serve", "frontier.json""#.to_string()
};
println!(
r#"Add this MCP server configuration to your client:
{{
"mcpServers": {{
"vela": {{
"command": "vela",
"args": [{args}]
}}
}}
}}
Source: {source_desc}"#
);
}
fn parse_entities(input: &str) -> Vec<(String, String)> {
if input.trim().is_empty() {
return Vec::new();
}
input
.split(',')
.filter_map(|pair| {
let parts = pair.trim().splitn(2, ':').collect::<Vec<_>>();
if parts.len() == 2 {
Some((parts[0].trim().to_string(), parts[1].trim().to_string()))
} else {
eprintln!(
"{} skipping malformed entity '{}'",
style::warn("warn"),
pair.trim()
);
None
}
})
.collect()
}
fn parse_evidence_spans(inputs: &[String]) -> Vec<Value> {
inputs
.iter()
.filter_map(|input| {
let trimmed = input.trim();
if trimmed.is_empty() {
return None;
}
if trimmed.starts_with('{') {
match serde_json::from_str::<Value>(trimmed) {
Ok(value @ Value::Object(_)) => return Some(value),
Ok(_) | Err(_) => {
eprintln!(
"{} evidence span JSON should be an object; storing as text",
style::warn("warn")
);
}
}
}
Some(json!({
"section": "curator_source",
"text": trimmed,
}))
})
.collect()
}
fn hash_path(path: &Path) -> Result<String, String> {
let mut hasher = Sha256::new();
if path.is_file() {
let bytes = std::fs::read(path)
.map_err(|e| format!("Failed to read {} for hashing: {e}", path.display()))?;
hasher.update(&bytes);
} else if path.is_dir() {
let mut files = Vec::new();
collect_hash_files(path, path, &mut files)?;
files.sort();
for rel in files {
hasher.update(rel.to_string_lossy().as_bytes());
let bytes = std::fs::read(path.join(&rel))
.map_err(|e| format!("Failed to read {} for hashing: {e}", rel.display()))?;
hasher.update(bytes);
}
} else {
return Err(format!("Cannot hash missing path {}", path.display()));
}
Ok(format!("{:x}", hasher.finalize()))
}
fn load_frontier_or_fail(path: &Path) -> project::Project {
repo::load_from_path(path).unwrap_or_else(|e| {
fail_return(&format!(
"Failed to load frontier '{}': {e}",
path.display()
))
})
}
fn hash_path_or_fail(path: &Path) -> String {
hash_path(path).unwrap_or_else(|e| {
fail_return(&format!(
"Failed to hash frontier '{}': {e}",
path.display()
))
})
}
fn collect_hash_files(root: &Path, dir: &Path, files: &mut Vec<PathBuf>) -> Result<(), String> {
for entry in
std::fs::read_dir(dir).map_err(|e| format!("Failed to read {}: {e}", dir.display()))?
{
let entry = entry.map_err(|e| format!("Failed to read directory entry: {e}"))?;
let path = entry.path();
if path.is_dir() {
collect_hash_files(root, &path, files)?;
} else if path.is_file() {
files.push(
path.strip_prefix(root)
.map_err(|e| e.to_string())?
.to_path_buf(),
);
}
}
Ok(())
}
fn schema_error_suggestion(error: &str) -> &'static str {
if schema_error_action(error).is_some() {
"Run `vela normalize` to repair deterministic frontier state."
} else {
"Inspect and correct the referenced frontier field."
}
}
fn schema_error_fix(error: &str) -> bool {
schema_error_action(error).is_some()
}
fn schema_error_action(error: &str) -> Option<&'static str> {
if error.contains("stats.findings")
|| error.contains("stats.links")
|| error.contains("Invalid compiler")
|| error.contains("Invalid vela_version")
|| error.contains("Invalid schema")
{
Some("normalize_metadata_and_stats")
} else if error.contains("does not match content-address") {
Some("rewrite_ids")
} else {
None
}
}
fn build_repair_plan(diagnostics: &[Value]) -> Vec<Value> {
let mut actions = std::collections::BTreeMap::<String, usize>::new();
for diagnostic in diagnostics {
if let Some(action) = diagnostic.get("normalize_action").and_then(Value::as_str) {
*actions.entry(action.to_string()).or_default() += 1;
}
}
actions
.into_iter()
.map(|(action, count)| {
let command = if action == "rewrite_ids" {
"vela normalize <frontier> --write --rewrite-ids --id-map id-map.json"
} else {
"vela normalize <frontier> --write"
};
json!({
"action": action,
"count": count,
"command": command,
})
})
.collect()
}
fn cmd_integrity(frontier: &Path, json: bool) {
let report = state_integrity::analyze_path(frontier).unwrap_or_else(|e| fail_return(&e));
if json {
println!(
"{}",
serde_json::to_string_pretty(&report).expect("failed to serialize integrity report")
);
} else {
println!("vela integrity");
println!(" frontier: {}", frontier.display());
println!(" status: {}", report.status);
println!(" proof freshness: {}", report.proof_freshness);
println!(" structural errors: {}", report.structural_errors.len());
for error in report.structural_errors.iter().take(8) {
println!(" - {}: {}", error.rule_id, error.message);
}
}
}
fn cmd_impact(frontier: &Path, finding_id: &str, depth: Option<usize>, json: bool) {
let report =
impact::analyze_path(frontier, finding_id, depth).unwrap_or_else(|e| fail_return(&e));
if json {
println!(
"{}",
serde_json::to_string_pretty(&report).expect("failed to serialize impact report")
);
} else {
println!("vela impact");
println!(" finding: {}", report.target.id);
println!(" frontier: {}", report.frontier.vfr_id);
println!(" direct dependents: {}", report.summary.direct_dependents);
println!(" downstream: {}", report.summary.total_downstream);
println!(" open proposals: {}", report.summary.open_proposals);
println!(" accepted events: {}", report.summary.accepted_events);
println!(" proof: {}", report.summary.proof_status);
}
}
fn cmd_discord(frontier: &Path, json: bool, kind_filter: Option<&str>) {
use crate::discord::DiscordKind;
use crate::discord_compute::compute_discord_assignment;
let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
let assignment = compute_discord_assignment(&project);
let support = assignment.frontier_support();
let mut rows: Vec<(String, Vec<String>)> = Vec::new();
for context in support.iter() {
let set = assignment.get(context);
let kinds: Vec<String> = set.iter().map(|k| k.as_str().to_string()).collect();
if let Some(filter) = kind_filter
&& !kinds.iter().any(|k| k == filter)
{
continue;
}
rows.push((context.clone(), kinds));
}
let mut histogram: std::collections::BTreeMap<&'static str, usize> =
std::collections::BTreeMap::new();
for kind in DiscordKind::ALL {
let count = assignment
.iter()
.filter(|(_, set)| set.contains(*kind))
.count();
if count > 0 {
histogram.insert(kind.as_str(), count);
}
}
let total_findings = project.findings.len();
let frontier_id = project
.frontier_id
.clone()
.unwrap_or_else(|| String::from("<unknown>"));
if json {
let row_value = |row: &(String, Vec<String>)| {
serde_json::json!({
"finding_id": row.0,
"discord_kinds": row.1,
})
};
let report = serde_json::json!({
"frontier_id": frontier_id,
"total_findings": total_findings,
"frontier_support_size": support.len(),
"filtered_row_count": rows.len(),
"filter_kind": kind_filter,
"histogram": histogram,
"rows": rows.iter().map(row_value).collect::<Vec<_>>(),
});
println!(
"{}",
serde_json::to_string_pretty(&report).expect("serialize discord report")
);
return;
}
println!("vela discord");
println!(" frontier: {frontier_id}");
println!(" total findings: {total_findings}");
println!(
" frontier support (any discord): {} of {}",
support.len(),
total_findings
);
if let Some(k) = kind_filter {
println!(" filter: kind = {k}");
}
println!();
if histogram.is_empty() {
println!(" no discord detected.");
} else {
println!(" discord histogram:");
for (k, n) in &histogram {
println!(" {n:>4} {k}");
}
}
if !rows.is_empty() {
println!();
println!(" findings with discord (showing up to 50):");
for (fid, kinds) in rows.iter().take(50) {
println!(" {fid} · {}", kinds.join(", "));
}
if rows.len() > 50 {
println!(" ... and {} more", rows.len() - 50);
}
}
}
fn empty_signal_report() -> signals::SignalReport {
signals::SignalReport {
schema: "vela.signals.v0".to_string(),
frontier: "unavailable".to_string(),
signals: Vec::new(),
review_queue: Vec::new(),
proof_readiness: signals::ProofReadiness {
status: "unavailable".to_string(),
blockers: 0,
warnings: 0,
caveats: vec!["Frontier could not be loaded for signal analysis.".to_string()],
},
}
}
fn print_signal_summary(report: &signals::SignalReport, strict: bool) {
println!();
println!(" {}", "SIGNALS".dimmed());
println!(" {}", style::tick_row(60));
println!(" total signals: {}", report.signals.len());
println!(" proof readiness: {}", report.proof_readiness.status);
if !report.review_queue.is_empty() {
println!(" review queue: {} items", report.review_queue.len());
}
if strict && report.proof_readiness.status != "ready" {
println!(
" {} proof readiness has blocking signals.",
style::lost("strict check failed")
);
}
}
fn append_packet_json_file(
packet_dir: &Path,
relative_path: &str,
value: &Value,
) -> Result<(), String> {
let content = serde_json::to_vec_pretty(value)
.map_err(|e| format!("Failed to serialize packet JSON file: {e}"))?;
let path = packet_dir.join(relative_path);
if let Some(parent) = path.parent() {
std::fs::create_dir_all(parent)
.map_err(|e| format!("Failed to create {}: {e}", parent.display()))?;
}
std::fs::write(&path, &content)
.map_err(|e| format!("Failed to write {}: {e}", path.display()))?;
let entry = json!({
"path": relative_path,
"sha256": hex::encode(Sha256::digest(&content)),
"bytes": content.len(),
});
for manifest_name in ["manifest.json", "packet.lock.json"] {
let manifest_path = packet_dir.join(manifest_name);
let data = std::fs::read_to_string(&manifest_path)
.map_err(|e| format!("Failed to read {}: {e}", manifest_path.display()))?;
let mut manifest: Value = serde_json::from_str(&data)
.map_err(|e| format!("Failed to parse {}: {e}", manifest_path.display()))?;
let array_key = if manifest_name == "manifest.json" {
"included_files"
} else {
"files"
};
let files = manifest
.get_mut(array_key)
.and_then(Value::as_array_mut)
.ok_or_else(|| format!("{} missing {array_key} array", manifest_path.display()))?;
files.retain(|file| {
file.get("path")
.and_then(Value::as_str)
.is_none_or(|path| path != relative_path)
});
files.push(entry.clone());
std::fs::write(
&manifest_path,
serde_json::to_vec_pretty(&manifest)
.map_err(|e| format!("Failed to serialize {}: {e}", manifest_path.display()))?,
)
.map_err(|e| format!("Failed to write {}: {e}", manifest_path.display()))?;
}
let lock_path = packet_dir.join("packet.lock.json");
let lock_content = std::fs::read(&lock_path)
.map_err(|e| format!("Failed to read {}: {e}", lock_path.display()))?;
let lock_entry = json!({
"path": "packet.lock.json",
"sha256": hex::encode(Sha256::digest(&lock_content)),
"bytes": lock_content.len(),
});
let manifest_path = packet_dir.join("manifest.json");
let data = std::fs::read_to_string(&manifest_path)
.map_err(|e| format!("Failed to read {}: {e}", manifest_path.display()))?;
let mut manifest: Value = serde_json::from_str(&data)
.map_err(|e| format!("Failed to parse {}: {e}", manifest_path.display()))?;
let files = manifest
.get_mut("included_files")
.and_then(Value::as_array_mut)
.ok_or_else(|| format!("{} missing included_files array", manifest_path.display()))?;
files.retain(|file| {
file.get("path")
.and_then(Value::as_str)
.is_none_or(|path| path != "packet.lock.json")
});
files.push(lock_entry);
std::fs::write(
&manifest_path,
serde_json::to_vec_pretty(&manifest)
.map_err(|e| format!("Failed to serialize {}: {e}", manifest_path.display()))?,
)
.map_err(|e| format!("Failed to write {}: {e}", manifest_path.display()))?;
Ok(())
}
fn print_tool_check_report(report: &Value) {
let summary = report.get("summary").unwrap_or(&Value::Null);
let frontier = report.get("frontier").unwrap_or(&Value::Null);
println!();
println!(" {}", "VELA · SERVE · CHECK-TOOLS".dimmed());
println!(" {}", style::tick_row(60));
println!(
"frontier: {}",
frontier
.get("name")
.and_then(Value::as_str)
.unwrap_or("unknown")
);
println!(
"findings: {}",
frontier
.get("findings")
.and_then(Value::as_u64)
.unwrap_or_default()
);
println!(
"checks: {} passed, {} failed",
summary
.get("passed")
.and_then(Value::as_u64)
.unwrap_or_default(),
summary
.get("failed")
.and_then(Value::as_u64)
.unwrap_or_default()
);
if let Some(tools) = report.get("tools").and_then(Value::as_array) {
let names = tools
.iter()
.filter_map(Value::as_str)
.collect::<Vec<_>>()
.join(", ");
println!("tools: {names}");
}
if let Some(checks) = report.get("checks").and_then(Value::as_array) {
for check in checks {
let status = if check.get("ok").and_then(Value::as_bool) == Some(true) {
style::ok("ok")
} else {
style::lost("lost")
};
println!(
" {} {}",
status,
check
.get("tool")
.and_then(Value::as_str)
.unwrap_or("unknown")
);
}
}
}
fn print_state_report(report: &state::StateCommandReport, json_output: bool) {
if json_output {
println!(
"{}",
serde_json::to_string_pretty(report).expect("failed to serialize state command report")
);
} else {
println!("{}", report.message);
println!(" frontier: {}", report.frontier);
println!(" finding: {}", report.finding_id);
println!(" proposal: {}", report.proposal_id);
println!(" status: {}", report.proposal_status);
if let Some(event_id) = &report.applied_event_id {
println!(" event: {}", event_id);
}
println!(" wrote: {}", report.wrote_to);
}
}
fn print_history(payload: &Value) {
let finding = payload.get("finding").unwrap_or(&Value::Null);
println!("vela history");
println!(
" finding: {}",
finding
.get("id")
.and_then(Value::as_str)
.unwrap_or("unknown")
);
println!(
" assertion: {}",
finding
.get("assertion")
.and_then(Value::as_str)
.unwrap_or("")
);
println!(
" confidence: {:.3}",
finding
.get("confidence")
.and_then(Value::as_f64)
.unwrap_or_default()
);
let reviews = payload
.get("review_events")
.and_then(Value::as_array)
.map_or(0, Vec::len);
let updates = payload
.get("confidence_updates")
.and_then(Value::as_array)
.map_or(0, Vec::len);
let annotations = finding
.get("annotations")
.and_then(Value::as_array)
.map_or(0, Vec::len);
let sources = payload
.get("sources")
.and_then(Value::as_array)
.map_or(0, Vec::len);
let atoms = payload
.get("evidence_atoms")
.and_then(Value::as_array)
.map_or(0, Vec::len);
let conditions = payload
.get("condition_records")
.and_then(Value::as_array)
.map_or(0, Vec::len);
let proposals = payload
.get("proposals")
.and_then(Value::as_array)
.map_or(0, Vec::len);
let events = payload
.get("events")
.and_then(Value::as_array)
.map_or(0, Vec::len);
println!(" review events: {reviews}");
println!(" confidence updates: {updates}");
println!(" annotations: {annotations}");
println!(" sources: {sources}");
println!(" evidence atoms: {atoms}");
println!(" condition records: {conditions}");
println!(" proposals: {proposals}");
println!(" canonical events: {events}");
if let Some(status) = payload
.get("proof_state")
.and_then(|value| value.get("latest_packet"))
.and_then(|value| value.get("status"))
.and_then(Value::as_str)
{
println!(" proof state: {status}");
}
if let Some(events) = payload.get("review_events").and_then(Value::as_array) {
for event in events.iter().take(8) {
println!(
" - {} {} {}",
event
.get("reviewed_at")
.and_then(Value::as_str)
.unwrap_or(""),
event.get("id").and_then(Value::as_str).unwrap_or(""),
event.get("reason").and_then(Value::as_str).unwrap_or("")
);
}
}
}
#[derive(Debug, Serialize)]
pub struct ProofTrace {
pub trace_version: String,
pub command: Vec<String>,
pub source: String,
pub source_hash: String,
pub schema_version: String,
pub checked_artifacts: Vec<String>,
pub benchmark: Option<Value>,
pub packet_manifest: String,
pub packet_validation: String,
pub caveats: Vec<String>,
pub status: String,
pub trace_path: String,
}
const SCIENCE_SUBCOMMANDS: &[&str] = &[
"compile-notes",
"compile-code",
"compile-data",
"review-pending",
"find-tensions",
"plan-experiments",
"scout",
"check",
"normalize",
"integrity",
"impact",
"discord",
"quickstart",
"proof",
"repo",
"serve",
"stats",
"search",
"tensions",
"gaps",
"bridge",
"export",
"packet",
"bench",
"conformance",
"version",
"sign",
"actor",
"frontier",
"queue",
"registry",
"init",
"import",
"lock",
"doc",
"diff",
"proposals",
"finding",
"link",
"entity",
"review",
"note",
"caveat",
"revise",
"reject",
"history",
"import-events",
"retract",
"propagate",
"replicate",
"replications",
"dataset-add",
"datasets",
"code-add",
"code-artifacts",
"artifact-add",
"artifact-to-state",
"bridge-kit",
"source-adapter",
"runtime-adapter",
"artifacts",
"artifact-audit",
"decision-brief",
"trial-summary",
"source-verification",
"source-ingest-plan",
"clinical-trial-import",
"negative-result-add",
"negative-results",
"trajectory-create",
"trajectory-step",
"trajectories",
"tier-set",
"locator-repair",
"span-repair",
"entity-resolve",
"entity-add",
"source-fetch",
"predict",
"resolve",
"predictions",
"predictions-expire",
"calibration",
"consensus",
"federation",
"causal",
"status",
"log",
"inbox",
"ask",
"bridges",
"workbench",
"verify",
"ingest",
"propose",
"accept",
"attest",
"lineage",
"carina",
"atlas",
"constellation",
];
pub fn is_science_subcommand(name: &str) -> bool {
SCIENCE_SUBCOMMANDS.contains(&name)
}
fn print_strict_help() {
println!(
r#"Vela {}
Version control for scientific state.
Usage:
vela <COMMAND>
Core flow (v0.74):
init Initialize a split frontier repo
ingest Ingest a paper, dataset, or Carina packet (dispatches by file type)
propose Create a finding.review proposal
diff Preview a `vpr_*` proposal, or compare two frontier files
accept Apply a proposal under reviewer authority
attest Sign findings under your private key
log Recent canonical state events
lineage State-transition replay for one finding
serve Local Workbench (findings, evidence, diff, lineage)
Read-only inspection:
check Validate a frontier, repo, or proof packet
integrity Check accepted frontier state integrity
impact Report downstream finding impact
normalize Apply deterministic frontier-state repairs
proof Export and validate a proof packet
repo Inspect split frontier repository status and shape
stats Show frontier statistics
search Search findings
tensions List candidate contradictions and tensions
gaps Inspect and rank candidate gap review leads
bridge Find candidate cross-domain connections
Advanced (proposal-creation, agent inboxes, federation):
scout Run Literature Scout against a folder of PDFs (writes proposals)
compile-notes Run Notes Compiler against a Markdown vault (writes proposals)
compile-code Run Code & Notebook Analyst against a research repo (writes proposals)
compile-data Run Datasets agent against a folder of CSV/TSV data (writes proposals)
review-pending Run Reviewer Agent: score every pending proposal (writes notes)
find-tensions Run Contradiction Finder: surface real contradictions among findings
plan-experiments Run Experiment Planner: propose experiments for open questions / hypotheses
export Export frontier artifacts
packet Inspect or validate proof packets
bench Run deterministic benchmark gates
conformance Run protocol conformance vectors
sign Optional signing and signature verification
runtime-adapter
Normalize external runtime exports into reviewable proposals
version Show version information
import Import frontier.json into a .vela repo
proposals Inspect, validate, export, import, accept, or reject write proposals
artifact-to-state
Import a Carina artifact packet as reviewable proposals
bridge-kit
Validate Carina artifact packets before importing runtime output
source-adapter
Run reviewed source adapters into artifact-to-state proposals
finding Add or manage finding bundles as frontier state
link Add typed links between findings (incl. cross-frontier vf_at-vfr targets)
entity Resolve unresolved entities against a bundled common-entity table (v0.19)
frontier Scaffold (`new`), materialize, and manage frontier metadata + deps
actor Register Ed25519 publisher identities in a frontier
registry Publish, list, or pull frontiers (open hub at https://vela-hub.fly.dev)
review Create a review proposal or review interactively
note Add a lightweight note to a finding
caveat Create an explicit caveat proposal
revise Create a confidence revision proposal
reject Create a rejection proposal
history Show state-transition history for one finding (v0.74 alias: `lineage`)
import-events Import review/state events from a packet or JSON file
retract Create a retraction proposal
propagate Simulate impact over declared dependency links
artifact-add Register a content-addressed artifact
artifacts List content-addressed artifacts
artifact-audit Audit artifact locators, hashes, references, and profiles
decision-brief Show the validated decision brief projection
trial-summary Show the validated trial outcome projection
source-verification Show the validated source verification projection
source-ingest-plan Show the validated source ingest plan
clinical-trial-import Import a ClinicalTrials.gov record as an artifact
locator-repair Mechanically repair an evidence atom's missing source locator
span-repair Mechanically repair a finding's missing evidence span
entity-resolve Resolve a finding entity to a canonical id
source-fetch Fetch metadata + abstract for a doi:/pmid:/nct: source
atlas Compose multiple frontiers into a domain-level Atlas (vat_*) (v0.78+)
constellation Compose multiple Atlases into a cross-domain Constellation (vco_*) (v0.82+)
Quick start (the demo):
vela init demo --name "Your bounded question"
vela ingest paper.pdf --frontier demo
vela propose demo <vf_id> --status accepted --reason "..." --reviewer reviewer:you --apply
vela diff <vpr_id> --frontier demo
vela accept demo <vpr_id> --reviewer reviewer:you --reason "applied"
vela serve --path demo
Substrate health:
vela frontier materialize my-frontier --json
vela repo status my-frontier --json
vela proof verify my-frontier --json
vela check my-frontier --strict --json
Monolithic frontier file:
vela frontier new frontier.json --name "Your bounded question"
vela finding add frontier.json --assertion "..." --author "reviewer:demo" --apply
vela check frontier.json --json
FINDING_ID=$(jq -r '.findings[0].id' frontier.json)
vela review frontier.json "$FINDING_ID" --status contested --reason "Mouse-only evidence" --reviewer reviewer:demo --apply
Publish your own frontier (see docs/PUBLISHING.md):
vela frontier new ./frontier.json --name "Your bounded question"
vela finding add ./frontier.json --assertion "..." --author "reviewer:you" --apply
vela sign generate-keypair --out keys
vela actor add ./frontier.json reviewer:you --pubkey "$(cat keys/public.key)"
vela registry publish ./frontier.json --owner reviewer:you --key keys/private.key \
--to https://vela-hub.fly.dev
"#,
env!("CARGO_PKG_VERSION")
);
}
pub type ScoutHandler = fn(
folder: PathBuf,
frontier: PathBuf,
backend: Option<String>,
dry_run: bool,
json: bool,
) -> Pin<Box<dyn Future<Output = ()> + Send>>;
static SCOUT_HANDLER: OnceLock<ScoutHandler> = OnceLock::new();
pub fn register_scout_handler(handler: ScoutHandler) {
let _ = SCOUT_HANDLER.set(handler);
}
pub type AtlasInitHandler = fn(
atlases_root: PathBuf,
name: String,
domain: String,
scope_note: Option<String>,
frontiers: Vec<PathBuf>,
json: bool,
) -> Pin<Box<dyn Future<Output = ()> + Send>>;
static ATLAS_INIT_HANDLER: OnceLock<AtlasInitHandler> = OnceLock::new();
pub fn register_atlas_init_handler(handler: AtlasInitHandler) {
let _ = ATLAS_INIT_HANDLER.set(handler);
}
pub type AtlasMaterializeHandler =
fn(atlases_root: PathBuf, name: String, json: bool) -> Pin<Box<dyn Future<Output = ()> + Send>>;
static ATLAS_MATERIALIZE_HANDLER: OnceLock<AtlasMaterializeHandler> = OnceLock::new();
pub fn register_atlas_materialize_handler(handler: AtlasMaterializeHandler) {
let _ = ATLAS_MATERIALIZE_HANDLER.set(handler);
}
pub type AtlasServeHandler = fn(
atlases_root: PathBuf,
name: String,
port: u16,
open_browser: bool,
) -> Pin<Box<dyn Future<Output = ()> + Send>>;
static ATLAS_SERVE_HANDLER: OnceLock<AtlasServeHandler> = OnceLock::new();
pub fn register_atlas_serve_handler(handler: AtlasServeHandler) {
let _ = ATLAS_SERVE_HANDLER.set(handler);
}
pub type AtlasUpdateHandler = fn(
atlases_root: PathBuf,
name: String,
add_frontier: Vec<PathBuf>,
remove_vfr_id: Vec<String>,
json: bool,
) -> Pin<Box<dyn Future<Output = ()> + Send>>;
static ATLAS_UPDATE_HANDLER: OnceLock<AtlasUpdateHandler> = OnceLock::new();
pub fn register_atlas_update_handler(handler: AtlasUpdateHandler) {
let _ = ATLAS_UPDATE_HANDLER.set(handler);
}
pub type ConstellationInitHandler = fn(
constellations_root: PathBuf,
name: String,
scope_note: Option<String>,
atlases: Vec<PathBuf>,
json: bool,
) -> Pin<Box<dyn Future<Output = ()> + Send>>;
static CONSTELLATION_INIT_HANDLER: OnceLock<ConstellationInitHandler> = OnceLock::new();
pub fn register_constellation_init_handler(handler: ConstellationInitHandler) {
let _ = CONSTELLATION_INIT_HANDLER.set(handler);
}
pub type ConstellationMaterializeHandler = fn(
constellations_root: PathBuf,
name: String,
json: bool,
) -> Pin<Box<dyn Future<Output = ()> + Send>>;
static CONSTELLATION_MATERIALIZE_HANDLER: OnceLock<ConstellationMaterializeHandler> =
OnceLock::new();
pub fn register_constellation_materialize_handler(handler: ConstellationMaterializeHandler) {
let _ = CONSTELLATION_MATERIALIZE_HANDLER.set(handler);
}
pub type ConstellationServeHandler = fn(
constellations_root: PathBuf,
name: String,
port: u16,
open_browser: bool,
) -> Pin<Box<dyn Future<Output = ()> + Send>>;
static CONSTELLATION_SERVE_HANDLER: OnceLock<ConstellationServeHandler> = OnceLock::new();
pub fn register_constellation_serve_handler(handler: ConstellationServeHandler) {
let _ = CONSTELLATION_SERVE_HANDLER.set(handler);
}
pub type NotesHandler = fn(
vault: PathBuf,
frontier: PathBuf,
backend: Option<String>,
max_files: Option<usize>,
max_items_per_category: Option<usize>,
dry_run: bool,
json: bool,
) -> Pin<Box<dyn Future<Output = ()> + Send>>;
static NOTES_HANDLER: OnceLock<NotesHandler> = OnceLock::new();
pub fn register_notes_handler(handler: NotesHandler) {
let _ = NOTES_HANDLER.set(handler);
}
pub type CodeHandler = fn(
root: PathBuf,
frontier: PathBuf,
backend: Option<String>,
max_files: Option<usize>,
dry_run: bool,
json: bool,
) -> Pin<Box<dyn Future<Output = ()> + Send>>;
static CODE_HANDLER: OnceLock<CodeHandler> = OnceLock::new();
pub fn register_code_handler(handler: CodeHandler) {
let _ = CODE_HANDLER.set(handler);
}
pub type DatasetsHandler = fn(
root: PathBuf,
frontier: PathBuf,
backend: Option<String>,
sample_rows: Option<usize>,
dry_run: bool,
json: bool,
) -> Pin<Box<dyn Future<Output = ()> + Send>>;
static DATASETS_HANDLER: OnceLock<DatasetsHandler> = OnceLock::new();
pub fn register_datasets_handler(handler: DatasetsHandler) {
let _ = DATASETS_HANDLER.set(handler);
}
pub type ReviewerHandler = fn(
frontier: PathBuf,
backend: Option<String>,
max_proposals: Option<usize>,
batch_size: usize,
dry_run: bool,
json: bool,
) -> Pin<Box<dyn Future<Output = ()> + Send>>;
static REVIEWER_HANDLER: OnceLock<ReviewerHandler> = OnceLock::new();
pub fn register_reviewer_handler(handler: ReviewerHandler) {
let _ = REVIEWER_HANDLER.set(handler);
}
pub type TensionsHandler = fn(
frontier: PathBuf,
backend: Option<String>,
max_findings: Option<usize>,
dry_run: bool,
json: bool,
) -> Pin<Box<dyn Future<Output = ()> + Send>>;
static TENSIONS_HANDLER: OnceLock<TensionsHandler> = OnceLock::new();
pub fn register_tensions_handler(handler: TensionsHandler) {
let _ = TENSIONS_HANDLER.set(handler);
}
pub type ExperimentsHandler = fn(
frontier: PathBuf,
backend: Option<String>,
max_findings: Option<usize>,
dry_run: bool,
json: bool,
) -> Pin<Box<dyn Future<Output = ()> + Send>>;
static EXPERIMENTS_HANDLER: OnceLock<ExperimentsHandler> = OnceLock::new();
pub fn register_experiments_handler(handler: ExperimentsHandler) {
let _ = EXPERIMENTS_HANDLER.set(handler);
}
fn find_vela_repo() -> Option<PathBuf> {
let mut cur = std::env::current_dir().ok()?;
loop {
if cur.join(".vela").is_dir() {
return Some(cur);
}
if !cur.pop() {
return None;
}
}
}
fn print_session_help() {
println!();
println!(
" Vela {} · Version control for scientific state.",
env!("CARGO_PKG_VERSION")
);
println!();
println!(" USAGE");
println!(" vela Open a session against the nearest .vela/ repo");
println!(" vela <command> Run a specific subcommand");
println!(" vela help advanced Full subcommand list (30+ commands)");
println!();
println!(" CORE FLOW (v0.74)");
println!(" init Initialize a split frontier repo");
println!(" ingest <path> Ingest a paper, dataset, or Carina packet");
println!(" propose Create a finding.review proposal");
println!(" diff <vpr_id> Preview a pending proposal vs current frontier");
println!(" accept <vpr_id> Apply a proposal under reviewer authority");
println!(" attest Sign findings under your private key");
println!(" log Recent canonical state events");
println!(" lineage <vf_id> State-transition replay for one finding");
println!(" serve Local Workbench (find, evidence, diff, lineage)");
println!();
println!(" DAILY ALSO-RANS");
println!(" status One-screen frontier health");
println!(" inbox Pending review proposals");
println!(" review Review a proposal interactively");
println!(" ask <question> Plain-text query against the frontier");
println!();
println!(" REASONING (Pearl 1 → 2 → 3)");
println!(" causal audit Per-finding identifiability");
println!(" causal effect <src> --on <tgt> Pairwise back-door / front-door");
println!(" causal counterfactual <src> --target <tgt> --set-to <0..1>");
println!();
println!(" COMPOSITION");
println!(" bridge <a> <b> Cross-frontier hypotheses");
println!(" consensus <vf> Field consensus over similar claims");
println!();
println!(" PUBLISH");
println!(" registry publish Push a signed manifest to the hub");
println!(" federation peer-add Federate with another hub");
println!();
println!(" In session, type a single letter for a quick verb, or any");
println!(" question in plain text. `q` or `exit` quits.");
println!();
}
fn print_session_dashboard(project: &crate::project::Project, repo_path: &Path) {
use crate::causal_reasoning::{audit_frontier, summarize_audit};
let label = frontier_label(project);
let vfr = project.frontier_id();
let vfr_short = vfr.chars().take(16).collect::<String>();
let mut pending = 0usize;
let mut by_kind: std::collections::BTreeMap<String, usize> = std::collections::BTreeMap::new();
for p in &project.proposals {
if p.status == "pending_review" {
pending += 1;
*by_kind.entry(p.kind.clone()).or_insert(0) += 1;
}
}
let audit = audit_frontier(project);
let audit_summary = summarize_audit(&audit);
let bridges_dir = repo_path.join(".vela/bridges");
let mut bridge_total = 0usize;
let mut bridge_confirmed = 0usize;
let mut bridge_derived = 0usize;
if bridges_dir.is_dir()
&& let Ok(entries) = std::fs::read_dir(&bridges_dir)
{
for entry in entries.flatten() {
let path = entry.path();
if path.extension().and_then(|s| s.to_str()) != Some("json") {
continue;
}
bridge_total += 1;
if let Ok(data) = std::fs::read_to_string(&path)
&& let Ok(b) = serde_json::from_str::<crate::bridge::Bridge>(&data)
{
match b.status {
crate::bridge::BridgeStatus::Confirmed => bridge_confirmed += 1,
crate::bridge::BridgeStatus::Derived => bridge_derived += 1,
_ => {}
}
}
}
}
let mut targets_with_success = std::collections::HashSet::new();
let mut failed_replications = 0usize;
for r in &project.replications {
if r.outcome == "replicated" {
targets_with_success.insert(r.target_finding.clone());
} else if r.outcome == "failed" {
failed_replications += 1;
}
}
println!();
let version = crate::project::VELA_COMPILER_VERSION
.strip_prefix("vela/")
.unwrap_or(crate::project::VELA_COMPILER_VERSION);
println!(
" {}",
format!("VELA · {version} · {label}")
.to_uppercase()
.dimmed()
);
println!(" {}", style::tick_row(60));
println!(
" vfr_id {}… repo {}",
vfr_short,
repo_path.display()
);
println!(
" findings {:>4} events {} proposals pending {}",
project.findings.len(),
project.events.len(),
pending
);
if pending > 0 {
let parts: Vec<String> = by_kind.iter().map(|(k, n)| format!("{n} {k}")).collect();
println!(" {} · {}", style::warn("inbox"), parts.join(" "));
}
if audit_summary.underidentified > 0 || audit_summary.conditional > 0 {
println!(
" {} · {} underidentified · {} conditional",
if audit_summary.underidentified > 0 {
style::lost("audit")
} else {
style::warn("audit")
},
audit_summary.underidentified,
audit_summary.conditional,
);
}
if bridge_total > 0 {
println!(
" {} · {} total · {} confirmed · {} awaiting review",
style::ok("bridges"),
bridge_total,
bridge_confirmed,
bridge_derived
);
}
if !project.replications.is_empty() {
println!(
" {} · {} records · {} findings replicated · {} failed",
style::ok("replications"),
project.replications.len(),
targets_with_success.len(),
failed_replications,
);
}
println!();
println!(" type a verb or ask anything:");
println!(" a audit problems i inbox (pending) b bridges");
println!(" g causal graph l log (recent) c counterfactuals");
println!(" s refresh status h help (more verbs) q quit");
println!();
}
fn run_session_verb(verb: &str, repo_path: &Path) -> bool {
match verb {
"a" | "audit" => {
let action = CausalAction::Audit {
frontier: repo_path.to_path_buf(),
problems_only: true,
json: false,
};
cmd_causal(action);
true
}
"i" | "inbox" => {
let action = ProposalAction::List {
frontier: repo_path.to_path_buf(),
status: Some("pending_review".into()),
json: false,
};
cmd_proposals(action);
true
}
"b" | "bridges" => {
let action = BridgesAction::List {
frontier: repo_path.to_path_buf(),
status: None,
json: false,
};
cmd_bridges(action);
true
}
"g" | "graph" => {
let action = CausalAction::Graph {
frontier: repo_path.to_path_buf(),
node: None,
json: false,
};
cmd_causal(action);
true
}
"l" | "log" => {
cmd_log(repo_path, 10, None, false);
true
}
"c" | "counterfactual" | "counterfactuals" => {
let project = match repo::load_from_path(repo_path) {
Ok(p) => p,
Err(e) => {
eprintln!("{} {e}", style::err_prefix());
return true;
}
};
println!();
println!(" {}", "VELA · COUNTERFACTUAL · LIVE PAIRS".dimmed());
println!(" {}", style::tick_row(60));
let mut pairs = 0usize;
for child in &project.findings {
for link in &child.links {
if !matches!(link.link_type.as_str(), "depends" | "supports") {
continue;
}
if link.mechanism.is_none() {
continue;
}
let parent = link
.target
.split_once(':')
.map_or(link.target.as_str(), |(_, r)| r);
pairs += 1;
if pairs <= 10 {
println!(" · do({parent}) → {}", child.id);
}
}
}
if pairs == 0 {
println!(" no mechanism-annotated edges found.");
println!(" add a mechanism via the link's `mechanism` field; see /counterfactual");
} else {
println!();
println!(" {pairs} live pair(s). Run with:");
println!(" vela causal counterfactual <repo> <src> --target <tgt> --set-to 0.5");
}
println!();
true
}
"s" | "status" | "refresh" => {
match repo::load_from_path(repo_path) {
Ok(p) => print_session_dashboard(&p, repo_path),
Err(e) => eprintln!("{} {e}", style::err_prefix()),
}
true
}
"h" | "help" | "?" => {
print_session_help();
true
}
_ => false,
}
}
fn run_session() {
let repo_path = match find_vela_repo() {
Some(p) => p,
None => {
println!();
println!(
" {}",
"VELA · NO FRONTIER FOUND IN CWD OR ANY PARENT".dimmed()
);
println!(" {}", style::tick_row(60));
println!(" Run `vela init` here to create a frontier, or cd into one.");
println!(" Or run `vela help` for the command list.");
println!();
return;
}
};
let project = match repo::load_from_path(&repo_path) {
Ok(p) => p,
Err(e) => {
eprintln!("{} failed to load .vela/ repo: {e}", style::err_prefix());
std::process::exit(1);
}
};
print_session_dashboard(&project, &repo_path);
use std::io::{BufRead, Write};
let stdin = std::io::stdin();
let mut stdout = std::io::stdout();
loop {
print!(" > ");
stdout.flush().ok();
let mut line = String::new();
if stdin.lock().read_line(&mut line).is_err() {
break;
}
let input = line.trim();
if input.is_empty() {
continue;
}
if matches!(input, "q" | "quit" | "exit") {
break;
}
if run_session_verb(input, &repo_path) {
continue;
}
let project = match repo::load_from_path(&repo_path) {
Ok(p) => p,
Err(e) => {
eprintln!("{} {e}", style::err_prefix());
continue;
}
};
answer(&project, input, false);
}
}
pub fn run_from_args() {
style::init();
let args = std::env::args().collect::<Vec<_>>();
match args.get(1).map(String::as_str) {
None => {
run_session();
return;
}
Some("-h" | "--help" | "help") => {
if args.get(2).map(String::as_str) == Some("advanced") {
print_strict_help();
} else {
print_session_help();
}
return;
}
Some("-V" | "--version" | "version") => {
println!("vela {}", env!("CARGO_PKG_VERSION"));
return;
}
Some("proof") if args.get(2).map(String::as_str) == Some("verify") => {
let json = args.iter().any(|arg| arg == "--json");
let frontier = args
.iter()
.skip(3)
.find(|arg| !arg.starts_with('-'))
.map(PathBuf::from)
.unwrap_or_else(|| {
eprintln!(
"{} proof verify requires a frontier repo",
style::err_prefix()
);
std::process::exit(2);
});
cmd_proof_verify(&frontier, json);
return;
}
Some("proof") if args.get(2).map(String::as_str) == Some("explain") => {
let frontier = args
.iter()
.skip(3)
.find(|arg| !arg.starts_with('-'))
.map(PathBuf::from)
.unwrap_or_else(|| {
eprintln!(
"{} proof explain requires a frontier repo",
style::err_prefix()
);
std::process::exit(2);
});
cmd_proof_explain(&frontier);
return;
}
Some(cmd) if !is_science_subcommand(cmd) => {
eprintln!(
"{} unknown or non-release command: {cmd}",
style::err_prefix()
);
eprintln!("run `vela --help` for the strict v0 command surface.");
std::process::exit(2);
}
Some(_) => {}
}
let runtime = tokio::runtime::Runtime::new().expect("failed to create tokio runtime");
runtime.block_on(run_command());
}
fn fail(message: &str) -> ! {
eprintln!("{} {message}", style::err_prefix());
std::process::exit(1);
}
fn validate_enum_arg(flag: &str, value: &str, valid: &[&str]) {
if !valid.contains(&value) {
fail(&format!(
"invalid {flag} '{value}'. Valid: {}",
valid.join(", ")
));
}
}
fn fail_return<T>(message: &str) -> T {
fail(message)
}