1use crate::{
2 benchmark, bridge, bundle, carina_validate, conformance, decision, diff, events, export,
3 frontier_repo, impact, lint, normalize, packet, project, propagate, proposals, repo, review,
4 search, serve, sign, signals, sources, state, state_integrity, tensions, validate,
5};
6
7use std::collections::BTreeMap;
8use std::future::Future;
9use std::path::{Path, PathBuf};
10use std::pin::Pin;
11use std::sync::OnceLock;
12
13use clap::{Parser, Subcommand};
14use colored::Colorize;
15
16use crate::cli_style as style;
17use reqwest::Client;
18use serde::Serialize;
19use serde_json::{Value, json};
20use sha2::{Digest, Sha256};
21
22#[derive(Parser)]
23#[command(name = "vela", version)]
24#[command(about = "Portable frontier state for science")]
25struct Cli {
26 #[command(subcommand)]
27 command: Commands,
28}
29
30#[derive(Subcommand)]
31enum Commands {
32 Scout {
39 folder: PathBuf,
41 #[arg(long)]
43 frontier: PathBuf,
44 #[arg(short, long)]
46 backend: Option<String>,
47 #[arg(long)]
49 dry_run: bool,
50 #[arg(long)]
52 json: bool,
53 },
54 CompileNotes {
61 vault: PathBuf,
63 #[arg(long)]
65 frontier: PathBuf,
66 #[arg(short, long)]
68 backend: Option<String>,
69 #[arg(long)]
71 max_files: Option<usize>,
72 #[arg(long)]
77 max_items_per_category: Option<usize>,
78 #[arg(long)]
80 dry_run: bool,
81 #[arg(long)]
83 json: bool,
84 },
85 CompileCode {
91 root: PathBuf,
93 #[arg(long)]
95 frontier: PathBuf,
96 #[arg(short, long)]
98 backend: Option<String>,
99 #[arg(long)]
101 max_files: Option<usize>,
102 #[arg(long)]
104 dry_run: bool,
105 #[arg(long)]
107 json: bool,
108 },
109 ReviewPending {
115 #[arg(long)]
116 frontier: PathBuf,
117 #[arg(short, long)]
118 backend: Option<String>,
119 #[arg(long)]
120 max_proposals: Option<usize>,
121 #[arg(long, default_value = "1")]
126 batch_size: usize,
127 #[arg(long)]
128 dry_run: bool,
129 #[arg(long)]
130 json: bool,
131 },
132 FindTensions {
136 #[arg(long)]
137 frontier: PathBuf,
138 #[arg(short, long)]
139 backend: Option<String>,
140 #[arg(long)]
141 max_findings: Option<usize>,
142 #[arg(long)]
143 dry_run: bool,
144 #[arg(long)]
145 json: bool,
146 },
147 PlanExperiments {
151 #[arg(long)]
152 frontier: PathBuf,
153 #[arg(short, long)]
154 backend: Option<String>,
155 #[arg(long)]
156 max_findings: Option<usize>,
157 #[arg(long)]
158 dry_run: bool,
159 #[arg(long)]
160 json: bool,
161 },
162 CompileData {
167 root: PathBuf,
169 #[arg(long)]
171 frontier: PathBuf,
172 #[arg(short, long)]
174 backend: Option<String>,
175 #[arg(long)]
177 sample_rows: Option<usize>,
178 #[arg(long)]
180 dry_run: bool,
181 #[arg(long)]
183 json: bool,
184 },
185 Check {
187 source: Option<PathBuf>,
189 #[arg(long)]
191 schema: bool,
192 #[arg(long)]
194 stats: bool,
195 #[arg(long)]
197 conformance: bool,
198 #[arg(long, default_value = "tests/conformance")]
200 conformance_dir: PathBuf,
201 #[arg(long)]
203 all: bool,
204 #[arg(long)]
206 schema_only: bool,
207 #[arg(long)]
209 strict: bool,
210 #[arg(long)]
212 fix: bool,
213 #[arg(long)]
215 json: bool,
216 },
217 Integrity {
219 frontier: PathBuf,
221 #[arg(long)]
223 json: bool,
224 },
225 Impact {
227 frontier: PathBuf,
229 finding_id: String,
231 #[arg(long)]
233 depth: Option<usize>,
234 #[arg(long)]
236 json: bool,
237 },
238 Discord {
245 frontier: PathBuf,
247 #[arg(long)]
249 json: bool,
250 #[arg(long)]
254 kind: Option<String>,
255 },
256 Normalize {
258 source: PathBuf,
260 #[arg(short, long)]
262 out: Option<PathBuf>,
263 #[arg(long)]
265 write: bool,
266 #[arg(long)]
268 dry_run: bool,
269 #[arg(long)]
271 rewrite_ids: bool,
272 #[arg(long)]
274 id_map: Option<PathBuf>,
275 #[arg(long)]
279 resync_provenance: bool,
280 #[arg(long)]
282 json: bool,
283 },
284 Proof {
286 frontier: PathBuf,
288 #[arg(long, short = 'o', default_value = "proof-packet")]
290 out: PathBuf,
291 #[arg(long, default_value = "bbb-alzheimer")]
293 template: String,
294 #[arg(long)]
296 gold: Option<PathBuf>,
297 #[arg(long)]
299 record_proof_state: bool,
300 #[arg(long)]
302 json: bool,
303 },
304 Repo {
306 #[command(subcommand)]
307 action: RepoAction,
308 },
309 Serve {
311 #[arg(required_unless_present_any = ["frontiers", "setup"])]
313 frontier: Option<PathBuf>,
314 #[arg(long)]
316 frontiers: Option<PathBuf>,
317 #[arg(short, long)]
319 backend: Option<String>,
320 #[arg(long)]
322 http: Option<u16>,
323 #[arg(long)]
325 setup: bool,
326 #[arg(long)]
328 check_tools: bool,
329 #[arg(long)]
331 json: bool,
332 #[arg(long)]
336 workbench: bool,
337 },
338 Status {
342 frontier: PathBuf,
343 #[arg(long)]
345 json: bool,
346 },
347 Log {
350 frontier: PathBuf,
351 #[arg(long, default_value = "20")]
353 limit: usize,
354 #[arg(long)]
356 kind: Option<String>,
357 #[arg(long)]
359 json: bool,
360 },
361 Inbox {
365 frontier: PathBuf,
366 #[arg(long)]
368 kind: Option<String>,
369 #[arg(long, default_value = "30")]
371 limit: usize,
372 #[arg(long)]
374 json: bool,
375 },
376 Ask {
381 frontier: PathBuf,
382 #[arg(trailing_var_arg = true)]
384 question: Vec<String>,
385 #[arg(long)]
387 json: bool,
388 },
389 Stats {
391 frontier: PathBuf,
393 #[arg(long)]
395 json: bool,
396 },
397 Search {
399 query: String,
401 #[arg(long)]
403 source: Option<PathBuf>,
404 #[arg(long)]
406 entity: Option<String>,
407 #[arg(long)]
409 r#type: Option<String>,
410 #[arg(long)]
412 all: Option<PathBuf>,
413 #[arg(long, default_value = "20")]
415 limit: usize,
416 #[arg(long)]
418 json: bool,
419 },
420 Tensions {
422 source: PathBuf,
423 #[arg(long)]
424 both_high: bool,
425 #[arg(long)]
426 cross_domain: bool,
427 #[arg(long, default_value = "20")]
428 top: usize,
429 #[arg(long)]
430 json: bool,
431 },
432 Gaps {
434 #[command(subcommand)]
435 action: GapsAction,
436 },
437 Bridge {
439 #[arg(required = true)]
441 inputs: Vec<PathBuf>,
442 #[arg(long, default_value = "true", action = clap::ArgAction::Set)]
444 novelty: bool,
445 #[arg(long, default_value = "30")]
447 top: usize,
448 },
449 Export {
451 frontier: PathBuf,
452 #[arg(short, long, default_value = "csv")]
453 format: String,
454 #[arg(short, long)]
455 output: Option<PathBuf>,
456 },
457 Packet {
459 #[command(subcommand)]
460 action: PacketAction,
461 },
462 Verify {
469 path: PathBuf,
471 #[arg(long)]
472 json: bool,
473 },
474 Bench {
486 frontier: Option<PathBuf>,
488 #[arg(long)]
490 gold: Option<PathBuf>,
491 #[arg(long)]
495 candidate: Option<PathBuf>,
496 #[arg(long)]
500 sources: Option<PathBuf>,
501 #[arg(long)]
504 threshold: Option<f64>,
505 #[arg(long)]
508 report: Option<PathBuf>,
509 #[arg(long)]
510 entity_gold: Option<PathBuf>,
511 #[arg(long)]
512 link_gold: Option<PathBuf>,
513 #[arg(long)]
514 suite: Option<PathBuf>,
515 #[arg(long)]
516 suite_ready: bool,
517 #[arg(long)]
518 min_f1: Option<f64>,
519 #[arg(long)]
520 min_precision: Option<f64>,
521 #[arg(long)]
522 min_recall: Option<f64>,
523 #[arg(long)]
524 no_thresholds: bool,
525 #[arg(long)]
526 json: bool,
527 },
528 Conformance {
530 #[arg(default_value = "tests/conformance")]
531 dir: PathBuf,
532 },
533 Version,
535 Sign {
537 #[command(subcommand)]
538 action: SignAction,
539 },
540 Actor {
542 #[command(subcommand)]
543 action: ActorAction,
544 },
545 Federation {
550 #[command(subcommand)]
551 action: FederationAction,
552 },
553 Causal {
560 #[command(subcommand)]
561 action: CausalAction,
562 },
563 Frontier {
567 #[command(subcommand)]
568 action: FrontierAction,
569 },
570 Queue {
573 #[command(subcommand)]
574 action: QueueAction,
575 },
576 Registry {
579 #[command(subcommand)]
580 action: RegistryAction,
581 },
582 Init {
584 #[arg(default_value = ".")]
585 path: PathBuf,
586 #[arg(long, default_value = "unnamed")]
587 name: String,
588 #[arg(long, default_value = "default")]
589 template: String,
590 #[arg(long)]
591 no_git: bool,
592 #[arg(long)]
593 json: bool,
594 },
595 Agent {
612 #[command(subcommand)]
613 action: AgentAction,
614 },
615 Quickstart {
616 #[arg(default_value = "demo")]
618 path: PathBuf,
619 #[arg(long, default_value = "Quickstart frontier")]
621 name: String,
622 #[arg(long, default_value = "reviewer:you")]
626 reviewer: String,
627 #[arg(long)]
630 assertion: Option<String>,
631 #[arg(long)]
634 keys_out: Option<PathBuf>,
635 #[arg(long)]
637 json: bool,
638 },
639 Lock {
646 path: PathBuf,
648 #[arg(long)]
651 check: bool,
652 #[arg(long)]
654 json: bool,
655 },
656 Doc {
663 path: PathBuf,
665 #[arg(long)]
667 out: Option<PathBuf>,
668 #[arg(long)]
671 json: bool,
672 },
673 Import {
675 frontier: PathBuf,
676 #[arg(long)]
677 into: Option<PathBuf>,
678 },
679 Diff {
689 target: String,
693 frontier_b: Option<String>,
697 #[arg(long)]
701 frontier: Option<PathBuf>,
702 #[arg(long, default_value = "reviewer:preview")]
704 reviewer: String,
705 #[arg(long)]
711 from: Option<String>,
712 #[arg(long)]
713 json: bool,
714 #[arg(long)]
715 quiet: bool,
716 },
717 Proposals {
719 #[command(subcommand)]
720 action: ProposalAction,
721 },
722 SearchIndex {
727 #[command(subcommand)]
728 action: SearchAction,
729 },
730 ArtifactToState {
732 frontier: PathBuf,
734 packet: PathBuf,
736 #[arg(long)]
738 actor: String,
739 #[arg(long)]
741 apply_artifacts: bool,
742 #[arg(long)]
743 json: bool,
744 },
745 BridgeKit {
747 #[command(subcommand)]
748 action: BridgeKitAction,
749 },
750 SourceAdapter {
752 #[command(subcommand)]
753 action: SourceAdapterAction,
754 },
755 RuntimeAdapter {
757 #[command(subcommand)]
758 action: RuntimeAdapterAction,
759 },
760 Finding {
762 #[command(subcommand)]
763 command: FindingCommands,
764 },
765 Link {
769 #[command(subcommand)]
770 action: LinkAction,
771 },
772 Workbench {
777 #[arg(default_value = ".")]
779 path: PathBuf,
780 #[arg(long, default_value_t = 3850)]
782 port: u16,
783 #[arg(long)]
785 no_open: bool,
786 },
787 Bridges {
793 #[command(subcommand)]
794 action: BridgesAction,
795 },
796 Entity {
801 #[command(subcommand)]
802 action: EntityAction,
803 },
804 Review {
806 frontier: PathBuf,
808 finding_id: String,
810 #[arg(long)]
812 status: Option<String>,
813 #[arg(long)]
815 reason: Option<String>,
816 #[arg(long)]
818 reviewer: String,
819 #[arg(long)]
821 apply: bool,
822 #[arg(long)]
824 json: bool,
825 },
826 Note {
828 frontier: PathBuf,
829 finding_id: String,
830 #[arg(long)]
831 text: String,
832 #[arg(long)]
833 author: String,
834 #[arg(long)]
836 apply: bool,
837 #[arg(long)]
838 json: bool,
839 },
840 Caveat {
842 frontier: PathBuf,
843 finding_id: String,
844 #[arg(long)]
845 text: String,
846 #[arg(long)]
847 author: String,
848 #[arg(long)]
849 apply: bool,
850 #[arg(long)]
851 json: bool,
852 },
853 Revise {
855 frontier: PathBuf,
856 finding_id: String,
857 #[arg(long)]
859 confidence: f64,
860 #[arg(long)]
862 reason: String,
863 #[arg(long)]
865 reviewer: String,
866 #[arg(long)]
867 apply: bool,
868 #[arg(long)]
869 json: bool,
870 },
871 Reject {
873 frontier: PathBuf,
874 finding_id: String,
875 #[arg(long)]
876 reason: String,
877 #[arg(long)]
878 reviewer: String,
879 #[arg(long)]
880 apply: bool,
881 #[arg(long)]
882 json: bool,
883 },
884 History {
886 frontier: PathBuf,
887 finding_id: String,
888 #[arg(long)]
889 json: bool,
890 #[arg(long, value_name = "RFC3339_TIMESTAMP")]
894 as_of: Option<String>,
895 },
896 ImportEvents {
898 source: PathBuf,
899 #[arg(long)]
900 into: PathBuf,
901 #[arg(long)]
902 json: bool,
903 },
904 Retract {
906 source: PathBuf,
907 finding_id: String,
908 #[arg(long)]
909 reason: String,
910 #[arg(long)]
911 reviewer: String,
912 #[arg(long)]
913 apply: bool,
914 #[arg(long)]
915 json: bool,
916 },
917 ProofAdd {
929 frontier: PathBuf,
930 #[arg(long = "target-finding")]
932 target_finding: String,
933 #[arg(long, default_value = "lean4")]
936 tool: String,
937 #[arg(long = "tool-version", default_value = "4.29.1")]
939 tool_version: String,
940 #[arg(long = "script-path")]
942 script_path: PathBuf,
943 #[arg(long, default_value = "Proof script")]
945 name: String,
946 #[arg(long)]
948 reviewer: String,
949 #[arg(long)]
951 reason: String,
952 #[arg(long)]
953 json: bool,
954 },
955 ProofAttestVerification {
963 #[arg(long)]
965 proof_id: String,
966 #[arg(long, default_value = "lean4")]
968 tool: String,
969 #[arg(long = "tool-version", default_value = "4.29.1")]
970 tool_version: String,
971 #[arg(long)]
974 script_locator: String,
975 #[arg(long = "lake-manifest-hash")]
977 lake_manifest_hash: Option<String>,
978 #[arg(long = "verifier-output-hash")]
980 verifier_output_hash: String,
981 #[arg(long, default_value = "verified")]
983 status: String,
984 #[arg(long = "verifier-actor")]
987 verifier_actor: String,
988 #[arg(long)]
990 key: PathBuf,
991 #[arg(long)]
993 out: PathBuf,
994 #[arg(long)]
995 json: bool,
996 },
997 ProofVerifyAttestation {
1001 record: PathBuf,
1003 #[arg(long)]
1004 json: bool,
1005 },
1006 EntityAdd {
1011 frontier: PathBuf,
1012 finding_id: String,
1013 #[arg(long)]
1014 entity: String,
1015 #[arg(long)]
1019 entity_type: String,
1020 #[arg(long)]
1021 reviewer: String,
1022 #[arg(long)]
1023 reason: String,
1024 #[arg(long)]
1025 apply: bool,
1026 #[arg(long)]
1027 json: bool,
1028 },
1029 EntityResolve {
1033 frontier: PathBuf,
1034 finding_id: String,
1035 #[arg(long)]
1036 entity: String,
1037 #[arg(long)]
1038 source: String,
1039 #[arg(long)]
1040 id: String,
1041 #[arg(long)]
1042 confidence: f64,
1043 #[arg(long)]
1044 matched_name: Option<String>,
1045 #[arg(long, default_value = "manual")]
1046 resolution_method: String,
1047 #[arg(long)]
1048 reviewer: String,
1049 #[arg(long)]
1050 reason: String,
1051 #[arg(long)]
1052 apply: bool,
1053 #[arg(long)]
1054 json: bool,
1055 },
1056 SourceFetch {
1064 identifier: String,
1067 #[arg(long)]
1071 cache: Option<PathBuf>,
1072 #[arg(long)]
1074 out: Option<PathBuf>,
1075 #[arg(long)]
1077 refresh: bool,
1078 #[arg(long)]
1079 json: bool,
1080 },
1081 SpanRepair {
1084 frontier: PathBuf,
1085 finding_id: String,
1086 #[arg(long)]
1087 section: String,
1088 #[arg(long)]
1089 text: String,
1090 #[arg(long)]
1091 reviewer: String,
1092 #[arg(long)]
1093 reason: String,
1094 #[arg(long)]
1095 apply: bool,
1096 #[arg(long)]
1097 json: bool,
1098 },
1099 LocatorRepair {
1104 frontier: PathBuf,
1105 atom_id: String,
1106 #[arg(long)]
1109 locator: Option<String>,
1110 #[arg(long)]
1113 reviewer: String,
1114 #[arg(long)]
1116 reason: String,
1117 #[arg(long)]
1119 apply: bool,
1120 #[arg(long)]
1121 json: bool,
1122 },
1123 Propagate {
1125 frontier: PathBuf,
1126 #[arg(long)]
1127 retract: Option<String>,
1128 #[arg(long)]
1129 reduce_confidence: Option<String>,
1130 #[arg(long)]
1131 to: Option<f64>,
1132 #[arg(short, long)]
1133 output: Option<PathBuf>,
1134 },
1135 Replicate {
1144 frontier: PathBuf,
1146 target: String,
1148 #[arg(long)]
1150 outcome: String,
1151 #[arg(long)]
1153 by: String,
1154 #[arg(long)]
1158 conditions: String,
1159 #[arg(long)]
1161 source_title: String,
1162 #[arg(long)]
1164 doi: Option<String>,
1165 #[arg(long)]
1167 pmid: Option<String>,
1168 #[arg(long)]
1170 sample_size: Option<String>,
1171 #[arg(long, default_value = "")]
1174 note: String,
1175 #[arg(long)]
1177 previous_attempt: Option<String>,
1178 #[arg(long, default_value_t = false)]
1185 no_cascade: bool,
1186 #[arg(long)]
1188 json: bool,
1189 },
1190 Replications {
1193 frontier: PathBuf,
1195 #[arg(long)]
1197 target: Option<String>,
1198 #[arg(long)]
1200 json: bool,
1201 },
1202 DatasetAdd {
1209 frontier: PathBuf,
1211 #[arg(long)]
1213 name: String,
1214 #[arg(long)]
1216 version: Option<String>,
1217 #[arg(long)]
1221 content_hash: String,
1222 #[arg(long)]
1224 url: Option<String>,
1225 #[arg(long)]
1227 license: Option<String>,
1228 #[arg(long)]
1230 source_title: String,
1231 #[arg(long)]
1233 doi: Option<String>,
1234 #[arg(long)]
1236 row_count: Option<u64>,
1237 #[arg(long)]
1239 json: bool,
1240 },
1241 Datasets {
1243 frontier: PathBuf,
1244 #[arg(long)]
1245 json: bool,
1246 },
1247 CodeAdd {
1251 frontier: PathBuf,
1253 #[arg(long)]
1255 language: String,
1256 #[arg(long)]
1258 repo_url: Option<String>,
1259 #[arg(long)]
1262 commit: Option<String>,
1263 #[arg(long)]
1265 path: String,
1266 #[arg(long)]
1268 content_hash: String,
1269 #[arg(long)]
1271 line_start: Option<u32>,
1272 #[arg(long)]
1274 line_end: Option<u32>,
1275 #[arg(long)]
1277 entry_point: Option<String>,
1278 #[arg(long)]
1280 json: bool,
1281 },
1282 CodeArtifacts {
1284 frontier: PathBuf,
1285 #[arg(long)]
1286 json: bool,
1287 },
1288 ArtifactAdd {
1293 frontier: PathBuf,
1295 #[arg(long)]
1298 kind: String,
1299 #[arg(long)]
1301 name: String,
1302 #[arg(long)]
1305 file: Option<PathBuf>,
1306 #[arg(long)]
1308 url: Option<String>,
1309 #[arg(long)]
1311 content_hash: Option<String>,
1312 #[arg(long)]
1314 media_type: Option<String>,
1315 #[arg(long)]
1317 license: Option<String>,
1318 #[arg(long)]
1320 source_title: Option<String>,
1321 #[arg(long)]
1323 source_url: Option<String>,
1324 #[arg(long)]
1326 doi: Option<String>,
1327 #[arg(long)]
1329 target: Vec<String>,
1330 #[arg(long)]
1332 metadata: Vec<String>,
1333 #[arg(long, default_value = "public")]
1335 access_tier: String,
1336 #[arg(long, default_value = "reviewer:manual")]
1338 deposited_by: String,
1339 #[arg(long, default_value = "artifact deposit")]
1341 reason: String,
1342 #[arg(long)]
1344 json: bool,
1345 },
1346 Artifacts {
1348 frontier: PathBuf,
1349 #[arg(long)]
1351 target: Option<String>,
1352 #[arg(long)]
1353 json: bool,
1354 },
1355 ArtifactAudit {
1357 frontier: PathBuf,
1358 #[arg(long)]
1360 json: bool,
1361 },
1362 DecisionBrief {
1364 frontier: PathBuf,
1365 #[arg(long)]
1367 json: bool,
1368 },
1369 TrialSummary {
1371 frontier: PathBuf,
1372 #[arg(long)]
1374 json: bool,
1375 },
1376 SourceVerification {
1378 frontier: PathBuf,
1379 #[arg(long)]
1381 json: bool,
1382 },
1383 SourceIngestPlan {
1385 frontier: PathBuf,
1386 #[arg(long)]
1388 json: bool,
1389 },
1390 ClinicalTrialImport {
1393 frontier: PathBuf,
1395 nct_id: String,
1397 #[arg(long)]
1400 input_json: Option<PathBuf>,
1401 #[arg(long)]
1403 target: Vec<String>,
1404 #[arg(long, default_value = "reviewer:manual")]
1406 deposited_by: String,
1407 #[arg(long, default_value = "clinical trial record import")]
1409 reason: String,
1410 #[arg(long, default_value = "ClinicalTrials.gov public record")]
1412 license: String,
1413 #[arg(long)]
1415 json: bool,
1416 },
1417 NegativeResultAdd {
1425 frontier: PathBuf,
1427 #[arg(long)]
1429 kind: String,
1430 #[arg(long)]
1432 deposited_by: String,
1433 #[arg(long)]
1435 reason: String,
1436 #[arg(long)]
1439 conditions_text: String,
1440 #[arg(long, default_value = "")]
1442 notes: String,
1443 #[arg(long)]
1446 target: Vec<String>,
1447 #[arg(long)]
1451 endpoint: Option<String>,
1452 #[arg(long)]
1454 intervention: Option<String>,
1455 #[arg(long)]
1457 comparator: Option<String>,
1458 #[arg(long)]
1460 population: Option<String>,
1461 #[arg(long)]
1463 n_enrolled: Option<u32>,
1464 #[arg(long)]
1466 power: Option<f64>,
1467 #[arg(long)]
1469 ci_lower: Option<f64>,
1470 #[arg(long)]
1472 ci_upper: Option<f64>,
1473 #[arg(long)]
1475 effect_size_threshold: Option<f64>,
1476 #[arg(long)]
1478 registry_id: Option<String>,
1479 #[arg(long)]
1482 reagent: Option<String>,
1483 #[arg(long)]
1485 observation: Option<String>,
1486 #[arg(long)]
1488 attempts: Option<u32>,
1489 #[arg(long)]
1492 source_title: String,
1493 #[arg(long)]
1495 doi: Option<String>,
1496 #[arg(long)]
1498 url: Option<String>,
1499 #[arg(long)]
1501 year: Option<i32>,
1502 #[arg(long)]
1504 json: bool,
1505 },
1506 NegativeResults {
1508 frontier: PathBuf,
1509 #[arg(long)]
1511 target: Option<String>,
1512 #[arg(long)]
1513 json: bool,
1514 },
1515 TrajectoryCreate {
1520 frontier: PathBuf,
1522 #[arg(long)]
1524 deposited_by: String,
1525 #[arg(long)]
1527 reason: String,
1528 #[arg(long)]
1533 target: Vec<String>,
1534 #[arg(long, default_value = "")]
1536 notes: String,
1537 #[arg(long)]
1538 json: bool,
1539 },
1540 TrajectoryStep {
1543 frontier: PathBuf,
1545 trajectory_id: String,
1547 #[arg(long)]
1549 kind: String,
1550 #[arg(long)]
1554 description: String,
1555 #[arg(long)]
1557 actor: String,
1558 #[arg(long)]
1560 reason: String,
1561 #[arg(long)]
1564 reference: Vec<String>,
1565 #[arg(long)]
1566 json: bool,
1567 },
1568 Trajectories {
1570 frontier: PathBuf,
1571 #[arg(long)]
1573 target: Option<String>,
1574 #[arg(long)]
1575 json: bool,
1576 },
1577 TierSet {
1583 frontier: PathBuf,
1585 #[arg(long)]
1587 object_type: String,
1588 #[arg(long)]
1590 object_id: String,
1591 #[arg(long)]
1593 tier: String,
1594 #[arg(long)]
1597 actor: String,
1598 #[arg(long)]
1601 reason: String,
1602 #[arg(long)]
1603 json: bool,
1604 },
1605 Predict {
1612 frontier: PathBuf,
1614 #[arg(long)]
1616 by: String,
1617 #[arg(long)]
1620 claim: String,
1621 #[arg(long)]
1623 criterion: String,
1624 #[arg(long)]
1626 resolves_by: Option<String>,
1627 #[arg(long)]
1629 confidence: f64,
1630 #[arg(long, default_value = "")]
1632 target: String,
1633 #[arg(long, default_value = "affirmed")]
1635 outcome: String,
1636 #[arg(long, default_value = "")]
1638 conditions: String,
1639 #[arg(long)]
1641 json: bool,
1642 },
1643 Resolve {
1648 frontier: PathBuf,
1650 prediction: String,
1652 #[arg(long)]
1654 outcome: String,
1655 #[arg(long)]
1657 matched: bool,
1658 #[arg(long)]
1661 by: String,
1662 #[arg(long, default_value = "1.0")]
1664 confidence: f64,
1665 #[arg(long, default_value = "")]
1667 source_title: String,
1668 #[arg(long)]
1670 doi: Option<String>,
1671 #[arg(long)]
1673 json: bool,
1674 },
1675 Predictions {
1677 frontier: PathBuf,
1678 #[arg(long)]
1680 by: Option<String>,
1681 #[arg(long)]
1683 open: bool,
1684 #[arg(long)]
1686 json: bool,
1687 },
1688 Calibration {
1691 frontier: PathBuf,
1692 #[arg(long)]
1694 actor: Option<String>,
1695 #[arg(long)]
1697 json: bool,
1698 },
1699 PredictionsExpire {
1707 frontier: PathBuf,
1708 #[arg(long)]
1711 now: Option<String>,
1712 #[arg(long)]
1715 dry_run: bool,
1716 #[arg(long)]
1717 json: bool,
1718 },
1719 Consensus {
1728 frontier: PathBuf,
1730 target: String,
1732 #[arg(long, default_value = "composite")]
1735 weighting: String,
1736 #[arg(long)]
1741 causal_claim: Option<String>,
1742 #[arg(long)]
1747 causal_grade_min: Option<String>,
1748 #[arg(long)]
1750 json: bool,
1751 },
1752
1753 Ingest {
1769 path: String,
1772 #[arg(long)]
1775 frontier: PathBuf,
1776 #[arg(short, long)]
1780 backend: Option<String>,
1781 #[arg(long)]
1785 actor: Option<String>,
1786 #[arg(long)]
1788 dry_run: bool,
1789 #[arg(long)]
1790 json: bool,
1791 },
1792
1793 Propose {
1799 frontier: PathBuf,
1800 finding_id: String,
1801 #[arg(long)]
1803 status: String,
1804 #[arg(long)]
1805 reason: String,
1806 #[arg(long)]
1807 reviewer: String,
1808 #[arg(long)]
1811 apply: bool,
1812 #[arg(long)]
1813 json: bool,
1814 },
1815
1816 Accept {
1820 frontier: PathBuf,
1821 proposal_id: String,
1822 #[arg(long)]
1823 reviewer: String,
1824 #[arg(long)]
1825 reason: String,
1826 #[arg(long)]
1827 json: bool,
1828 },
1829
1830 Attest {
1842 frontier: PathBuf,
1844 #[arg(long)]
1848 event: Option<String>,
1849 #[arg(long)]
1852 attester: Option<String>,
1853 #[arg(long)]
1856 scope_note: Option<String>,
1857 #[arg(long)]
1860 proof_id: Option<String>,
1861 #[arg(long)]
1866 signature: Option<String>,
1867 #[arg(long)]
1870 key: Option<PathBuf>,
1871 #[arg(long)]
1872 json: bool,
1873 },
1874
1875 Lineage {
1878 frontier: PathBuf,
1879 finding_id: String,
1880 #[arg(long, value_name = "RFC3339_TIMESTAMP")]
1881 as_of: Option<String>,
1882 #[arg(long)]
1883 json: bool,
1884 },
1885
1886 Carina {
1889 #[command(subcommand)]
1890 action: CarinaAction,
1891 },
1892
1893 Atlas {
1898 #[command(subcommand)]
1899 action: AtlasAction,
1900 },
1901
1902 Constellation {
1908 #[command(subcommand)]
1909 action: ConstellationAction,
1910 },
1911}
1912
1913#[derive(Subcommand)]
1918enum AtlasAction {
1919 Init {
1924 name: String,
1927 #[arg(long, value_delimiter = ',', num_args = 1..)]
1929 frontiers: Vec<PathBuf>,
1930 #[arg(long, default_value = "general")]
1933 domain: String,
1934 #[arg(long)]
1936 scope_note: Option<String>,
1937 #[arg(long, default_value = "atlases")]
1939 atlases_root: PathBuf,
1940 #[arg(long)]
1941 json: bool,
1942 },
1943 Materialize {
1947 name: String,
1949 #[arg(long, default_value = "atlases")]
1950 atlases_root: PathBuf,
1951 #[arg(long)]
1952 json: bool,
1953 },
1954 Serve {
1959 name: String,
1960 #[arg(long, default_value = "atlases")]
1961 atlases_root: PathBuf,
1962 #[arg(long, default_value_t = 3848)]
1963 port: u16,
1964 #[arg(long)]
1965 no_open: bool,
1966 },
1967 Update {
1974 name: String,
1975 #[arg(long, value_delimiter = ',')]
1978 add_frontier: Vec<PathBuf>,
1979 #[arg(long, value_delimiter = ',')]
1982 remove_vfr_id: Vec<String>,
1983 #[arg(long, default_value = "atlases")]
1984 atlases_root: PathBuf,
1985 #[arg(long)]
1986 json: bool,
1987 },
1988}
1989
1990#[derive(Subcommand)]
1994enum ConstellationAction {
1995 Init {
1999 name: String,
2000 #[arg(long, value_delimiter = ',', num_args = 1..)]
2002 atlases: Vec<PathBuf>,
2003 #[arg(long)]
2004 scope_note: Option<String>,
2005 #[arg(long, default_value = "constellations")]
2006 constellations_root: PathBuf,
2007 #[arg(long)]
2008 json: bool,
2009 },
2010 Materialize {
2015 name: String,
2016 #[arg(long, default_value = "constellations")]
2017 constellations_root: PathBuf,
2018 #[arg(long)]
2019 json: bool,
2020 },
2021 Serve {
2025 name: String,
2026 #[arg(long, default_value = "constellations")]
2027 constellations_root: PathBuf,
2028 #[arg(long, default_value_t = 3849)]
2029 port: u16,
2030 #[arg(long)]
2031 no_open: bool,
2032 },
2033}
2034
2035#[derive(Subcommand)]
2039enum CarinaAction {
2040 Validate {
2045 path: PathBuf,
2049 #[arg(long)]
2052 primitive: Option<String>,
2053 #[arg(long)]
2054 json: bool,
2055 },
2056 List {
2058 #[arg(long)]
2059 json: bool,
2060 },
2061 Schema { primitive: String },
2063}
2064
2065#[derive(Subcommand)]
2066enum PacketAction {
2067 Inspect {
2069 path: PathBuf,
2070 #[arg(long)]
2071 json: bool,
2072 },
2073 Validate {
2075 path: PathBuf,
2076 #[arg(long)]
2077 json: bool,
2078 },
2079}
2080
2081#[derive(Subcommand)]
2082enum SignAction {
2083 GenerateKeypair {
2085 #[arg(long, default_value = ".vela/keys")]
2086 out: PathBuf,
2087 #[arg(long)]
2088 json: bool,
2089 },
2090 Apply {
2092 frontier: PathBuf,
2093 #[arg(long)]
2094 private_key: PathBuf,
2095 #[arg(long)]
2096 json: bool,
2097 },
2098 Verify {
2100 frontier: PathBuf,
2101 #[arg(long)]
2102 public_key: Option<PathBuf>,
2103 #[arg(long)]
2104 json: bool,
2105 },
2106 ThresholdSet {
2111 frontier: PathBuf,
2112 finding_id: String,
2114 #[arg(long)]
2116 to: u32,
2117 #[arg(long)]
2118 json: bool,
2119 },
2120}
2121
2122#[derive(Subcommand)]
2123enum ActorAction {
2124 Add {
2126 frontier: PathBuf,
2127 id: String,
2129 #[arg(long)]
2131 pubkey: String,
2132 #[arg(long)]
2136 tier: Option<String>,
2137 #[arg(long)]
2141 orcid: Option<String>,
2142 #[arg(long)]
2147 clearance: Option<String>,
2148 #[arg(long)]
2149 json: bool,
2150 },
2151 List {
2153 frontier: PathBuf,
2154 #[arg(long)]
2155 json: bool,
2156 },
2157 Rotate {
2168 frontier: PathBuf,
2169 #[arg(long)]
2172 id: String,
2173 #[arg(long = "new-id")]
2178 new_id: String,
2179 #[arg(long = "new-pubkey")]
2181 new_pubkey: String,
2182 #[arg(long)]
2185 reason: String,
2186 #[arg(long)]
2187 json: bool,
2188 },
2189}
2190
2191#[derive(Subcommand)]
2198enum AgentAction {
2199 Init {
2203 name: String,
2206 #[arg(long, default_value = "custom")]
2210 framework: String,
2211 #[arg(long)]
2213 out: Option<PathBuf>,
2214 #[arg(long)]
2215 json: bool,
2216 },
2217 List {
2219 #[arg(long, default_value = "agents")]
2221 root: PathBuf,
2222 #[arg(long)]
2223 json: bool,
2224 },
2225}
2226
2227#[derive(Subcommand)]
2228enum CausalAction {
2229 Audit {
2233 frontier: PathBuf,
2234 #[arg(long)]
2237 problems_only: bool,
2238 #[arg(long)]
2239 json: bool,
2240 },
2241 Effect {
2254 frontier: PathBuf,
2255 source: String,
2257 #[arg(long)]
2259 on: String,
2260 #[arg(long)]
2261 json: bool,
2262 },
2263 Graph {
2266 frontier: PathBuf,
2267 #[arg(long)]
2269 node: Option<String>,
2270 #[arg(long)]
2271 json: bool,
2272 },
2273 Counterfactual {
2280 frontier: PathBuf,
2281 intervene_on: String,
2283 #[arg(long)]
2285 set_to: f64,
2286 #[arg(long)]
2288 target: String,
2289 #[arg(long)]
2290 json: bool,
2291 },
2292}
2293
2294#[derive(Subcommand)]
2295enum BridgesAction {
2296 Derive {
2300 frontier_a: PathBuf,
2303 #[arg(long, default_value = "a")]
2305 label_a: String,
2306 frontier_b: PathBuf,
2308 #[arg(long, default_value = "b")]
2310 label_b: String,
2311 #[arg(long)]
2312 json: bool,
2313 },
2314 List {
2316 frontier: PathBuf,
2318 #[arg(long)]
2320 status: Option<String>,
2321 #[arg(long)]
2322 json: bool,
2323 },
2324 Show {
2326 frontier: PathBuf,
2327 bridge_id: String,
2328 #[arg(long)]
2329 json: bool,
2330 },
2331 Confirm {
2336 frontier: PathBuf,
2337 bridge_id: String,
2338 #[arg(long)]
2341 reviewer: Option<String>,
2342 #[arg(long)]
2344 note: Option<String>,
2345 #[arg(long)]
2346 json: bool,
2347 },
2348 Refute {
2351 frontier: PathBuf,
2352 bridge_id: String,
2353 #[arg(long)]
2354 reviewer: Option<String>,
2355 #[arg(long)]
2356 note: Option<String>,
2357 #[arg(long)]
2358 json: bool,
2359 },
2360}
2361
2362#[derive(Subcommand)]
2363enum FederationAction {
2364 PeerAdd {
2368 frontier: PathBuf,
2369 id: String,
2371 #[arg(long)]
2373 url: String,
2374 #[arg(long)]
2376 pubkey: String,
2377 #[arg(long, default_value = "")]
2379 note: String,
2380 #[arg(long)]
2381 json: bool,
2382 },
2383 PeerList {
2385 frontier: PathBuf,
2386 #[arg(long)]
2387 json: bool,
2388 },
2389 PeerRemove {
2393 frontier: PathBuf,
2394 id: String,
2395 #[arg(long)]
2396 json: bool,
2397 },
2398 Sync {
2415 frontier: PathBuf,
2416 peer_id: String,
2418 #[arg(long)]
2420 url: Option<String>,
2421 #[arg(long)]
2425 via_hub: bool,
2426 #[arg(long)]
2429 vfr_id: Option<String>,
2430 #[arg(long)]
2437 allow_cross_vfr: bool,
2438 #[arg(long)]
2440 dry_run: bool,
2441 #[arg(long)]
2442 json: bool,
2443 },
2444 PushResolution {
2457 frontier: PathBuf,
2458 conflict_event_id: String,
2462 #[arg(long = "to")]
2464 to: String,
2465 #[arg(long)]
2469 key: Option<PathBuf>,
2470 #[arg(long)]
2473 vfr_id: Option<String>,
2474 #[arg(long)]
2475 json: bool,
2476 },
2477}
2478
2479#[derive(Subcommand)]
2480enum FrontierAction {
2481 New {
2488 path: PathBuf,
2490 #[arg(long)]
2492 name: String,
2493 #[arg(long, default_value = "")]
2495 description: String,
2496 #[arg(long)]
2498 force: bool,
2499 #[arg(long)]
2500 json: bool,
2501 },
2502 Materialize {
2504 frontier: PathBuf,
2506 #[arg(long)]
2507 json: bool,
2508 },
2509 AddDep {
2513 frontier: PathBuf,
2515 vfr_id: String,
2517 #[arg(long)]
2520 locator: String,
2521 #[arg(long)]
2524 snapshot: String,
2525 #[arg(long)]
2527 name: Option<String>,
2528 #[arg(long)]
2529 json: bool,
2530 },
2531 ListDeps {
2533 frontier: PathBuf,
2534 #[arg(long)]
2535 json: bool,
2536 },
2537 RemoveDep {
2540 frontier: PathBuf,
2541 vfr_id: String,
2542 #[arg(long)]
2543 json: bool,
2544 },
2545 RefreshDeps {
2552 frontier: PathBuf,
2553 #[arg(long, default_value = "https://vela-hub.fly.dev")]
2555 from: String,
2556 #[arg(long)]
2558 dry_run: bool,
2559 #[arg(long)]
2560 json: bool,
2561 },
2562 Diff {
2574 frontier: PathBuf,
2576 #[arg(long)]
2579 since: Option<String>,
2580 #[arg(long)]
2583 week: Option<String>,
2584 #[arg(long)]
2586 json: bool,
2587 },
2588}
2589
2590#[derive(Subcommand)]
2591enum RepoAction {
2592 Status {
2594 frontier: PathBuf,
2596 #[arg(long)]
2598 json: bool,
2599 },
2600 Doctor {
2602 frontier: PathBuf,
2604 #[arg(long)]
2606 json: bool,
2607 },
2608}
2609
2610#[derive(Subcommand)]
2611enum QueueAction {
2612 List {
2614 #[arg(long)]
2615 queue_file: Option<PathBuf>,
2616 #[arg(long)]
2617 json: bool,
2618 },
2619 Sign {
2622 #[arg(long)]
2624 actor: String,
2625 #[arg(long)]
2627 key: PathBuf,
2628 #[arg(long)]
2630 queue_file: Option<PathBuf>,
2631 #[arg(long, alias = "all")]
2637 yes_to_all: bool,
2638 #[arg(long)]
2639 json: bool,
2640 },
2641 Clear {
2643 #[arg(long)]
2644 queue_file: Option<PathBuf>,
2645 #[arg(long)]
2646 json: bool,
2647 },
2648}
2649
2650#[derive(Subcommand)]
2651enum RegistryAction {
2652 List {
2654 #[arg(long)]
2656 from: Option<String>,
2657 #[arg(long)]
2658 json: bool,
2659 },
2660 Publish {
2662 frontier: PathBuf,
2664 #[arg(long)]
2666 owner: String,
2667 #[arg(long)]
2669 key: PathBuf,
2670 #[arg(long)]
2677 locator: Option<String>,
2678 #[arg(long)]
2680 to: Option<String>,
2681 #[arg(long)]
2682 json: bool,
2683 },
2684 DependsOn {
2691 vfr_id: String,
2693 #[arg(long, default_value = "https://vela-hub.fly.dev")]
2695 from: String,
2696 #[arg(long)]
2697 json: bool,
2698 },
2699 Mirror {
2707 vfr_id: String,
2709 #[arg(long)]
2711 from: String,
2712 #[arg(long)]
2714 to: String,
2715 #[arg(long)]
2716 json: bool,
2717 },
2718 WitnessCheck {
2726 vfr_id: String,
2728 #[arg(long, value_delimiter = ',')]
2733 hubs: Vec<String>,
2734 #[arg(long)]
2735 json: bool,
2736 },
2737 Governance {
2744 #[command(subcommand)]
2745 action: GovernanceAction,
2746 },
2747 HubFederation {
2756 #[command(subcommand)]
2757 action: HubFederationAction,
2758 },
2759 Checkpoint {
2769 #[command(subcommand)]
2770 action: CheckpointAction,
2771 },
2772 VerifyAll {
2778 #[arg(long)]
2781 from: Option<PathBuf>,
2782 #[arg(long)]
2783 json: bool,
2784 },
2785 VerifyChain {
2793 frontier: PathBuf,
2796 #[arg(long)]
2800 artifacts: PathBuf,
2801 #[arg(long)]
2802 json: bool,
2803 },
2804 OwnerRotateGoverned {
2818 #[command(subcommand)]
2819 action: OwnerRotateGovernedAction,
2820 },
2821 OwnerRotate {
2831 frontier: PathBuf,
2833 #[arg(long)]
2835 current_owner: String,
2836 #[arg(long)]
2838 new_owner: String,
2839 #[arg(long)]
2841 new_key: PathBuf,
2842 #[arg(long)]
2845 reason: String,
2846 #[arg(long)]
2851 locator: Option<String>,
2852 #[arg(long)]
2855 to: Option<String>,
2856 #[arg(long)]
2857 json: bool,
2858 },
2859 Pull {
2861 vfr_id: String,
2863 #[arg(long)]
2865 from: Option<String>,
2866 #[arg(long)]
2870 out: PathBuf,
2871 #[arg(long)]
2874 transitive: bool,
2875 #[arg(long, default_value = "4")]
2878 depth: usize,
2879 #[arg(long)]
2880 json: bool,
2881 },
2882}
2883
2884#[derive(Subcommand)]
2885enum GovernanceAction {
2886 Init {
2890 frontier: PathBuf,
2892 #[arg(long)]
2894 threshold: u32,
2895 #[arg(long, value_delimiter = ',')]
2897 eligible: Vec<String>,
2898 #[arg(long)]
2904 bootstrap: bool,
2905 #[arg(long)]
2908 owner_epoch: Option<u64>,
2909 #[arg(long)]
2913 current_owner_counts: bool,
2914 #[arg(long, default_value = "168")]
2916 attestation_ttl_hours: u32,
2917 #[arg(long)]
2920 out: Option<PathBuf>,
2921 #[arg(long)]
2922 json: bool,
2923 },
2924 Show {
2926 policy: PathBuf,
2928 #[arg(long)]
2929 json: bool,
2930 },
2931 Validate {
2934 policy: PathBuf,
2936 #[arg(long)]
2937 json: bool,
2938 },
2939}
2940
2941#[derive(Subcommand)]
2942enum SearchAction {
2943 Build {
2945 #[arg(required = true)]
2947 frontiers: Vec<PathBuf>,
2948 #[arg(long)]
2950 out: PathBuf,
2951 #[arg(long)]
2955 include_bootstrap: bool,
2956 #[arg(long)]
2959 include_broken: bool,
2960 #[arg(long)]
2961 json: bool,
2962 },
2963 Query {
2965 #[arg(default_value = "")]
2968 query: String,
2969 #[arg(long)]
2972 index: Option<PathBuf>,
2973 #[arg(long)]
2975 kind: Option<String>,
2976 #[arg(long)]
2978 entity: Option<String>,
2979 #[arg(long)]
2982 status: Option<String>,
2983 #[arg(long)]
2985 frontier_id: Option<String>,
2986 #[arg(long)]
2988 source_id: Option<String>,
2989 #[arg(long)]
2993 chain_status: Option<String>,
2994 #[arg(long)]
2996 limit: Option<usize>,
2997 #[arg(long)]
2998 json: bool,
2999 },
3000}
3001
3002#[derive(Subcommand)]
3003enum HubFederationAction {
3004 Status {
3014 #[arg(long = "source", value_delimiter = ',')]
3015 sources: Vec<String>,
3016 #[arg(long)]
3017 json: bool,
3018 },
3019}
3020
3021#[derive(Subcommand)]
3022enum CheckpointAction {
3023 Create {
3025 #[arg(long)]
3027 from: PathBuf,
3028 #[arg(long)]
3030 hub_id: String,
3031 #[arg(long)]
3033 sequence: u64,
3034 #[arg(long)]
3036 previous: Option<String>,
3037 #[arg(long)]
3039 key: PathBuf,
3040 #[arg(long)]
3042 out: PathBuf,
3043 #[arg(long)]
3044 json: bool,
3045 },
3046 Verify {
3049 checkpoint: PathBuf,
3051 #[arg(long)]
3053 registry: PathBuf,
3054 #[arg(long)]
3055 json: bool,
3056 },
3057}
3058
3059#[derive(Subcommand)]
3060enum OwnerRotateGovernedAction {
3061 Propose {
3068 frontier: PathBuf,
3069 #[arg(long)]
3070 old_owner: String,
3071 #[arg(long)]
3072 new_owner: String,
3073 #[arg(long)]
3077 new_pubkey_hex: String,
3078 #[arg(long)]
3079 target_epoch: u64,
3080 #[arg(long)]
3081 previous_entry_hash: String,
3082 #[arg(long)]
3083 policy: PathBuf,
3084 #[arg(long)]
3085 reason: String,
3086 #[arg(long, default_value = "168")]
3087 ttl_hours: u32,
3088 #[arg(long)]
3089 out: PathBuf,
3090 #[arg(long)]
3091 json: bool,
3092 },
3093 Attest {
3098 #[arg(long)]
3099 proposal: PathBuf,
3100 #[arg(long)]
3101 attester_id: String,
3102 #[arg(long)]
3103 key: PathBuf,
3104 #[arg(long)]
3105 bundle: PathBuf,
3106 #[arg(long)]
3107 json: bool,
3108 },
3109 Apply {
3114 frontier: PathBuf,
3115 #[arg(long)]
3116 proposal: PathBuf,
3117 #[arg(long)]
3118 bundle: PathBuf,
3119 #[arg(long)]
3120 policy: PathBuf,
3121 #[arg(long)]
3124 new_key: PathBuf,
3125 #[arg(long)]
3126 locator: Option<String>,
3127 #[arg(long)]
3128 to: Option<String>,
3129 #[arg(long)]
3130 json: bool,
3131 },
3132}
3133
3134#[derive(Subcommand)]
3135enum GapsAction {
3136 Rank {
3138 frontier: PathBuf,
3139 #[arg(long, default_value = "10")]
3140 top: usize,
3141 #[arg(long)]
3142 domain: Option<String>,
3143 #[arg(long)]
3144 json: bool,
3145 },
3146}
3147
3148#[derive(Subcommand)]
3149enum LinkAction {
3150 Add {
3155 frontier: PathBuf,
3157 #[arg(long)]
3159 from: String,
3160 #[arg(long)]
3162 to: String,
3163 #[arg(long, default_value = "supports")]
3165 r#type: String,
3166 #[arg(long, default_value = "")]
3168 note: String,
3169 #[arg(long, default_value = "reviewer")]
3171 inferred_by: String,
3172 #[arg(long)]
3181 no_check_target: bool,
3182 #[arg(long)]
3183 json: bool,
3184 },
3185}
3186
3187#[derive(Subcommand)]
3188enum EntityAction {
3189 Resolve {
3196 frontier: PathBuf,
3197 #[arg(long)]
3199 force: bool,
3200 #[arg(long)]
3201 json: bool,
3202 },
3203 List {
3205 #[arg(long)]
3206 json: bool,
3207 },
3208}
3209
3210#[derive(Subcommand)]
3211enum FindingCommands {
3212 Add {
3214 frontier: PathBuf,
3216 #[arg(long)]
3218 assertion: String,
3219 #[arg(long, default_value = "mechanism")]
3221 r#type: String,
3222 #[arg(long, default_value = "manual finding")]
3224 source: String,
3225 #[arg(long, default_value = "expert_assertion")]
3227 source_type: String,
3228 #[arg(long)]
3230 author: String,
3231 #[arg(long, default_value = "0.3")]
3233 confidence: f64,
3234 #[arg(long, default_value = "theoretical")]
3236 evidence_type: String,
3237 #[arg(long, default_value = "")]
3239 entities: String,
3240 #[arg(long)]
3242 entities_reviewed: bool,
3243 #[arg(long)]
3245 evidence_span: Vec<String>,
3246 #[arg(long)]
3248 gap: bool,
3249 #[arg(long)]
3251 negative_space: bool,
3252 #[arg(long)]
3254 doi: Option<String>,
3255 #[arg(long)]
3257 pmid: Option<String>,
3258 #[arg(long)]
3260 year: Option<i32>,
3261 #[arg(long)]
3263 journal: Option<String>,
3264 #[arg(long)]
3266 url: Option<String>,
3267 #[arg(long)]
3269 source_authors: Option<String>,
3270 #[arg(long)]
3272 conditions_text: Option<String>,
3273 #[arg(long)]
3275 species: Option<String>,
3276 #[arg(long)]
3278 in_vivo: bool,
3279 #[arg(long)]
3281 in_vitro: bool,
3282 #[arg(long)]
3284 human_data: bool,
3285 #[arg(long)]
3287 clinical_trial: bool,
3288 #[arg(long)]
3290 json: bool,
3291 #[arg(long)]
3293 apply: bool,
3294 },
3295 Supersede {
3302 frontier: PathBuf,
3304 old_id: String,
3306 #[arg(long)]
3308 assertion: String,
3309 #[arg(long, default_value = "mechanism")]
3311 r#type: String,
3312 #[arg(long, default_value = "manual finding")]
3314 source: String,
3315 #[arg(long, default_value = "expert_assertion")]
3317 source_type: String,
3318 #[arg(long)]
3320 author: String,
3321 #[arg(long)]
3323 reason: String,
3324 #[arg(long, default_value = "0.5")]
3326 confidence: f64,
3327 #[arg(long, default_value = "experimental")]
3329 evidence_type: String,
3330 #[arg(long, default_value = "")]
3332 entities: String,
3333 #[arg(long)]
3335 doi: Option<String>,
3336 #[arg(long)]
3338 pmid: Option<String>,
3339 #[arg(long)]
3341 year: Option<i32>,
3342 #[arg(long)]
3344 journal: Option<String>,
3345 #[arg(long)]
3347 url: Option<String>,
3348 #[arg(long)]
3350 source_authors: Option<String>,
3351 #[arg(long)]
3353 conditions_text: Option<String>,
3354 #[arg(long)]
3356 species: Option<String>,
3357 #[arg(long)]
3358 in_vivo: bool,
3359 #[arg(long)]
3360 in_vitro: bool,
3361 #[arg(long)]
3362 human_data: bool,
3363 #[arg(long)]
3364 clinical_trial: bool,
3365 #[arg(long)]
3366 json: bool,
3367 #[arg(long)]
3369 apply: bool,
3370 },
3371 CausalSet {
3377 frontier: PathBuf,
3379 finding_id: String,
3381 #[arg(long)]
3383 claim: String,
3384 #[arg(long)]
3387 grade: Option<String>,
3388 #[arg(long)]
3391 actor: String,
3392 #[arg(long)]
3395 reason: String,
3396 #[arg(long)]
3397 json: bool,
3398 },
3399}
3400
3401#[derive(Subcommand)]
3402enum ProposalAction {
3403 List {
3405 frontier: PathBuf,
3406 #[arg(long)]
3407 status: Option<String>,
3408 #[arg(long)]
3409 json: bool,
3410 },
3411 Show {
3413 frontier: PathBuf,
3414 proposal_id: String,
3415 #[arg(long)]
3416 json: bool,
3417 },
3418 Preview {
3420 frontier: PathBuf,
3421 proposal_id: String,
3422 #[arg(long, default_value = "reviewer:preview")]
3423 reviewer: String,
3424 #[arg(long)]
3425 json: bool,
3426 },
3427 Import {
3429 frontier: PathBuf,
3430 source: PathBuf,
3431 #[arg(long)]
3432 json: bool,
3433 },
3434 Validate {
3436 source: PathBuf,
3437 #[arg(long)]
3438 json: bool,
3439 },
3440 Export {
3442 frontier: PathBuf,
3443 output: PathBuf,
3444 #[arg(long)]
3445 status: Option<String>,
3446 #[arg(long)]
3447 json: bool,
3448 },
3449 Accept {
3451 frontier: PathBuf,
3452 proposal_id: String,
3453 #[arg(long)]
3454 reviewer: String,
3455 #[arg(long)]
3456 reason: String,
3457 #[arg(long)]
3458 json: bool,
3459 },
3460 Reject {
3462 frontier: PathBuf,
3463 proposal_id: String,
3464 #[arg(long)]
3465 reviewer: String,
3466 #[arg(long)]
3467 reason: String,
3468 #[arg(long)]
3469 json: bool,
3470 },
3471}
3472
3473#[derive(Subcommand)]
3474enum SourceAdapterAction {
3475 Run {
3477 frontier: PathBuf,
3479 adapter: String,
3481 #[arg(long)]
3483 actor: String,
3484 #[arg(long = "entry")]
3486 entries: Vec<String>,
3487 #[arg(long)]
3489 priority: Option<String>,
3490 #[arg(long)]
3492 include_excluded: bool,
3493 #[arg(long)]
3495 allow_partial: bool,
3496 #[arg(long)]
3498 dry_run: bool,
3499 #[arg(long)]
3501 input_dir: Option<PathBuf>,
3502 #[arg(long)]
3504 apply_artifacts: bool,
3505 #[arg(long)]
3507 json: bool,
3508 },
3509}
3510
3511#[derive(Subcommand)]
3512enum RuntimeAdapterAction {
3513 Run {
3515 frontier: PathBuf,
3517 adapter: String,
3519 #[arg(long)]
3521 input: PathBuf,
3522 #[arg(long)]
3524 actor: String,
3525 #[arg(long)]
3527 dry_run: bool,
3528 #[arg(long)]
3530 apply_artifacts: bool,
3531 #[arg(long)]
3533 json: bool,
3534 },
3535}
3536
3537#[derive(Subcommand)]
3538enum BridgeKitAction {
3539 Validate {
3541 source: PathBuf,
3543 #[arg(long)]
3545 json: bool,
3546 },
3547 VerifyProvenance {
3554 packet: PathBuf,
3556 #[arg(long)]
3558 json: bool,
3559 #[arg(long = "cross-check")]
3566 cross_check: bool,
3567 },
3568}
3569
3570pub async fn run_command() {
3571 dotenvy::dotenv().ok();
3572
3573 match Cli::parse().command {
3574 Commands::Scout {
3575 folder,
3576 frontier,
3577 backend,
3578 dry_run,
3579 json,
3580 } => {
3581 cmd_scout(&folder, &frontier, backend.as_deref(), dry_run, json).await;
3582 }
3583 Commands::CompileNotes {
3584 vault,
3585 frontier,
3586 backend,
3587 max_files,
3588 max_items_per_category,
3589 dry_run,
3590 json,
3591 } => {
3592 cmd_compile_notes(
3593 &vault,
3594 &frontier,
3595 backend.as_deref(),
3596 max_files,
3597 max_items_per_category,
3598 dry_run,
3599 json,
3600 )
3601 .await;
3602 }
3603 Commands::CompileCode {
3604 root,
3605 frontier,
3606 backend,
3607 max_files,
3608 dry_run,
3609 json,
3610 } => {
3611 cmd_compile_code(
3612 &root,
3613 &frontier,
3614 backend.as_deref(),
3615 max_files,
3616 dry_run,
3617 json,
3618 )
3619 .await;
3620 }
3621 Commands::CompileData {
3622 root,
3623 frontier,
3624 backend,
3625 sample_rows,
3626 dry_run,
3627 json,
3628 } => {
3629 cmd_compile_data(
3630 &root,
3631 &frontier,
3632 backend.as_deref(),
3633 sample_rows,
3634 dry_run,
3635 json,
3636 )
3637 .await;
3638 }
3639 Commands::ReviewPending {
3640 frontier,
3641 backend,
3642 max_proposals,
3643 batch_size,
3644 dry_run,
3645 json,
3646 } => {
3647 cmd_review_pending(
3648 &frontier,
3649 backend.as_deref(),
3650 max_proposals,
3651 batch_size,
3652 dry_run,
3653 json,
3654 )
3655 .await;
3656 }
3657 Commands::FindTensions {
3658 frontier,
3659 backend,
3660 max_findings,
3661 dry_run,
3662 json,
3663 } => {
3664 cmd_find_tensions(&frontier, backend.as_deref(), max_findings, dry_run, json).await;
3665 }
3666 Commands::PlanExperiments {
3667 frontier,
3668 backend,
3669 max_findings,
3670 dry_run,
3671 json,
3672 } => {
3673 cmd_plan_experiments(&frontier, backend.as_deref(), max_findings, dry_run, json).await;
3674 }
3675 Commands::Check {
3676 source,
3677 schema,
3678 stats,
3679 conformance,
3680 conformance_dir,
3681 all,
3682 schema_only,
3683 strict,
3684 fix,
3685 json,
3686 } => cmd_check(
3687 source.as_deref(),
3688 schema,
3689 stats,
3690 conformance,
3691 &conformance_dir,
3692 all,
3693 schema_only,
3694 strict,
3695 fix,
3696 json,
3697 ),
3698 Commands::Integrity { frontier, json } => cmd_integrity(&frontier, json),
3699 Commands::Impact {
3700 frontier,
3701 finding_id,
3702 depth,
3703 json,
3704 } => cmd_impact(&frontier, &finding_id, depth, json),
3705 Commands::Discord {
3706 frontier,
3707 json,
3708 kind,
3709 } => cmd_discord(&frontier, json, kind.as_deref()),
3710 Commands::Normalize {
3711 source,
3712 out,
3713 write,
3714 dry_run,
3715 rewrite_ids,
3716 id_map,
3717 resync_provenance,
3718 json,
3719 } => cmd_normalize(
3720 &source,
3721 out.as_deref(),
3722 write,
3723 dry_run,
3724 rewrite_ids,
3725 id_map.as_deref(),
3726 resync_provenance,
3727 json,
3728 ),
3729 Commands::Proof {
3730 frontier,
3731 out,
3732 template,
3733 gold,
3734 record_proof_state,
3735 json,
3736 } => cmd_proof(
3737 &frontier,
3738 &out,
3739 &template,
3740 gold.as_deref(),
3741 record_proof_state,
3742 json,
3743 ),
3744 Commands::Repo { action } => cmd_repo(action),
3745 Commands::Serve {
3746 frontier,
3747 frontiers,
3748 backend,
3749 http,
3750 setup,
3751 check_tools,
3752 json,
3753 workbench,
3754 } => {
3755 if setup {
3756 cmd_mcp_setup(frontier.as_deref(), frontiers.as_deref());
3757 } else if check_tools {
3758 let source =
3759 serve::ProjectSource::from_args(frontier.as_deref(), frontiers.as_deref());
3760 match serve::check_tools(source) {
3761 Ok(report) => {
3762 if json {
3763 println!(
3764 "{}",
3765 serde_json::to_string_pretty(&report)
3766 .expect("failed to serialize tool check report")
3767 );
3768 } else {
3769 print_tool_check_report(&report);
3770 }
3771 }
3772 Err(e) => fail(&format!("Tool check failed: {e}")),
3773 }
3774 } else {
3775 let source =
3776 serve::ProjectSource::from_args(frontier.as_deref(), frontiers.as_deref());
3777 let resolved_port = if workbench {
3779 Some(http.unwrap_or(3848))
3780 } else {
3781 http
3782 };
3783 if let Some(port) = resolved_port {
3784 serve::run_http(source, backend.as_deref(), port, workbench).await;
3785 } else {
3786 serve::run(source, backend.as_deref()).await;
3787 }
3788 }
3789 }
3790 Commands::Status { frontier, json } => cmd_status(&frontier, json),
3791 Commands::Log {
3792 frontier,
3793 limit,
3794 kind,
3795 json,
3796 } => cmd_log(&frontier, limit, kind.as_deref(), json),
3797 Commands::Inbox {
3798 frontier,
3799 kind,
3800 limit,
3801 json,
3802 } => cmd_inbox(&frontier, kind.as_deref(), limit, json),
3803 Commands::Ask {
3804 frontier,
3805 question,
3806 json,
3807 } => cmd_ask(&frontier, &question.join(" "), json),
3808 Commands::Stats { frontier, json } => {
3809 if json {
3810 print_stats_json(&frontier);
3811 } else {
3812 cmd_stats(&frontier);
3813 }
3814 }
3815 Commands::Search {
3816 source,
3817 query,
3818 entity,
3819 r#type,
3820 all,
3821 limit,
3822 json,
3823 } => cmd_search(
3824 source.as_deref(),
3825 &query,
3826 entity.as_deref(),
3827 r#type.as_deref(),
3828 all.as_deref(),
3829 limit,
3830 json,
3831 ),
3832 Commands::Tensions {
3833 source,
3834 both_high,
3835 cross_domain,
3836 top,
3837 json,
3838 } => cmd_tensions(&source, both_high, cross_domain, top, json),
3839 Commands::Gaps { action } => cmd_gaps(action),
3840 Commands::Bridge {
3841 inputs,
3842 novelty,
3843 top,
3844 } => cmd_bridge(&inputs, novelty, top).await,
3845 Commands::Export {
3846 frontier,
3847 format,
3848 output,
3849 } => export::run(&frontier, &format, output.as_deref()),
3850 Commands::Packet { action } => cmd_packet(action),
3851 Commands::Verify { path, json } => cmd_verify(&path, json),
3852 Commands::Bench {
3853 frontier,
3854 gold,
3855 candidate,
3856 sources,
3857 threshold,
3858 report,
3859 entity_gold,
3860 link_gold,
3861 suite,
3862 suite_ready,
3863 min_f1,
3864 min_precision,
3865 min_recall,
3866 no_thresholds,
3867 json,
3868 } => {
3869 if let Some(cand) = candidate.clone() {
3874 let Some(g) = gold.clone() else {
3875 eprintln!(
3876 "{} `vela bench --candidate <…>` requires `--gold <…>`",
3877 style::err_prefix()
3878 );
3879 std::process::exit(2);
3880 };
3881 cmd_agent_bench(
3882 &g,
3883 &cand,
3884 sources.as_deref(),
3885 threshold,
3886 report.as_deref(),
3887 json,
3888 );
3889 } else {
3890 cmd_bench(BenchArgs {
3891 frontier,
3892 gold,
3893 entity_gold,
3894 link_gold,
3895 suite,
3896 suite_ready,
3897 min_f1,
3898 min_precision,
3899 min_recall,
3900 no_thresholds,
3901 json,
3902 });
3903 }
3904 }
3905 Commands::Conformance { dir } => {
3906 let _ = conformance::run(&dir);
3907 }
3908 Commands::Version => println!("vela 0.36.0"),
3909 Commands::Sign { action } => cmd_sign(action),
3910 Commands::Actor { action } => cmd_actor(action),
3911 Commands::Federation { action } => cmd_federation(action),
3912 Commands::Causal { action } => cmd_causal(action),
3913 Commands::Frontier { action } => cmd_frontier(action),
3914 Commands::Queue { action } => cmd_queue(action),
3915 Commands::Registry { action } => cmd_registry(action),
3916 Commands::Init {
3917 path,
3918 name,
3919 template,
3920 no_git,
3921 json,
3922 } => cmd_init(&path, &name, &template, !no_git, json),
3923 Commands::Quickstart {
3924 path,
3925 name,
3926 reviewer,
3927 assertion,
3928 keys_out,
3929 json,
3930 } => cmd_quickstart(
3931 &path,
3932 &name,
3933 &reviewer,
3934 assertion.as_deref(),
3935 keys_out.as_deref(),
3936 json,
3937 ),
3938 Commands::Agent { action } => cmd_agent(action),
3939 Commands::Lock { path, check, json } => cmd_lock(&path, check, json),
3940 Commands::Doc { path, out, json } => cmd_doc(&path, out.as_deref(), json),
3941 Commands::Import { frontier, into } => cmd_import(&frontier, into.as_deref()),
3942 Commands::Diff {
3943 target,
3944 frontier_b,
3945 frontier,
3946 reviewer,
3947 from,
3948 json,
3949 quiet,
3950 } => {
3951 if target.starts_with("vpr_") {
3956 let frontier_root = frontier
3957 .clone()
3958 .or_else(|| frontier_b.clone().map(std::path::PathBuf::from))
3959 .unwrap_or_else(|| std::path::PathBuf::from("."));
3960 let preview = proposals::preview_at_path(&frontier_root, &target, &reviewer)
3961 .unwrap_or_else(|e| fail_return(&e));
3962 let payload = json!({
3963 "ok": true,
3964 "command": "diff.proposal",
3965 "frontier": frontier_root.display().to_string(),
3966 "proposal_id": target,
3967 "preview": preview,
3968 });
3969 if json {
3970 println!(
3971 "{}",
3972 serde_json::to_string_pretty(&payload)
3973 .expect("failed to serialize diff preview")
3974 );
3975 } else {
3976 println!("vela diff · proposal preview");
3977 println!(" proposal: {}", target);
3978 println!(" kind: {}", preview.kind);
3979 println!(
3980 " findings: {} -> {}",
3981 preview.findings_before, preview.findings_after
3982 );
3983 println!(
3984 " artifacts: {} -> {}",
3985 preview.artifacts_before, preview.artifacts_after
3986 );
3987 println!(
3988 " events: {} -> {}",
3989 preview.events_before, preview.events_after
3990 );
3991 if !preview.changed_findings.is_empty() {
3992 println!(
3993 " findings changed: {}",
3994 preview.changed_findings.join(", ")
3995 );
3996 }
3997 }
3998 } else {
3999 let b_str = frontier_b.unwrap_or_else(|| {
4000 fail_return(
4001 "diff: two-frontier mode needs a second positional (filesystem path or `vfr_*` id); for proposal preview pass a `vpr_*` id",
4002 )
4003 });
4004 let _tmp = if target.starts_with("vfr_") || b_str.starts_with("vfr_") {
4010 Some(
4011 tempfile::Builder::new()
4012 .prefix("vela-diff-")
4013 .tempdir()
4014 .unwrap_or_else(|e| {
4015 fail_return(&format!("tempdir for vfr resolve: {e}"))
4016 }),
4017 )
4018 } else {
4019 None
4020 };
4021 let resolve_side = |side: &str, slot: &str| -> std::path::PathBuf {
4022 if side.starts_with("vfr_") {
4023 let tmp = _tmp.as_ref().expect("tempdir initialized above");
4024 let dest = tmp.path().join(format!("{slot}-{side}.json"));
4025 resolve_vfr_to_path(side, from.as_deref(), &dest)
4026 .unwrap_or_else(|e| fail_return(&e));
4027 dest
4028 } else {
4029 std::path::PathBuf::from(side)
4030 }
4031 };
4032 let frontier_a = resolve_side(&target, "a");
4033 let frontier_b_path = resolve_side(&b_str, "b");
4034 diff::run(&frontier_a, &frontier_b_path, json, quiet);
4035 }
4036 }
4037 Commands::Proposals { action } => cmd_proposals(action),
4038 Commands::SearchIndex { action } => cmd_search_index(action).await,
4039 Commands::ArtifactToState {
4040 frontier,
4041 packet,
4042 actor,
4043 apply_artifacts,
4044 json,
4045 } => cmd_artifact_to_state(&frontier, &packet, &actor, apply_artifacts, json),
4046 Commands::BridgeKit { action } => cmd_bridge_kit(action).await,
4047 Commands::SourceAdapter { action } => cmd_source_adapter(action).await,
4048 Commands::RuntimeAdapter { action } => cmd_runtime_adapter(action),
4049 Commands::Link { action } => cmd_link(action),
4050 Commands::Workbench {
4051 path,
4052 port,
4053 no_open,
4054 } => {
4055 if let Err(e) = crate::workbench::run(path, port, !no_open).await {
4056 fail(&e);
4057 }
4058 }
4059 Commands::Bridges { action } => cmd_bridges(action),
4060 Commands::Entity { action } => cmd_entity(action),
4061 Commands::Finding { command } => match command {
4062 FindingCommands::Add {
4063 frontier,
4064 assertion,
4065 r#type,
4066 source,
4067 source_type,
4068 author,
4069 confidence,
4070 evidence_type,
4071 entities,
4072 entities_reviewed,
4073 evidence_span,
4074 gap,
4075 negative_space,
4076 doi,
4077 pmid,
4078 year,
4079 journal,
4080 url,
4081 source_authors,
4082 conditions_text,
4083 species,
4084 in_vivo,
4085 in_vitro,
4086 human_data,
4087 clinical_trial,
4088 json,
4089 apply,
4090 } => {
4091 validate_enum_arg("--type", &r#type, bundle::VALID_ASSERTION_TYPES);
4092 validate_enum_arg(
4093 "--evidence-type",
4094 &evidence_type,
4095 bundle::VALID_EVIDENCE_TYPES,
4096 );
4097 validate_enum_arg(
4098 "--source-type",
4099 &source_type,
4100 bundle::VALID_PROVENANCE_SOURCE_TYPES,
4101 );
4102 let parsed_entities = parse_entities(&entities);
4103 let parsed_evidence_spans = parse_evidence_spans(&evidence_span);
4104 for (name, etype) in &parsed_entities {
4105 if !bundle::VALID_ENTITY_TYPES.contains(&etype.as_str()) {
4106 fail(&format!(
4107 "invalid entity type '{}' for '{}'. Valid: {}",
4108 etype,
4109 name,
4110 bundle::VALID_ENTITY_TYPES.join(", "),
4111 ));
4112 }
4113 }
4114 let parsed_source_authors = source_authors
4115 .map(|s| {
4116 s.split(';')
4117 .map(|a| a.trim().to_string())
4118 .filter(|a| !a.is_empty())
4119 .collect()
4120 })
4121 .unwrap_or_default();
4122 let parsed_species = species
4123 .map(|s| {
4124 s.split(';')
4125 .map(|a| a.trim().to_string())
4126 .filter(|a| !a.is_empty())
4127 .collect()
4128 })
4129 .unwrap_or_default();
4130 let report = state::add_finding(
4131 &frontier,
4132 state::FindingDraftOptions {
4133 text: assertion,
4134 assertion_type: r#type,
4135 source,
4136 source_type,
4137 author,
4138 confidence,
4139 evidence_type,
4140 entities: parsed_entities,
4141 doi,
4142 pmid,
4143 year,
4144 journal,
4145 url,
4146 source_authors: parsed_source_authors,
4147 conditions_text,
4148 species: parsed_species,
4149 in_vivo,
4150 in_vitro,
4151 human_data,
4152 clinical_trial,
4153 entities_reviewed,
4154 evidence_spans: parsed_evidence_spans,
4155 gap,
4156 negative_space,
4157 },
4158 apply,
4159 )
4160 .unwrap_or_else(|e| fail_return(&e));
4161 print_state_report(&report, json);
4162 }
4163 FindingCommands::Supersede {
4164 frontier,
4165 old_id,
4166 assertion,
4167 r#type,
4168 source,
4169 source_type,
4170 author,
4171 reason,
4172 confidence,
4173 evidence_type,
4174 entities,
4175 doi,
4176 pmid,
4177 year,
4178 journal,
4179 url,
4180 source_authors,
4181 conditions_text,
4182 species,
4183 in_vivo,
4184 in_vitro,
4185 human_data,
4186 clinical_trial,
4187 json,
4188 apply,
4189 } => {
4190 validate_enum_arg("--type", &r#type, bundle::VALID_ASSERTION_TYPES);
4191 validate_enum_arg(
4192 "--evidence-type",
4193 &evidence_type,
4194 bundle::VALID_EVIDENCE_TYPES,
4195 );
4196 validate_enum_arg(
4197 "--source-type",
4198 &source_type,
4199 bundle::VALID_PROVENANCE_SOURCE_TYPES,
4200 );
4201 let parsed_entities = parse_entities(&entities);
4202 for (name, etype) in &parsed_entities {
4203 if !bundle::VALID_ENTITY_TYPES.contains(&etype.as_str()) {
4204 fail(&format!(
4205 "invalid entity type '{}' for '{}'. Valid: {}",
4206 etype,
4207 name,
4208 bundle::VALID_ENTITY_TYPES.join(", "),
4209 ));
4210 }
4211 }
4212 let parsed_source_authors = source_authors
4213 .map(|s| {
4214 s.split(';')
4215 .map(|a| a.trim().to_string())
4216 .filter(|a| !a.is_empty())
4217 .collect()
4218 })
4219 .unwrap_or_default();
4220 let parsed_species = species
4221 .map(|s| {
4222 s.split(';')
4223 .map(|a| a.trim().to_string())
4224 .filter(|a| !a.is_empty())
4225 .collect()
4226 })
4227 .unwrap_or_default();
4228 let report = state::supersede_finding(
4229 &frontier,
4230 &old_id,
4231 &reason,
4232 state::FindingDraftOptions {
4233 text: assertion,
4234 assertion_type: r#type,
4235 source,
4236 source_type,
4237 author,
4238 confidence,
4239 evidence_type,
4240 entities: parsed_entities,
4241 doi,
4242 pmid,
4243 year,
4244 journal,
4245 url,
4246 source_authors: parsed_source_authors,
4247 conditions_text,
4248 species: parsed_species,
4249 in_vivo,
4250 in_vitro,
4251 human_data,
4252 clinical_trial,
4253 entities_reviewed: false,
4254 evidence_spans: Vec::new(),
4255 gap: false,
4256 negative_space: false,
4257 },
4258 apply,
4259 )
4260 .unwrap_or_else(|e| fail_return(&e));
4261 print_state_report(&report, json);
4262 }
4263 FindingCommands::CausalSet {
4264 frontier,
4265 finding_id,
4266 claim,
4267 grade,
4268 actor,
4269 reason,
4270 json,
4271 } => {
4272 if !bundle::VALID_CAUSAL_CLAIMS.contains(&claim.as_str()) {
4273 fail(&format!(
4274 "invalid --claim '{claim}'; valid: {:?}",
4275 bundle::VALID_CAUSAL_CLAIMS
4276 ));
4277 }
4278 if let Some(g) = grade.as_deref()
4279 && !bundle::VALID_CAUSAL_EVIDENCE_GRADES.contains(&g)
4280 {
4281 fail(&format!(
4282 "invalid --grade '{g}'; valid: {:?}",
4283 bundle::VALID_CAUSAL_EVIDENCE_GRADES
4284 ));
4285 }
4286 let report = state::set_causal(
4287 &frontier,
4288 &finding_id,
4289 &claim,
4290 grade.as_deref(),
4291 &actor,
4292 &reason,
4293 )
4294 .unwrap_or_else(|e| fail_return(&e));
4295 print_state_report(&report, json);
4296 }
4297 },
4298 Commands::Review {
4299 frontier,
4300 finding_id,
4301 status,
4302 reason,
4303 reviewer,
4304 apply,
4305 json,
4306 } => {
4307 let status = status.unwrap_or_else(|| fail_return("--status is required for review"));
4308 let reason = reason.unwrap_or_else(|| fail_return("--reason is required for review"));
4309 let report = state::review_finding(
4310 &frontier,
4311 &finding_id,
4312 state::ReviewOptions {
4313 status,
4314 reason,
4315 reviewer,
4316 },
4317 apply,
4318 )
4319 .unwrap_or_else(|e| fail_return(&e));
4320 print_state_report(&report, json);
4321 }
4322 Commands::Note {
4323 frontier,
4324 finding_id,
4325 text,
4326 author,
4327 apply,
4328 json,
4329 } => {
4330 let report = state::add_note(&frontier, &finding_id, &text, &author, apply)
4331 .unwrap_or_else(|e| fail_return(&e));
4332 print_state_report(&report, json);
4333 }
4334 Commands::Caveat {
4335 frontier,
4336 finding_id,
4337 text,
4338 author,
4339 apply,
4340 json,
4341 } => {
4342 let report = state::caveat_finding(&frontier, &finding_id, &text, &author, apply)
4343 .unwrap_or_else(|e| fail_return(&e));
4344 print_state_report(&report, json);
4345 }
4346 Commands::Revise {
4347 frontier,
4348 finding_id,
4349 confidence,
4350 reason,
4351 reviewer,
4352 apply,
4353 json,
4354 } => {
4355 let report = state::revise_confidence(
4356 &frontier,
4357 &finding_id,
4358 state::ReviseOptions {
4359 confidence,
4360 reason,
4361 reviewer,
4362 },
4363 apply,
4364 )
4365 .unwrap_or_else(|e| fail_return(&e));
4366 print_state_report(&report, json);
4367 }
4368 Commands::Reject {
4369 frontier,
4370 finding_id,
4371 reason,
4372 reviewer,
4373 apply,
4374 json,
4375 } => {
4376 let report = state::reject_finding(&frontier, &finding_id, &reviewer, &reason, apply)
4377 .unwrap_or_else(|e| fail_return(&e));
4378 print_state_report(&report, json);
4379 }
4380 Commands::History {
4381 frontier,
4382 finding_id,
4383 json,
4384 as_of,
4385 } => {
4386 let payload = state::history_as_of(&frontier, &finding_id, as_of.as_deref())
4387 .unwrap_or_else(|e| fail_return(&e));
4388 if json {
4389 println!(
4390 "{}",
4391 serde_json::to_string_pretty(&payload)
4392 .expect("failed to serialize history response")
4393 );
4394 } else {
4395 print_history(&payload);
4396 }
4397 }
4398 Commands::ImportEvents { source, into, json } => {
4399 let report =
4400 review::import_review_events(&source, &into).unwrap_or_else(|e| fail_return(&e));
4401 if json {
4402 println!(
4403 "{}",
4404 serde_json::to_string_pretty(&json!({
4405 "ok": true,
4406 "command": "import-events",
4407 "source": report.source,
4408 "target": into.display().to_string(),
4409 "summary": {
4410 "imported": report.imported,
4411 "new": report.new,
4412 "duplicate": report.duplicate,
4413 "canonical_events_imported": report.events_imported,
4414 "canonical_events_new": report.events_new,
4415 "canonical_events_duplicate": report.events_duplicate,
4416 }
4417 }))
4418 .expect("failed to serialize import-events response")
4419 );
4420 } else {
4421 println!("{report}");
4422 }
4423 }
4424 Commands::Retract {
4425 source,
4426 finding_id,
4427 reason,
4428 reviewer,
4429 apply,
4430 json,
4431 } => {
4432 let report = state::retract_finding(&source, &finding_id, &reviewer, &reason, apply)
4433 .unwrap_or_else(|e| fail_return(&e));
4434 print_state_report(&report, json);
4435 }
4436 Commands::LocatorRepair {
4437 frontier,
4438 atom_id,
4439 locator,
4440 reviewer,
4441 reason,
4442 apply,
4443 json,
4444 } => {
4445 cmd_locator_repair(
4446 &frontier,
4447 &atom_id,
4448 locator.as_deref(),
4449 &reviewer,
4450 &reason,
4451 apply,
4452 json,
4453 );
4454 }
4455 Commands::SourceFetch {
4456 identifier,
4457 cache,
4458 out,
4459 refresh,
4460 json,
4461 } => {
4462 cmd_source_fetch(&identifier, cache.as_deref(), out.as_deref(), refresh, json).await;
4463 }
4464 Commands::SpanRepair {
4465 frontier,
4466 finding_id,
4467 section,
4468 text,
4469 reviewer,
4470 reason,
4471 apply,
4472 json,
4473 } => {
4474 cmd_span_repair(
4475 &frontier,
4476 &finding_id,
4477 §ion,
4478 &text,
4479 &reviewer,
4480 &reason,
4481 apply,
4482 json,
4483 );
4484 }
4485 Commands::ProofAdd {
4486 frontier,
4487 target_finding,
4488 tool,
4489 tool_version,
4490 script_path,
4491 name,
4492 reviewer,
4493 reason,
4494 json,
4495 } => {
4496 cmd_proof_add(
4497 &frontier,
4498 &target_finding,
4499 &tool,
4500 &tool_version,
4501 &script_path,
4502 &name,
4503 &reviewer,
4504 &reason,
4505 json,
4506 );
4507 }
4508 Commands::ProofAttestVerification {
4509 proof_id,
4510 tool,
4511 tool_version,
4512 script_locator,
4513 lake_manifest_hash,
4514 verifier_output_hash,
4515 status,
4516 verifier_actor,
4517 key,
4518 out,
4519 json,
4520 } => cmd_proof_attest_verification(
4521 proof_id,
4522 tool,
4523 tool_version,
4524 script_locator,
4525 lake_manifest_hash,
4526 verifier_output_hash,
4527 status,
4528 verifier_actor,
4529 key,
4530 out,
4531 json,
4532 ),
4533 Commands::ProofVerifyAttestation { record, json } => {
4534 cmd_proof_verify_attestation(record, json)
4535 }
4536 Commands::EntityAdd {
4537 frontier,
4538 finding_id,
4539 entity,
4540 entity_type,
4541 reviewer,
4542 reason,
4543 apply,
4544 json,
4545 } => {
4546 let report = state::add_finding_entity(
4547 &frontier,
4548 &finding_id,
4549 &entity,
4550 &entity_type,
4551 &reviewer,
4552 &reason,
4553 apply,
4554 )
4555 .unwrap_or_else(|e| fail_return(&e));
4556 print_state_report(&report, json);
4557 }
4558 Commands::EntityResolve {
4559 frontier,
4560 finding_id,
4561 entity,
4562 source,
4563 id,
4564 confidence,
4565 matched_name,
4566 resolution_method,
4567 reviewer,
4568 reason,
4569 apply,
4570 json,
4571 } => {
4572 cmd_entity_resolve(
4573 &frontier,
4574 &finding_id,
4575 &entity,
4576 &source,
4577 &id,
4578 confidence,
4579 matched_name.as_deref(),
4580 &resolution_method,
4581 &reviewer,
4582 &reason,
4583 apply,
4584 json,
4585 );
4586 }
4587 Commands::Propagate {
4588 frontier,
4589 retract,
4590 reduce_confidence,
4591 to,
4592 output,
4593 } => cmd_propagate(&frontier, retract, reduce_confidence, to, output.as_deref()),
4594 Commands::Replicate {
4595 frontier,
4596 target,
4597 outcome,
4598 by,
4599 conditions,
4600 source_title,
4601 doi,
4602 pmid,
4603 sample_size,
4604 note,
4605 previous_attempt,
4606 no_cascade,
4607 json,
4608 } => cmd_replicate(
4609 &frontier,
4610 &target,
4611 &outcome,
4612 &by,
4613 &conditions,
4614 &source_title,
4615 doi.as_deref(),
4616 pmid.as_deref(),
4617 sample_size.as_deref(),
4618 ¬e,
4619 previous_attempt.as_deref(),
4620 no_cascade,
4621 json,
4622 ),
4623 Commands::Replications {
4624 frontier,
4625 target,
4626 json,
4627 } => cmd_replications(&frontier, target.as_deref(), json),
4628 Commands::DatasetAdd {
4629 frontier,
4630 name,
4631 version,
4632 content_hash,
4633 url,
4634 license,
4635 source_title,
4636 doi,
4637 row_count,
4638 json,
4639 } => cmd_dataset_add(
4640 &frontier,
4641 &name,
4642 version.as_deref(),
4643 &content_hash,
4644 url.as_deref(),
4645 license.as_deref(),
4646 &source_title,
4647 doi.as_deref(),
4648 row_count,
4649 json,
4650 ),
4651 Commands::Datasets { frontier, json } => cmd_datasets(&frontier, json),
4652 Commands::CodeAdd {
4653 frontier,
4654 language,
4655 repo_url,
4656 commit,
4657 path,
4658 content_hash,
4659 line_start,
4660 line_end,
4661 entry_point,
4662 json,
4663 } => cmd_code_add(
4664 &frontier,
4665 &language,
4666 repo_url.as_deref(),
4667 commit.as_deref(),
4668 &path,
4669 &content_hash,
4670 line_start,
4671 line_end,
4672 entry_point.as_deref(),
4673 json,
4674 ),
4675 Commands::CodeArtifacts { frontier, json } => cmd_code_artifacts(&frontier, json),
4676 Commands::ArtifactAdd {
4677 frontier,
4678 kind,
4679 name,
4680 file,
4681 url,
4682 content_hash,
4683 media_type,
4684 license,
4685 source_title,
4686 source_url,
4687 doi,
4688 target,
4689 metadata,
4690 access_tier,
4691 deposited_by,
4692 reason,
4693 json,
4694 } => cmd_artifact_add(
4695 &frontier,
4696 &kind,
4697 &name,
4698 file.as_deref(),
4699 url.as_deref(),
4700 content_hash.as_deref(),
4701 media_type.as_deref(),
4702 license.as_deref(),
4703 source_title.as_deref(),
4704 source_url.as_deref(),
4705 doi.as_deref(),
4706 target,
4707 metadata,
4708 &access_tier,
4709 &deposited_by,
4710 &reason,
4711 json,
4712 ),
4713 Commands::Artifacts {
4714 frontier,
4715 target,
4716 json,
4717 } => cmd_artifacts(&frontier, target.as_deref(), json),
4718 Commands::ArtifactAudit { frontier, json } => cmd_artifact_audit(&frontier, json),
4719 Commands::DecisionBrief { frontier, json } => cmd_decision_brief(&frontier, json),
4720 Commands::TrialSummary { frontier, json } => cmd_trial_summary(&frontier, json),
4721 Commands::SourceVerification { frontier, json } => cmd_source_verification(&frontier, json),
4722 Commands::SourceIngestPlan { frontier, json } => cmd_source_ingest_plan(&frontier, json),
4723 Commands::ClinicalTrialImport {
4724 frontier,
4725 nct_id,
4726 input_json,
4727 target,
4728 deposited_by,
4729 reason,
4730 license,
4731 json,
4732 } => {
4733 cmd_clinical_trial_import(
4734 &frontier,
4735 &nct_id,
4736 input_json.as_deref(),
4737 target,
4738 &deposited_by,
4739 &reason,
4740 &license,
4741 json,
4742 )
4743 .await
4744 }
4745 Commands::NegativeResultAdd {
4746 frontier,
4747 kind,
4748 deposited_by,
4749 reason,
4750 conditions_text,
4751 notes,
4752 target,
4753 endpoint,
4754 intervention,
4755 comparator,
4756 population,
4757 n_enrolled,
4758 power,
4759 ci_lower,
4760 ci_upper,
4761 effect_size_threshold,
4762 registry_id,
4763 reagent,
4764 observation,
4765 attempts,
4766 source_title,
4767 doi,
4768 url,
4769 year,
4770 json,
4771 } => cmd_negative_result_add(
4772 &frontier,
4773 &kind,
4774 &deposited_by,
4775 &reason,
4776 &conditions_text,
4777 ¬es,
4778 target,
4779 endpoint.as_deref(),
4780 intervention.as_deref(),
4781 comparator.as_deref(),
4782 population.as_deref(),
4783 n_enrolled,
4784 power,
4785 ci_lower,
4786 ci_upper,
4787 effect_size_threshold,
4788 registry_id.as_deref(),
4789 reagent.as_deref(),
4790 observation.as_deref(),
4791 attempts,
4792 &source_title,
4793 doi.as_deref(),
4794 url.as_deref(),
4795 year,
4796 json,
4797 ),
4798 Commands::NegativeResults {
4799 frontier,
4800 target,
4801 json,
4802 } => cmd_negative_results(&frontier, target.as_deref(), json),
4803 Commands::TrajectoryCreate {
4804 frontier,
4805 deposited_by,
4806 reason,
4807 target,
4808 notes,
4809 json,
4810 } => cmd_trajectory_create(&frontier, &deposited_by, &reason, target, ¬es, json),
4811 Commands::TrajectoryStep {
4812 frontier,
4813 trajectory_id,
4814 kind,
4815 description,
4816 actor,
4817 reason,
4818 reference,
4819 json,
4820 } => cmd_trajectory_step(
4821 &frontier,
4822 &trajectory_id,
4823 &kind,
4824 &description,
4825 &actor,
4826 &reason,
4827 reference,
4828 json,
4829 ),
4830 Commands::Trajectories {
4831 frontier,
4832 target,
4833 json,
4834 } => cmd_trajectories(&frontier, target.as_deref(), json),
4835 Commands::TierSet {
4836 frontier,
4837 object_type,
4838 object_id,
4839 tier,
4840 actor,
4841 reason,
4842 json,
4843 } => cmd_tier_set(
4844 &frontier,
4845 &object_type,
4846 &object_id,
4847 &tier,
4848 &actor,
4849 &reason,
4850 json,
4851 ),
4852 Commands::Predict {
4853 frontier,
4854 by,
4855 claim,
4856 criterion,
4857 resolves_by,
4858 confidence,
4859 target,
4860 outcome,
4861 conditions,
4862 json,
4863 } => cmd_predict(
4864 &frontier,
4865 &by,
4866 &claim,
4867 &criterion,
4868 resolves_by.as_deref(),
4869 confidence,
4870 &target,
4871 &outcome,
4872 &conditions,
4873 json,
4874 ),
4875 Commands::Resolve {
4876 frontier,
4877 prediction,
4878 outcome,
4879 matched,
4880 by,
4881 confidence,
4882 source_title,
4883 doi,
4884 json,
4885 } => cmd_resolve(
4886 &frontier,
4887 &prediction,
4888 &outcome,
4889 matched,
4890 &by,
4891 confidence,
4892 &source_title,
4893 doi.as_deref(),
4894 json,
4895 ),
4896 Commands::Predictions {
4897 frontier,
4898 by,
4899 open,
4900 json,
4901 } => cmd_predictions(&frontier, by.as_deref(), open, json),
4902 Commands::Calibration {
4903 frontier,
4904 actor,
4905 json,
4906 } => cmd_calibration(&frontier, actor.as_deref(), json),
4907 Commands::PredictionsExpire {
4908 frontier,
4909 now,
4910 dry_run,
4911 json,
4912 } => cmd_predictions_expire(&frontier, now.as_deref(), dry_run, json),
4913 Commands::Consensus {
4914 frontier,
4915 target,
4916 weighting,
4917 causal_claim,
4918 causal_grade_min,
4919 json,
4920 } => cmd_consensus(
4921 &frontier,
4922 &target,
4923 &weighting,
4924 causal_claim.as_deref(),
4925 causal_grade_min.as_deref(),
4926 json,
4927 ),
4928
4929 Commands::Ingest {
4932 path,
4933 frontier,
4934 backend,
4935 actor,
4936 dry_run,
4937 json,
4938 } => {
4939 cmd_ingest(
4940 &path,
4941 &frontier,
4942 backend.as_deref(),
4943 actor.as_deref(),
4944 dry_run,
4945 json,
4946 )
4947 .await
4948 }
4949
4950 Commands::Propose {
4951 frontier,
4952 finding_id,
4953 status,
4954 reason,
4955 reviewer,
4956 apply,
4957 json,
4958 } => {
4959 let options = state::ReviewOptions {
4962 status: status.clone(),
4963 reason: reason.clone(),
4964 reviewer: reviewer.clone(),
4965 };
4966 let report = state::review_finding(&frontier, &finding_id, options, apply)
4967 .unwrap_or_else(|e| fail_return(&e));
4968 print_state_report(&report, json);
4969 }
4970
4971 Commands::Accept {
4972 frontier,
4973 proposal_id,
4974 reviewer,
4975 reason,
4976 json,
4977 } => {
4978 let event_id = proposals::accept_at_path(&frontier, &proposal_id, &reviewer, &reason)
4980 .unwrap_or_else(|e| fail_return(&e));
4981 let payload = json!({
4982 "ok": true,
4983 "command": "accept",
4984 "frontier": frontier.display().to_string(),
4985 "proposal_id": proposal_id,
4986 "reviewer": reviewer,
4987 "applied_event_id": event_id,
4988 });
4989 if json {
4990 println!(
4991 "{}",
4992 serde_json::to_string_pretty(&payload)
4993 .expect("failed to serialize accept response")
4994 );
4995 } else {
4996 println!(
4997 "{} accepted and applied proposal {}",
4998 style::ok("ok"),
4999 proposal_id
5000 );
5001 println!(" event: {}", event_id);
5002 }
5003 }
5004
5005 Commands::Attest {
5006 frontier,
5007 event,
5008 attester,
5009 scope_note,
5010 proof_id,
5011 signature,
5012 key,
5013 json,
5014 } => {
5015 if let Some(target_event_id) = event {
5019 let attester_id = attester.unwrap_or_else(|| {
5020 fail_return("attest: --attester is required in per-event mode")
5021 });
5022 let scope = scope_note.unwrap_or_else(|| {
5023 fail_return("attest: --scope-note is required in per-event mode")
5024 });
5025 let attestation_event_id = state::record_attestation(
5026 &frontier,
5027 &target_event_id,
5028 &attester_id,
5029 &scope,
5030 proof_id.as_deref(),
5031 signature.as_deref(),
5032 )
5033 .unwrap_or_else(|e| fail_return(&e));
5034 if json {
5035 let payload = json!({
5036 "ok": true,
5037 "command": "attest.event",
5038 "frontier": frontier.display().to_string(),
5039 "target_event_id": target_event_id,
5040 "attestation_event_id": attestation_event_id,
5041 "attester_id": attester_id,
5042 });
5043 println!(
5044 "{}",
5045 serde_json::to_string_pretty(&payload)
5046 .expect("failed to serialize attest.event response")
5047 );
5048 } else {
5049 println!(
5050 "{} attested {} by {} ({})",
5051 style::ok("ok"),
5052 target_event_id,
5053 attester_id,
5054 attestation_event_id
5055 );
5056 }
5057 return;
5058 }
5059 let key_path = key.unwrap_or_else(|| {
5061 fail_return(
5062 "attest: --key is required in frontier-wide mode (or pass --event for per-event mode)",
5063 )
5064 });
5065 let count =
5066 sign::sign_frontier(&frontier, &key_path).unwrap_or_else(|e| fail_return(&e));
5067 let payload = json!({
5068 "ok": true,
5069 "command": "attest",
5070 "frontier": frontier.display().to_string(),
5071 "private_key": key_path.display().to_string(),
5072 "signed": count,
5073 });
5074 if json {
5075 println!(
5076 "{}",
5077 serde_json::to_string_pretty(&payload)
5078 .expect("failed to serialize attest response")
5079 );
5080 } else {
5081 println!(
5082 "{} {count} findings in {}",
5083 style::ok("attested"),
5084 frontier.display()
5085 );
5086 }
5087 }
5088
5089 Commands::Lineage {
5090 frontier,
5091 finding_id,
5092 as_of,
5093 json,
5094 } => {
5095 let payload = state::history_as_of(&frontier, &finding_id, as_of.as_deref())
5097 .unwrap_or_else(|e| fail_return(&e));
5098 if json {
5099 println!(
5100 "{}",
5101 serde_json::to_string_pretty(&payload)
5102 .expect("failed to serialize lineage response")
5103 );
5104 } else {
5105 print_history(&payload);
5106 }
5107 }
5108
5109 Commands::Carina { action } => cmd_carina(action),
5110
5111 Commands::Atlas { action } => cmd_atlas(action).await,
5112
5113 Commands::Constellation { action } => cmd_constellation(action).await,
5114 }
5115}
5116
5117async fn cmd_atlas(action: AtlasAction) {
5122 match action {
5123 AtlasAction::Init {
5124 name,
5125 frontiers,
5126 domain,
5127 scope_note,
5128 atlases_root,
5129 json,
5130 } => match ATLAS_INIT_HANDLER.get() {
5131 Some(handler) => {
5132 handler(atlases_root, name, domain, scope_note, frontiers, json).await;
5133 }
5134 None => fail("vela atlas init: handler not registered (built without vela-atlas)"),
5135 },
5136 AtlasAction::Materialize {
5137 name,
5138 atlases_root,
5139 json,
5140 } => match ATLAS_MATERIALIZE_HANDLER.get() {
5141 Some(handler) => handler(atlases_root, name, json).await,
5142 None => fail("vela atlas materialize: handler not registered"),
5143 },
5144 AtlasAction::Serve {
5145 name,
5146 atlases_root,
5147 port,
5148 no_open,
5149 } => {
5150 match ATLAS_SERVE_HANDLER.get() {
5154 Some(handler) => handler(atlases_root, name, port, !no_open).await,
5155 None => fail("vela atlas serve: handler not registered"),
5156 }
5157 }
5158 AtlasAction::Update {
5159 name,
5160 add_frontier,
5161 remove_vfr_id,
5162 atlases_root,
5163 json,
5164 } => match ATLAS_UPDATE_HANDLER.get() {
5165 Some(handler) => {
5166 handler(atlases_root, name, add_frontier, remove_vfr_id, json).await;
5167 }
5168 None => fail("vela atlas update: handler not registered"),
5169 },
5170 }
5171}
5172
5173async fn cmd_constellation(action: ConstellationAction) {
5177 match action {
5178 ConstellationAction::Init {
5179 name,
5180 atlases,
5181 scope_note,
5182 constellations_root,
5183 json,
5184 } => match CONSTELLATION_INIT_HANDLER.get() {
5185 Some(handler) => {
5186 handler(constellations_root, name, scope_note, atlases, json).await;
5187 }
5188 None => fail(
5189 "vela constellation init: handler not registered (built without vela-constellation)",
5190 ),
5191 },
5192 ConstellationAction::Materialize {
5193 name,
5194 constellations_root,
5195 json,
5196 } => match CONSTELLATION_MATERIALIZE_HANDLER.get() {
5197 Some(handler) => handler(constellations_root, name, json).await,
5198 None => fail("vela constellation materialize: handler not registered"),
5199 },
5200 ConstellationAction::Serve {
5201 name,
5202 constellations_root,
5203 port,
5204 no_open,
5205 } => match CONSTELLATION_SERVE_HANDLER.get() {
5206 Some(handler) => handler(constellations_root, name, port, !no_open).await,
5207 None => fail("vela constellation serve: handler not registered"),
5208 },
5209 }
5210}
5211
5212fn cmd_carina(action: CarinaAction) {
5215 match action {
5216 CarinaAction::List { json } => {
5217 if json {
5218 println!(
5219 "{}",
5220 serde_json::to_string_pretty(&json!({
5221 "ok": true,
5222 "command": "carina.list",
5223 "primitives": carina_validate::PRIMITIVE_NAMES,
5224 }))
5225 .expect("failed to serialize carina.list")
5226 );
5227 } else {
5228 println!("Carina primitives bundled with this build:");
5229 for name in carina_validate::PRIMITIVE_NAMES {
5230 println!(" · {name}");
5231 }
5232 }
5233 }
5234 CarinaAction::Schema { primitive } => match carina_validate::schema_text(&primitive) {
5235 Some(text) => print!("{text}"),
5236 None => fail(&format!("carina: unknown primitive '{primitive}'")),
5237 },
5238 CarinaAction::Validate {
5239 path,
5240 primitive,
5241 json,
5242 } => {
5243 let text = std::fs::read_to_string(&path)
5244 .unwrap_or_else(|e| fail_return(&format!("read {}: {e}", path.display())));
5245 let value: Value = serde_json::from_str(&text)
5246 .unwrap_or_else(|e| fail_return(&format!("parse {}: {e}", path.display())));
5247 type CarinaValidateOutcome = Result<Option<&'static str>, Vec<String>>;
5253 let mut report: Vec<(String, CarinaValidateOutcome)> = Vec::new();
5254 if value.get("primitives").and_then(Value::as_object).is_some() && primitive.is_none() {
5255 let primitives = value.get("primitives").and_then(Value::as_object).unwrap();
5256 for (key, child) in primitives {
5257 let outcome = carina_validate::validate(key, child)
5258 .map(|()| carina_validate::detect_primitive(child));
5259 report.push((key.clone(), outcome));
5260 }
5261 } else {
5262 let outcome = match primitive.as_deref() {
5263 Some(name) => carina_validate::validate(name, &value).map(|()| {
5264 carina_validate::PRIMITIVE_NAMES
5265 .iter()
5266 .copied()
5267 .find(|p| *p == name)
5268 }),
5269 None => carina_validate::validate_auto(&value).map(Some),
5270 };
5271 let label = primitive.clone().unwrap_or_else(|| "<auto>".to_string());
5272 report.push((label, outcome));
5273 }
5274
5275 let total = report.len();
5276 let pass = report.iter().filter(|(_, r)| r.is_ok()).count();
5277 let fail = total - pass;
5278
5279 if json {
5280 let entries: Vec<Value> = report
5281 .iter()
5282 .map(|(label, r)| match r {
5283 Ok(name) => json!({
5284 "key": label,
5285 "primitive": name,
5286 "ok": true,
5287 }),
5288 Err(errs) => json!({
5289 "key": label,
5290 "ok": false,
5291 "errors": errs,
5292 }),
5293 })
5294 .collect();
5295 println!(
5296 "{}",
5297 serde_json::to_string_pretty(&json!({
5298 "ok": fail == 0,
5299 "command": "carina.validate",
5300 "file": path.display().to_string(),
5301 "total": total,
5302 "passed": pass,
5303 "failed": fail,
5304 "entries": entries,
5305 }))
5306 .expect("failed to serialize carina.validate")
5307 );
5308 } else {
5309 for (label, r) in &report {
5310 match r {
5311 Ok(Some(name)) => println!(" {} {label} (as {name})", style::ok("ok")),
5312 Ok(None) => println!(" {} {label}", style::ok("ok")),
5313 Err(errs) => {
5314 println!(" {} {label}", style::lost("fail"));
5315 for e in errs {
5316 println!(" {e}");
5317 }
5318 }
5319 }
5320 }
5321 println!();
5322 if fail == 0 {
5323 println!("{} {pass}/{total} valid", style::ok("carina.validate"));
5324 } else {
5325 println!(
5326 "{} {pass}/{total} valid · {fail} failed",
5327 style::lost("carina.validate")
5328 );
5329 }
5330 }
5331
5332 if fail > 0 {
5333 std::process::exit(1);
5334 }
5335 }
5336 }
5337}
5338
5339#[allow(clippy::too_many_arguments)]
5350fn cmd_proof_add(
5351 frontier: &Path,
5352 target_finding: &str,
5353 tool: &str,
5354 tool_version: &str,
5355 script_path: &Path,
5356 name: &str,
5357 reviewer: &str,
5358 reason: &str,
5359 json_output: bool,
5360) {
5361 use std::collections::BTreeMap;
5362
5363 if !target_finding.starts_with("vf_") {
5365 fail(&format!(
5366 "--target-finding must be a vf_* finding id; got `{target_finding}`"
5367 ));
5368 }
5369 let valid_tools = [
5371 "lean4", "coq", "isabelle", "agda", "metamath", "rocq", "other",
5372 ];
5373 if !valid_tools.contains(&tool) {
5374 fail(&format!(
5375 "--tool `{tool}` not in {valid_tools:?}; see embedded/carina-schemas/proof.schema.json"
5376 ));
5377 }
5378
5379 let script_bytes = std::fs::read(script_path)
5381 .unwrap_or_else(|e| fail_return(&format!("read {}: {e}", script_path.display())));
5382 let script_hash_hex = hex::encode(Sha256::digest(&script_bytes));
5383 let script_locator = format!("sha256:{script_hash_hex}");
5384
5385 let vpf_preimage = format!("{script_locator}|{tool}|{tool_version}|{target_finding}");
5389 let vpf_id = format!(
5390 "vpf_{}",
5391 &hex::encode(Sha256::digest(vpf_preimage.as_bytes()))[..16]
5392 );
5393
5394 let verified_at = chrono::Utc::now().to_rfc3339();
5397 let proof_primitive = json!({
5398 "schema": "carina.proof.v0.3",
5399 "id": vpf_id,
5400 "tool": tool,
5401 "tool_version": tool_version,
5402 "script_locator": script_locator,
5403 "verifier_output_hash": format!("sha256:{}", "0".repeat(64)),
5407 "verified_at": verified_at,
5408 "target_finding_id": target_finding,
5409 });
5410 if let Err(errs) = carina_validate::validate("proof", &proof_primitive) {
5411 fail(&format!(
5412 "constructed Proof primitive does not validate against proof.schema.json:\n - {}",
5413 errs.join("\n - ")
5414 ));
5415 }
5416
5417 let mut metadata: BTreeMap<String, Value> = BTreeMap::new();
5419 metadata.insert(
5420 "carina_kind".to_string(),
5421 Value::String("proof_script".to_string()),
5422 );
5423 metadata.insert(
5424 "carina_proof_tool".to_string(),
5425 Value::String(tool.to_string()),
5426 );
5427 metadata.insert(
5428 "carina_proof_tool_version".to_string(),
5429 Value::String(tool_version.to_string()),
5430 );
5431 metadata.insert("carina_proof_id".to_string(), Value::String(vpf_id.clone()));
5432 metadata.insert(
5433 "carina_proof_target_finding".to_string(),
5434 Value::String(target_finding.to_string()),
5435 );
5436
5437 let media_type = match tool {
5438 "lean4" | "rocq" => Some("text/x-lean".to_string()),
5439 "coq" => Some("text/x-coq".to_string()),
5440 "isabelle" => Some("text/x-isabelle".to_string()),
5441 "agda" => Some("text/x-agda".to_string()),
5442 "metamath" => Some("text/x-metamath".to_string()),
5443 _ => None,
5444 };
5445
5446 let provenance = crate::bundle::Provenance {
5447 source_type: "code_repository".to_string(),
5448 doi: None,
5449 pmid: None,
5450 pmc: None,
5451 openalex_id: None,
5452 url: None,
5453 title: format!("Proof script for {target_finding} ({tool} {tool_version})"),
5454 authors: Vec::new(),
5455 year: None,
5456 journal: None,
5457 license: Some("Apache-2.0 OR MIT".to_string()),
5458 publisher: None,
5459 funders: Vec::new(),
5460 extraction: crate::bundle::Extraction::default(),
5461 review: None,
5462 citation_count: None,
5463 };
5464
5465 let artifact_id = crate::bundle::Artifact::content_address(
5466 "source_file",
5467 name,
5468 &format!("sha256:{script_hash_hex}"),
5469 None,
5470 Some(&script_path.display().to_string()),
5471 );
5472
5473 let artifact = crate::bundle::Artifact {
5474 id: artifact_id.clone(),
5475 kind: "source_file".to_string(),
5476 name: name.to_string(),
5477 content_hash: format!("sha256:{script_hash_hex}"),
5478 size_bytes: Some(script_bytes.len() as u64),
5479 media_type,
5480 storage_mode: "pointer".to_string(),
5481 locator: Some(script_path.display().to_string()),
5482 source_url: None,
5483 license: Some("Apache-2.0 OR MIT".to_string()),
5484 target_findings: vec![target_finding.to_string()],
5485 source_id: None,
5486 provenance,
5487 metadata,
5488 review_state: None,
5489 retracted: false,
5490 access_tier: crate::access_tier::AccessTier::default(),
5491 created: verified_at.clone(),
5492 };
5493
5494 let report = state::add_artifact(frontier, artifact, reviewer, reason)
5498 .unwrap_or_else(|e| fail_return(&e));
5499
5500 let payload = json!({
5502 "ok": true,
5503 "command": "proof-add",
5504 "frontier": frontier.display().to_string(),
5505 "target_finding": target_finding,
5506 "tool": tool,
5507 "tool_version": tool_version,
5508 "script_path": script_path.display().to_string(),
5509 "script_locator": script_locator,
5510 "size_bytes": script_bytes.len(),
5511 "vpf_id": vpf_id,
5512 "va_id": artifact_id,
5513 "applied_event_id": report.applied_event_id,
5514 "verified_at": verified_at,
5515 "reviewer": reviewer,
5516 });
5517
5518 if json_output {
5519 println!(
5520 "{}",
5521 serde_json::to_string_pretty(&payload).expect("failed to serialize proof-add response")
5522 );
5523 } else {
5524 println!(
5525 "{} proof artifact deposited for {target_finding}",
5526 style::ok("ok")
5527 );
5528 println!(" vpf_id: {vpf_id}");
5529 println!(" va_id: {artifact_id}");
5530 println!(" locator: {script_locator}");
5531 println!(" tool: {tool} {tool_version}");
5532 if let Some(eid) = &report.applied_event_id {
5533 println!(" event: {eid}");
5534 }
5535 }
5536}
5537
5538fn cmd_consensus(
5541 frontier: &Path,
5542 target: &str,
5543 weighting_str: &str,
5544 causal_claim: Option<&str>,
5545 causal_grade_min: Option<&str>,
5546 json: bool,
5547) {
5548 use crate::bundle::{CausalClaim, CausalEvidenceGrade};
5549
5550 if !target.starts_with("vf_") {
5551 fail(&format!("target `{target}` is not a vf_ finding id"));
5552 }
5553 let scheme =
5554 crate::aggregate::WeightingScheme::parse(weighting_str).unwrap_or_else(|e| fail_return(&e));
5555
5556 let parsed_claim = match causal_claim {
5557 None => None,
5558 Some("correlation") => Some(CausalClaim::Correlation),
5559 Some("mediation") => Some(CausalClaim::Mediation),
5560 Some("intervention") => Some(CausalClaim::Intervention),
5561 Some(other) => fail_return(&format!(
5562 "invalid --causal-claim '{other}'; valid: correlation | mediation | intervention"
5563 )),
5564 };
5565 let parsed_grade = match causal_grade_min {
5566 None => None,
5567 Some("theoretical") => Some(CausalEvidenceGrade::Theoretical),
5568 Some("observational") => Some(CausalEvidenceGrade::Observational),
5569 Some("quasi_experimental") => Some(CausalEvidenceGrade::QuasiExperimental),
5570 Some("rct") => Some(CausalEvidenceGrade::Rct),
5571 Some(other) => fail_return(&format!(
5572 "invalid --causal-grade-min '{other}'; valid: theoretical | observational | quasi_experimental | rct"
5573 )),
5574 };
5575 let filter = crate::aggregate::AggregateFilter {
5576 causal_claim: parsed_claim,
5577 causal_grade_min: parsed_grade,
5578 };
5579 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
5580
5581 let result = crate::aggregate::consensus_for_with_filter(&project, target, scheme, &filter)
5582 .unwrap_or_else(|| fail_return(&format!("target `{target}` not in frontier")));
5583
5584 if json {
5585 println!(
5586 "{}",
5587 serde_json::to_string_pretty(&result).expect("serialize consensus")
5588 );
5589 return;
5590 }
5591
5592 println!();
5593 println!(
5594 " {}",
5595 format!(
5596 "VELA · CONSENSUS · {} ({})",
5597 result.target, result.weighting
5598 )
5599 .to_uppercase()
5600 .dimmed()
5601 );
5602 println!(" {}", style::tick_row(60));
5603 println!(
5604 " target: {}",
5605 truncate(&result.target_assertion, 80)
5606 );
5607 println!(" similar findings: {}", result.n_findings);
5608 println!(
5609 " consensus: {:.3} ({:.3} – {:.3} 95% credible)",
5610 result.consensus_confidence, result.credible_interval_lo, result.credible_interval_hi
5611 );
5612 println!();
5613 println!(" constituents (sorted by weight):");
5614 let mut sorted = result.constituents.clone();
5615 sorted.sort_by(|a, b| {
5616 b.weight
5617 .partial_cmp(&a.weight)
5618 .unwrap_or(std::cmp::Ordering::Equal)
5619 });
5620 for c in sorted.iter().take(10) {
5621 let repls = if c.n_replications > 0 {
5622 format!(" ({}r {}f)", c.n_replicated, c.n_failed_replications)
5623 } else {
5624 String::new()
5625 };
5626 println!(
5627 " · w={:.2} raw={:.2} adj={:.2}{}",
5628 c.weight, c.raw_score, c.adjusted_score, repls
5629 );
5630 println!(" {}", truncate(&c.assertion_text, 88));
5631 }
5632 if result.constituents.len() > 10 {
5633 println!(" ... ({} more)", result.constituents.len() - 10);
5634 }
5635}
5636
5637fn parse_expected_outcome(s: &str) -> Result<crate::bundle::ExpectedOutcome, String> {
5643 let trimmed = s.trim();
5644 if trimmed.eq_ignore_ascii_case("affirmed") {
5645 return Ok(crate::bundle::ExpectedOutcome::Affirmed);
5646 }
5647 if trimmed.eq_ignore_ascii_case("falsified") {
5648 return Ok(crate::bundle::ExpectedOutcome::Falsified);
5649 }
5650 if let Some(rest) = trimmed.strip_prefix("cat:") {
5651 return Ok(crate::bundle::ExpectedOutcome::Categorical {
5652 value: rest.to_string(),
5653 });
5654 }
5655 if let Some(rest) = trimmed.strip_prefix("quant:") {
5656 let (vt, units) = rest.split_once(' ').unwrap_or((rest, ""));
5657 let (val_s, tol_s) = vt
5658 .split_once('±')
5659 .or_else(|| vt.split_once("+/-"))
5660 .ok_or_else(|| format!("expected `quant:VALUE±TOL UNITS`, got `quant:{rest}`"))?;
5661 let value: f64 = val_s
5662 .parse()
5663 .map_err(|e| format!("bad quant value `{val_s}`: {e}"))?;
5664 let tolerance: f64 = tol_s
5665 .parse()
5666 .map_err(|e| format!("bad quant tolerance `{tol_s}`: {e}"))?;
5667 return Ok(crate::bundle::ExpectedOutcome::Quantitative {
5668 value,
5669 tolerance,
5670 units: units.to_string(),
5671 });
5672 }
5673 Err(format!(
5674 "unknown outcome `{s}`; expected one of: affirmed | falsified | quant:V±T units | cat:label"
5675 ))
5676}
5677
5678#[allow(clippy::too_many_arguments)]
5680fn cmd_predict(
5681 frontier: &Path,
5682 by: &str,
5683 claim: &str,
5684 criterion: &str,
5685 resolves_by: Option<&str>,
5686 confidence: f64,
5687 target_csv: &str,
5688 outcome: &str,
5689 conditions_text: &str,
5690 json: bool,
5691) {
5692 if !(0.0..=1.0).contains(&confidence) {
5693 fail(&format!("confidence must be in [0, 1]; got {confidence}"));
5694 }
5695 let expected = parse_expected_outcome(outcome).unwrap_or_else(|e| fail_return(&e));
5696
5697 let mut project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
5698
5699 let targets: Vec<String> = target_csv
5700 .split(',')
5701 .map(|s| s.trim().to_string())
5702 .filter(|s| !s.is_empty())
5703 .collect();
5704 for t in &targets {
5705 if !t.starts_with("vf_") {
5706 fail(&format!("target `{t}` is not a vf_ id"));
5707 }
5708 if !project.findings.iter().any(|f| f.id == *t) {
5709 fail(&format!("target `{t}` not present in frontier"));
5710 }
5711 }
5712
5713 let lower = conditions_text.to_lowercase();
5714 let conditions = crate::bundle::Conditions {
5715 text: conditions_text.to_string(),
5716 species_verified: Vec::new(),
5717 species_unverified: Vec::new(),
5718 in_vitro: lower.contains("in vitro"),
5719 in_vivo: lower.contains("in vivo"),
5720 human_data: lower.contains("human") || lower.contains("clinical"),
5721 clinical_trial: lower.contains("clinical trial") || lower.contains("phase "),
5722 concentration_range: None,
5723 duration: None,
5724 age_group: None,
5725 cell_type: None,
5726 };
5727
5728 let prediction = crate::bundle::Prediction::new(
5729 claim.to_string(),
5730 targets,
5731 None,
5732 resolves_by.map(|s| s.to_string()),
5733 criterion.to_string(),
5734 expected,
5735 by.to_string(),
5736 confidence,
5737 conditions,
5738 );
5739
5740 if project.predictions.iter().any(|p| p.id == prediction.id) {
5741 if json {
5742 println!(
5743 "{}",
5744 serde_json::to_string_pretty(&json!({
5745 "ok": false,
5746 "command": "predict",
5747 "reason": "prediction_already_exists",
5748 "id": prediction.id,
5749 }))
5750 .expect("serialize")
5751 );
5752 } else {
5753 println!(
5754 "{} prediction {} already exists in {}; skipping.",
5755 style::warn("predict"),
5756 prediction.id,
5757 frontier.display()
5758 );
5759 }
5760 return;
5761 }
5762
5763 let new_id = prediction.id.clone();
5764 project.predictions.push(prediction);
5765 repo::save_to_path(frontier, &project).unwrap_or_else(|e| fail_return(&e));
5766
5767 if json {
5768 println!(
5769 "{}",
5770 serde_json::to_string_pretty(&json!({
5771 "ok": true,
5772 "command": "predict",
5773 "id": new_id,
5774 "made_by": by,
5775 "confidence": confidence,
5776 "frontier": frontier.display().to_string(),
5777 }))
5778 .expect("serialize predict result")
5779 );
5780 } else {
5781 println!();
5782 println!(
5783 " {}",
5784 format!("VELA · PREDICT · {}", new_id)
5785 .to_uppercase()
5786 .dimmed()
5787 );
5788 println!(" {}", style::tick_row(60));
5789 println!(" by: {by}");
5790 println!(" confidence: {confidence:.3}");
5791 if let Some(d) = resolves_by {
5792 println!(" resolves by: {d}");
5793 }
5794 println!(" outcome: {outcome}");
5795 println!(" claim: {}", truncate(claim, 88));
5796 println!();
5797 println!(
5798 " {} prediction recorded in {}",
5799 style::ok("ok"),
5800 frontier.display()
5801 );
5802 }
5803}
5804
5805#[allow(clippy::too_many_arguments)]
5807fn cmd_resolve(
5808 frontier: &Path,
5809 prediction_id: &str,
5810 actual_outcome: &str,
5811 matched: bool,
5812 by: &str,
5813 confidence: f64,
5814 source_title: &str,
5815 doi: Option<&str>,
5816 json: bool,
5817) {
5818 if !prediction_id.starts_with("vpred_") {
5819 fail(&format!("prediction `{prediction_id}` is not a vpred_ id"));
5820 }
5821 if !(0.0..=1.0).contains(&confidence) {
5822 fail(&format!("confidence must be in [0, 1]; got {confidence}"));
5823 }
5824 let mut project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
5825 if !project.predictions.iter().any(|p| p.id == prediction_id) {
5826 fail(&format!(
5827 "prediction `{prediction_id}` not present in frontier"
5828 ));
5829 }
5830
5831 let evidence = crate::bundle::Evidence {
5832 evidence_type: "experimental".to_string(),
5833 model_system: String::new(),
5834 species: None,
5835 method: "prediction_resolution".to_string(),
5836 sample_size: None,
5837 effect_size: None,
5838 p_value: None,
5839 replicated: false,
5840 replication_count: None,
5841 evidence_spans: if source_title.is_empty() {
5842 Vec::new()
5843 } else {
5844 vec![serde_json::json!({"text": source_title})]
5845 },
5846 };
5847
5848 let _ = doi; let resolution = crate::bundle::Resolution::new(
5855 prediction_id.to_string(),
5856 actual_outcome.to_string(),
5857 matched,
5858 by.to_string(),
5859 evidence,
5860 confidence,
5861 );
5862
5863 if project.resolutions.iter().any(|r| r.id == resolution.id) {
5864 if json {
5865 println!(
5866 "{}",
5867 serde_json::to_string_pretty(&json!({
5868 "ok": false,
5869 "command": "resolve",
5870 "reason": "resolution_already_exists",
5871 "id": resolution.id,
5872 }))
5873 .expect("serialize")
5874 );
5875 } else {
5876 println!(
5877 "{} resolution {} already exists in {}; skipping.",
5878 style::warn("resolve"),
5879 resolution.id,
5880 frontier.display()
5881 );
5882 }
5883 return;
5884 }
5885
5886 let new_id = resolution.id.clone();
5887 project.resolutions.push(resolution);
5888 repo::save_to_path(frontier, &project).unwrap_or_else(|e| fail_return(&e));
5889
5890 if json {
5891 println!(
5892 "{}",
5893 serde_json::to_string_pretty(&json!({
5894 "ok": true,
5895 "command": "resolve",
5896 "id": new_id,
5897 "prediction": prediction_id,
5898 "matched": matched,
5899 "frontier": frontier.display().to_string(),
5900 }))
5901 .expect("serialize resolve result")
5902 );
5903 } else {
5904 println!();
5905 println!(
5906 " {}",
5907 format!("VELA · RESOLVE · {}", new_id)
5908 .to_uppercase()
5909 .dimmed()
5910 );
5911 println!(" {}", style::tick_row(60));
5912 println!(" prediction: {prediction_id}");
5913 println!(
5914 " matched: {}",
5915 if matched {
5916 style::ok("yes")
5917 } else {
5918 style::lost("no")
5919 }
5920 );
5921 println!(" by: {by}");
5922 println!(" outcome: {}", truncate(actual_outcome, 80));
5923 println!();
5924 println!(
5925 " {} resolution recorded in {}",
5926 style::ok("ok"),
5927 frontier.display()
5928 );
5929 }
5930}
5931
5932fn cmd_predictions(frontier: &Path, by: Option<&str>, open: bool, json: bool) {
5934 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
5935
5936 let resolved_ids: std::collections::HashSet<&str> = project
5937 .resolutions
5938 .iter()
5939 .map(|r| r.prediction_id.as_str())
5940 .collect();
5941
5942 let mut filtered: Vec<&crate::bundle::Prediction> = project
5943 .predictions
5944 .iter()
5945 .filter(|p| by.is_none_or(|b| p.made_by == b))
5946 .filter(|p| !open || !resolved_ids.contains(p.id.as_str()))
5947 .collect();
5948 filtered.sort_by(|a, b| {
5949 a.resolves_by
5950 .as_deref()
5951 .unwrap_or("9999")
5952 .cmp(b.resolves_by.as_deref().unwrap_or("9999"))
5953 });
5954
5955 if json {
5956 let payload: Vec<serde_json::Value> = filtered
5957 .iter()
5958 .map(|p| {
5959 json!({
5960 "id": p.id,
5961 "claim_text": p.claim_text,
5962 "made_by": p.made_by,
5963 "confidence": p.confidence,
5964 "predicted_at": p.predicted_at,
5965 "resolves_by": p.resolves_by,
5966 "expected_outcome": p.expected_outcome,
5967 "resolved": resolved_ids.contains(p.id.as_str()),
5968 })
5969 })
5970 .collect();
5971 println!(
5972 "{}",
5973 serde_json::to_string_pretty(&json!({
5974 "ok": true,
5975 "command": "predictions",
5976 "frontier": frontier.display().to_string(),
5977 "count": payload.len(),
5978 "predictions": payload,
5979 }))
5980 .expect("serialize predictions")
5981 );
5982 return;
5983 }
5984
5985 println!();
5986 println!(
5987 " {}",
5988 format!("VELA · PREDICTIONS · {}", frontier.display())
5989 .to_uppercase()
5990 .dimmed()
5991 );
5992 println!(" {}", style::tick_row(60));
5993 if filtered.is_empty() {
5994 println!(" (no predictions matching filters)");
5995 return;
5996 }
5997 for p in &filtered {
5998 let resolved = resolved_ids.contains(p.id.as_str());
5999 let chip = if resolved {
6000 style::ok("resolved")
6001 } else {
6002 style::warn("open")
6003 };
6004 let deadline = p.resolves_by.as_deref().unwrap_or("(no deadline)");
6005 println!(
6006 " · {} {} by {} → {}",
6007 p.id.dimmed(),
6008 chip,
6009 p.made_by,
6010 deadline,
6011 );
6012 println!(" claim: {}", truncate(&p.claim_text, 90));
6013 println!(" confidence: {:.2}", p.confidence);
6014 }
6015}
6016
6017fn cmd_predictions_expire(frontier: &Path, now_override: Option<&str>, dry_run: bool, json: bool) {
6022 use chrono::DateTime;
6023
6024 let now_dt = match now_override {
6025 Some(s) => DateTime::parse_from_rfc3339(s)
6026 .map(|dt| dt.with_timezone(&chrono::Utc))
6027 .unwrap_or_else(|e| fail_return(&format!("invalid --now '{s}': {e}"))),
6028 None => chrono::Utc::now(),
6029 };
6030
6031 let mut project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
6032 if dry_run {
6033 let mut probe = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
6035 let report = crate::calibration::expire_overdue_predictions(&mut probe, now_dt);
6036 if json {
6037 println!(
6038 "{}",
6039 serde_json::to_string_pretty(&json!({
6040 "ok": true,
6041 "command": "predictions.expire",
6042 "dry_run": true,
6043 "report": report,
6044 }))
6045 .expect("serialize predictions.expire (dry-run)")
6046 );
6047 } else {
6048 println!(
6049 "{} dry-run @ {}: {} would expire, {} already expired, {} resolved, {} still open",
6050 style::ok("ok"),
6051 report.now,
6052 report.newly_expired.len(),
6053 report.already_expired.len(),
6054 report.already_resolved.len(),
6055 report.still_open.len(),
6056 );
6057 for id in &report.newly_expired {
6058 println!(" · {id}");
6059 }
6060 }
6061 return;
6062 }
6063
6064 let report = crate::calibration::expire_overdue_predictions(&mut project, now_dt);
6065 repo::save_to_path(frontier, &project).unwrap_or_else(|e| fail_return(&e));
6066
6067 if json {
6068 println!(
6069 "{}",
6070 serde_json::to_string_pretty(&json!({
6071 "ok": true,
6072 "command": "predictions.expire",
6073 "report": report,
6074 }))
6075 .expect("serialize predictions.expire")
6076 );
6077 } else {
6078 println!(
6079 "{} @ {}: {} newly expired, {} already expired, {} resolved, {} still open",
6080 style::ok("expired"),
6081 report.now,
6082 report.newly_expired.len(),
6083 report.already_expired.len(),
6084 report.already_resolved.len(),
6085 report.still_open.len(),
6086 );
6087 for id in &report.newly_expired {
6088 println!(" · {id}");
6089 }
6090 }
6091}
6092
6093fn cmd_calibration(frontier: &Path, actor: Option<&str>, json: bool) {
6094 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
6095 let records = match actor {
6096 Some(a) => {
6097 crate::calibration::calibration_for_actor(a, &project.predictions, &project.resolutions)
6098 .map(|r| vec![r])
6099 .unwrap_or_default()
6100 }
6101 None => crate::calibration::calibration_records(&project.predictions, &project.resolutions),
6102 };
6103
6104 if json {
6105 println!(
6106 "{}",
6107 serde_json::to_string_pretty(&json!({
6108 "ok": true,
6109 "command": "calibration",
6110 "frontier": frontier.display().to_string(),
6111 "filter_actor": actor,
6112 "records": records,
6113 }))
6114 .expect("serialize calibration")
6115 );
6116 return;
6117 }
6118
6119 println!();
6120 println!(
6121 " {}",
6122 format!("VELA · CALIBRATION · {}", frontier.display())
6123 .to_uppercase()
6124 .dimmed()
6125 );
6126 println!(" {}", style::tick_row(60));
6127 if records.is_empty() {
6128 println!(" (no calibration records)");
6129 return;
6130 }
6131 for r in &records {
6132 println!(" · {}", r.actor);
6133 println!(
6134 " predictions: {} resolved: {} hits: {}",
6135 r.n_predictions, r.n_resolved, r.n_hit
6136 );
6137 match r.hit_rate {
6138 Some(h) => println!(" hit rate: {:.1}%", h * 100.0),
6139 None => println!(" hit rate: n/a"),
6140 }
6141 match r.brier_score {
6142 Some(b) => println!(
6143 " brier: {:.4} (lower is better; 0.25 = chance)",
6144 b
6145 ),
6146 None => println!(" brier: n/a"),
6147 }
6148 match r.log_score {
6149 Some(l) => println!(
6150 " log score: {:.4} (higher is better; 0 = perfect)",
6151 l
6152 ),
6153 None => println!(" log score: n/a"),
6154 }
6155 }
6156}
6157
6158#[allow(clippy::too_many_arguments)]
6160fn cmd_dataset_add(
6161 frontier: &Path,
6162 name: &str,
6163 version: Option<&str>,
6164 content_hash: &str,
6165 url: Option<&str>,
6166 license: Option<&str>,
6167 source_title: &str,
6168 doi: Option<&str>,
6169 row_count: Option<u64>,
6170 json: bool,
6171) {
6172 let mut project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
6173
6174 let provenance = crate::bundle::Provenance {
6175 source_type: "data_release".to_string(),
6176 doi: doi.map(|s| s.to_string()),
6177 pmid: None,
6178 pmc: None,
6179 openalex_id: None,
6180 url: url.map(|s| s.to_string()),
6181 title: source_title.to_string(),
6182 authors: Vec::new(),
6183 year: None,
6184 journal: None,
6185 license: license.map(|s| s.to_string()),
6186 publisher: None,
6187 funders: Vec::new(),
6188 extraction: crate::bundle::Extraction {
6189 method: "manual_curation".to_string(),
6190 model: None,
6191 model_version: None,
6192 extracted_at: chrono::Utc::now().to_rfc3339(),
6193 extractor_version: env!("CARGO_PKG_VERSION").to_string(),
6194 },
6195 review: None,
6196 citation_count: None,
6197 };
6198
6199 let mut dataset = crate::bundle::Dataset::new(
6200 name.to_string(),
6201 version.map(|s| s.to_string()),
6202 content_hash.to_string(),
6203 url.map(|s| s.to_string()),
6204 license.map(|s| s.to_string()),
6205 provenance,
6206 );
6207 dataset.row_count = row_count;
6208
6209 if project.datasets.iter().any(|d| d.id == dataset.id) {
6210 if json {
6211 println!(
6212 "{}",
6213 serde_json::to_string_pretty(&json!({
6214 "ok": false,
6215 "command": "dataset.add",
6216 "reason": "dataset_already_exists",
6217 "id": dataset.id,
6218 }))
6219 .expect("serialize")
6220 );
6221 } else {
6222 println!(
6223 "{} dataset {} already exists in {}; skipping.",
6224 style::warn("dataset"),
6225 dataset.id,
6226 frontier.display()
6227 );
6228 }
6229 return;
6230 }
6231
6232 let new_id = dataset.id.clone();
6233 project.datasets.push(dataset);
6234 repo::save_to_path(frontier, &project).unwrap_or_else(|e| fail_return(&e));
6235
6236 if json {
6237 println!(
6238 "{}",
6239 serde_json::to_string_pretty(&json!({
6240 "ok": true,
6241 "command": "dataset.add",
6242 "id": new_id,
6243 "name": name,
6244 "version": version,
6245 "frontier": frontier.display().to_string(),
6246 }))
6247 .expect("failed to serialize dataset.add result")
6248 );
6249 } else {
6250 println!();
6251 println!(
6252 " {}",
6253 format!("VELA · DATASET · {}", new_id)
6254 .to_uppercase()
6255 .dimmed()
6256 );
6257 println!(" {}", style::tick_row(60));
6258 println!(" name: {name}");
6259 if let Some(v) = version {
6260 println!(" version: {v}");
6261 }
6262 println!(" content_hash: {content_hash}");
6263 if let Some(u) = url {
6264 println!(" url: {u}");
6265 }
6266 println!(" source: {source_title}");
6267 println!();
6268 println!(
6269 " {} dataset recorded in {}",
6270 style::ok("ok"),
6271 frontier.display()
6272 );
6273 }
6274}
6275
6276#[allow(clippy::too_many_arguments)]
6282fn cmd_negative_result_add(
6283 frontier: &Path,
6284 kind: &str,
6285 deposited_by: &str,
6286 reason: &str,
6287 conditions_text: &str,
6288 notes: &str,
6289 targets: Vec<String>,
6290 endpoint: Option<&str>,
6291 intervention: Option<&str>,
6292 comparator: Option<&str>,
6293 population: Option<&str>,
6294 n_enrolled: Option<u32>,
6295 power: Option<f64>,
6296 ci_lower: Option<f64>,
6297 ci_upper: Option<f64>,
6298 effect_size_threshold: Option<f64>,
6299 registry_id: Option<&str>,
6300 reagent: Option<&str>,
6301 observation: Option<&str>,
6302 attempts: Option<u32>,
6303 source_title: &str,
6304 doi: Option<&str>,
6305 url: Option<&str>,
6306 year: Option<i32>,
6307 json: bool,
6308) {
6309 let nr_kind = match kind {
6310 "registered_trial" => {
6311 let endpoint =
6312 endpoint.unwrap_or_else(|| fail_return("--endpoint required for registered_trial"));
6313 let intervention = intervention
6314 .unwrap_or_else(|| fail_return("--intervention required for registered_trial"));
6315 let comparator = comparator
6316 .unwrap_or_else(|| fail_return("--comparator required for registered_trial"));
6317 let population = population
6318 .unwrap_or_else(|| fail_return("--population required for registered_trial"));
6319 let n_enrolled = n_enrolled
6320 .unwrap_or_else(|| fail_return("--n-enrolled required for registered_trial"));
6321 let power =
6322 power.unwrap_or_else(|| fail_return("--power required for registered_trial"));
6323 let ci_lower =
6324 ci_lower.unwrap_or_else(|| fail_return("--ci-lower required for registered_trial"));
6325 let ci_upper =
6326 ci_upper.unwrap_or_else(|| fail_return("--ci-upper required for registered_trial"));
6327 crate::bundle::NegativeResultKind::RegisteredTrial {
6328 endpoint: endpoint.to_string(),
6329 intervention: intervention.to_string(),
6330 comparator: comparator.to_string(),
6331 population: population.to_string(),
6332 n_enrolled,
6333 power,
6334 effect_size_ci: (ci_lower, ci_upper),
6335 effect_size_threshold,
6336 registry_id: registry_id.map(|s| s.to_string()),
6337 }
6338 }
6339 "exploratory" => {
6340 let reagent =
6341 reagent.unwrap_or_else(|| fail_return("--reagent required for exploratory"));
6342 let observation = observation
6343 .unwrap_or_else(|| fail_return("--observation required for exploratory"));
6344 let attempts =
6345 attempts.unwrap_or_else(|| fail_return("--attempts required for exploratory"));
6346 crate::bundle::NegativeResultKind::Exploratory {
6347 reagent: reagent.to_string(),
6348 observation: observation.to_string(),
6349 attempts,
6350 }
6351 }
6352 other => fail_return(&format!(
6353 "--kind must be 'registered_trial' or 'exploratory', got '{other}'"
6354 )),
6355 };
6356
6357 let conditions = crate::bundle::Conditions {
6358 text: conditions_text.to_string(),
6359 species_verified: Vec::new(),
6360 species_unverified: Vec::new(),
6361 in_vitro: false,
6362 in_vivo: false,
6363 human_data: false,
6364 clinical_trial: matches!(kind, "registered_trial"),
6365 concentration_range: None,
6366 duration: None,
6367 age_group: None,
6368 cell_type: None,
6369 };
6370
6371 let provenance = crate::bundle::Provenance {
6372 source_type: if matches!(kind, "registered_trial") {
6373 "clinical_trial".to_string()
6374 } else {
6375 "lab_notebook".to_string()
6376 },
6377 doi: doi.map(|s| s.to_string()),
6378 pmid: None,
6379 pmc: None,
6380 openalex_id: None,
6381 url: url.map(|s| s.to_string()),
6382 title: source_title.to_string(),
6383 authors: Vec::new(),
6384 year,
6385 journal: None,
6386 license: None,
6387 publisher: None,
6388 funders: Vec::new(),
6389 extraction: crate::bundle::Extraction {
6390 method: "manual_curation".to_string(),
6391 model: None,
6392 model_version: None,
6393 extracted_at: chrono::Utc::now().to_rfc3339(),
6394 extractor_version: env!("CARGO_PKG_VERSION").to_string(),
6395 },
6396 review: None,
6397 citation_count: None,
6398 };
6399
6400 let report = state::add_negative_result(
6401 frontier,
6402 nr_kind,
6403 targets,
6404 deposited_by,
6405 conditions,
6406 provenance,
6407 notes,
6408 reason,
6409 )
6410 .unwrap_or_else(|e| fail_return(&e));
6411
6412 if json {
6413 println!(
6414 "{}",
6415 serde_json::to_string_pretty(&report).expect("serialize report")
6416 );
6417 } else {
6418 println!();
6419 println!(
6420 " {}",
6421 format!("VELA · NEGATIVE-RESULT · {}", report.finding_id)
6422 .to_uppercase()
6423 .dimmed()
6424 );
6425 println!(" {}", style::tick_row(60));
6426 println!(" kind: {kind}");
6427 println!(" deposited_by: {deposited_by}");
6428 if let Some(ev) = &report.applied_event_id {
6429 println!(" event: {ev}");
6430 }
6431 println!(
6432 " {} negative_result deposited in {}",
6433 style::ok("ok"),
6434 frontier.display()
6435 );
6436 }
6437}
6438
6439fn cmd_negative_results(frontier: &Path, target: Option<&str>, json: bool) {
6442 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
6443 let filtered: Vec<&crate::bundle::NegativeResult> = project
6444 .negative_results
6445 .iter()
6446 .filter(|nr| {
6447 target
6448 .map(|t| nr.target_findings.iter().any(|f| f == t))
6449 .unwrap_or(true)
6450 })
6451 .collect();
6452
6453 if json {
6454 println!(
6455 "{}",
6456 serde_json::to_string_pretty(&json!({
6457 "ok": true,
6458 "command": "negative_results",
6459 "frontier": frontier.display().to_string(),
6460 "count": filtered.len(),
6461 "negative_results": filtered,
6462 }))
6463 .expect("serialize negative_results")
6464 );
6465 return;
6466 }
6467
6468 if filtered.is_empty() {
6469 println!(" no negative_results in {}", frontier.display());
6470 return;
6471 }
6472
6473 println!();
6474 println!(
6475 " {} ({})",
6476 "VELA · NEGATIVE RESULTS".dimmed(),
6477 filtered.len()
6478 );
6479 println!(" {}", style::tick_row(60));
6480 for nr in &filtered {
6481 let kind_label = match &nr.kind {
6482 crate::bundle::NegativeResultKind::RegisteredTrial {
6483 endpoint, power, ..
6484 } => format!("trial · {endpoint} · power {power:.2}"),
6485 crate::bundle::NegativeResultKind::Exploratory {
6486 reagent, attempts, ..
6487 } => format!("exploratory · {reagent} · {attempts} attempts"),
6488 };
6489 let retracted = if nr.retracted { " [retracted]" } else { "" };
6490 let review = nr
6491 .review_state
6492 .as_ref()
6493 .map(|s| format!(" [{s:?}]"))
6494 .unwrap_or_default();
6495 println!(" {}{}{}", nr.id, retracted, review);
6496 println!(" {kind_label}");
6497 if !nr.target_findings.is_empty() {
6498 println!(" targets: {}", nr.target_findings.join(", "));
6499 }
6500 }
6501 println!();
6502}
6503
6504#[allow(clippy::too_many_arguments)]
6506fn cmd_tier_set(
6507 frontier: &Path,
6508 object_type: &str,
6509 object_id: &str,
6510 tier: &str,
6511 actor: &str,
6512 reason: &str,
6513 json: bool,
6514) {
6515 let parsed_tier =
6516 crate::access_tier::AccessTier::parse(tier).unwrap_or_else(|e| fail_return(&e));
6517 let report = state::set_tier(frontier, object_type, object_id, parsed_tier, actor, reason)
6518 .unwrap_or_else(|e| fail_return(&e));
6519
6520 if json {
6521 println!(
6522 "{}",
6523 serde_json::to_string_pretty(&report).expect("serialize tier-set report")
6524 );
6525 } else {
6526 println!();
6527 println!(
6528 " {}",
6529 format!("VELA · TIER · {}", object_id)
6530 .to_uppercase()
6531 .dimmed()
6532 );
6533 println!(" {}", style::tick_row(60));
6534 println!(" object_type: {object_type}");
6535 println!(" new_tier: {}", parsed_tier.canonical());
6536 println!(" actor: {actor}");
6537 if let Some(ev) = &report.applied_event_id {
6538 println!(" event: {ev}");
6539 }
6540 println!(" {} tier set in {}", style::ok("ok"), frontier.display());
6541 }
6542}
6543
6544#[allow(clippy::too_many_arguments)]
6546fn cmd_trajectory_create(
6547 frontier: &Path,
6548 deposited_by: &str,
6549 reason: &str,
6550 targets: Vec<String>,
6551 notes: &str,
6552 json: bool,
6553) {
6554 let report = state::create_trajectory(frontier, targets, deposited_by, notes, reason)
6555 .unwrap_or_else(|e| fail_return(&e));
6556
6557 if json {
6558 println!(
6559 "{}",
6560 serde_json::to_string_pretty(&report).expect("serialize trajectory report")
6561 );
6562 } else {
6563 println!();
6564 println!(
6565 " {}",
6566 format!("VELA · TRAJECTORY · {}", report.finding_id)
6567 .to_uppercase()
6568 .dimmed()
6569 );
6570 println!(" {}", style::tick_row(60));
6571 println!(" deposited_by: {deposited_by}");
6572 if let Some(ev) = &report.applied_event_id {
6573 println!(" event: {ev}");
6574 }
6575 println!(
6576 " {} trajectory opened in {}",
6577 style::ok("ok"),
6578 frontier.display()
6579 );
6580 }
6581}
6582
6583#[allow(clippy::too_many_arguments)]
6585fn cmd_trajectory_step(
6586 frontier: &Path,
6587 trajectory_id: &str,
6588 kind: &str,
6589 description: &str,
6590 actor: &str,
6591 reason: &str,
6592 references: Vec<String>,
6593 json: bool,
6594) {
6595 let parsed_kind = match kind {
6596 "hypothesis" => crate::bundle::TrajectoryStepKind::Hypothesis,
6597 "tried" => crate::bundle::TrajectoryStepKind::Tried,
6598 "ruled_out" => crate::bundle::TrajectoryStepKind::RuledOut,
6599 "observed" => crate::bundle::TrajectoryStepKind::Observed,
6600 "refined" => crate::bundle::TrajectoryStepKind::Refined,
6601 other => fail_return(&format!(
6602 "--kind must be one of hypothesis|tried|ruled_out|observed|refined, got '{other}'"
6603 )),
6604 };
6605 let report = state::append_trajectory_step(
6606 frontier,
6607 trajectory_id,
6608 parsed_kind,
6609 description,
6610 actor,
6611 references,
6612 reason,
6613 )
6614 .unwrap_or_else(|e| fail_return(&e));
6615
6616 if json {
6617 println!(
6618 "{}",
6619 serde_json::to_string_pretty(&report).expect("serialize step report")
6620 );
6621 } else {
6622 println!();
6623 println!(
6624 " {}",
6625 format!("VELA · STEP · {}", report.finding_id)
6626 .to_uppercase()
6627 .dimmed()
6628 );
6629 println!(" {}", style::tick_row(60));
6630 println!(" trajectory: {trajectory_id}");
6631 println!(" kind: {kind}");
6632 println!(" actor: {actor}");
6633 println!(
6634 " {} step appended in {}",
6635 style::ok("ok"),
6636 frontier.display()
6637 );
6638 }
6639}
6640
6641fn cmd_trajectories(frontier: &Path, target: Option<&str>, json: bool) {
6643 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
6644 let filtered: Vec<&crate::bundle::Trajectory> = project
6645 .trajectories
6646 .iter()
6647 .filter(|t| {
6648 target
6649 .map(|tg| t.target_findings.iter().any(|f| f == tg))
6650 .unwrap_or(true)
6651 })
6652 .collect();
6653
6654 if json {
6655 println!(
6656 "{}",
6657 serde_json::to_string_pretty(&json!({
6658 "ok": true,
6659 "command": "trajectories",
6660 "frontier": frontier.display().to_string(),
6661 "count": filtered.len(),
6662 "trajectories": filtered,
6663 }))
6664 .expect("serialize trajectories")
6665 );
6666 return;
6667 }
6668
6669 if filtered.is_empty() {
6670 println!(" no trajectories in {}", frontier.display());
6671 return;
6672 }
6673
6674 println!();
6675 println!(" {} ({})", "VELA · TRAJECTORIES".dimmed(), filtered.len());
6676 println!(" {}", style::tick_row(60));
6677 for t in &filtered {
6678 let retracted = if t.retracted { " [retracted]" } else { "" };
6679 let review = t
6680 .review_state
6681 .as_ref()
6682 .map(|s| format!(" [{s:?}]"))
6683 .unwrap_or_default();
6684 println!(" {}{}{}", t.id, retracted, review);
6685 println!(
6686 " {} step(s){}",
6687 t.steps.len(),
6688 if t.target_findings.is_empty() {
6689 String::new()
6690 } else {
6691 format!(" · targets: {}", t.target_findings.join(", "))
6692 }
6693 );
6694 for step in &t.steps {
6695 let label = match step.kind {
6696 crate::bundle::TrajectoryStepKind::Hypothesis => "hypothesis",
6697 crate::bundle::TrajectoryStepKind::Tried => "tried",
6698 crate::bundle::TrajectoryStepKind::RuledOut => "ruled_out",
6699 crate::bundle::TrajectoryStepKind::Observed => "observed",
6700 crate::bundle::TrajectoryStepKind::Refined => "refined",
6701 };
6702 let preview: String = step.description.chars().take(80).collect();
6703 println!(" [{label}] {preview}");
6704 }
6705 }
6706 println!();
6707}
6708
6709fn cmd_datasets(frontier: &Path, json: bool) {
6711 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
6712 if json {
6713 println!(
6714 "{}",
6715 serde_json::to_string_pretty(&json!({
6716 "ok": true,
6717 "command": "datasets",
6718 "frontier": frontier.display().to_string(),
6719 "count": project.datasets.len(),
6720 "datasets": project.datasets,
6721 }))
6722 .expect("serialize datasets")
6723 );
6724 return;
6725 }
6726 println!();
6727 println!(
6728 " {}",
6729 format!("VELA · DATASETS · {}", frontier.display())
6730 .to_uppercase()
6731 .dimmed()
6732 );
6733 println!(" {}", style::tick_row(60));
6734 if project.datasets.is_empty() {
6735 println!(" (no datasets registered)");
6736 return;
6737 }
6738 for ds in &project.datasets {
6739 let v = ds
6740 .version
6741 .as_deref()
6742 .map(|s| format!("@{s}"))
6743 .unwrap_or_default();
6744 println!(" · {} {}{}", ds.id.dimmed(), ds.name, v);
6745 if let Some(u) = &ds.url {
6746 println!(" url: {}", truncate(u, 80));
6747 }
6748 println!(" hash: {}", truncate(&ds.content_hash, 80));
6749 }
6750}
6751
6752#[allow(clippy::too_many_arguments)]
6754fn cmd_code_add(
6755 frontier: &Path,
6756 language: &str,
6757 repo_url: Option<&str>,
6758 commit: Option<&str>,
6759 path: &str,
6760 content_hash: &str,
6761 line_start: Option<u32>,
6762 line_end: Option<u32>,
6763 entry_point: Option<&str>,
6764 json: bool,
6765) {
6766 let mut project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
6767
6768 let line_range = match (line_start, line_end) {
6769 (Some(a), Some(b)) => Some((a, b)),
6770 (Some(a), None) => Some((a, a)),
6771 _ => None,
6772 };
6773
6774 let artifact = crate::bundle::CodeArtifact::new(
6775 language.to_string(),
6776 repo_url.map(|s| s.to_string()),
6777 commit.map(|s| s.to_string()),
6778 path.to_string(),
6779 line_range,
6780 content_hash.to_string(),
6781 entry_point.map(|s| s.to_string()),
6782 );
6783
6784 if project.code_artifacts.iter().any(|c| c.id == artifact.id) {
6785 if json {
6786 println!(
6787 "{}",
6788 serde_json::to_string_pretty(&json!({
6789 "ok": false,
6790 "command": "code.add",
6791 "reason": "artifact_already_exists",
6792 "id": artifact.id,
6793 }))
6794 .expect("serialize")
6795 );
6796 } else {
6797 println!(
6798 "{} code artifact {} already exists in {}; skipping.",
6799 style::warn("code"),
6800 artifact.id,
6801 frontier.display()
6802 );
6803 }
6804 return;
6805 }
6806
6807 let new_id = artifact.id.clone();
6808 project.code_artifacts.push(artifact);
6809 repo::save_to_path(frontier, &project).unwrap_or_else(|e| fail_return(&e));
6810
6811 if json {
6812 println!(
6813 "{}",
6814 serde_json::to_string_pretty(&json!({
6815 "ok": true,
6816 "command": "code.add",
6817 "id": new_id,
6818 "language": language,
6819 "path": path,
6820 "frontier": frontier.display().to_string(),
6821 }))
6822 .expect("failed to serialize code.add result")
6823 );
6824 } else {
6825 println!();
6826 println!(
6827 " {}",
6828 format!("VELA · CODE · {}", new_id).to_uppercase().dimmed()
6829 );
6830 println!(" {}", style::tick_row(60));
6831 println!(" language: {language}");
6832 if let Some(r) = repo_url {
6833 println!(" repo: {r}");
6834 }
6835 if let Some(c) = commit {
6836 println!(" commit: {c}");
6837 }
6838 println!(" path: {path}");
6839 if let Some((a, b)) = line_range {
6840 println!(" lines: {a}-{b}");
6841 }
6842 println!(" content_hash: {content_hash}");
6843 println!();
6844 println!(
6845 " {} code artifact recorded in {}",
6846 style::ok("ok"),
6847 frontier.display()
6848 );
6849 }
6850}
6851
6852fn cmd_code_artifacts(frontier: &Path, json: bool) {
6854 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
6855 if json {
6856 println!(
6857 "{}",
6858 serde_json::to_string_pretty(&json!({
6859 "ok": true,
6860 "command": "code-artifacts",
6861 "frontier": frontier.display().to_string(),
6862 "count": project.code_artifacts.len(),
6863 "code_artifacts": project.code_artifacts,
6864 }))
6865 .expect("serialize code-artifacts")
6866 );
6867 return;
6868 }
6869 println!();
6870 println!(
6871 " {}",
6872 format!("VELA · CODE · {}", frontier.display())
6873 .to_uppercase()
6874 .dimmed()
6875 );
6876 println!(" {}", style::tick_row(60));
6877 if project.code_artifacts.is_empty() {
6878 println!(" (no code artifacts registered)");
6879 return;
6880 }
6881 for c in &project.code_artifacts {
6882 let lr = c
6883 .line_range
6884 .map(|(a, b)| format!(":{a}-{b}"))
6885 .unwrap_or_default();
6886 println!(" · {} {} {}{}", c.id.dimmed(), c.language, c.path, lr);
6887 if let Some(r) = &c.repo_url {
6888 println!(" repo: {}", truncate(r, 80));
6889 }
6890 if let Some(g) = &c.git_commit {
6891 println!(" commit: {g}");
6892 }
6893 }
6894}
6895
6896fn sha256_for_bytes(bytes: &[u8]) -> String {
6897 format!("sha256:{}", hex::encode(Sha256::digest(bytes)))
6898}
6899
6900fn sha256_hex_part(content_hash: &str) -> &str {
6901 content_hash.strip_prefix("sha256:").unwrap_or(content_hash)
6902}
6903
6904fn artifact_blob_locator(frontier: &Path, content_hash: &str, bytes: &[u8]) -> Option<String> {
6905 let Ok(repo::VelaSource::VelaRepo(root)) = repo::detect(frontier) else {
6906 return None;
6907 };
6908 let hex = sha256_hex_part(content_hash);
6909 let rel = format!(".vela/artifact-blobs/sha256/{hex}");
6910 let path = root.join(&rel);
6911 if let Some(parent) = path.parent() {
6912 std::fs::create_dir_all(parent).unwrap_or_else(|e| {
6913 fail(&format!(
6914 "Failed to create artifact blob directory {}: {e}",
6915 parent.display()
6916 ))
6917 });
6918 }
6919 if !path.is_file() {
6920 std::fs::write(&path, bytes)
6921 .unwrap_or_else(|e| fail(&format!("Failed to write artifact blob: {e}")));
6922 }
6923 Some(rel)
6924}
6925
6926fn parse_metadata_pairs(pairs: Vec<String>) -> BTreeMap<String, Value> {
6927 let mut out = BTreeMap::new();
6928 for pair in pairs {
6929 let Some((key, value)) = pair.split_once('=') else {
6930 fail(&format!("--metadata must be key=value, got {pair:?}"));
6931 };
6932 let key = key.trim();
6933 if key.is_empty() {
6934 fail("--metadata key must be non-empty");
6935 }
6936 out.insert(key.to_string(), Value::String(value.trim().to_string()));
6937 }
6938 out
6939}
6940
6941fn artifact_source_type(kind: &str) -> &'static str {
6942 match kind {
6943 "clinical_trial_record" | "protocol" => "clinical_trial",
6944 "dataset" => "data_release",
6945 "model_output" => "model_output",
6946 "registry_record" => "database_record",
6947 "lab_file" => "lab_notebook",
6948 _ => "database_record",
6949 }
6950}
6951
6952fn artifact_provenance(
6953 kind: &str,
6954 title: &str,
6955 url: Option<&str>,
6956 doi: Option<&str>,
6957 license: Option<&str>,
6958) -> crate::bundle::Provenance {
6959 crate::bundle::Provenance {
6960 source_type: artifact_source_type(kind).to_string(),
6961 doi: doi.map(str::to_string),
6962 pmid: None,
6963 pmc: None,
6964 openalex_id: None,
6965 url: url.map(str::to_string),
6966 title: title.to_string(),
6967 authors: Vec::new(),
6968 year: None,
6969 journal: None,
6970 license: license.map(str::to_string),
6971 publisher: None,
6972 funders: Vec::new(),
6973 extraction: crate::bundle::Extraction {
6974 method: "artifact_deposit".to_string(),
6975 model: None,
6976 model_version: None,
6977 extracted_at: chrono::Utc::now().to_rfc3339(),
6978 extractor_version: env!("CARGO_PKG_VERSION").to_string(),
6979 },
6980 review: None,
6981 citation_count: None,
6982 }
6983}
6984
6985#[allow(clippy::too_many_arguments)]
6986fn cmd_artifact_add(
6987 frontier: &Path,
6988 kind: &str,
6989 name: &str,
6990 file: Option<&Path>,
6991 url: Option<&str>,
6992 content_hash: Option<&str>,
6993 media_type: Option<&str>,
6994 license: Option<&str>,
6995 source_title: Option<&str>,
6996 source_url: Option<&str>,
6997 doi: Option<&str>,
6998 target: Vec<String>,
6999 metadata: Vec<String>,
7000 access_tier: &str,
7001 deposited_by: &str,
7002 reason: &str,
7003 json_out: bool,
7004) {
7005 let tier =
7006 crate::access_tier::AccessTier::parse(access_tier).unwrap_or_else(|e| fail_return(&e));
7007 let mut size_bytes = None;
7008 let mut storage_mode = "pointer".to_string();
7009 let mut locator = url.map(str::to_string);
7010 let mut computed_hash = content_hash.map(str::to_string);
7011
7012 if let Some(path) = file {
7013 let bytes = std::fs::read(path)
7014 .unwrap_or_else(|e| fail(&format!("Failed to read artifact file: {e}")));
7015 let actual_hash = sha256_for_bytes(&bytes);
7016 if let Some(expected) = content_hash {
7017 let expected_hex = sha256_hex_part(expected);
7018 let actual_hex = sha256_hex_part(&actual_hash);
7019 if !expected_hex.eq_ignore_ascii_case(actual_hex) {
7020 fail(&format!(
7021 "--content-hash does not match file bytes: expected {expected}, got {actual_hash}"
7022 ));
7023 }
7024 }
7025 size_bytes = Some(bytes.len() as u64);
7026 computed_hash = Some(actual_hash.clone());
7027 if let Some(rel) = artifact_blob_locator(frontier, &actual_hash, &bytes) {
7028 storage_mode = "local_blob".to_string();
7029 locator = Some(rel);
7030 } else {
7031 storage_mode = "local_file".to_string();
7032 locator = Some(path.display().to_string());
7033 }
7034 }
7035
7036 let Some(content_hash) = computed_hash else {
7037 fail("Provide --content-hash unless --file is present.");
7038 };
7039 let content_hash_for_print = content_hash.clone();
7040 if file.is_none() && url.is_some() {
7041 storage_mode = "remote".to_string();
7042 }
7043
7044 let source_url_effective = source_url.or(url);
7045 let source_title = source_title.unwrap_or(name);
7046 let provenance = artifact_provenance(kind, source_title, source_url_effective, doi, license);
7047 let metadata = parse_metadata_pairs(metadata);
7048 let artifact = crate::bundle::Artifact::new(
7049 kind.to_string(),
7050 name.to_string(),
7051 content_hash,
7052 size_bytes,
7053 media_type.map(str::to_string),
7054 storage_mode,
7055 locator,
7056 source_url_effective.map(str::to_string),
7057 license.map(str::to_string),
7058 target,
7059 provenance,
7060 metadata,
7061 tier,
7062 )
7063 .unwrap_or_else(|e| fail_return(&e));
7064
7065 let artifact_id = artifact.id.clone();
7066 let report = state::add_artifact(frontier, artifact, deposited_by, reason)
7067 .unwrap_or_else(|e| fail_return(&e));
7068
7069 if json_out {
7070 println!(
7071 "{}",
7072 serde_json::to_string_pretty(&json!({
7073 "ok": true,
7074 "command": "artifact.add",
7075 "id": artifact_id,
7076 "frontier": frontier.display().to_string(),
7077 "event": report.applied_event_id,
7078 }))
7079 .expect("serialize artifact.add")
7080 );
7081 } else {
7082 println!();
7083 println!(
7084 " {}",
7085 format!("VELA · ARTIFACT · {}", artifact_id)
7086 .to_uppercase()
7087 .dimmed()
7088 );
7089 println!(" {}", style::tick_row(60));
7090 println!(" kind: {kind}");
7091 println!(" name: {name}");
7092 println!(" hash: {content_hash_for_print}");
7093 println!(
7094 " {} artifact recorded in {}",
7095 style::ok("ok"),
7096 frontier.display()
7097 );
7098 }
7099}
7100
7101fn cmd_artifacts(frontier: &Path, target: Option<&str>, json_out: bool) {
7102 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
7103 let filtered: Vec<&crate::bundle::Artifact> = project
7104 .artifacts
7105 .iter()
7106 .filter(|artifact| {
7107 target
7108 .map(|t| artifact.target_findings.iter().any(|f| f == t))
7109 .unwrap_or(true)
7110 })
7111 .collect();
7112
7113 if json_out {
7114 println!(
7115 "{}",
7116 serde_json::to_string_pretty(&json!({
7117 "ok": true,
7118 "command": "artifacts",
7119 "frontier": frontier.display().to_string(),
7120 "count": filtered.len(),
7121 "artifacts": filtered,
7122 }))
7123 .expect("serialize artifacts")
7124 );
7125 return;
7126 }
7127
7128 println!();
7129 println!(
7130 " {}",
7131 format!("VELA · ARTIFACTS · {}", frontier.display())
7132 .to_uppercase()
7133 .dimmed()
7134 );
7135 println!(" {}", style::tick_row(60));
7136 if filtered.is_empty() {
7137 println!(" (no artifacts registered)");
7138 return;
7139 }
7140 for artifact in filtered {
7141 println!(
7142 " · {} {} · {}",
7143 artifact.id.dimmed(),
7144 artifact.kind,
7145 artifact.name
7146 );
7147 if let Some(locator) = &artifact.locator {
7148 println!(" locator: {}", truncate(locator, 88));
7149 }
7150 if !artifact.target_findings.is_empty() {
7151 println!(" targets: {}", artifact.target_findings.join(", "));
7152 }
7153 }
7154}
7155
7156fn cmd_artifact_audit(frontier: &Path, json_out: bool) {
7157 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
7158 let audit = crate::artifact_audit::audit_artifacts(frontier, &project);
7159 if json_out {
7160 println!(
7161 "{}",
7162 serde_json::to_string_pretty(&audit).expect("serialize artifact audit")
7163 );
7164 if !audit.ok {
7165 std::process::exit(1);
7166 }
7167 return;
7168 }
7169
7170 println!();
7171 println!(
7172 " {}",
7173 format!("VELA · ARTIFACT AUDIT · {}", frontier.display())
7174 .to_uppercase()
7175 .dimmed()
7176 );
7177 println!(" {}", style::tick_row(60));
7178 println!(" artifacts: {}", audit.artifact_count);
7179 println!(" checked local blobs: {}", audit.checked_local_blobs);
7180 println!(" local blob bytes: {}", audit.local_blob_bytes);
7181 if !audit.by_kind.is_empty() {
7182 let kinds = audit
7183 .by_kind
7184 .iter()
7185 .map(|(kind, count)| format!("{kind}:{count}"))
7186 .collect::<Vec<_>>()
7187 .join(", ");
7188 println!(" kinds: {kinds}");
7189 }
7190 if audit.ok {
7191 println!(" {} artifact audit passed.", style::ok("ok"));
7192 return;
7193 }
7194 for issue in &audit.issues {
7195 println!(
7196 " {} {} {}: {}",
7197 style::lost("invalid"),
7198 issue.id,
7199 issue.field,
7200 issue.message
7201 );
7202 }
7203 std::process::exit(1);
7204}
7205
7206fn cmd_decision_brief(frontier: &Path, json_out: bool) {
7207 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
7208 let report = decision::load_decision_brief(frontier, &project);
7209 if json_out {
7210 println!(
7211 "{}",
7212 serde_json::to_string_pretty(&report).expect("serialize decision brief report")
7213 );
7214 if !report.ok {
7215 std::process::exit(1);
7216 }
7217 return;
7218 }
7219 println!();
7220 println!(
7221 " {}",
7222 format!("VELA · DECISION BRIEF · {}", project.project.name)
7223 .to_uppercase()
7224 .dimmed()
7225 );
7226 println!(" {}", style::tick_row(60));
7227 if !report.ok {
7228 print_projection_issues(&report.issues, report.error.as_deref());
7229 std::process::exit(1);
7230 }
7231 let brief = report
7232 .projection
7233 .as_ref()
7234 .expect("ok decision report carries projection");
7235 for question in &brief.questions {
7236 println!(" · {} · {}", question.id.dimmed(), question.title);
7237 println!(" answer: {}", wrap_line(&question.short_answer, 82));
7238 println!(" caveat: {}", wrap_line(&question.caveat, 82));
7239 println!(" support: {}", question.supporting_findings.join(", "));
7240 if !question.tension_findings.is_empty() {
7241 println!(" tensions: {}", question.tension_findings.join(", "));
7242 }
7243 if !question.gap_findings.is_empty() {
7244 println!(" gaps: {}", question.gap_findings.join(", "));
7245 }
7246 if !question.artifact_ids.is_empty() {
7247 println!(" artifacts: {}", question.artifact_ids.join(", "));
7248 }
7249 println!(
7250 " would change: {}",
7251 wrap_line(&question.what_would_change_this_answer, 82)
7252 );
7253 }
7254}
7255
7256fn cmd_trial_summary(frontier: &Path, json_out: bool) {
7257 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
7258 let report = decision::load_trial_outcomes(frontier, &project);
7259 if json_out {
7260 println!(
7261 "{}",
7262 serde_json::to_string_pretty(&report).expect("serialize trial summary report")
7263 );
7264 if !report.ok {
7265 std::process::exit(1);
7266 }
7267 return;
7268 }
7269 println!();
7270 println!(
7271 " {}",
7272 format!("VELA · TRIAL SUMMARY · {}", project.project.name)
7273 .to_uppercase()
7274 .dimmed()
7275 );
7276 println!(" {}", style::tick_row(60));
7277 if !report.ok {
7278 print_projection_issues(&report.issues, report.error.as_deref());
7279 std::process::exit(1);
7280 }
7281 let outcomes = report
7282 .projection
7283 .as_ref()
7284 .expect("ok trial report carries projection");
7285 for row in &outcomes.rows {
7286 println!(" · {} · {} ({})", row.id.dimmed(), row.program, row.drug);
7287 println!(" population: {}", wrap_line(&row.population, 82));
7288 println!(" endpoint: {}", wrap_line(&row.primary_endpoint, 82));
7289 println!(" cognition: {}", wrap_line(&row.cognitive_result, 82));
7290 println!(" biomarker: {}", wrap_line(&row.biomarker_result, 82));
7291 println!(" risk: {}", wrap_line(&row.aria_or_safety_result, 82));
7292 println!(" status: {}", wrap_line(&row.regulatory_status, 82));
7293 if !row.finding_ids.is_empty() {
7294 println!(" findings: {}", row.finding_ids.join(", "));
7295 }
7296 if !row.artifact_ids.is_empty() {
7297 println!(" artifacts: {}", row.artifact_ids.join(", "));
7298 }
7299 }
7300}
7301
7302fn cmd_source_verification(frontier: &Path, json_out: bool) {
7303 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
7304 let report = decision::load_source_verification(frontier, &project);
7305 if json_out {
7306 println!(
7307 "{}",
7308 serde_json::to_string_pretty(&report).expect("serialize source verification report")
7309 );
7310 if !report.ok {
7311 std::process::exit(1);
7312 }
7313 return;
7314 }
7315 println!();
7316 println!(
7317 " {}",
7318 format!("VELA · SOURCE VERIFICATION · {}", project.project.name)
7319 .to_uppercase()
7320 .dimmed()
7321 );
7322 println!(" {}", style::tick_row(60));
7323 if !report.ok {
7324 print_projection_issues(&report.issues, report.error.as_deref());
7325 std::process::exit(1);
7326 }
7327 let verification = report
7328 .projection
7329 .as_ref()
7330 .expect("ok source verification report carries projection");
7331 println!(" verified_at: {}", verification.verified_at);
7332 for source in &verification.sources {
7333 println!(" · {} · {}", source.id.dimmed(), source.title);
7334 println!(" agency: {}", source.agency);
7335 println!(" url: {}", truncate(&source.url, 88));
7336 println!(" status: {}", wrap_line(&source.current_status, 82));
7337 }
7338}
7339
7340fn cmd_source_ingest_plan(frontier: &Path, json_out: bool) {
7341 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
7342 let report = decision::load_source_ingest_plan(frontier, &project);
7343 if json_out {
7344 println!(
7345 "{}",
7346 serde_json::to_string_pretty(&report).expect("serialize source ingest plan report")
7347 );
7348 if !report.ok {
7349 std::process::exit(1);
7350 }
7351 return;
7352 }
7353 println!();
7354 println!(
7355 " {}",
7356 format!("VELA · SOURCE INGEST PLAN · {}", project.project.name)
7357 .to_uppercase()
7358 .dimmed()
7359 );
7360 println!(" {}", style::tick_row(60));
7361 if !report.ok {
7362 print_projection_issues(&report.issues, report.error.as_deref());
7363 std::process::exit(1);
7364 }
7365 let plan = report
7366 .projection
7367 .as_ref()
7368 .expect("ok source ingest plan report carries projection");
7369 println!(" verified_at: {}", plan.verified_at);
7370 println!(" entries: {}", plan.entries.len());
7371 for entry in &plan.entries {
7372 println!(
7373 " · {} · {} · {} · {}",
7374 entry.id.dimmed(),
7375 entry.category,
7376 entry.priority,
7377 entry.ingest_status
7378 );
7379 println!(" name: {}", wrap_line(&entry.name, 82));
7380 println!(" locator: {}", truncate(&entry.locator, 88));
7381 println!(" use: {}", wrap_line(&entry.target_use, 82));
7382 if let Some(id) = &entry.current_frontier_artifact_id {
7383 println!(" artifact: {id}");
7384 }
7385 if !entry.target_findings.is_empty() {
7386 println!(" findings: {}", entry.target_findings.join(", "));
7387 }
7388 }
7389}
7390
7391fn print_projection_issues(issues: &[decision::ProjectionIssue], error: Option<&str>) {
7392 if let Some(error) = error {
7393 println!(" {} {error}", style::lost("unavailable"));
7394 }
7395 for issue in issues {
7396 println!(
7397 " {} {}: {}",
7398 style::lost("invalid"),
7399 issue.path,
7400 issue.message
7401 );
7402 }
7403}
7404
7405fn wrap_line(text: &str, max_chars: usize) -> String {
7406 if text.chars().count() <= max_chars {
7407 return text.to_string();
7408 }
7409 let mut out = String::new();
7410 let mut line_len = 0usize;
7411 for word in text.split_whitespace() {
7412 let word_len = word.chars().count();
7413 if line_len > 0 && line_len + 1 + word_len > max_chars {
7414 out.push('\n');
7415 out.push_str(" ");
7416 out.push_str(word);
7417 line_len = word_len;
7418 } else {
7419 if line_len > 0 {
7420 out.push(' ');
7421 line_len += 1;
7422 }
7423 out.push_str(word);
7424 line_len += word_len;
7425 }
7426 }
7427 out
7428}
7429
7430fn clinical_str<'a>(study: &'a Value, pointer: &str) -> Option<&'a str> {
7431 study.pointer(pointer).and_then(Value::as_str)
7432}
7433
7434fn clinical_string_array(study: &Value, pointer: &str) -> Vec<String> {
7435 study
7436 .pointer(pointer)
7437 .and_then(Value::as_array)
7438 .map(|items| {
7439 items
7440 .iter()
7441 .filter_map(Value::as_str)
7442 .map(str::to_string)
7443 .collect()
7444 })
7445 .unwrap_or_default()
7446}
7447
7448fn clinical_named_array(study: &Value, pointer: &str, field: &str) -> Vec<String> {
7449 study
7450 .pointer(pointer)
7451 .and_then(Value::as_array)
7452 .map(|items| {
7453 items
7454 .iter()
7455 .filter_map(|item| item.get(field).and_then(Value::as_str))
7456 .map(str::to_string)
7457 .collect()
7458 })
7459 .unwrap_or_default()
7460}
7461
7462fn insert_string_vec_metadata(
7463 metadata: &mut BTreeMap<String, Value>,
7464 key: &str,
7465 values: Vec<String>,
7466) {
7467 if values.is_empty() {
7468 return;
7469 }
7470 metadata.insert(
7471 key.to_string(),
7472 Value::Array(values.into_iter().map(Value::String).collect()),
7473 );
7474}
7475
7476async fn cmd_clinical_trial_import(
7477 frontier: &Path,
7478 nct_id: &str,
7479 input_json: Option<&Path>,
7480 target: Vec<String>,
7481 deposited_by: &str,
7482 reason: &str,
7483 license: &str,
7484 json_out: bool,
7485) {
7486 let api_url = format!("https://clinicaltrials.gov/api/v2/studies/{nct_id}");
7487 let raw = if let Some(path) = input_json {
7488 std::fs::read_to_string(path)
7489 .unwrap_or_else(|e| fail(&format!("Failed to read ClinicalTrials.gov JSON: {e}")))
7490 } else {
7491 let response = reqwest::get(&api_url).await.unwrap_or_else(|e| {
7492 fail(&format!(
7493 "Failed to fetch ClinicalTrials.gov record {nct_id}: {e}"
7494 ))
7495 });
7496 let response = response.error_for_status().unwrap_or_else(|e| {
7497 fail(&format!(
7498 "Failed to fetch ClinicalTrials.gov record {nct_id}: {e}"
7499 ))
7500 });
7501 response.text().await.unwrap_or_else(|e| {
7502 fail(&format!(
7503 "Failed to read ClinicalTrials.gov record {nct_id}: {e}"
7504 ))
7505 })
7506 };
7507 let study: Value = serde_json::from_str(&raw)
7508 .unwrap_or_else(|e| fail(&format!("Failed to parse ClinicalTrials.gov JSON: {e}")));
7509 let canonical_bytes = crate::canonical::to_canonical_bytes(&study)
7510 .unwrap_or_else(|e| fail(&format!("Failed to canonicalize trial JSON: {e}")));
7511 let content_hash = sha256_for_bytes(&canonical_bytes);
7512 let locator = artifact_blob_locator(frontier, &content_hash, &canonical_bytes)
7513 .unwrap_or_else(|| api_url.clone());
7514 let storage_mode = if locator.starts_with(".vela/") {
7515 "local_blob"
7516 } else {
7517 "remote"
7518 };
7519
7520 let parsed_nct = clinical_str(&study, "/protocolSection/identificationModule/nctId")
7521 .unwrap_or(nct_id)
7522 .to_string();
7523 let title = clinical_str(&study, "/protocolSection/identificationModule/briefTitle")
7524 .or_else(|| {
7525 clinical_str(
7526 &study,
7527 "/protocolSection/identificationModule/officialTitle",
7528 )
7529 })
7530 .unwrap_or(nct_id);
7531 let public_url = format!("https://clinicaltrials.gov/study/{parsed_nct}");
7532 let mut metadata = BTreeMap::new();
7533 metadata.insert("nct_id".to_string(), Value::String(parsed_nct.clone()));
7534 metadata.insert(
7535 "source_api".to_string(),
7536 Value::String("clinicaltrials.gov-v2".to_string()),
7537 );
7538 metadata.insert(
7539 "retrieved_at".to_string(),
7540 Value::String(chrono::Utc::now().to_rfc3339()),
7541 );
7542 for (key, pointer) in [
7543 (
7544 "overall_status",
7545 "/protocolSection/statusModule/overallStatus",
7546 ),
7547 (
7548 "start_date",
7549 "/protocolSection/statusModule/startDateStruct/date",
7550 ),
7551 (
7552 "completion_date",
7553 "/protocolSection/statusModule/completionDateStruct/date",
7554 ),
7555 ] {
7556 if let Some(value) = clinical_str(&study, pointer) {
7557 metadata.insert(key.to_string(), Value::String(value.to_string()));
7558 }
7559 }
7560 insert_string_vec_metadata(
7561 &mut metadata,
7562 "phases",
7563 clinical_string_array(&study, "/protocolSection/designModule/phases"),
7564 );
7565 insert_string_vec_metadata(
7566 &mut metadata,
7567 "conditions",
7568 clinical_string_array(&study, "/protocolSection/conditionsModule/conditions"),
7569 );
7570 insert_string_vec_metadata(
7571 &mut metadata,
7572 "interventions",
7573 clinical_named_array(
7574 &study,
7575 "/protocolSection/armsInterventionsModule/interventions",
7576 "name",
7577 ),
7578 );
7579 insert_string_vec_metadata(
7580 &mut metadata,
7581 "primary_outcomes",
7582 clinical_named_array(
7583 &study,
7584 "/protocolSection/outcomesModule/primaryOutcomes",
7585 "measure",
7586 ),
7587 );
7588 if let Some(has_results) = study.get("hasResults").and_then(Value::as_bool) {
7589 metadata.insert("has_results".to_string(), Value::Bool(has_results));
7590 }
7591
7592 let provenance = artifact_provenance(
7593 "clinical_trial_record",
7594 title,
7595 Some(&public_url),
7596 None,
7597 Some(license),
7598 );
7599 let artifact = crate::bundle::Artifact::new(
7600 "clinical_trial_record",
7601 title.to_string(),
7602 content_hash,
7603 Some(canonical_bytes.len() as u64),
7604 Some("application/json".to_string()),
7605 storage_mode.to_string(),
7606 Some(locator),
7607 Some(public_url.clone()),
7608 Some(license.to_string()),
7609 target,
7610 provenance,
7611 metadata,
7612 crate::access_tier::AccessTier::Public,
7613 )
7614 .unwrap_or_else(|e| fail_return(&e));
7615 let artifact_id = artifact.id.clone();
7616 let report = state::add_artifact(frontier, artifact, deposited_by, reason)
7617 .unwrap_or_else(|e| fail_return(&e));
7618
7619 if json_out {
7620 println!(
7621 "{}",
7622 serde_json::to_string_pretty(&json!({
7623 "ok": true,
7624 "command": "clinical-trial-import",
7625 "nct_id": parsed_nct,
7626 "id": artifact_id,
7627 "frontier": frontier.display().to_string(),
7628 "event": report.applied_event_id,
7629 "source_url": public_url,
7630 }))
7631 .expect("serialize clinical-trial-import")
7632 );
7633 } else {
7634 println!();
7635 println!(
7636 " {}",
7637 format!("VELA · CLINICAL TRIAL · {}", artifact_id)
7638 .to_uppercase()
7639 .dimmed()
7640 );
7641 println!(" {}", style::tick_row(60));
7642 println!(" nct_id: {parsed_nct}");
7643 println!(" title: {}", truncate(title, 96));
7644 println!(" source: {public_url}");
7645 println!(
7646 " {} trial record imported into {}",
7647 style::ok("ok"),
7648 frontier.display()
7649 );
7650 }
7651}
7652
7653#[allow(clippy::too_many_arguments)]
7660fn cmd_replicate(
7661 frontier: &Path,
7662 target: &str,
7663 outcome: &str,
7664 attempted_by: &str,
7665 conditions_text: &str,
7666 source_title: &str,
7667 doi: Option<&str>,
7668 pmid: Option<&str>,
7669 sample_size: Option<&str>,
7670 note: &str,
7671 previous_attempt: Option<&str>,
7672 no_cascade: bool,
7673 json: bool,
7674) {
7675 if !crate::bundle::VALID_REPLICATION_OUTCOMES.contains(&outcome) {
7676 fail(&format!(
7677 "invalid outcome '{outcome}'; valid: {:?}",
7678 crate::bundle::VALID_REPLICATION_OUTCOMES
7679 ));
7680 }
7681 if !target.starts_with("vf_") {
7682 fail(&format!("target '{target}' is not a vf_ finding id"));
7683 }
7684
7685 let mut project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
7686
7687 if !project.findings.iter().any(|f| f.id == target) {
7688 fail(&format!(
7689 "target finding '{target}' not present in frontier '{}'",
7690 frontier.display()
7691 ));
7692 }
7693
7694 let lower = conditions_text.to_lowercase();
7699 let conditions = crate::bundle::Conditions {
7700 text: conditions_text.to_string(),
7701 species_verified: Vec::new(),
7702 species_unverified: Vec::new(),
7703 in_vitro: lower.contains("in vitro") || lower.contains("ipsc"),
7704 in_vivo: lower.contains("in vivo") || lower.contains("mouse") || lower.contains("rat"),
7705 human_data: lower.contains("human")
7706 || lower.contains("clinical")
7707 || lower.contains("patient"),
7708 clinical_trial: lower.contains("clinical trial") || lower.contains("phase "),
7709 concentration_range: None,
7710 duration: None,
7711 age_group: None,
7712 cell_type: None,
7713 };
7714
7715 let evidence = crate::bundle::Evidence {
7716 evidence_type: "experimental".to_string(),
7717 model_system: String::new(),
7718 species: None,
7719 method: "replication_attempt".to_string(),
7720 sample_size: sample_size.map(|s| s.to_string()),
7721 effect_size: None,
7722 p_value: None,
7723 replicated: outcome == "replicated",
7724 replication_count: None,
7725 evidence_spans: Vec::new(),
7726 };
7727
7728 let provenance = crate::bundle::Provenance {
7729 source_type: "published_paper".to_string(),
7730 doi: doi.map(|s| s.to_string()),
7731 pmid: pmid.map(|s| s.to_string()),
7732 pmc: None,
7733 openalex_id: None,
7734 url: None,
7735 title: source_title.to_string(),
7736 authors: Vec::new(),
7737 year: None,
7738 journal: None,
7739 license: None,
7740 publisher: None,
7741 funders: Vec::new(),
7742 extraction: crate::bundle::Extraction {
7743 method: "manual_curation".to_string(),
7744 model: None,
7745 model_version: None,
7746 extracted_at: chrono::Utc::now().to_rfc3339(),
7747 extractor_version: env!("CARGO_PKG_VERSION").to_string(),
7748 },
7749 review: None,
7750 citation_count: None,
7751 };
7752
7753 let mut rep = crate::bundle::Replication::new(
7754 target.to_string(),
7755 attempted_by.to_string(),
7756 outcome.to_string(),
7757 evidence,
7758 conditions,
7759 provenance,
7760 note.to_string(),
7761 );
7762 rep.previous_attempt = previous_attempt.map(|s| s.to_string());
7763
7764 if project.replications.iter().any(|r| r.id == rep.id) {
7767 if json {
7768 println!(
7769 "{}",
7770 serde_json::to_string_pretty(&json!({
7771 "ok": false,
7772 "command": "replicate",
7773 "reason": "replication_already_exists",
7774 "id": rep.id,
7775 }))
7776 .expect("serialize")
7777 );
7778 } else {
7779 println!(
7780 "{} replication {} already exists in {}; skipping.",
7781 style::warn("replicate"),
7782 rep.id,
7783 frontier.display()
7784 );
7785 }
7786 return;
7787 }
7788
7789 let new_id = rep.id.clone();
7790 project.replications.push(rep);
7791
7792 let cascade_result = if no_cascade {
7799 None
7800 } else {
7801 let result = propagate::propagate_correction(
7802 &mut project,
7803 target,
7804 propagate::PropagationAction::ReplicationOutcome {
7805 outcome: outcome.to_string(),
7806 vrep_id: new_id.clone(),
7807 },
7808 );
7809 project.review_events.extend(result.events.clone());
7812 project::recompute_stats(&mut project);
7813 Some(result)
7814 };
7815
7816 repo::save_to_path(frontier, &project).unwrap_or_else(|e| fail_return(&e));
7817
7818 if json {
7819 let cascade_json = cascade_result.as_ref().map(|r| {
7820 json!({
7821 "affected": r.affected,
7822 "events": r.events.len(),
7823 })
7824 });
7825 println!(
7826 "{}",
7827 serde_json::to_string_pretty(&json!({
7828 "ok": true,
7829 "command": "replicate",
7830 "id": new_id,
7831 "target": target,
7832 "outcome": outcome,
7833 "attempted_by": attempted_by,
7834 "cascade": cascade_json,
7835 "frontier": frontier.display().to_string(),
7836 }))
7837 .expect("failed to serialize replicate result")
7838 );
7839 } else {
7840 println!();
7841 println!(
7842 " {}",
7843 format!("VELA · REPLICATE · {}", new_id)
7844 .to_uppercase()
7845 .dimmed()
7846 );
7847 println!(" {}", style::tick_row(60));
7848 println!(" target: {target}");
7849 println!(" outcome: {outcome}");
7850 println!(" attempted by: {attempted_by}");
7851 println!(" conditions: {conditions_text}");
7852 println!(" source: {source_title}");
7853 if let Some(d) = doi {
7854 println!(" doi: {d}");
7855 }
7856 println!();
7857 println!(
7858 " {} replication recorded in {}",
7859 style::ok("ok"),
7860 frontier.display()
7861 );
7862 if let Some(result) = cascade_result {
7863 println!(
7864 " {} cascade: {} dependent(s) flagged, {} review event(s) recorded",
7865 style::ok("ok"),
7866 result.affected,
7867 result.events.len()
7868 );
7869 } else {
7870 println!(" {} cascade skipped (--no-cascade)", style::warn("info"));
7871 }
7872 }
7873}
7874
7875fn cmd_replications(frontier: &Path, target: Option<&str>, json: bool) {
7877 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
7878 let filtered: Vec<&crate::bundle::Replication> = project
7879 .replications
7880 .iter()
7881 .filter(|r| target.is_none_or(|t| r.target_finding == t))
7882 .collect();
7883
7884 if json {
7885 let payload = json!({
7886 "ok": true,
7887 "command": "replications",
7888 "frontier": frontier.display().to_string(),
7889 "filter_target": target,
7890 "count": filtered.len(),
7891 "replications": filtered,
7892 });
7893 println!(
7894 "{}",
7895 serde_json::to_string_pretty(&payload).expect("failed to serialize replications list")
7896 );
7897 return;
7898 }
7899
7900 println!();
7901 let header = match target {
7902 Some(t) => format!("VELA · REPLICATIONS · {t}"),
7903 None => format!("VELA · REPLICATIONS · {}", frontier.display()),
7904 };
7905 println!(" {}", header.to_uppercase().dimmed());
7906 println!(" {}", style::tick_row(60));
7907 if filtered.is_empty() {
7908 println!(" (no replications recorded)");
7909 return;
7910 }
7911 for rep in &filtered {
7912 let outcome_chip = match rep.outcome.as_str() {
7913 "replicated" => style::ok(&rep.outcome),
7914 "failed" => style::lost(&rep.outcome),
7915 "partial" => style::warn(&rep.outcome),
7916 _ => rep.outcome.clone().normal().to_string(),
7917 };
7918 println!(
7919 " · {} {} by {}",
7920 rep.id.dimmed(),
7921 outcome_chip,
7922 rep.attempted_by
7923 );
7924 println!(" target: {}", rep.target_finding);
7925 if !rep.conditions.text.is_empty() {
7926 println!(" conditions: {}", truncate(&rep.conditions.text, 80));
7927 }
7928 if !rep.provenance.title.is_empty() {
7929 println!(" source: {}", truncate(&rep.provenance.title, 80));
7930 }
7931 }
7932}
7933
7934async fn cmd_ingest(
7947 path: &str,
7948 frontier: &Path,
7949 backend: Option<&str>,
7950 actor: Option<&str>,
7951 dry_run: bool,
7952 json: bool,
7953) {
7954 let lowered = path.trim().to_lowercase();
7956 if lowered.starts_with("doi:") || lowered.starts_with("pmid:") || lowered.starts_with("nct:") {
7957 cmd_source_fetch(path.trim(), None, None, false, json).await;
7958 if !json {
7964 eprintln!();
7965 eprintln!(
7966 " vela ingest · note: doi:/pmid:/nct: URIs only fetch metadata; no frontier state was written."
7967 );
7968 eprintln!(
7969 " next: turn this paper into a proposal with `vela finding add {} --assertion '...' --author 'reviewer:you' --apply`",
7970 frontier.display()
7971 );
7972 }
7973 return;
7974 }
7975
7976 let p = std::path::PathBuf::from(path);
7977 if !p.exists() {
7978 fail(&format!(
7979 "ingest: path '{path}' does not exist (and is not a doi:/pmid:/nct: URI)"
7980 ));
7981 }
7982
7983 let ext = p
7985 .extension()
7986 .and_then(|s| s.to_str())
7987 .map(|s| s.to_ascii_lowercase());
7988
7989 if p.is_file() {
7990 match ext.as_deref() {
7991 Some("pdf") => {
7992 cmd_scout(&p, frontier, backend, dry_run, json).await;
7996 }
7997 Some("md") | Some("markdown") => {
7998 cmd_compile_notes(&p, frontier, backend, None, None, dry_run, json).await;
8001 }
8002 Some("csv") | Some("tsv") => {
8003 cmd_compile_data(&p, frontier, backend, None, dry_run, json).await;
8006 }
8007 Some("json") => {
8008 let actor_id = actor.unwrap_or("agent:vela-ingest-bot");
8010 cmd_artifact_to_state(frontier, &p, actor_id, false, json);
8011 }
8012 other => {
8013 fail(&format!(
8014 "ingest: unsupported file type '{}' (expected .pdf, .md, .csv, .tsv, .json, or a doi:/pmid:/nct: URI)",
8015 other.unwrap_or("(none)")
8016 ));
8017 }
8018 }
8019 return;
8020 }
8021
8022 if p.is_dir() {
8023 let mut pdf_count = 0usize;
8030 let mut md_count = 0usize;
8031 let mut data_count = 0usize;
8032 let mut json_count = 0usize;
8033 let mut unhandled_exts: std::collections::BTreeSet<String> =
8034 std::collections::BTreeSet::new();
8035 if let Ok(entries) = std::fs::read_dir(&p) {
8036 for entry in entries.flatten() {
8037 let path = entry.path();
8038 if !path.is_file() {
8039 continue;
8040 }
8041 if let Some(name) = entry.file_name().to_str()
8042 && let Some(dot) = name.rfind('.')
8043 {
8044 let ext = name[dot + 1..].to_ascii_lowercase();
8045 match ext.as_str() {
8046 "pdf" => pdf_count += 1,
8047 "md" | "markdown" => md_count += 1,
8048 "csv" | "tsv" => data_count += 1,
8049 "json" => json_count += 1,
8050 other => {
8051 if !name.starts_with('.') {
8054 unhandled_exts.insert(other.to_string());
8055 }
8056 }
8057 }
8058 }
8059 }
8060 }
8061
8062 let dispatched_types = (pdf_count > 0) as usize
8063 + (md_count > 0) as usize
8064 + (data_count > 0) as usize
8065 + (json_count > 0) as usize;
8066
8067 if dispatched_types == 0 {
8068 cmd_compile_code(&p, frontier, backend, None, dry_run, json).await;
8071 return;
8072 }
8073
8074 if dispatched_types > 1 {
8075 eprintln!(
8076 " vela ingest · folder has multiple handlable types; running each in sequence"
8077 );
8078 eprintln!(
8079 " pdf:{pdf_count} md:{md_count} csv/tsv:{data_count} json:{json_count}"
8080 );
8081 }
8082
8083 if pdf_count > 0 {
8090 cmd_scout(&p, frontier, backend, dry_run, json).await;
8091 }
8092 if md_count > 0 {
8093 cmd_compile_notes(&p, frontier, backend, None, None, dry_run, json).await;
8094 }
8095 if data_count > 0 {
8096 cmd_compile_data(&p, frontier, backend, None, dry_run, json).await;
8097 }
8098 if json_count > 0 {
8099 let actor_id = actor.unwrap_or("agent:vela-ingest-bot");
8102 if let Ok(entries) = std::fs::read_dir(&p) {
8103 for entry in entries.flatten() {
8104 let path = entry.path();
8105 if path.is_file()
8106 && path
8107 .extension()
8108 .and_then(|s| s.to_str())
8109 .map(|s| s.eq_ignore_ascii_case("json"))
8110 .unwrap_or(false)
8111 {
8112 cmd_artifact_to_state(frontier, &path, actor_id, false, json);
8113 }
8114 }
8115 }
8116 }
8117
8118 if !unhandled_exts.is_empty() {
8119 let kinds: Vec<String> = unhandled_exts.into_iter().collect();
8120 eprintln!(
8121 " vela ingest · skipped {} file extension(s) with no handler: {}",
8122 kinds.len(),
8123 kinds.join(", ")
8124 );
8125 }
8126 return;
8127 }
8128
8129 fail(&format!(
8130 "ingest: path '{path}' is neither a file nor a directory"
8131 ));
8132}
8133
8134#[allow(clippy::too_many_arguments)]
8135async fn cmd_compile_data(
8137 root: &Path,
8138 frontier: &Path,
8139 backend: Option<&str>,
8140 sample_rows: Option<usize>,
8141 dry_run: bool,
8142 json_out: bool,
8143) {
8144 match DATASETS_HANDLER.get() {
8145 Some(handler) => {
8146 handler(
8147 root.to_path_buf(),
8148 frontier.to_path_buf(),
8149 backend.map(String::from),
8150 sample_rows,
8151 dry_run,
8152 json_out,
8153 )
8154 .await;
8155 }
8156 None => {
8157 eprintln!(
8158 "{} `vela compile-data` requires the vela CLI binary; the library is unwired without a registered datasets handler.",
8159 style::err_prefix()
8160 );
8161 std::process::exit(1);
8162 }
8163 }
8164}
8165
8166async fn cmd_review_pending(
8169 frontier: &Path,
8170 backend: Option<&str>,
8171 max_proposals: Option<usize>,
8172 batch_size: usize,
8173 dry_run: bool,
8174 json_out: bool,
8175) {
8176 match REVIEWER_HANDLER.get() {
8177 Some(handler) => {
8178 handler(
8179 frontier.to_path_buf(),
8180 backend.map(String::from),
8181 max_proposals,
8182 batch_size,
8183 dry_run,
8184 json_out,
8185 )
8186 .await;
8187 }
8188 None => {
8189 eprintln!(
8190 "{} `vela review-pending` requires the vela CLI binary; the library is unwired without a registered reviewer handler.",
8191 style::err_prefix()
8192 );
8193 std::process::exit(1);
8194 }
8195 }
8196}
8197
8198async fn cmd_find_tensions(
8201 frontier: &Path,
8202 backend: Option<&str>,
8203 max_findings: Option<usize>,
8204 dry_run: bool,
8205 json_out: bool,
8206) {
8207 match TENSIONS_HANDLER.get() {
8208 Some(handler) => {
8209 handler(
8210 frontier.to_path_buf(),
8211 backend.map(String::from),
8212 max_findings,
8213 dry_run,
8214 json_out,
8215 )
8216 .await;
8217 }
8218 None => {
8219 eprintln!(
8220 "{} `vela find-tensions` requires the vela CLI binary; the library is unwired without a registered tensions handler.",
8221 style::err_prefix()
8222 );
8223 std::process::exit(1);
8224 }
8225 }
8226}
8227
8228async fn cmd_plan_experiments(
8231 frontier: &Path,
8232 backend: Option<&str>,
8233 max_findings: Option<usize>,
8234 dry_run: bool,
8235 json_out: bool,
8236) {
8237 match EXPERIMENTS_HANDLER.get() {
8238 Some(handler) => {
8239 handler(
8240 frontier.to_path_buf(),
8241 backend.map(String::from),
8242 max_findings,
8243 dry_run,
8244 json_out,
8245 )
8246 .await;
8247 }
8248 None => {
8249 eprintln!(
8250 "{} `vela plan-experiments` requires the vela CLI binary; the library is unwired without a registered experiments handler.",
8251 style::err_prefix()
8252 );
8253 std::process::exit(1);
8254 }
8255 }
8256}
8257
8258async fn cmd_compile_code(
8261 root: &Path,
8262 frontier: &Path,
8263 backend: Option<&str>,
8264 max_files: Option<usize>,
8265 dry_run: bool,
8266 json_out: bool,
8267) {
8268 match CODE_HANDLER.get() {
8269 Some(handler) => {
8270 handler(
8271 root.to_path_buf(),
8272 frontier.to_path_buf(),
8273 backend.map(String::from),
8274 max_files,
8275 dry_run,
8276 json_out,
8277 )
8278 .await;
8279 }
8280 None => {
8281 eprintln!(
8282 "{} `vela compile-code` requires the vela CLI binary; the library is unwired without a registered code handler.",
8283 style::err_prefix()
8284 );
8285 std::process::exit(1);
8286 }
8287 }
8288}
8289
8290async fn cmd_compile_notes(
8295 vault: &Path,
8296 frontier: &Path,
8297 backend: Option<&str>,
8298 max_files: Option<usize>,
8299 max_items_per_category: Option<usize>,
8300 dry_run: bool,
8301 json_out: bool,
8302) {
8303 match NOTES_HANDLER.get() {
8304 Some(handler) => {
8305 handler(
8306 vault.to_path_buf(),
8307 frontier.to_path_buf(),
8308 backend.map(String::from),
8309 max_files,
8310 max_items_per_category,
8311 dry_run,
8312 json_out,
8313 )
8314 .await;
8315 }
8316 None => {
8317 eprintln!(
8318 "{} `vela compile-notes` requires the vela CLI binary; the library is unwired without a registered notes handler.",
8319 style::err_prefix()
8320 );
8321 std::process::exit(1);
8322 }
8323 }
8324}
8325
8326async fn cmd_scout(
8333 folder: &Path,
8334 frontier: &Path,
8335 backend: Option<&str>,
8336 dry_run: bool,
8337 json_out: bool,
8338) {
8339 match SCOUT_HANDLER.get() {
8340 Some(handler) => {
8341 handler(
8342 folder.to_path_buf(),
8343 frontier.to_path_buf(),
8344 backend.map(String::from),
8345 dry_run,
8346 json_out,
8347 )
8348 .await;
8349 }
8350 None => {
8351 eprintln!(
8352 "{} `vela scout` requires the vela CLI binary; the library is unwired without a registered scout handler.",
8353 style::err_prefix()
8354 );
8355 std::process::exit(1);
8356 }
8357 }
8358}
8359
8360#[allow(clippy::too_many_arguments)]
8361pub fn scan_for_sensitive_paths(root: &Path) -> Vec<PathBuf> {
8370 let mut hits: Vec<PathBuf> = Vec::new();
8371 let skip_dirs: &[&str] = &[".git", "target", "node_modules", "dist", "build"];
8372 let bad_exts: &[&str] = &["key", "pem", "p12", "pfx"];
8373 let bad_substrings: &[&str] = &["private", "secret", "credential"];
8374 let mut stack: Vec<PathBuf> = vec![root.to_path_buf()];
8375 while let Some(dir) = stack.pop() {
8376 let Ok(entries) = std::fs::read_dir(&dir) else {
8377 continue;
8378 };
8379 for entry in entries.flatten() {
8380 let path = entry.path();
8381 let name_os = path.file_name();
8382 let Some(name) = name_os.and_then(|n| n.to_str()) else {
8383 continue;
8384 };
8385 let lower = name.to_lowercase();
8386 if path.is_dir() {
8387 if skip_dirs.contains(&name) {
8388 continue;
8389 }
8390 stack.push(path);
8391 continue;
8392 }
8393 if lower.ends_with(".pub") || lower.ends_with(".pubkey") {
8395 continue;
8396 }
8397 if lower == "public.key" {
8399 continue;
8400 }
8401 let ext = path
8402 .extension()
8403 .and_then(|e| e.to_str())
8404 .map(str::to_lowercase)
8405 .unwrap_or_default();
8406 let mut hit = false;
8407 if bad_exts.iter().any(|x| ext == *x) {
8408 hit = true;
8409 }
8410 if bad_substrings.iter().any(|s| lower.contains(s)) {
8411 hit = true;
8412 }
8413 if hit {
8414 hits.push(path);
8415 }
8416 }
8417 }
8418 hits.sort();
8419 hits
8420}
8421
8422fn cmd_check(
8423 source: Option<&Path>,
8424 schema: bool,
8425 stats: bool,
8426 conformance_flag: bool,
8427 conformance_dir: &Path,
8428 all: bool,
8429 schema_only: bool,
8430 strict: bool,
8431 fix: bool,
8432 json_output: bool,
8433) {
8434 if json_output {
8435 let Some(src) = source else {
8436 fail("--json requires a frontier source");
8437 };
8438 let payload = check_json_payload(src, schema_only, strict);
8439 println!(
8440 "{}",
8441 serde_json::to_string_pretty(&payload).expect("failed to serialize check report")
8442 );
8443 if payload.get("ok").and_then(Value::as_bool) != Some(true) {
8444 std::process::exit(1);
8445 }
8446 return;
8447 }
8448
8449 if strict && let Some(src) = source {
8460 let hits = scan_for_sensitive_paths(src);
8461 if !hits.is_empty() {
8462 eprintln!(
8463 "{} secret-audit: {} sensitive path(s) found under {}",
8464 style::err_prefix(),
8465 hits.len(),
8466 src.display()
8467 );
8468 for hit in &hits {
8469 eprintln!(" - {}", hit.display());
8470 }
8471 eprintln!(
8472 " hint: add `keys/` and `*.key` to .gitignore so these never reach a public repo (see THREAT_MODEL.md A17)"
8473 );
8474 std::process::exit(1);
8475 }
8476 }
8477
8478 let run_all = all || (!schema && !stats && !conformance_flag && !schema_only);
8479 if run_all || schema || schema_only {
8480 let Some(src) = source else {
8481 fail("check requires a frontier source");
8482 };
8483 validate::run(src);
8484 }
8485 if !schema_only && (run_all || stats) {
8486 let Some(src) = source else {
8487 fail("--stats requires a frontier source");
8488 };
8489 let frontier = load_frontier_or_fail(src);
8490 let report = lint::lint(&frontier, None, None);
8491 lint::print_report(&report);
8492 let replay_report = events::replay_report(&frontier);
8493 println!("event replay: {}", replay_report.status);
8494 if !replay_report.conflicts.is_empty() {
8495 for conflict in &replay_report.conflicts {
8496 println!(" - {conflict}");
8497 }
8498 }
8499 if let Ok(signature_report) = sign::verify_frontier_data(&frontier, None)
8500 && signature_report.signed > 0
8501 {
8502 println!(
8503 "Signatures: {} valid / {} invalid / {} unsigned",
8504 signature_report.valid, signature_report.invalid, signature_report.unsigned
8505 );
8506 }
8507 let signal_report = signals::analyze(&frontier, &[]);
8508 print_signal_summary(&signal_report, strict);
8509 if !replay_report.ok
8510 || (strict
8511 && (!signal_report.review_queue.is_empty()
8512 || signal_report.proof_readiness.status != "ready"))
8513 {
8514 std::process::exit(1);
8515 }
8516 }
8517 if run_all || conformance_flag {
8518 if conformance_flag || conformance_dir.is_dir() {
8528 conformance::run(conformance_dir);
8529 } else {
8530 eprintln!(
8531 " conformance: skipped ({} not present; pass --conformance-dir <path> to point at the source repo's tests/conformance)",
8532 conformance_dir.display()
8533 );
8534 }
8535 }
8536 let _ = fix;
8537}
8538
8539fn check_json_payload(src: &Path, schema_only: bool, strict: bool) -> Value {
8540 let report = validate::validate(src);
8541 let loaded = repo::load_from_path(src).ok();
8542 let (method_report, graph_report) = if schema_only {
8543 (None, None)
8544 } else if let Some(frontier) = loaded.as_ref() {
8545 (
8546 Some(lint::lint(frontier, None, None)),
8547 Some(lint::lint_frontier(frontier)),
8548 )
8549 } else {
8550 (None, None)
8551 };
8552 let source_hash = hash_path(src).unwrap_or_else(|_| "unavailable".to_string());
8553 let mut diagnostics = Vec::new();
8554 diagnostics.extend(report.errors.iter().map(|e| {
8555 json!({
8556 "severity": "error",
8557 "rule_id": "schema",
8558 "finding_id": null,
8559 "file": &e.file,
8560 "field_path": null,
8561 "message": &e.error,
8562 "suggestion": schema_error_suggestion(&e.error),
8563 "fixable": schema_error_fix(&e.error),
8564 "normalize_action": schema_error_action(&e.error),
8565 })
8566 }));
8567 for (check_id, lint_report) in [
8568 ("methodology", method_report.as_ref()),
8569 ("frontier_graph", graph_report.as_ref()),
8570 ] {
8571 if let Some(lint_report) = lint_report {
8572 diagnostics.extend(lint_report.diagnostics.iter().map(|d| {
8573 json!({
8574 "severity": d.severity.to_string(),
8575 "rule_id": &d.rule_id,
8576 "check": check_id,
8577 "finding_id": &d.finding_id,
8578 "field_path": null,
8579 "message": &d.message,
8580 "suggestion": &d.suggestion,
8581 "fixable": false,
8582 "normalize_action": null,
8583 })
8584 }));
8585 }
8586 }
8587 let method_errors = method_report.as_ref().map_or(0, |r| r.errors);
8588 let method_warnings = method_report.as_ref().map_or(0, |r| r.warnings);
8589 let method_infos = method_report.as_ref().map_or(0, |r| r.infos);
8590 let graph_errors = graph_report.as_ref().map_or(0, |r| r.errors);
8591 let graph_warnings = graph_report.as_ref().map_or(0, |r| r.warnings);
8592 let graph_infos = graph_report.as_ref().map_or(0, |r| r.infos);
8593 let replay_report = loaded.as_ref().map(events::replay_report);
8594 let state_integrity_report = if schema_only {
8595 loaded.as_ref().map(state_integrity::analyze)
8596 } else {
8597 state_integrity::analyze_path(src).ok()
8598 };
8599 if let Some(replay) = replay_report.as_ref()
8600 && !replay.ok
8601 {
8602 diagnostics.extend(replay.conflicts.iter().map(|conflict| {
8603 json!({
8604 "severity": "error",
8605 "rule_id": "event_replay",
8606 "check": "events",
8607 "finding_id": null,
8608 "field_path": null,
8609 "message": conflict,
8610 "suggestion": "Inspect canonical state events and repair the frontier event log before proof export.",
8611 "fixable": false,
8612 "normalize_action": null,
8613 })
8614 }));
8615 }
8616 let event_errors = replay_report
8617 .as_ref()
8618 .map_or(0, |replay| usize::from(!replay.ok));
8619 let state_integrity_errors = state_integrity_report
8620 .as_ref()
8621 .map_or(0, |report| report.structural_errors.len());
8622 let (source_registry, evidence_atoms, conditions, proposal_summary, proof_state) = loaded
8623 .as_ref()
8624 .map(|frontier| {
8625 (
8626 sources::source_summary(frontier),
8627 sources::evidence_summary(frontier),
8628 sources::condition_summary(frontier),
8629 proposals::summary(frontier),
8630 proposals::proof_state_json(&frontier.proof_state),
8631 )
8632 })
8633 .unwrap_or_else(|| {
8634 (
8635 sources::SourceRegistrySummary::default(),
8636 sources::EvidenceAtomSummary::default(),
8637 sources::ConditionSummary::default(),
8638 proposals::ProposalSummary::default(),
8639 Value::Null,
8640 )
8641 });
8642 let signature_report = loaded
8643 .as_ref()
8644 .and_then(|frontier| sign::verify_frontier_data(frontier, None).ok());
8645 if let Some(frontier) = loaded.as_ref()
8646 && !schema_only
8647 {
8648 let projection = sources::derive_projection(frontier);
8649 let existing_sources = frontier
8650 .sources
8651 .iter()
8652 .map(|source| source.id.as_str())
8653 .collect::<std::collections::BTreeSet<_>>();
8654 let existing_atoms = frontier
8655 .evidence_atoms
8656 .iter()
8657 .map(|atom| atom.id.as_str())
8658 .collect::<std::collections::BTreeSet<_>>();
8659 let existing_conditions = frontier
8660 .condition_records
8661 .iter()
8662 .map(|record| record.id.as_str())
8663 .collect::<std::collections::BTreeSet<_>>();
8664 for source in projection
8665 .sources
8666 .iter()
8667 .filter(|source| !existing_sources.contains(source.id.as_str()))
8668 {
8669 diagnostics.push(json!({
8670 "severity": "warning",
8671 "rule_id": "missing_source_record",
8672 "check": "source_registry",
8673 "finding_id": source.finding_ids.first(),
8674 "field_path": "sources",
8675 "message": format!("Source record {} is derivable but not materialized in frontier state.", source.id),
8676 "suggestion": "Run `vela normalize` to materialize source records before proof export.",
8677 "fixable": true,
8678 "normalize_action": "materialize_source_record",
8679 }));
8680 }
8681 for atom in projection
8682 .evidence_atoms
8683 .iter()
8684 .filter(|atom| !existing_atoms.contains(atom.id.as_str()))
8685 {
8686 diagnostics.push(json!({
8687 "severity": "warning",
8688 "rule_id": "missing_evidence_atom",
8689 "check": "evidence_atoms",
8690 "finding_id": atom.finding_id,
8691 "field_path": "evidence_atoms",
8692 "message": format!("Evidence atom {} is derivable but not materialized in frontier state.", atom.id),
8693 "suggestion": "Run `vela normalize` to materialize evidence atoms before proof export.",
8694 "fixable": true,
8695 "normalize_action": "materialize_evidence_atom",
8696 }));
8697 }
8698 for atom in projection
8699 .evidence_atoms
8700 .iter()
8701 .filter(|atom| atom.locator.is_none())
8702 {
8703 diagnostics.push(json!({
8704 "severity": "warning",
8705 "rule_id": "missing_evidence_locator",
8706 "check": "evidence_atoms",
8707 "finding_id": atom.finding_id,
8708 "field_path": "evidence_atoms[].locator",
8709 "message": format!("Evidence atom {} has no source locator.", atom.id),
8710 "suggestion": "Add or verify evidence spans, table rows, pages, sections, or run locators.",
8711 "fixable": false,
8712 "normalize_action": null,
8713 }));
8714 }
8715 for condition in projection
8716 .condition_records
8717 .iter()
8718 .filter(|condition| !existing_conditions.contains(condition.id.as_str()))
8719 {
8720 diagnostics.push(json!({
8721 "severity": "warning",
8722 "rule_id": "condition_record_missing",
8723 "check": "conditions",
8724 "finding_id": condition.finding_id,
8725 "field_path": "condition_records",
8726 "message": format!("Condition record {} is derivable but not materialized in frontier state.", condition.id),
8727 "suggestion": "Run `vela normalize` to materialize condition boundaries before proof export.",
8728 "fixable": true,
8729 "normalize_action": "materialize_condition_record",
8730 }));
8731 }
8732 for proposal in frontier.proposals.iter().filter(|proposal| {
8733 matches!(proposal.status.as_str(), "accepted" | "applied")
8734 && proposal
8735 .reviewed_by
8736 .as_deref()
8737 .is_none_or(proposals::is_placeholder_reviewer)
8738 }) {
8739 diagnostics.push(json!({
8740 "severity": "error",
8741 "rule_id": "reviewer_identity_missing",
8742 "check": "proposals",
8743 "finding_id": proposal.target.id,
8744 "field_path": "proposals[].reviewed_by",
8745 "message": format!("Accepted or applied proposal {} uses a missing or placeholder reviewer identity.", proposal.id),
8746 "suggestion": "Accept the proposal with a stable named reviewer id before strict proof use.",
8747 "fixable": false,
8748 "normalize_action": null,
8749 }));
8750 }
8751 }
8752 let signal_report = loaded
8753 .as_ref()
8754 .map(|frontier| signals::analyze(frontier, &diagnostics))
8755 .unwrap_or_else(empty_signal_report);
8756 let errors =
8757 report.errors.len() + method_errors + graph_errors + event_errors + state_integrity_errors;
8758 let warnings = method_warnings + graph_warnings + signal_report.proof_readiness.warnings;
8759 let infos = method_infos + graph_infos;
8760 let strict_blockers = signal_report
8761 .signals
8762 .iter()
8763 .filter(|signal| signal.blocks.iter().any(|block| block == "strict_check"))
8764 .count();
8765 let fixable = diagnostics
8766 .iter()
8767 .filter(|d| d.get("fixable").and_then(Value::as_bool).unwrap_or(false))
8768 .count();
8769 let ok = errors == 0 && (!strict || (warnings == 0 && strict_blockers == 0));
8770
8771 json!({
8772 "ok": ok,
8773 "command": "check",
8774 "schema_version": project::VELA_SCHEMA_VERSION,
8775 "source": {
8776 "path": src.display().to_string(),
8777 "hash": format!("sha256:{source_hash}"),
8778 },
8779 "summary": {
8780 "status": if ok { "pass" } else { "fail" },
8781 "checked_findings": report.total_files,
8782 "valid_findings": report.valid,
8783 "invalid_findings": report.invalid,
8784 "errors": errors,
8785 "warnings": warnings,
8786 "info": infos,
8787 "fixable": fixable,
8788 "strict": strict,
8789 "schema_only": schema_only,
8790 },
8791 "checks": [
8792 {
8793 "id": "schema",
8794 "status": if report.invalid == 0 { "pass" } else { "fail" },
8795 "checked": report.total_files,
8796 "failed": report.invalid,
8797 "errors": report.errors.iter().map(|e| json!({
8798 "file": e.file,
8799 "message": e.error,
8800 })).collect::<Vec<_>>(),
8801 },
8802 {
8803 "id": "methodology",
8804 "status": if method_errors == 0 { "pass" } else { "fail" },
8805 "checked": method_report.as_ref().map_or(0, |r| r.findings_checked),
8806 "failed": method_errors,
8807 "warnings": method_warnings,
8808 "info": method_infos,
8809 "skipped": schema_only,
8810 },
8811 {
8812 "id": "frontier_graph",
8813 "status": if graph_errors == 0 { "pass" } else { "fail" },
8814 "checked": graph_report.as_ref().map_or(0, |r| r.findings_checked),
8815 "failed": graph_errors,
8816 "warnings": graph_warnings,
8817 "info": graph_infos,
8818 "skipped": schema_only,
8819 },
8820 {
8821 "id": "signals",
8822 "status": if strict_blockers == 0 { "pass" } else { "fail" },
8823 "checked": signal_report.signals.len(),
8824 "failed": strict_blockers,
8825 "warnings": signal_report.proof_readiness.warnings,
8826 "skipped": loaded.is_none(),
8827 "blockers": signal_report.signals.iter()
8828 .filter(|s| s.blocks.iter().any(|b| b == "strict_check"))
8829 .map(|s| json!({
8830 "id": s.id,
8831 "kind": s.kind,
8832 "severity": s.severity,
8833 "reason": s.reason,
8834 }))
8835 .collect::<Vec<_>>(),
8836 },
8837 {
8838 "id": "events",
8839 "status": if replay_report.as_ref().is_none_or(|replay| replay.ok) { "pass" } else { "fail" },
8840 "checked": replay_report.as_ref().map_or(0, |replay| replay.event_log.count),
8841 "failed": event_errors,
8842 "skipped": schema_only || loaded.is_none(),
8843 },
8844 {
8845 "id": "state_integrity",
8846 "status": if state_integrity_report.as_ref().is_none_or(|report| report.status != "fail") { "pass" } else { "fail" },
8847 "checked": state_integrity_report.as_ref().map_or(0, |report| report.summary.get("events").copied().unwrap_or_default()),
8848 "failed": state_integrity_errors,
8849 "skipped": schema_only || loaded.is_none(),
8850 }
8851 ],
8852 "event_log": replay_report.as_ref().map(|replay| &replay.event_log),
8853 "replay": replay_report,
8854 "state_integrity": state_integrity_report,
8855 "source_registry": source_registry,
8856 "evidence_atoms": evidence_atoms,
8857 "conditions": conditions,
8858 "proposals": proposal_summary,
8859 "proof_state": proof_state,
8860 "signatures": signature_report,
8861 "diagnostics": diagnostics,
8862 "signals": signal_report.signals,
8863 "review_queue": signal_report.review_queue,
8864 "proof_readiness": signal_report.proof_readiness,
8865 "repair_plan": build_repair_plan(&diagnostics),
8866 })
8867}
8868
8869#[allow(clippy::too_many_arguments)]
8870fn cmd_normalize(
8871 source: &Path,
8872 out: Option<&Path>,
8873 write: bool,
8874 dry_run: bool,
8875 rewrite_ids: bool,
8876 id_map: Option<&Path>,
8877 resync_provenance: bool,
8878 json_output: bool,
8879) {
8880 if write && out.is_some() {
8881 fail("Use either --write or --out, not both.");
8882 }
8883 if dry_run && (write || out.is_some()) {
8884 fail("--dry-run cannot be combined with --write or --out.");
8885 }
8886 if id_map.is_some() && !rewrite_ids {
8887 fail("--id-map requires --rewrite-ids.");
8888 }
8889
8890 let detected = repo::detect(source).unwrap_or_else(|e| {
8891 eprintln!("{e}");
8892 std::process::exit(1);
8893 });
8894 if matches!(detected, repo::VelaSource::PacketDir(_)) {
8895 fail(
8896 "Cannot normalize a proof packet directory. Export a new packet from frontier state instead.",
8897 );
8898 }
8899 let mut frontier = repo::load(&detected).unwrap_or_else(|e| fail_return(&e));
8900 let has_substantive_events = frontier
8905 .events
8906 .iter()
8907 .any(|event| event.kind != "frontier.created");
8908 if has_substantive_events && (write || out.is_some()) {
8909 fail(
8910 "Refusing to normalize a frontier with canonical events. Normalize before proposal-backed writes, or create a new reviewed transition for the intended change.",
8911 );
8912 }
8913 let source_hash = hash_path(source).unwrap_or_else(|_| "unavailable".to_string());
8914 let before_stats = serde_json::to_value(&frontier.stats).unwrap_or(Value::Null);
8915 let (entity_type_fixes, entity_name_fixes) =
8916 normalize::normalize_findings(&mut frontier.findings);
8917 let confidence_updates =
8918 bundle::recompute_all_confidence(&mut frontier.findings, &frontier.replications);
8919 let provenance_resync_count = if resync_provenance {
8923 sources::resync_provenance_from_sources(&mut frontier)
8924 } else {
8925 0
8926 };
8927 let before_source_count = frontier.sources.len();
8928 let before_evidence_atom_count = frontier.evidence_atoms.len();
8929 let before_condition_record_count = frontier.condition_records.len();
8930
8931 let mut id_rewrites = Vec::new();
8932 if rewrite_ids {
8933 let mut id_map_values = std::collections::BTreeMap::<String, String>::new();
8934 for finding in &frontier.findings {
8935 let expected =
8936 bundle::FindingBundle::content_address(&finding.assertion, &finding.provenance);
8937 if expected != finding.id {
8938 id_map_values.insert(finding.id.clone(), expected);
8939 }
8940 }
8941 let new_ids = id_map_values
8942 .values()
8943 .map(String::as_str)
8944 .collect::<std::collections::HashSet<_>>();
8945 if new_ids.len() != id_map_values.len() {
8946 fail("Refusing to rewrite IDs because two findings map to the same content address.");
8947 }
8948 for finding in &mut frontier.findings {
8949 if let Some(new_id) = id_map_values.get(&finding.id) {
8950 id_rewrites.push(json!({"old": finding.id, "new": new_id}));
8951 finding.previous_version = Some(finding.id.clone());
8952 finding.id = new_id.clone();
8953 }
8954 }
8955 for finding in &mut frontier.findings {
8956 for link in &mut finding.links {
8957 if let Some(new_target) = id_map_values.get(&link.target) {
8958 link.target = new_target.clone();
8959 }
8960 }
8961 }
8962 if let Some(path) = id_map {
8963 std::fs::write(
8964 path,
8965 serde_json::to_string_pretty(&id_map_values)
8966 .expect("failed to serialize normalize id map"),
8967 )
8968 .unwrap_or_else(|e| fail(&format!("Failed to write {}: {e}", path.display())));
8969 }
8970 }
8971
8972 sources::materialize_project(&mut frontier);
8973 let source_records_materialized = frontier.sources.len().saturating_sub(before_source_count);
8974 let evidence_atoms_materialized = frontier
8975 .evidence_atoms
8976 .len()
8977 .saturating_sub(before_evidence_atom_count);
8978 let condition_records_materialized = frontier
8979 .condition_records
8980 .len()
8981 .saturating_sub(before_condition_record_count);
8982 let after_stats = serde_json::to_value(&frontier.stats).unwrap_or(Value::Null);
8983 let id_rewrite_count = id_rewrites.len();
8984 let wrote_to = if write {
8985 repo::save(&detected, &frontier).unwrap_or_else(|e| fail(&e));
8986 Some(source.display().to_string())
8987 } else if let Some(out_path) = out {
8988 repo::save_to_path(out_path, &frontier).unwrap_or_else(|e| fail(&e));
8989 Some(out_path.display().to_string())
8990 } else {
8991 None
8992 };
8993 let wrote = wrote_to.is_some();
8994 let planned_changes = entity_type_fixes
8995 + entity_name_fixes
8996 + confidence_updates
8997 + id_rewrite_count
8998 + source_records_materialized
8999 + evidence_atoms_materialized
9000 + condition_records_materialized
9001 + provenance_resync_count;
9002 let payload = json!({
9003 "ok": true,
9004 "command": "normalize",
9005 "schema_version": project::VELA_SCHEMA_VERSION,
9006 "source": {
9007 "path": source.display().to_string(),
9008 "hash": format!("sha256:{source_hash}"),
9009 },
9010 "dry_run": wrote_to.is_none(),
9011 "wrote_to": wrote_to,
9012 "summary": {
9013 "planned": planned_changes,
9014 "safe": planned_changes,
9015 "unsafe": 0,
9016 "applied": if wrote { planned_changes } else { 0 },
9017 },
9018 "changes": {
9019 "entity_type_fixes": entity_type_fixes,
9020 "entity_name_fixes": entity_name_fixes,
9021 "confidence_updates": confidence_updates,
9022 "id_rewrites": id_rewrite_count,
9023 "source_records_materialized": source_records_materialized,
9024 "evidence_atoms_materialized": evidence_atoms_materialized,
9025 "condition_records_materialized": condition_records_materialized,
9026 "provenance_resyncs": provenance_resync_count,
9027 "stats_changed": before_stats != after_stats,
9028 },
9029 "id_rewrites": id_rewrites,
9030 "repair_plan": if wrote { Vec::<Value>::new() } else {
9031 vec![json!({
9032 "action": "apply_normalization",
9033 "command": "vela normalize <frontier> --out frontier.normalized.json"
9034 })]
9035 },
9036 });
9037 if json_output {
9038 println!(
9039 "{}",
9040 serde_json::to_string_pretty(&payload).expect("failed to serialize normalize report")
9041 );
9042 } else if let Some(path) = payload.get("wrote_to").and_then(Value::as_str) {
9043 println!("{} normalized frontier written to {path}", style::ok("ok"));
9044 println!(
9045 " entity type fixes: {}, entity name fixes: {}, confidence updates: {}, id rewrites: {}",
9046 entity_type_fixes, entity_name_fixes, confidence_updates, id_rewrite_count
9047 );
9048 } else {
9049 println!("normalize dry run for {}", source.display());
9050 println!(
9051 " would apply entity type fixes: {}, entity name fixes: {}, confidence updates: {}, id rewrites: {}",
9052 entity_type_fixes, entity_name_fixes, confidence_updates, id_rewrite_count
9053 );
9054 }
9055}
9056
9057fn cmd_proof(
9058 frontier: &Path,
9059 out: &Path,
9060 template: &str,
9061 gold: Option<&Path>,
9062 record_proof_state: bool,
9063 json_output: bool,
9064) {
9065 if template != "bbb-alzheimer" {
9066 fail(&format!(
9067 "Unsupported proof template '{template}'. Supported: bbb-alzheimer"
9068 ));
9069 }
9070 let mut loaded = load_frontier_or_fail(frontier);
9071 let source_hash = hash_path_or_fail(frontier);
9072 let export_record = export::export_packet_with_source(&loaded, Some(frontier), out)
9073 .unwrap_or_else(|e| fail(&e));
9074 let benchmark_summary = gold.map(|gold_path| {
9075 let summary = benchmark::run_suite(gold_path).unwrap_or_else(|e| {
9076 fail(&format!(
9077 "Failed to run proof benchmark '{}': {e}",
9078 gold_path.display()
9079 ))
9080 });
9081 append_packet_json_file(out, "benchmark-summary.json", &summary).unwrap_or_else(|e| {
9082 fail(&format!("Failed to write benchmark summary: {e}"));
9083 });
9084 if summary.get("ok").and_then(Value::as_bool) != Some(true) {
9085 fail(&format!(
9086 "Proof benchmark failed for {}",
9087 gold_path.display()
9088 ));
9089 }
9090 summary
9091 });
9092 let validation_summary = packet::validate(out).unwrap_or_else(|e| {
9093 fail(&format!("Proof packet validation failed: {e}"));
9094 });
9095 proposals::record_proof_export(
9096 &mut loaded,
9097 proposals::ProofPacketRecord {
9098 generated_at: export_record.generated_at.clone(),
9099 snapshot_hash: export_record.snapshot_hash.clone(),
9100 event_log_hash: export_record.event_log_hash.clone(),
9101 packet_manifest_hash: export_record.packet_manifest_hash.clone(),
9102 },
9103 );
9104 project::recompute_stats(&mut loaded);
9105 if record_proof_state {
9106 repo::save_to_path(frontier, &loaded).unwrap_or_else(|e| fail(&e));
9107 }
9108 let signal_report = signals::analyze(&loaded, &[]);
9109 if json_output {
9110 let payload = json!({
9111 "ok": true,
9112 "command": "proof",
9113 "schema_version": project::VELA_SCHEMA_VERSION,
9114 "recorded_proof_state": record_proof_state,
9115 "frontier": {
9116 "name": &loaded.project.name,
9117 "source": frontier.display().to_string(),
9118 "hash": format!("sha256:{source_hash}"),
9119 },
9120 "template": template,
9121 "gold": gold.map(|p| p.display().to_string()),
9122 "benchmark": benchmark_summary,
9123 "output": out.display().to_string(),
9124 "packet": {
9125 "manifest_path": out.join("manifest.json").display().to_string(),
9126 },
9127 "validation": {
9128 "status": "ok",
9129 "summary": validation_summary,
9130 },
9131 "proposals": proposals::summary(&loaded),
9132 "proof_state": loaded.proof_state,
9133 "signals": signal_report.signals,
9134 "review_queue": signal_report.review_queue,
9135 "proof_readiness": signal_report.proof_readiness,
9136 "trace_path": out.join("proof-trace.json").display().to_string(),
9137 });
9138 println!(
9139 "{}",
9140 serde_json::to_string_pretty(&payload).expect("failed to serialize proof response")
9141 );
9142 } else {
9143 println!("vela proof");
9144 println!(" source: {}", frontier.display());
9145 println!(" template: {template}");
9146 println!(" output: {}", out.display());
9147 println!(" trace: {}", out.join("proof-trace.json").display());
9148 println!(
9149 " proof state: {}",
9150 if record_proof_state {
9151 "recorded"
9152 } else {
9153 "not recorded"
9154 }
9155 );
9156 println!();
9157 println!("{validation_summary}");
9158 }
9159}
9160
9161fn cmd_status(path: &Path, json: bool) {
9165 let project = repo::load_from_path(path).unwrap_or_else(|e| fail_return(&e));
9166
9167 let mut pending_total = 0usize;
9169 let mut pending_by_kind: std::collections::BTreeMap<String, usize> =
9170 std::collections::BTreeMap::new();
9171 for p in &project.proposals {
9172 if p.status == "pending_review" {
9173 pending_total += 1;
9174 *pending_by_kind.entry(p.kind.clone()).or_insert(0) += 1;
9175 }
9176 }
9177
9178 let audit = crate::causal_reasoning::audit_frontier(&project);
9180 let audit_summary = crate::causal_reasoning::summarize_audit(&audit);
9181
9182 let mut last_sync: Option<&crate::events::StateEvent> = None;
9184 let mut last_conflict: Option<&crate::events::StateEvent> = None;
9185 let mut total_conflicts = 0usize;
9186 for e in &project.events {
9187 match e.kind.as_str() {
9188 "frontier.synced_with_peer" => {
9189 if last_sync
9190 .map(|prev| e.timestamp > prev.timestamp)
9191 .unwrap_or(true)
9192 {
9193 last_sync = Some(e);
9194 }
9195 }
9196 "frontier.conflict_detected" => {
9197 total_conflicts += 1;
9198 if last_conflict
9199 .map(|prev| e.timestamp > prev.timestamp)
9200 .unwrap_or(true)
9201 {
9202 last_conflict = Some(e);
9203 }
9204 }
9205 _ => {}
9206 }
9207 }
9208
9209 let mut targets_with_success = std::collections::HashSet::new();
9211 let mut failed_replications = 0usize;
9212 for r in &project.replications {
9213 if r.outcome == "replicated" {
9214 targets_with_success.insert(r.target_finding.clone());
9215 } else if r.outcome == "failed" {
9216 failed_replications += 1;
9217 }
9218 }
9219
9220 if json {
9221 println!(
9222 "{}",
9223 serde_json::to_string_pretty(&json!({
9224 "ok": true,
9225 "command": "status",
9226 "frontier": frontier_label(&project),
9227 "vfr_id": project.frontier_id(),
9228 "findings": project.findings.len(),
9229 "events": project.events.len(),
9230 "actors": project.actors.len(),
9231 "peers": project.peers.len(),
9232 "inbox": {
9233 "pending_total": pending_total,
9234 "pending_by_kind": pending_by_kind,
9235 },
9236 "causal_audit": {
9237 "identified": audit_summary.identified,
9238 "conditional": audit_summary.conditional,
9239 "underidentified": audit_summary.underidentified,
9240 "underdetermined": audit_summary.underdetermined,
9241 },
9242 "replications": {
9243 "total": project.replications.len(),
9244 "findings_with_success": targets_with_success.len(),
9245 "failed": failed_replications,
9246 },
9247 "federation": {
9248 "peers": project.peers.len(),
9249 "last_sync": last_sync.map(|e| e.timestamp.clone()),
9250 "last_conflict": last_conflict.map(|e| e.timestamp.clone()),
9251 "total_conflicts": total_conflicts,
9252 },
9253 }))
9254 .expect("serialize status")
9255 );
9256 return;
9257 }
9258
9259 println!();
9260 println!(
9261 " {}",
9262 format!("VELA · STATUS · {}", path.display())
9263 .to_uppercase()
9264 .dimmed()
9265 );
9266 println!(" {}", style::tick_row(60));
9267 println!();
9268 println!(" frontier: {}", frontier_label(&project));
9269 println!(" vfr_id: {}", project.frontier_id());
9270 println!(
9271 " findings: {} events: {} peers: {} actors: {}",
9272 project.findings.len(),
9273 project.events.len(),
9274 project.peers.len(),
9275 project.actors.len(),
9276 );
9277 println!();
9278 if pending_total > 0 {
9279 println!(
9280 " {} {pending_total} pending proposals",
9281 style::warn("inbox")
9282 );
9283 for (k, n) in &pending_by_kind {
9284 println!(" · {n:>3} {k}");
9285 }
9286 } else {
9287 println!(" {} inbox clean", style::ok("ok"));
9288 }
9289 println!();
9290 if audit_summary.underidentified > 0 || audit_summary.conditional > 0 {
9291 let chip = if audit_summary.underidentified > 0 {
9292 style::lost("audit")
9293 } else {
9294 style::warn("audit")
9295 };
9296 println!(
9297 " {} identified {} · conditional {} · underidentified {} · underdetermined {}",
9298 chip,
9299 audit_summary.identified,
9300 audit_summary.conditional,
9301 audit_summary.underidentified,
9302 audit_summary.underdetermined,
9303 );
9304 if audit_summary.underidentified > 0 {
9305 println!(
9306 " next: vela causal audit {} --problems-only",
9307 path.display()
9308 );
9309 }
9310 } else if audit_summary.underdetermined == 0 {
9311 println!(
9312 " {} causal audit: all {} identified",
9313 style::ok("ok"),
9314 audit_summary.identified
9315 );
9316 } else {
9317 println!(
9318 " {} causal audit: {} identified, {} ungraded",
9319 style::warn("audit"),
9320 audit_summary.identified,
9321 audit_summary.underdetermined,
9322 );
9323 }
9324 println!();
9325 if !project.replications.is_empty() {
9326 println!(
9327 " {} {} records · {} findings replicated · {} failed",
9328 style::ok("replications"),
9329 project.replications.len(),
9330 targets_with_success.len(),
9331 failed_replications,
9332 );
9333 }
9334 if project.peers.is_empty() {
9335 println!(
9336 " {} no federation peers registered",
9337 style::warn("federation")
9338 );
9339 } else {
9340 let last = last_sync
9341 .map(|e| fmt_timestamp(&e.timestamp))
9342 .unwrap_or_else(|| "never".to_string());
9343 let chip = if total_conflicts > 0 {
9344 style::warn("federation")
9345 } else {
9346 style::ok("federation")
9347 };
9348 println!(
9349 " {} {} peer(s) · last sync {} · {} conflict events",
9350 chip,
9351 project.peers.len(),
9352 last,
9353 total_conflicts,
9354 );
9355 }
9356 println!();
9357}
9358
9359fn cmd_log(path: &Path, limit: usize, kind_filter: Option<&str>, json: bool) {
9361 let project = repo::load_from_path(path).unwrap_or_else(|e| fail_return(&e));
9362 let mut events: Vec<&crate::events::StateEvent> = project
9363 .events
9364 .iter()
9365 .filter(|e| match kind_filter {
9366 Some(k) => e.kind.contains(k),
9367 None => true,
9368 })
9369 .collect();
9370 events.sort_by(|a, b| b.timestamp.cmp(&a.timestamp));
9371 events.truncate(limit);
9372
9373 if json {
9374 let payload: Vec<_> = events
9375 .iter()
9376 .map(|e| {
9377 json!({
9378 "id": e.id,
9379 "kind": e.kind,
9380 "actor": e.actor.id,
9381 "target": &e.target.id,
9382 "target_type": &e.target.r#type,
9383 "timestamp": e.timestamp,
9384 "reason": e.reason,
9385 })
9386 })
9387 .collect();
9388 println!(
9389 "{}",
9390 serde_json::to_string_pretty(&json!({
9391 "ok": true,
9392 "command": "log",
9393 "events": payload,
9394 }))
9395 .expect("serialize log")
9396 );
9397 return;
9398 }
9399
9400 println!();
9401 println!(
9402 " {}",
9403 format!("VELA · LOG · {} (latest {})", path.display(), events.len())
9404 .to_uppercase()
9405 .dimmed()
9406 );
9407 println!(" {}", style::tick_row(60));
9408 if events.is_empty() {
9409 println!(" (no events)");
9410 return;
9411 }
9412 for e in &events {
9413 let when = fmt_timestamp(&e.timestamp);
9414 let target_short = if e.target.id.len() > 22 {
9415 format!("{}…", &e.target.id[..21])
9416 } else {
9417 e.target.id.clone()
9418 };
9419 let reason: String = e.reason.chars().take(70).collect();
9420 println!(
9421 " {:<19} {:<32} {:<24} {}",
9422 when, e.kind, target_short, reason
9423 );
9424 }
9425 println!();
9426}
9427
9428fn cmd_inbox(path: &Path, kind_filter: Option<&str>, limit: usize, json: bool) {
9430 let project = repo::load_from_path(path).unwrap_or_else(|e| fail_return(&e));
9431
9432 let mut score_map: std::collections::HashMap<String, (f64, f64, f64, f64)> =
9435 std::collections::HashMap::new();
9436 for p in &project.proposals {
9437 if p.kind != "finding.note" {
9438 continue;
9439 }
9440 if p.actor.id != "agent:reviewer-agent" {
9441 continue;
9442 }
9443 let reason = &p.reason;
9444 let Some(target) = reason.split_whitespace().find(|s| s.starts_with("vpr_")) else {
9445 continue;
9446 };
9447 let text = p.payload.get("text").and_then(|v| v.as_str()).unwrap_or("");
9448 let extract = |k: &str| -> f64 {
9449 let pat = format!("{k} ");
9450 text.find(&pat)
9451 .and_then(|idx| text[idx + pat.len()..].split_whitespace().next())
9452 .and_then(|t| t.parse::<f64>().ok())
9453 .unwrap_or(0.0)
9454 };
9455 score_map.insert(
9456 target.to_string(),
9457 (
9458 extract("plausibility"),
9459 extract("evidence"),
9460 extract("scope"),
9461 extract("duplicate-risk"),
9462 ),
9463 );
9464 }
9465
9466 let mut pending: Vec<&crate::proposals::StateProposal> = project
9467 .proposals
9468 .iter()
9469 .filter(|p| {
9470 p.status == "pending_review"
9471 && match kind_filter {
9472 Some(k) => p.kind.contains(k),
9473 None => true,
9474 }
9475 })
9476 .collect();
9477 pending.sort_by(|a, b| {
9479 let sa = score_map
9480 .get(&a.id)
9481 .map(|(p, e, s, d)| 0.4 * p + 0.3 * e + 0.2 * s - 0.3 * d);
9482 let sb = score_map
9483 .get(&b.id)
9484 .map(|(p, e, s, d)| 0.4 * p + 0.3 * e + 0.2 * s - 0.3 * d);
9485 sb.partial_cmp(&sa).unwrap_or(std::cmp::Ordering::Equal)
9486 });
9487 pending.truncate(limit);
9488
9489 if json {
9490 let payload: Vec<_> = pending
9491 .iter()
9492 .map(|p| {
9493 let assertion_text = p
9494 .payload
9495 .get("finding")
9496 .and_then(|f| f.get("assertion"))
9497 .and_then(|a| a.get("text"))
9498 .and_then(|t| t.as_str());
9499 let assertion_type = p
9500 .payload
9501 .get("finding")
9502 .and_then(|f| f.get("assertion"))
9503 .and_then(|a| a.get("type"))
9504 .and_then(|t| t.as_str());
9505 let composite = score_map
9506 .get(&p.id)
9507 .map(|(pl, e, s, d)| 0.4 * pl + 0.3 * e + 0.2 * s - 0.3 * d);
9508 json!({
9509 "proposal_id": p.id,
9510 "kind": p.kind,
9511 "actor": p.actor,
9512 "reason": p.reason,
9513 "assertion_text": assertion_text,
9514 "assertion_type": assertion_type,
9515 "reviewer_composite": composite,
9516 })
9517 })
9518 .collect();
9519 println!(
9520 "{}",
9521 serde_json::to_string_pretty(&json!({
9522 "ok": true,
9523 "command": "inbox",
9524 "shown": pending.len(),
9525 "proposals": payload,
9526 }))
9527 .expect("serialize inbox")
9528 );
9529 return;
9530 }
9531
9532 println!();
9533 println!(
9534 " {}",
9535 format!(
9536 "VELA · INBOX · {} ({} pending shown)",
9537 path.display(),
9538 pending.len()
9539 )
9540 .to_uppercase()
9541 .dimmed()
9542 );
9543 println!(" {}", style::tick_row(60));
9544 if pending.is_empty() {
9545 println!(" (inbox clean)");
9546 return;
9547 }
9548 for p in &pending {
9549 let assertion_text = p
9550 .payload
9551 .get("finding")
9552 .and_then(|f| f.get("assertion"))
9553 .and_then(|a| a.get("text"))
9554 .and_then(|t| t.as_str())
9555 .unwrap_or("");
9556 let assertion_type = p
9557 .payload
9558 .get("finding")
9559 .and_then(|f| f.get("assertion"))
9560 .and_then(|a| a.get("type"))
9561 .and_then(|t| t.as_str())
9562 .unwrap_or("");
9563 let composite = score_map
9564 .get(&p.id)
9565 .map(|(pl, e, s, d)| 0.4 * pl + 0.3 * e + 0.2 * s - 0.3 * d);
9566 let score_str = composite
9567 .map(|c| format!("[{:.2}]", c))
9568 .unwrap_or_else(|| "[—] ".to_string());
9569 let kind_short = if p.kind.len() > 12 {
9570 format!("{}…", &p.kind[..11])
9571 } else {
9572 p.kind.clone()
9573 };
9574 let summary: String = if !assertion_text.is_empty() {
9575 assertion_text.chars().take(80).collect()
9576 } else {
9577 p.reason.chars().take(80).collect()
9578 };
9579 println!(
9580 " {} {} {:<13} {:<18} {}",
9581 score_str, p.id, kind_short, assertion_type, summary
9582 );
9583 }
9584 println!();
9585}
9586
9587fn cmd_ask(path: &Path, question: &str, json: bool) {
9592 let project = repo::load_from_path(path).unwrap_or_else(|e| fail_return(&e));
9593
9594 if question.trim().is_empty() {
9595 use std::io::{BufRead, Write};
9597 println!();
9598 println!(
9599 " {}",
9600 format!("VELA · ASK · {}", path.display())
9601 .to_uppercase()
9602 .dimmed()
9603 );
9604 println!(" {}", style::tick_row(60));
9605 println!(" Ask a question. Type `exit` to quit.");
9606 println!(" Examples:");
9607 println!(" · what's pending?");
9608 println!(" · what's underidentified?");
9609 println!(" · how many findings?");
9610 println!(" · what changed recently?");
9611 println!(" · who has what calibration?");
9612 println!();
9613 let stdin = std::io::stdin();
9614 let mut stdout = std::io::stdout();
9615 loop {
9616 print!(" ask> ");
9617 stdout.flush().ok();
9618 let mut line = String::new();
9619 if stdin.lock().read_line(&mut line).is_err() {
9620 break;
9621 }
9622 let q = line.trim();
9623 if q.is_empty() {
9624 continue;
9625 }
9626 if matches!(q, "exit" | "quit" | "q") {
9627 break;
9628 }
9629 answer(&project, q, false);
9630 }
9631 return;
9632 }
9633
9634 answer(&project, question, json);
9635}
9636
9637fn answer(project: &crate::project::Project, q: &str, json: bool) {
9638 let lower = q.to_lowercase();
9639
9640 if lower.contains("pending")
9642 || lower.contains("inbox")
9643 || lower.contains("queue")
9644 || lower.contains("to review")
9645 {
9646 let pending: Vec<&crate::proposals::StateProposal> = project
9647 .proposals
9648 .iter()
9649 .filter(|p| p.status == "pending_review")
9650 .collect();
9651 let mut by_kind: std::collections::BTreeMap<String, usize> = Default::default();
9652 for p in &pending {
9653 *by_kind.entry(p.kind.clone()).or_insert(0) += 1;
9654 }
9655 if json {
9656 println!(
9657 "{}",
9658 serde_json::to_string_pretty(&json!({
9659 "answer": "pending",
9660 "total": pending.len(),
9661 "by_kind": by_kind,
9662 }))
9663 .unwrap()
9664 );
9665 } else {
9666 println!(" {} pending proposals.", pending.len());
9667 for (k, n) in &by_kind {
9668 println!(" · {n:>3} {k}");
9669 }
9670 if pending.is_empty() {
9671 println!(" Inbox is clean.");
9672 } else {
9673 println!(" Run `vela inbox <frontier>` to triage.");
9674 }
9675 }
9676 return;
9677 }
9678
9679 if lower.contains("underident")
9681 || lower.contains("audit")
9682 || lower.contains("identif")
9683 || lower.contains("causal")
9684 {
9685 let entries = crate::causal_reasoning::audit_frontier(project);
9686 let summary = crate::causal_reasoning::summarize_audit(&entries);
9687 if json {
9688 println!(
9689 "{}",
9690 serde_json::to_string_pretty(&json!({
9691 "answer": "audit",
9692 "summary": {
9693 "identified": summary.identified,
9694 "conditional": summary.conditional,
9695 "underidentified": summary.underidentified,
9696 "underdetermined": summary.underdetermined,
9697 },
9698 }))
9699 .unwrap()
9700 );
9701 } else {
9702 println!(
9703 " Causal audit: {} identified · {} conditional · {} underidentified · {} underdetermined.",
9704 summary.identified,
9705 summary.conditional,
9706 summary.underidentified,
9707 summary.underdetermined,
9708 );
9709 if summary.underidentified > 0 {
9710 println!(
9711 " The {} underidentified findings are concrete review items:",
9712 summary.underidentified
9713 );
9714 for e in entries
9715 .iter()
9716 .filter(|e| {
9717 matches!(
9718 e.verdict,
9719 crate::causal_reasoning::Identifiability::Underidentified
9720 )
9721 })
9722 .take(8)
9723 {
9724 let txt: String = e.assertion_text.chars().take(70).collect();
9725 println!(" · {} {}", e.finding_id, txt);
9726 }
9727 }
9728 }
9729 return;
9730 }
9731
9732 if lower.contains("recent")
9734 || lower.contains("changed")
9735 || lower.contains("latest")
9736 || lower.contains("happen")
9737 {
9738 let mut events: Vec<&crate::events::StateEvent> = project.events.iter().collect();
9739 events.sort_by(|a, b| b.timestamp.cmp(&a.timestamp));
9740 events.truncate(8);
9741 if json {
9742 println!(
9743 "{}",
9744 serde_json::to_string_pretty(&json!({
9745 "answer": "recent_events",
9746 "events": events.iter().map(|e| json!({
9747 "id": e.id, "kind": e.kind, "timestamp": e.timestamp,
9748 "actor": e.actor.id, "target": e.target.id,
9749 })).collect::<Vec<_>>(),
9750 }))
9751 .unwrap()
9752 );
9753 } else {
9754 println!(" Most recent {} events:", events.len());
9755 for e in &events {
9756 let when = fmt_timestamp(&e.timestamp);
9757 println!(" · {when} {:<28} {}", e.kind, e.target.id);
9758 }
9759 }
9760 return;
9761 }
9762
9763 if lower.starts_with("how many") || lower.contains("count") || lower.contains("total") {
9765 let n = project.findings.len();
9766 let evs = project.events.len();
9767 let peers = project.peers.len();
9768 let actors = project.actors.len();
9769 if json {
9770 println!(
9771 "{}",
9772 serde_json::to_string_pretty(&json!({
9773 "answer": "counts",
9774 "findings": n,
9775 "events": evs,
9776 "peers": peers,
9777 "actors": actors,
9778 "replications": project.replications.len(),
9779 "predictions": project.predictions.len(),
9780 }))
9781 .unwrap()
9782 );
9783 } else {
9784 println!(" {n} findings · {evs} events · {actors} actors · {peers} peers.");
9785 println!(
9786 " {} replications · {} predictions · {} datasets · {} code artifacts.",
9787 project.replications.len(),
9788 project.predictions.len(),
9789 project.datasets.len(),
9790 project.code_artifacts.len(),
9791 );
9792 }
9793 return;
9794 }
9795
9796 if lower.contains("calibration") || lower.contains("brier") || lower.contains("predict") {
9798 let records =
9799 crate::calibration::calibration_records(&project.predictions, &project.resolutions);
9800 if json {
9801 println!("{}", serde_json::to_string_pretty(&records).unwrap());
9802 } else if records.is_empty() {
9803 println!(" No predictions yet. The calibration ledger is empty.");
9804 } else {
9805 println!(" Calibration over {} actor(s):", records.len());
9806 for r in &records {
9807 let brier = r
9808 .brier_score
9809 .map(|b| format!("{:.3}", b))
9810 .unwrap_or_else(|| "—".into());
9811 println!(
9812 " · {:<28} predictions {} · resolved {} · expired {} · Brier {}",
9813 r.actor, r.n_predictions, r.n_resolved, r.n_expired, brier
9814 );
9815 }
9816 }
9817 return;
9818 }
9819
9820 if lower.contains("peer")
9822 || lower.contains("federat")
9823 || lower.contains("sync")
9824 || lower.contains("conflict")
9825 {
9826 let mut total_conflicts = 0usize;
9827 for e in &project.events {
9828 if e.kind == "frontier.conflict_detected" {
9829 total_conflicts += 1;
9830 }
9831 }
9832 if json {
9833 println!(
9834 "{}",
9835 serde_json::to_string_pretty(&json!({
9836 "answer": "federation",
9837 "peers": project.peers.iter().map(|p| &p.id).collect::<Vec<_>>(),
9838 "total_conflicts": total_conflicts,
9839 }))
9840 .unwrap()
9841 );
9842 } else {
9843 println!(" {} peer(s) registered:", project.peers.len());
9844 for p in &project.peers {
9845 println!(" · {:<24} {}", p.id, p.url);
9846 }
9847 println!(" {total_conflicts} conflict events on the canonical log.");
9848 }
9849 return;
9850 }
9851
9852 if json {
9854 println!(
9855 "{}",
9856 serde_json::to_string_pretty(&json!({
9857 "answer": "unknown_question",
9858 "question": q,
9859 "hint": "Try: pending, audit, recent, how many, calibration, peers."
9860 }))
9861 .unwrap()
9862 );
9863 } else {
9864 println!(" Don't know how to route that question yet.");
9865 println!(" Try: pending · audit · recent · how many · calibration · peers");
9866 }
9867}
9868
9869fn frontier_label(p: &crate::project::Project) -> String {
9870 if p.project.name.trim().is_empty() {
9871 "(unnamed)".to_string()
9872 } else {
9873 p.project.name.clone()
9874 }
9875}
9876
9877fn fmt_timestamp(ts: &str) -> String {
9878 chrono::DateTime::parse_from_rfc3339(ts)
9881 .map(|dt| dt.format("%m-%d %H:%M").to_string())
9882 .unwrap_or_else(|_| ts.chars().take(16).collect())
9883}
9884
9885fn cmd_stats(path: &Path) {
9886 let frontier = load_frontier_or_fail(path);
9887 let s = &frontier.stats;
9888 println!();
9889 println!(" {}", "FRONTIER · V0.36.0".dimmed());
9890 println!(" {}", frontier.project.name.bold());
9891 println!(" {}", style::tick_row(60));
9892 println!(" id: {}", frontier.frontier_id());
9893 println!(" compiled: {}", frontier.project.compiled_at);
9894 println!(" papers: {}", frontier.project.papers_processed);
9895 println!(" findings: {}", s.findings);
9896 println!(" links: {}", s.links);
9897 println!(" replicated: {}", s.replicated);
9898 println!(" avg confidence: {}", s.avg_confidence);
9899 println!(" gaps: {}", s.gaps);
9900 println!(" contested: {}", s.contested);
9901 println!(" reviewed: {}", s.human_reviewed);
9902 println!(" proposals: {}", s.proposal_count);
9903 println!(
9904 " recorded proof: {}",
9905 frontier.proof_state.latest_packet.status
9906 );
9907 if frontier.proof_state.latest_packet.status != "never_exported" {
9908 println!(
9909 " proof note: recorded frontier metadata; packet files are checked by `vela packet validate`"
9910 );
9911 }
9912 if !s.categories.is_empty() {
9913 println!();
9914 println!(" {}", "categories".dimmed());
9915 let mut categories = s.categories.iter().collect::<Vec<_>>();
9916 categories.sort_by(|a, b| b.1.cmp(a.1));
9917 for (category, count) in categories {
9918 println!(" {category}: {}", count);
9919 }
9920 }
9921 println!();
9922 println!(" {}", style::tick_row(60));
9923 println!();
9924}
9925
9926fn cmd_proposals(action: ProposalAction) {
9927 match action {
9928 ProposalAction::List {
9929 frontier,
9930 status,
9931 json,
9932 } => {
9933 let frontier_state =
9934 repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
9935 let proposals_list = proposals::list(&frontier_state, status.as_deref());
9936 let payload = json!({
9937 "ok": true,
9938 "command": "proposals.list",
9939 "frontier": frontier_state.project.name,
9940 "status_filter": status,
9941 "summary": proposals::summary(&frontier_state),
9942 "proposals": proposals_list,
9943 });
9944 if json {
9945 println!(
9946 "{}",
9947 serde_json::to_string_pretty(&payload)
9948 .expect("failed to serialize proposals list")
9949 );
9950 } else {
9951 println!("vela proposals list");
9952 println!(" frontier: {}", frontier_state.project.name);
9953 println!(
9954 " proposals: {}",
9955 payload["proposals"].as_array().map_or(0, Vec::len)
9956 );
9957 }
9958 }
9959 ProposalAction::Show {
9960 frontier,
9961 proposal_id,
9962 json,
9963 } => {
9964 let frontier_state =
9965 repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
9966 let proposal =
9967 proposals::show(&frontier_state, &proposal_id).unwrap_or_else(|e| fail_return(&e));
9968 let payload = json!({
9969 "ok": true,
9970 "command": "proposals.show",
9971 "frontier": frontier_state.project.name,
9972 "proposal": proposal,
9973 });
9974 if json {
9975 println!(
9976 "{}",
9977 serde_json::to_string_pretty(&payload)
9978 .expect("failed to serialize proposal show")
9979 );
9980 } else {
9981 println!("vela proposals show");
9982 println!(" frontier: {}", frontier_state.project.name);
9983 println!(" proposal: {}", proposal_id);
9984 println!(" kind: {}", proposal.kind);
9985 println!(" status: {}", proposal.status);
9986 }
9987 }
9988 ProposalAction::Preview {
9989 frontier,
9990 proposal_id,
9991 reviewer,
9992 json,
9993 } => {
9994 let preview = proposals::preview_at_path(&frontier, &proposal_id, &reviewer)
9995 .unwrap_or_else(|e| fail_return(&e));
9996 let payload = json!({
9997 "ok": true,
9998 "command": "proposals.preview",
9999 "frontier": frontier.display().to_string(),
10000 "preview": preview,
10001 });
10002 if json {
10003 println!(
10004 "{}",
10005 serde_json::to_string_pretty(&payload)
10006 .expect("failed to serialize proposal preview")
10007 );
10008 } else {
10009 println!("vela proposals preview");
10010 println!(" proposal: {}", proposal_id);
10011 println!(" kind: {}", preview.kind);
10012 println!(
10013 " findings: {} -> {}",
10014 preview.findings_before, preview.findings_after
10015 );
10016 println!(
10017 " artifacts: {} -> {}",
10018 preview.artifacts_before, preview.artifacts_after
10019 );
10020 println!(
10021 " events: {} -> {}",
10022 preview.events_before, preview.events_after
10023 );
10024 if !preview.changed_findings.is_empty() {
10025 println!(
10026 " findings changed: {}",
10027 preview.changed_findings.join(", ")
10028 );
10029 }
10030 if !preview.changed_artifacts.is_empty() {
10031 println!(
10032 " artifacts changed: {}",
10033 preview.changed_artifacts.join(", ")
10034 );
10035 }
10036 if !preview.event_kinds.is_empty() {
10037 println!(" event kinds: {}", preview.event_kinds.join(", "));
10038 }
10039 println!(" event: {}", preview.applied_event_id);
10040 }
10041 }
10042 ProposalAction::Import {
10043 frontier,
10044 source,
10045 json,
10046 } => {
10047 let report =
10048 proposals::import_from_path(&frontier, &source).unwrap_or_else(|e| fail_return(&e));
10049 let payload = json!({
10050 "ok": true,
10051 "command": "proposals.import",
10052 "frontier": frontier.display().to_string(),
10053 "source": source.display().to_string(),
10054 "summary": {
10055 "imported": report.imported,
10056 "applied": report.applied,
10057 "rejected": report.rejected,
10058 "duplicates": report.duplicates,
10059 },
10060 });
10061 if json {
10062 println!(
10063 "{}",
10064 serde_json::to_string_pretty(&payload)
10065 .expect("failed to serialize proposal import")
10066 );
10067 } else {
10068 println!(
10069 "Imported {} proposals into {}",
10070 report.imported, report.wrote_to
10071 );
10072 }
10073 }
10074 ProposalAction::Validate { source, json } => {
10075 let report = proposals::validate_source(&source).unwrap_or_else(|e| fail_return(&e));
10076 let payload = json!({
10077 "ok": report.ok,
10078 "command": "proposals.validate",
10079 "source": source.display().to_string(),
10080 "summary": {
10081 "checked": report.checked,
10082 "valid": report.valid,
10083 "invalid": report.invalid,
10084 },
10085 "proposal_ids": report.proposal_ids,
10086 "errors": report.errors,
10087 });
10088 if json {
10089 println!(
10090 "{}",
10091 serde_json::to_string_pretty(&payload)
10092 .expect("failed to serialize proposal validation")
10093 );
10094 } else if report.ok {
10095 println!("{} validated {} proposals", style::ok("ok"), report.valid);
10096 } else {
10097 println!(
10098 "{} validated {} proposals, {} invalid",
10099 style::lost("lost"),
10100 report.valid,
10101 report.invalid
10102 );
10103 for error in &report.errors {
10104 println!(" · {error}");
10105 }
10106 std::process::exit(1);
10107 }
10108 }
10109 ProposalAction::Export {
10110 frontier,
10111 output,
10112 status,
10113 json,
10114 } => {
10115 let count = proposals::export_to_path(&frontier, &output, status.as_deref())
10116 .unwrap_or_else(|e| fail_return(&e));
10117 let payload = json!({
10118 "ok": true,
10119 "command": "proposals.export",
10120 "frontier": frontier.display().to_string(),
10121 "output": output.display().to_string(),
10122 "status": status,
10123 "exported": count,
10124 });
10125 if json {
10126 println!(
10127 "{}",
10128 serde_json::to_string_pretty(&payload)
10129 .expect("failed to serialize proposal export")
10130 );
10131 } else {
10132 println!("sealed · {count} proposals · {}", output.display());
10133 }
10134 }
10135 ProposalAction::Accept {
10136 frontier,
10137 proposal_id,
10138 reviewer,
10139 reason,
10140 json,
10141 } => {
10142 let event_id = proposals::accept_at_path(&frontier, &proposal_id, &reviewer, &reason)
10143 .unwrap_or_else(|e| fail_return(&e));
10144 let payload = json!({
10145 "ok": true,
10146 "command": "proposals.accept",
10147 "frontier": frontier.display().to_string(),
10148 "proposal_id": proposal_id,
10149 "reviewer": reviewer,
10150 "applied_event_id": event_id,
10151 });
10152 if json {
10153 println!(
10154 "{}",
10155 serde_json::to_string_pretty(&payload)
10156 .expect("failed to serialize proposal accept")
10157 );
10158 } else {
10159 println!(
10160 "{} accepted and applied proposal {}",
10161 style::ok("ok"),
10162 proposal_id
10163 );
10164 println!(" event: {}", event_id);
10165 }
10166 }
10167 ProposalAction::Reject {
10168 frontier,
10169 proposal_id,
10170 reviewer,
10171 reason,
10172 json,
10173 } => {
10174 proposals::reject_at_path(&frontier, &proposal_id, &reviewer, &reason)
10175 .unwrap_or_else(|e| fail_return(&e));
10176 let payload = json!({
10177 "ok": true,
10178 "command": "proposals.reject",
10179 "frontier": frontier.display().to_string(),
10180 "proposal_id": proposal_id,
10181 "reviewer": reviewer,
10182 "status": "rejected",
10183 });
10184 if json {
10185 println!(
10186 "{}",
10187 serde_json::to_string_pretty(&payload)
10188 .expect("failed to serialize proposal reject")
10189 );
10190 } else {
10191 println!(
10192 "{} rejected proposal {}",
10193 style::warn("rejected"),
10194 proposal_id
10195 );
10196 }
10197 }
10198 }
10199}
10200
10201fn cmd_artifact_to_state(
10202 frontier: &Path,
10203 packet: &Path,
10204 actor: &str,
10205 apply_artifacts: bool,
10206 json: bool,
10207) {
10208 let report =
10209 crate::artifact_to_state::import_packet_at_path(frontier, packet, actor, apply_artifacts)
10210 .unwrap_or_else(|e| fail_return(&e));
10211 if json {
10212 println!(
10213 "{}",
10214 serde_json::to_string_pretty(&report)
10215 .expect("failed to serialize artifact-to-state report")
10216 );
10217 } else {
10218 println!("vela artifact-to-state");
10219 println!(" packet: {}", report.packet_id);
10220 println!(" frontier: {}", report.frontier);
10221 println!(" artifact proposals: {}", report.artifact_proposals);
10222 println!(" finding proposals: {}", report.finding_proposals);
10223 println!(" gap proposals: {}", report.gap_proposals);
10224 println!(
10225 " applied artifact events: {}",
10226 report.applied_artifact_events
10227 );
10228 println!(
10229 " pending truth proposals: {}",
10230 report.pending_truth_proposals
10231 );
10232 }
10233}
10234
10235async fn cmd_bridge_kit(action: BridgeKitAction) {
10236 match action {
10237 BridgeKitAction::Validate { source, json } => {
10238 let report = crate::artifact_to_state::validate_bridge_kit_path(&source);
10239 if json {
10240 println!(
10241 "{}",
10242 serde_json::to_string_pretty(&report)
10243 .expect("failed to serialize bridge-kit validation report")
10244 );
10245 } else {
10246 println!("vela bridge-kit validate");
10247 println!(" source: {}", report.source);
10248 println!(" packets: {}", report.packet_count);
10249 println!(" valid: {}", report.valid_packet_count);
10250 println!(" invalid: {}", report.invalid_packet_count);
10251 for packet in &report.packets {
10252 if packet.ok {
10253 println!(
10254 " ok: {} · {} artifacts · {} claims · {} needs",
10255 packet
10256 .packet_id
10257 .as_deref()
10258 .unwrap_or("packet id unavailable"),
10259 packet.artifact_count,
10260 packet.candidate_claim_count,
10261 packet.open_need_count
10262 );
10263 } else {
10264 println!(" invalid: {} · {}", packet.path, packet.errors.join("; "));
10265 }
10266 }
10267 for error in &report.errors {
10268 println!(" error: {error}");
10269 }
10270 }
10271 if !report.ok {
10272 std::process::exit(1);
10273 }
10274 }
10275 BridgeKitAction::VerifyProvenance {
10276 packet,
10277 json,
10278 cross_check,
10279 } => {
10280 let mut report = verify_packet_provenance(&packet).await;
10281 if cross_check {
10282 cross_check_packet_provenance(&packet, &mut report).await;
10283 }
10284 if json {
10285 println!(
10286 "{}",
10287 serde_json::to_string_pretty(&report)
10288 .expect("failed to serialize provenance verification report")
10289 );
10290 } else {
10291 println!("vela bridge-kit verify-provenance");
10292 println!(" packet: {}", report.packet);
10293 println!(" identifiers: {}", report.identifiers.len());
10294 println!(" resolved: {}", report.resolved_count);
10295 println!(" unresolved: {}", report.unresolved_count);
10296 println!(" skipped: {}", report.skipped_count);
10297 for entry in &report.identifiers {
10298 let status = match entry.status.as_str() {
10299 "resolved" => "ok ",
10300 "unresolved" => "FAIL",
10301 "skipped" => "skip",
10302 _ => "? ",
10303 };
10304 println!(
10305 " {} {} ({})",
10306 status,
10307 entry.identifier,
10308 entry.note.as_deref().unwrap_or(entry.kind.as_str())
10309 );
10310 }
10311 }
10312 if report.unresolved_count > 0 {
10313 std::process::exit(1);
10314 }
10315 }
10316 }
10317}
10318
10319#[derive(Debug, Clone, Serialize)]
10320struct ProvenanceVerificationReport {
10321 command: String,
10322 packet: String,
10323 identifiers: Vec<ProvenanceVerificationEntry>,
10324 resolved_count: usize,
10325 unresolved_count: usize,
10326 skipped_count: usize,
10327 #[serde(skip_serializing_if = "Option::is_none")]
10332 cross_check: Option<Vec<CrossCheckEntry>>,
10333}
10334
10335#[derive(Debug, Clone, Serialize)]
10336struct ProvenanceVerificationEntry {
10337 identifier: String,
10338 kind: String,
10339 status: String,
10340 #[serde(skip_serializing_if = "Option::is_none")]
10341 note: Option<String>,
10342 #[serde(skip_serializing_if = "Option::is_none")]
10345 title: Option<String>,
10346 #[serde(skip_serializing_if = "Option::is_none")]
10349 first_author: Option<String>,
10350}
10351
10352#[derive(Debug, Clone, Serialize)]
10356struct CrossCheckEntry {
10357 identifiers: Vec<String>,
10359 sources: Vec<CrossCheckSource>,
10361 consensus: String,
10365 #[serde(skip_serializing_if = "Option::is_none")]
10366 note: Option<String>,
10367}
10368
10369#[derive(Debug, Clone, Serialize)]
10370struct CrossCheckSource {
10371 source: String,
10372 identifier: String,
10373 #[serde(skip_serializing_if = "Option::is_none")]
10374 title: Option<String>,
10375 #[serde(skip_serializing_if = "Option::is_none")]
10376 first_author: Option<String>,
10377}
10378
10379async fn verify_packet_provenance(packet_path: &Path) -> ProvenanceVerificationReport {
10384 use crate::artifact_to_state::ArtifactPacket;
10385 let raw = std::fs::read_to_string(packet_path)
10386 .unwrap_or_else(|e| fail_return(&format!("read packet: {e}")));
10387 let parsed: ArtifactPacket =
10388 serde_json::from_str(&raw).unwrap_or_else(|e| fail_return(&format!("parse packet: {e}")));
10389 let packet = parsed
10390 .validate()
10391 .unwrap_or_else(|e| fail_return(&format!("validate packet: {e}")));
10392
10393 let mut candidates: std::collections::BTreeSet<String> = std::collections::BTreeSet::new();
10395 for artifact in &packet.artifacts {
10396 if let Some(ident) = extract_identifier(&artifact.locator) {
10397 candidates.insert(ident);
10398 }
10399 }
10400 for claim in &packet.candidate_claims {
10401 for source_ref in &claim.source_refs {
10402 if let Some(ident) = extract_identifier(source_ref) {
10403 candidates.insert(ident);
10404 }
10405 }
10406 }
10407
10408 let client = reqwest::Client::builder()
10409 .user_agent("vela/0.108 (+https://github.com/vela-science/vela)")
10410 .timeout(std::time::Duration::from_secs(15))
10411 .build()
10412 .unwrap_or_else(|e| fail_return(&format!("build http client: {e}")));
10413
10414 let mut entries: Vec<ProvenanceVerificationEntry> = Vec::new();
10415 let mut resolved = 0usize;
10416 let mut unresolved = 0usize;
10417 let mut skipped = 0usize;
10418 for candidate in &candidates {
10419 let entry = if let Some(doi) = candidate.strip_prefix("doi:") {
10420 verify_doi(&client, doi).await
10421 } else if let Some(pmid) = candidate.strip_prefix("pmid:") {
10422 verify_pmid(&client, pmid).await
10423 } else if let Some(s2_id) = candidate.strip_prefix("s2:") {
10424 verify_s2(&client, s2_id).await
10425 } else if let Some(arxiv_id) = candidate.strip_prefix("arxiv:") {
10426 verify_arxiv(&client, arxiv_id).await
10427 } else {
10428 ProvenanceVerificationEntry {
10429 identifier: candidate.clone(),
10430 kind: "unknown".to_string(),
10431 status: "skipped".to_string(),
10432 note: Some("no recognized identifier prefix".to_string()),
10433 title: None,
10434 first_author: None,
10435 }
10436 };
10437 match entry.status.as_str() {
10438 "resolved" => resolved += 1,
10439 "unresolved" => unresolved += 1,
10440 _ => skipped += 1,
10441 }
10442 entries.push(entry);
10443 }
10444
10445 ProvenanceVerificationReport {
10446 command: "bridge-kit.verify-provenance".to_string(),
10447 packet: packet_path.display().to_string(),
10448 identifiers: entries,
10449 resolved_count: resolved,
10450 unresolved_count: unresolved,
10451 skipped_count: skipped,
10452 cross_check: None,
10453 }
10454}
10455
10456async fn cross_check_packet_provenance(
10466 packet_path: &Path,
10467 report: &mut ProvenanceVerificationReport,
10468) {
10469 use crate::artifact_to_state::ArtifactPacket;
10470 let raw = std::fs::read_to_string(packet_path)
10474 .unwrap_or_else(|e| fail_return(&format!("read packet: {e}")));
10475 let parsed: ArtifactPacket =
10476 serde_json::from_str(&raw).unwrap_or_else(|e| fail_return(&format!("parse packet: {e}")));
10477 let packet = parsed
10478 .validate()
10479 .unwrap_or_else(|e| fail_return(&format!("validate packet: {e}")));
10480
10481 let by_ident: std::collections::HashMap<String, &ProvenanceVerificationEntry> = report
10483 .identifiers
10484 .iter()
10485 .filter(|e| e.status == "resolved")
10486 .map(|e| (e.identifier.clone(), e))
10487 .collect();
10488
10489 let mut cross_entries: Vec<CrossCheckEntry> = Vec::new();
10490 for artifact in &packet.artifacts {
10491 let Some(ident) = extract_identifier(&artifact.locator) else {
10499 continue;
10500 };
10501 let Some(entry) = by_ident.get(&ident) else {
10502 continue;
10503 };
10504 let source = CrossCheckSource {
10505 source: entry.kind.clone(),
10506 identifier: entry.identifier.clone(),
10507 title: entry.title.clone(),
10508 first_author: entry.first_author.clone(),
10509 };
10510 cross_entries.push(CrossCheckEntry {
10511 identifiers: vec![entry.identifier.clone()],
10512 sources: vec![source],
10513 consensus: "insufficient".to_string(),
10514 note: Some("only one source resolved for this artifact".to_string()),
10515 });
10516 }
10517
10518 for claim in &packet.candidate_claims {
10523 let mut cluster_idents: Vec<String> = Vec::new();
10524 let mut cluster_sources: Vec<CrossCheckSource> = Vec::new();
10525 for source_ref in &claim.source_refs {
10526 let Some(ident) = extract_identifier(source_ref) else {
10527 continue;
10528 };
10529 let Some(entry) = by_ident.get(&ident) else {
10530 continue;
10531 };
10532 cluster_idents.push(entry.identifier.clone());
10533 cluster_sources.push(CrossCheckSource {
10534 source: entry.kind.clone(),
10535 identifier: entry.identifier.clone(),
10536 title: entry.title.clone(),
10537 first_author: entry.first_author.clone(),
10538 });
10539 }
10540 if cluster_sources.len() >= 2 {
10541 let mut title_mismatch = false;
10546 let mut author_mismatch = false;
10547 for i in 0..cluster_sources.len() {
10548 for j in (i + 1)..cluster_sources.len() {
10549 let a = &cluster_sources[i];
10550 let b = &cluster_sources[j];
10551 if let (Some(ta), Some(tb)) = (&a.title, &b.title)
10552 && ta != tb
10553 {
10554 title_mismatch = true;
10555 }
10556 if let (Some(la), Some(lb)) = (&a.first_author, &b.first_author) {
10557 if !la.is_empty()
10567 && !lb.is_empty()
10568 && !la.starts_with(lb.as_str())
10569 && !lb.starts_with(la.as_str())
10570 {
10571 author_mismatch = true;
10572 }
10573 }
10574 }
10575 }
10576 let consensus = match (title_mismatch, author_mismatch) {
10577 (false, false) => "agree".to_string(),
10578 (true, false) => "title_mismatch".to_string(),
10579 (false, true) => "author_mismatch".to_string(),
10580 (true, true) => "both_mismatch".to_string(),
10581 };
10582 cross_entries.push(CrossCheckEntry {
10583 identifiers: cluster_idents,
10584 sources: cluster_sources,
10585 consensus,
10586 note: None,
10587 });
10588 }
10589 }
10590
10591 report.cross_check = Some(cross_entries);
10592}
10593
10594fn extract_identifier(s: &str) -> Option<String> {
10599 let trimmed = s.trim();
10600 if trimmed.is_empty() {
10601 return None;
10602 }
10603 if trimmed.starts_with("doi:")
10605 || trimmed.starts_with("pmid:")
10606 || trimmed.starts_with("s2:")
10607 || trimmed.starts_with("arxiv:")
10608 {
10609 return Some(trimmed.to_string());
10610 }
10611 for prefix in ["https://doi.org/", "http://doi.org/", "https://dx.doi.org/"] {
10613 if let Some(rest) = trimmed.strip_prefix(prefix) {
10614 return Some(format!("doi:{rest}"));
10615 }
10616 }
10617 for prefix in [
10619 "https://pubmed.ncbi.nlm.nih.gov/",
10620 "http://pubmed.ncbi.nlm.nih.gov/",
10621 ] {
10622 if let Some(rest) = trimmed.strip_prefix(prefix) {
10623 let pmid = rest.trim_end_matches('/');
10624 return Some(format!("pmid:{pmid}"));
10625 }
10626 }
10627 for prefix in [
10632 "https://www.semanticscholar.org/paper/",
10633 "http://www.semanticscholar.org/paper/",
10634 "https://api.semanticscholar.org/graph/v1/paper/",
10635 "https://api.semanticscholar.org/v1/paper/",
10636 ] {
10637 if let Some(rest) = trimmed.strip_prefix(prefix) {
10638 let s2_id = rest
10639 .split('/')
10640 .next_back()
10641 .unwrap_or(rest)
10642 .split('?')
10643 .next()
10644 .unwrap_or(rest);
10645 if !s2_id.is_empty() {
10646 return Some(format!("s2:{s2_id}"));
10647 }
10648 }
10649 }
10650 for prefix in [
10655 "https://arxiv.org/abs/",
10656 "http://arxiv.org/abs/",
10657 "https://arxiv.org/pdf/",
10658 "http://arxiv.org/pdf/",
10659 "https://www.arxiv.org/abs/",
10660 ] {
10661 if let Some(rest) = trimmed.strip_prefix(prefix) {
10662 let arxiv_id = rest
10663 .trim_end_matches('/')
10664 .trim_end_matches(".pdf")
10665 .split('?')
10666 .next()
10667 .unwrap_or(rest);
10668 if !arxiv_id.is_empty() {
10669 return Some(format!("arxiv:{arxiv_id}"));
10670 }
10671 }
10672 }
10673 if trimmed.starts_with("10.") && trimmed.contains('/') && !trimmed.contains(' ') {
10675 return Some(format!("doi:{trimmed}"));
10676 }
10677 None
10678}
10679
10680async fn verify_doi(client: &reqwest::Client, doi: &str) -> ProvenanceVerificationEntry {
10681 let url = format!("https://api.crossref.org/works/{doi}");
10682 match client.get(&url).send().await {
10683 Ok(resp) if resp.status().is_success() => {
10684 let body: serde_json::Value = resp.json().await.unwrap_or(serde_json::Value::Null);
10688 let title = body
10689 .pointer("/message/title/0")
10690 .and_then(serde_json::Value::as_str)
10691 .map(normalize_title);
10692 let first_author = body
10693 .pointer("/message/author/0/family")
10694 .and_then(serde_json::Value::as_str)
10695 .map(normalize_last_name);
10696 ProvenanceVerificationEntry {
10697 identifier: format!("doi:{doi}"),
10698 kind: "doi".to_string(),
10699 status: "resolved".to_string(),
10700 note: None,
10701 title,
10702 first_author,
10703 }
10704 }
10705 Ok(resp) => ProvenanceVerificationEntry {
10706 identifier: format!("doi:{doi}"),
10707 kind: "doi".to_string(),
10708 status: "unresolved".to_string(),
10709 note: Some(format!("crossref returned {}", resp.status())),
10710 title: None,
10711 first_author: None,
10712 },
10713 Err(e) => ProvenanceVerificationEntry {
10714 identifier: format!("doi:{doi}"),
10715 kind: "doi".to_string(),
10716 status: "skipped".to_string(),
10717 note: Some(format!("crossref unreachable: {e}")),
10718 title: None,
10719 first_author: None,
10720 },
10721 }
10722}
10723
10724fn normalize_title(s: &str) -> String {
10730 let lower = s.to_lowercase();
10731 let stripped: String = lower
10732 .chars()
10733 .filter(|c| c.is_alphanumeric() || c.is_whitespace())
10734 .collect();
10735 stripped.split_whitespace().collect::<Vec<_>>().join(" ")
10736}
10737
10738fn normalize_last_name(s: &str) -> String {
10744 let trimmed = s.trim();
10745 let candidate = if let Some(comma_idx) = trimmed.find(',') {
10746 &trimmed[..comma_idx]
10747 } else if let Some(last_space) = trimmed.rsplit_once(' ') {
10748 last_space.1
10749 } else {
10750 trimmed
10751 };
10752 candidate
10753 .to_lowercase()
10754 .chars()
10755 .filter(|c| c.is_alphanumeric() || *c == '-')
10756 .collect()
10757}
10758
10759async fn verify_pmid(client: &reqwest::Client, pmid: &str) -> ProvenanceVerificationEntry {
10760 let url = format!(
10761 "https://eutils.ncbi.nlm.nih.gov/entrez/eutils/esummary.fcgi?db=pubmed&id={pmid}&retmode=json"
10762 );
10763 match client.get(&url).send().await {
10764 Ok(resp) if resp.status().is_success() => {
10765 let body: serde_json::Value = resp.json().await.unwrap_or(serde_json::Value::Null);
10768 let result = body.get("result");
10769 let uids = result
10770 .and_then(|r| r.get("uids"))
10771 .and_then(|u| u.as_array());
10772 let resolved = uids.is_some_and(|a| !a.is_empty());
10773 if resolved {
10774 let record = result.and_then(|r| r.get(pmid));
10778 let title = record
10779 .and_then(|r| r.get("title"))
10780 .and_then(serde_json::Value::as_str)
10781 .map(normalize_title);
10782 let first_author = record
10783 .and_then(|r| r.get("authors"))
10784 .and_then(serde_json::Value::as_array)
10785 .and_then(|a| a.first())
10786 .and_then(|a| a.get("name"))
10787 .and_then(serde_json::Value::as_str)
10788 .map(normalize_last_name);
10789 ProvenanceVerificationEntry {
10790 identifier: format!("pmid:{pmid}"),
10791 kind: "pmid".to_string(),
10792 status: "resolved".to_string(),
10793 note: None,
10794 title,
10795 first_author,
10796 }
10797 } else {
10798 ProvenanceVerificationEntry {
10799 identifier: format!("pmid:{pmid}"),
10800 kind: "pmid".to_string(),
10801 status: "unresolved".to_string(),
10802 note: Some("eutils returned empty uids".to_string()),
10803 title: None,
10804 first_author: None,
10805 }
10806 }
10807 }
10808 Ok(resp) => ProvenanceVerificationEntry {
10809 identifier: format!("pmid:{pmid}"),
10810 kind: "pmid".to_string(),
10811 status: "unresolved".to_string(),
10812 note: Some(format!("eutils returned {}", resp.status())),
10813 title: None,
10814 first_author: None,
10815 },
10816 Err(e) => ProvenanceVerificationEntry {
10817 identifier: format!("pmid:{pmid}"),
10818 kind: "pmid".to_string(),
10819 status: "skipped".to_string(),
10820 note: Some(format!("eutils unreachable: {e}")),
10821 title: None,
10822 first_author: None,
10823 },
10824 }
10825}
10826
10827async fn verify_s2(client: &reqwest::Client, s2_id: &str) -> ProvenanceVerificationEntry {
10834 let url = format!("https://api.semanticscholar.org/graph/v1/paper/{s2_id}");
10835 match client.get(&url).send().await {
10836 Ok(resp) if resp.status().is_success() => {
10837 let body: serde_json::Value = resp.json().await.unwrap_or(serde_json::Value::Null);
10838 let has_paper_id = body
10839 .get("paperId")
10840 .and_then(serde_json::Value::as_str)
10841 .is_some_and(|v| !v.is_empty());
10842 if has_paper_id {
10843 let title = body
10848 .get("title")
10849 .and_then(serde_json::Value::as_str)
10850 .map(normalize_title);
10851 let first_author = body
10852 .get("authors")
10853 .and_then(serde_json::Value::as_array)
10854 .and_then(|a| a.first())
10855 .and_then(|a| a.get("name"))
10856 .and_then(serde_json::Value::as_str)
10857 .map(normalize_last_name);
10858 ProvenanceVerificationEntry {
10859 identifier: format!("s2:{s2_id}"),
10860 kind: "s2".to_string(),
10861 status: "resolved".to_string(),
10862 note: None,
10863 title,
10864 first_author,
10865 }
10866 } else {
10867 ProvenanceVerificationEntry {
10868 identifier: format!("s2:{s2_id}"),
10869 kind: "s2".to_string(),
10870 status: "unresolved".to_string(),
10871 note: Some("semantic scholar returned 200 with no paperId".to_string()),
10872 title: None,
10873 first_author: None,
10874 }
10875 }
10876 }
10877 Ok(resp) => ProvenanceVerificationEntry {
10878 identifier: format!("s2:{s2_id}"),
10879 kind: "s2".to_string(),
10880 status: "unresolved".to_string(),
10881 note: Some(format!("semantic scholar returned {}", resp.status())),
10882 title: None,
10883 first_author: None,
10884 },
10885 Err(e) => ProvenanceVerificationEntry {
10886 identifier: format!("s2:{s2_id}"),
10887 kind: "s2".to_string(),
10888 status: "skipped".to_string(),
10889 note: Some(format!("semantic scholar unreachable: {e}")),
10890 title: None,
10891 first_author: None,
10892 },
10893 }
10894}
10895
10896async fn verify_arxiv(client: &reqwest::Client, arxiv_id: &str) -> ProvenanceVerificationEntry {
10905 let url = format!("https://export.arxiv.org/api/query?id_list={arxiv_id}&max_results=1");
10906 match client.get(&url).send().await {
10907 Ok(resp) if resp.status().is_success() => {
10908 let body = resp.text().await.unwrap_or_default();
10909 let has_entry = body.contains("<entry>") || body.contains("<entry ");
10913 let has_id_url = body.contains("http://arxiv.org/abs/");
10917 if has_entry && has_id_url {
10918 let title = atom_inner_text(&body, "<entry>", "<title>", "</title>")
10928 .as_deref()
10929 .map(normalize_title);
10930 let first_author = atom_inner_text(&body, "<author>", "<name>", "</name>")
10931 .as_deref()
10932 .map(normalize_last_name);
10933 ProvenanceVerificationEntry {
10934 identifier: format!("arxiv:{arxiv_id}"),
10935 kind: "arxiv".to_string(),
10936 status: "resolved".to_string(),
10937 note: None,
10938 title,
10939 first_author,
10940 }
10941 } else {
10942 ProvenanceVerificationEntry {
10943 identifier: format!("arxiv:{arxiv_id}"),
10944 kind: "arxiv".to_string(),
10945 status: "unresolved".to_string(),
10946 note: Some("arxiv returned 200 with no matching entry".to_string()),
10947 title: None,
10948 first_author: None,
10949 }
10950 }
10951 }
10952 Ok(resp) => ProvenanceVerificationEntry {
10953 identifier: format!("arxiv:{arxiv_id}"),
10954 kind: "arxiv".to_string(),
10955 status: "unresolved".to_string(),
10956 note: Some(format!("arxiv returned {}", resp.status())),
10957 title: None,
10958 first_author: None,
10959 },
10960 Err(e) => ProvenanceVerificationEntry {
10961 identifier: format!("arxiv:{arxiv_id}"),
10962 kind: "arxiv".to_string(),
10963 status: "skipped".to_string(),
10964 note: Some(format!("arxiv unreachable: {e}")),
10965 title: None,
10966 first_author: None,
10967 },
10968 }
10969}
10970
10971fn atom_inner_text(body: &str, scope_open: &str, open: &str, close: &str) -> Option<String> {
10978 let scope_start = body.find(scope_open)?;
10979 let after_scope = &body[scope_start..];
10980 let open_idx = after_scope.find(open)?;
10981 let after_open = &after_scope[open_idx + open.len()..];
10982 let close_idx = after_open.find(close)?;
10983 Some(after_open[..close_idx].trim().to_string())
10984}
10985
10986async fn cmd_source_adapter(action: SourceAdapterAction) {
10987 match action {
10988 SourceAdapterAction::Run {
10989 frontier,
10990 adapter,
10991 actor,
10992 entries,
10993 priority,
10994 include_excluded,
10995 allow_partial,
10996 dry_run,
10997 input_dir,
10998 apply_artifacts,
10999 json,
11000 } => {
11001 let report = crate::source_adapters::run(
11002 &frontier,
11003 crate::source_adapters::SourceAdapterRunOptions {
11004 adapter,
11005 actor,
11006 entries,
11007 priority,
11008 include_excluded,
11009 allow_partial,
11010 dry_run,
11011 input_dir,
11012 apply_artifacts,
11013 },
11014 )
11015 .await
11016 .unwrap_or_else(|e| fail_return(&e));
11017 if json {
11018 println!(
11019 "{}",
11020 serde_json::to_string_pretty(&report)
11021 .expect("failed to serialize source adapter report")
11022 );
11023 } else {
11024 println!("vela source-adapter run");
11025 println!(" adapter: {}", report.adapter);
11026 println!(" run: {}", report.run_id);
11027 println!(" frontier: {}", report.frontier);
11028 println!(" selected entries: {}", report.selected_entries);
11029 println!(" fetched records: {}", report.fetched_records);
11030 println!(" changed records: {}", report.changed_records);
11031 println!(" unchanged records: {}", report.unchanged_records);
11032 println!(" failed records: {}", report.failed_records.len());
11033 if let Some(packet_id) = report.packet_id {
11034 println!(" packet: {packet_id}");
11035 }
11036 println!(" artifact proposals: {}", report.artifact_proposals);
11037 println!(" review note proposals: {}", report.review_note_proposals);
11038 println!(" applied events: {}", report.applied_event_ids.len());
11039 }
11040 }
11041 }
11042}
11043
11044fn cmd_runtime_adapter(action: RuntimeAdapterAction) {
11045 match action {
11046 RuntimeAdapterAction::Run {
11047 frontier,
11048 adapter,
11049 input,
11050 actor,
11051 dry_run,
11052 apply_artifacts,
11053 json,
11054 } => {
11055 let report = crate::runtime_adapters::run(
11056 &frontier,
11057 crate::runtime_adapters::RuntimeAdapterRunOptions {
11058 adapter,
11059 input,
11060 actor,
11061 dry_run,
11062 apply_artifacts,
11063 },
11064 )
11065 .unwrap_or_else(|e| fail_return(&e));
11066 if json {
11067 println!(
11068 "{}",
11069 serde_json::to_string_pretty(&report)
11070 .expect("failed to serialize runtime adapter report")
11071 );
11072 } else {
11073 println!("vela runtime-adapter run");
11074 println!(" adapter: {}", report.adapter);
11075 println!(" run: {}", report.run_id);
11076 println!(" frontier: {}", report.frontier);
11077 if let Some(packet_id) = report.packet_id {
11078 println!(" packet: {packet_id}");
11079 }
11080 println!(" artifact proposals: {}", report.artifact_proposals);
11081 println!(" finding proposals: {}", report.finding_proposals);
11082 println!(" gap proposals: {}", report.gap_proposals);
11083 println!(" review note proposals: {}", report.review_note_proposals);
11084 println!(
11085 " applied artifact events: {}",
11086 report.applied_artifact_events
11087 );
11088 println!(
11089 " pending truth proposals: {}",
11090 report.pending_truth_proposals
11091 );
11092 }
11093 }
11094 }
11095}
11096
11097fn cmd_sign(action: SignAction) {
11098 match action {
11099 SignAction::GenerateKeypair { out, json } => {
11100 let public_key = sign::generate_keypair(&out).unwrap_or_else(|e| fail_return(&e));
11101 let payload = json!({
11102 "ok": true,
11103 "command": "sign.generate-keypair",
11104 "output_dir": out.display().to_string(),
11105 "public_key": public_key,
11106 });
11107 if json {
11108 println!(
11109 "{}",
11110 serde_json::to_string_pretty(&payload)
11111 .expect("failed to serialize sign.generate-keypair")
11112 );
11113 } else {
11114 println!("{} keypair · {}", style::ok("generated"), out.display());
11115 println!(" public key: {public_key}");
11116 }
11117 }
11118 SignAction::Apply {
11119 frontier,
11120 private_key,
11121 json,
11122 } => {
11123 let count =
11124 sign::sign_frontier(&frontier, &private_key).unwrap_or_else(|e| fail_return(&e));
11125 let payload = json!({
11126 "ok": true,
11127 "command": "sign.apply",
11128 "frontier": frontier.display().to_string(),
11129 "private_key": private_key.display().to_string(),
11130 "signed": count,
11131 });
11132 if json {
11133 println!(
11134 "{}",
11135 serde_json::to_string_pretty(&payload).expect("failed to serialize sign.apply")
11136 );
11137 } else {
11138 println!(
11139 "{} {count} findings in {}",
11140 style::ok("signed"),
11141 frontier.display()
11142 );
11143 }
11144 }
11145 SignAction::Verify {
11146 frontier,
11147 public_key,
11148 json,
11149 } => {
11150 let report = sign::verify_frontier(&frontier, public_key.as_deref())
11151 .unwrap_or_else(|e| fail_return(&e));
11152 if json {
11153 println!(
11154 "{}",
11155 serde_json::to_string_pretty(&report).expect("failed to serialize sign.verify")
11156 );
11157 } else {
11158 println!();
11159 println!(
11160 " {}",
11161 format!("VELA · SIGN · VERIFY · {}", frontier.display())
11162 .to_uppercase()
11163 .dimmed()
11164 );
11165 println!(" {}", style::tick_row(60));
11166 println!(" total findings: {}", report.total_findings);
11167 println!(" signed: {}", report.signed);
11168 println!(" unsigned: {}", report.unsigned);
11169 println!(" valid: {}", report.valid);
11170 println!(" invalid: {}", report.invalid);
11171 if report.findings_with_threshold > 0 {
11172 println!(" with threshold: {}", report.findings_with_threshold);
11173 println!(" jointly accepted: {}", report.jointly_accepted);
11174 }
11175 }
11176 }
11177 SignAction::ThresholdSet {
11178 frontier,
11179 finding_id,
11180 to,
11181 json,
11182 } => {
11183 if to == 0 {
11184 fail("--to must be >= 1");
11185 }
11186 let mut project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
11187 let Some(idx) = project.findings.iter().position(|f| f.id == finding_id) else {
11188 fail(&format!("finding '{finding_id}' not present in frontier"));
11189 };
11190 project.findings[idx].flags.signature_threshold = Some(to);
11191 sign::refresh_jointly_accepted(&mut project);
11195 let met = project.findings[idx].flags.jointly_accepted;
11196 repo::save_to_path(&frontier, &project).unwrap_or_else(|e| fail_return(&e));
11197
11198 if json {
11199 println!(
11200 "{}",
11201 serde_json::to_string_pretty(&json!({
11202 "ok": true,
11203 "command": "sign.threshold-set",
11204 "finding_id": finding_id,
11205 "threshold": to,
11206 "jointly_accepted": met,
11207 "frontier": frontier.display().to_string(),
11208 }))
11209 .expect("failed to serialize sign.threshold-set")
11210 );
11211 } else {
11212 println!(
11213 "{} signature_threshold={to} on {finding_id} ({})",
11214 style::ok("set"),
11215 if met {
11216 "jointly accepted"
11217 } else {
11218 "awaiting signatures"
11219 }
11220 );
11221 }
11222 }
11223 }
11224}
11225
11226fn cmd_actor(action: ActorAction) {
11227 match action {
11228 ActorAction::Add {
11229 frontier,
11230 id,
11231 pubkey,
11232 tier,
11233 orcid,
11234 clearance,
11235 json,
11236 } => {
11237 let trimmed = pubkey.trim();
11239 if trimmed.len() != 64 || hex::decode(trimmed).is_err() {
11240 fail("Public key must be 64 hex characters (32-byte Ed25519 pubkey).");
11241 }
11242 let orcid_normalized = orcid
11244 .as_deref()
11245 .map(|s| sign::validate_orcid(s).unwrap_or_else(|e| fail_return(&e)));
11246 let clearance: Option<crate::access_tier::AccessTier> = clearance.as_deref().map(|s| {
11249 crate::access_tier::AccessTier::parse(s).unwrap_or_else(|e| fail_return(&e))
11250 });
11251
11252 let mut project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
11253 if project.actors.iter().any(|actor| actor.id == id) {
11254 fail(&format!(
11255 "Actor '{id}' already registered in this frontier."
11256 ));
11257 }
11258 project.actors.push(sign::ActorRecord {
11259 id: id.clone(),
11260 public_key: trimmed.to_string(),
11261 algorithm: "ed25519".to_string(),
11262 created_at: chrono::Utc::now().to_rfc3339(),
11263 tier: tier.clone(),
11264 orcid: orcid_normalized.clone(),
11265 access_clearance: clearance,
11266 revoked_at: None,
11267 revoked_reason: None,
11268 });
11269 repo::save_to_path(&frontier, &project).unwrap_or_else(|e| fail_return(&e));
11270 let payload = json!({
11271 "ok": true,
11272 "command": "actor.add",
11273 "frontier": frontier.display().to_string(),
11274 "actor_id": id,
11275 "public_key": trimmed,
11276 "tier": tier,
11277 "orcid": orcid_normalized,
11278 "registered_count": project.actors.len(),
11279 });
11280 if json {
11281 println!(
11282 "{}",
11283 serde_json::to_string_pretty(&payload).expect("failed to serialize actor.add")
11284 );
11285 } else {
11286 let tier_suffix = tier
11287 .as_deref()
11288 .map_or_else(String::new, |t| format!(" tier={t}"));
11289 println!(
11290 "{} actor {} (pubkey {}{tier_suffix})",
11291 style::ok("registered"),
11292 id,
11293 &trimmed[..16]
11294 );
11295 }
11296 }
11297 ActorAction::Rotate {
11298 frontier,
11299 id,
11300 new_id,
11301 new_pubkey,
11302 reason,
11303 json,
11304 } => {
11305 let trimmed = new_pubkey.trim();
11307 if trimmed.len() != 64 || hex::decode(trimmed).is_err() {
11308 fail("--new-pubkey must be 64 hex characters (32-byte Ed25519 pubkey).");
11309 }
11310 if reason.trim().is_empty() {
11311 fail("--reason must be non-empty (record why the rotation is happening).");
11312 }
11313 if id == new_id {
11314 fail("--id and --new-id must differ; rotation registers a fresh actor record.");
11315 }
11316
11317 let mut project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
11318
11319 if project.actors.iter().any(|a| a.id == new_id) {
11321 fail(&format!(
11322 "Refusing to rotate: actor '{new_id}' is already registered."
11323 ));
11324 }
11325
11326 let now = chrono::Utc::now().to_rfc3339();
11328 let mut found_old = false;
11329 let mut old_pubkey_prefix: Option<String> = None;
11330 for actor in project.actors.iter_mut() {
11331 if actor.id == id {
11332 if actor.revoked_at.is_some() {
11333 fail(&format!(
11334 "Refusing to rotate: actor '{id}' is already revoked at {}.",
11335 actor.revoked_at.as_deref().unwrap_or("?")
11336 ));
11337 }
11338 actor.revoked_at = Some(now.clone());
11339 actor.revoked_reason = Some(reason.clone());
11340 old_pubkey_prefix = Some(actor.public_key[..16].to_string());
11341 found_old = true;
11342 }
11343 }
11344 if !found_old {
11345 fail(&format!(
11346 "Cannot rotate: actor '{id}' is not registered in this frontier."
11347 ));
11348 }
11349
11350 project.actors.push(sign::ActorRecord {
11352 id: new_id.clone(),
11353 public_key: trimmed.to_string(),
11354 algorithm: "ed25519".to_string(),
11355 created_at: now.clone(),
11356 tier: None,
11357 orcid: None,
11358 access_clearance: None,
11359 revoked_at: None,
11360 revoked_reason: None,
11361 });
11362
11363 repo::save_to_path(&frontier, &project).unwrap_or_else(|e| fail_return(&e));
11364
11365 let payload = json!({
11366 "ok": true,
11367 "command": "actor.rotate",
11368 "frontier": frontier.display().to_string(),
11369 "retired_actor_id": id,
11370 "retired_pubkey_prefix": old_pubkey_prefix,
11371 "new_actor_id": new_id,
11372 "new_pubkey": trimmed,
11373 "revoked_at": now,
11374 "reason": reason,
11375 });
11376 if json {
11377 println!(
11378 "{}",
11379 serde_json::to_string_pretty(&payload)
11380 .expect("failed to serialize actor.rotate")
11381 );
11382 } else {
11383 println!(
11384 "{} actor {} retired (pubkey {}...), {} registered (pubkey {}...)",
11385 style::ok("rotated"),
11386 id,
11387 old_pubkey_prefix.as_deref().unwrap_or("?"),
11388 new_id,
11389 &trimmed[..16]
11390 );
11391 println!(" revoked_at: {now}");
11392 println!(" reason: {reason}");
11393 }
11394 }
11395 ActorAction::List { frontier, json } => {
11396 let project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
11397 if json {
11398 let payload = json!({
11399 "ok": true,
11400 "command": "actor.list",
11401 "frontier": frontier.display().to_string(),
11402 "actors": project.actors,
11403 });
11404 println!(
11405 "{}",
11406 serde_json::to_string_pretty(&payload).expect("failed to serialize actor.list")
11407 );
11408 } else {
11409 println!();
11410 println!(
11411 " {}",
11412 format!("VELA · ACTOR · LIST · {}", frontier.display())
11413 .to_uppercase()
11414 .dimmed()
11415 );
11416 println!(" {}", style::tick_row(60));
11417 if project.actors.is_empty() {
11418 println!(" (no actors registered)");
11419 } else {
11420 for actor in &project.actors {
11421 println!(
11422 " {:<28} {}… registered {}",
11423 actor.id,
11424 &actor.public_key[..16],
11425 actor.created_at
11426 );
11427 }
11428 }
11429 }
11430 }
11431 }
11432}
11433
11434fn cmd_causal(action: CausalAction) {
11436 use crate::causal_reasoning;
11437
11438 match action {
11439 CausalAction::Audit {
11440 frontier,
11441 problems_only,
11442 json,
11443 } => {
11444 let project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
11445 let mut entries = causal_reasoning::audit_frontier(&project);
11446 if problems_only {
11447 entries.retain(|e| e.verdict.needs_reviewer_attention());
11448 }
11449 let summary = causal_reasoning::summarize_audit(&entries);
11450
11451 if json {
11452 println!(
11453 "{}",
11454 serde_json::to_string_pretty(&json!({
11455 "ok": true,
11456 "command": "causal.audit",
11457 "frontier": frontier.display().to_string(),
11458 "summary": summary,
11459 "entries": entries,
11460 }))
11461 .expect("serialize causal.audit")
11462 );
11463 return;
11464 }
11465
11466 println!();
11467 println!(
11468 " {}",
11469 format!("VELA · CAUSAL · AUDIT · {}", frontier.display())
11470 .to_uppercase()
11471 .dimmed()
11472 );
11473 println!(" {}", style::tick_row(60));
11474 println!(
11475 " total: {} identified: {} conditional: {} underidentified: {} underdetermined: {}",
11476 summary.total,
11477 summary.identified,
11478 summary.conditional,
11479 summary.underidentified,
11480 summary.underdetermined,
11481 );
11482 if entries.is_empty() {
11483 println!(" (no entries to report)");
11484 return;
11485 }
11486 for e in &entries {
11487 let chip = match e.verdict {
11488 crate::causal_reasoning::Identifiability::Identified => style::ok("identified"),
11489 crate::causal_reasoning::Identifiability::Conditional => {
11490 style::warn("conditional")
11491 }
11492 crate::causal_reasoning::Identifiability::Underidentified => {
11493 style::lost("underidentified")
11494 }
11495 crate::causal_reasoning::Identifiability::Underdetermined => {
11496 style::warn("underdetermined")
11497 }
11498 };
11499 let claim = e
11500 .causal_claim
11501 .map_or("none".to_string(), |c| format!("{c:?}").to_lowercase());
11502 let grade = e
11503 .causal_evidence_grade
11504 .map_or("none".to_string(), |g| format!("{g:?}").to_lowercase());
11505 println!();
11506 println!(" {chip} {} ({}/{})", e.finding_id, claim, grade);
11507 let assertion_short: String = e.assertion_text.chars().take(78).collect();
11508 println!(" {assertion_short}");
11509 println!(" {} {}", style::ok("why:"), e.rationale);
11510 if e.verdict.needs_reviewer_attention()
11511 || matches!(
11512 e.verdict,
11513 crate::causal_reasoning::Identifiability::Underdetermined
11514 )
11515 {
11516 println!(" {} {}", style::ok("fix:"), e.remediation);
11517 }
11518 }
11519 }
11520 CausalAction::Effect {
11521 frontier,
11522 source,
11523 on: target,
11524 json,
11525 } => {
11526 use crate::causal_graph::{CausalEffectVerdict, identify_effect};
11527
11528 let project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
11529 let verdict = identify_effect(&project, &source, &target);
11530
11531 if json {
11532 println!(
11533 "{}",
11534 serde_json::to_string_pretty(&json!({
11535 "ok": true,
11536 "command": "causal.effect",
11537 "frontier": frontier.display().to_string(),
11538 "source": source,
11539 "target": target,
11540 "verdict": verdict,
11541 }))
11542 .expect("serialize causal.effect")
11543 );
11544 return;
11545 }
11546
11547 println!();
11548 println!(
11549 " {}",
11550 format!("VELA · CAUSAL · EFFECT · {} → {}", source, target)
11551 .to_uppercase()
11552 .dimmed()
11553 );
11554 println!(" {}", style::tick_row(60));
11555 match verdict {
11556 CausalEffectVerdict::Identified {
11557 adjustment_set,
11558 back_door_paths_considered,
11559 } => {
11560 if adjustment_set.is_empty() {
11561 println!(
11562 " {} no back-door adjustment needed",
11563 style::ok("identified")
11564 );
11565 } else {
11566 println!(" {} identified by adjusting on:", style::ok("identified"));
11567 for z in &adjustment_set {
11568 println!(" · {z}");
11569 }
11570 }
11571 println!(
11572 " back-door paths considered: {}",
11573 back_door_paths_considered
11574 );
11575 }
11576 CausalEffectVerdict::IdentifiedByFrontDoor { mediator_set } => {
11577 println!(
11578 " {} identified via front-door criterion (Pearl 1995 §3.3)",
11579 style::ok("identified")
11580 );
11581 println!(" mediators that intercept all directed paths:");
11582 for m in &mediator_set {
11583 println!(" · {m}");
11584 }
11585 println!(
11586 " applies when source-target confounders are unobserved but the mediator chain is."
11587 );
11588 }
11589 CausalEffectVerdict::NoCausalPath { reason } => {
11590 println!(" {} no causal path: {reason}", style::warn("no_path"));
11591 }
11592 CausalEffectVerdict::Underidentified {
11593 unblocked_back_door_paths,
11594 candidates_tried,
11595 } => {
11596 println!(
11597 " {} no observational adjustment set found ({} candidates tried)",
11598 style::lost("underidentified"),
11599 candidates_tried
11600 );
11601 println!(" open back-door paths:");
11602 for path in unblocked_back_door_paths.iter().take(5) {
11603 println!(" · {}", path.join(" — "));
11604 }
11605 println!(
11606 " remediation: either intervene experimentally on {source}, or extend the link graph to make a confounder observable."
11607 );
11608 }
11609 CausalEffectVerdict::UnknownNode { which } => {
11610 fail(&which);
11611 }
11612 }
11613 println!();
11614 }
11615 CausalAction::Graph {
11616 frontier,
11617 node,
11618 json,
11619 } => {
11620 use crate::causal_graph::CausalGraph;
11621 let project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
11622 let graph = CausalGraph::from_project(&project);
11623
11624 let nodes: Vec<&str> = if let Some(n) = node.as_deref() {
11627 if !graph.contains(n) {
11628 fail(&format!("node not in frontier: {n}"));
11629 }
11630 vec![n]
11631 } else {
11632 project.findings.iter().map(|f| f.id.as_str()).collect()
11633 };
11634
11635 if json {
11636 let payload: Vec<_> = nodes
11637 .iter()
11638 .map(|n| {
11639 let parents: Vec<&str> = graph.parents_of(n).collect();
11640 let children: Vec<&str> = graph.children_of(n).collect();
11641 json!({
11642 "node": n,
11643 "parents": parents,
11644 "children": children,
11645 })
11646 })
11647 .collect();
11648 println!(
11649 "{}",
11650 serde_json::to_string_pretty(&json!({
11651 "ok": true,
11652 "command": "causal.graph",
11653 "node_count": graph.node_count(),
11654 "edge_count": graph.edge_count(),
11655 "nodes": payload,
11656 }))
11657 .expect("serialize causal.graph")
11658 );
11659 return;
11660 }
11661
11662 println!();
11663 println!(
11664 " {}",
11665 format!("VELA · CAUSAL · GRAPH · {}", frontier.display())
11666 .to_uppercase()
11667 .dimmed()
11668 );
11669 println!(" {}", style::tick_row(60));
11670 println!(
11671 " {} nodes · {} edges",
11672 graph.node_count(),
11673 graph.edge_count()
11674 );
11675 println!();
11676 for n in &nodes {
11677 let parents: Vec<&str> = graph.parents_of(n).collect();
11678 let children: Vec<&str> = graph.children_of(n).collect();
11679 if parents.is_empty() && children.is_empty() && nodes.len() > 1 {
11680 continue; }
11682 println!(" {n}");
11683 if !parents.is_empty() {
11684 println!(" parents: {}", parents.join(", "));
11685 }
11686 if !children.is_empty() {
11687 println!(" children: {}", children.join(", "));
11688 }
11689 }
11690 }
11691 CausalAction::Counterfactual {
11692 frontier,
11693 intervene_on,
11694 set_to,
11695 target,
11696 json,
11697 } => {
11698 use crate::counterfactual::{
11699 CounterfactualQuery, CounterfactualVerdict, answer_counterfactual,
11700 };
11701
11702 let project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
11703 let query = CounterfactualQuery {
11704 intervene_on: intervene_on.clone(),
11705 set_to,
11706 target: target.clone(),
11707 };
11708 let verdict = answer_counterfactual(&project, &query);
11709
11710 if json {
11711 println!(
11712 "{}",
11713 serde_json::to_string_pretty(&json!({
11714 "ok": true,
11715 "command": "causal.counterfactual",
11716 "frontier": frontier.display().to_string(),
11717 "query": query,
11718 "verdict": verdict,
11719 }))
11720 .expect("serialize causal.counterfactual")
11721 );
11722 return;
11723 }
11724
11725 println!();
11726 println!(
11727 " {}",
11728 format!(
11729 "VELA · CAUSAL · COUNTERFACTUAL · do({intervene_on} := {set_to:.3}) → {target}"
11730 )
11731 .to_uppercase()
11732 .dimmed()
11733 );
11734 println!(" {}", style::tick_row(72));
11735 match verdict {
11736 CounterfactualVerdict::Resolved {
11737 factual,
11738 counterfactual,
11739 delta,
11740 paths_used,
11741 } => {
11742 println!(
11743 " {} factual: {factual:.3} counterfactual: {counterfactual:.3} delta: {delta:+.3}",
11744 style::ok("resolved")
11745 );
11746 println!(
11747 " twin-network propagation through {} causal path(s):",
11748 paths_used.len()
11749 );
11750 for p in paths_used.iter().take(5) {
11751 println!(" · {}", p.join(" → "));
11752 }
11753 println!(
11754 " reading: \"if {intervene_on}'s confidence had been {set_to:.3} \
11755 instead of factual, {target}'s confidence would shift by {delta:+.3}.\""
11756 );
11757 }
11758 CounterfactualVerdict::MechanismUnspecified { unspecified_edges } => {
11759 println!(
11760 " {} causal path exists but {} edge(s) lack a mechanism annotation",
11761 style::warn("mechanism_unspecified"),
11762 unspecified_edges.len()
11763 );
11764 for (parent, child) in unspecified_edges.iter().take(8) {
11765 println!(" · {parent} → {child}");
11766 }
11767 println!(
11768 " remediation: annotate one of the link mechanisms (linear / monotonic / threshold / saturating)."
11769 );
11770 }
11771 CounterfactualVerdict::NoCausalPath { factual } => {
11772 println!(
11773 " {} no directed path from {intervene_on} to {target}; counterfactual = factual = {factual:.3}",
11774 style::warn("no_path")
11775 );
11776 }
11777 CounterfactualVerdict::UnknownNode { which } => {
11778 fail(&format!("node not in frontier: {which}"));
11779 }
11780 CounterfactualVerdict::InvalidIntervention { reason } => {
11781 fail(&reason);
11782 }
11783 }
11784 println!();
11785 }
11786 }
11787}
11788
11789fn cmd_bridges(action: BridgesAction) {
11792 use crate::bridge::{Bridge, BridgeStatus, derive_bridges};
11793 use std::collections::HashMap;
11794
11795 fn bridges_dir(frontier: &Path) -> PathBuf {
11796 frontier.join(".vela/bridges")
11797 }
11798
11799 fn load_bridge(frontier: &Path, id: &str) -> Result<Bridge, String> {
11800 let path = bridges_dir(frontier).join(format!("{id}.json"));
11801 if !path.is_file() {
11802 return Err(format!("bridge not found: {id}"));
11803 }
11804 let data = std::fs::read_to_string(&path).map_err(|e| format!("read {id}: {e}"))?;
11805 serde_json::from_str(&data).map_err(|e| format!("parse {id}: {e}"))
11806 }
11807
11808 fn save_bridge(frontier: &Path, b: &Bridge) -> Result<(), String> {
11809 let dir = bridges_dir(frontier);
11810 std::fs::create_dir_all(&dir).map_err(|e| format!("mkdir bridges/: {e}"))?;
11811 let path = dir.join(format!("{}.json", b.id));
11812 let data = serde_json::to_string_pretty(b).map_err(|e| format!("serialize bridge: {e}"))?;
11813 std::fs::write(&path, format!("{data}\n")).map_err(|e| format!("write bridge: {e}"))
11814 }
11815
11816 fn default_reviewer_id() -> String {
11819 std::env::var("VELA_REVIEWER_ID").unwrap_or_else(|_| "reviewer:will-blair".to_string())
11820 }
11821
11822 fn emit_bridge_reviewed_event(
11833 frontier: &Path,
11834 bridge_id: &str,
11835 status: &str,
11836 reviewer_id: &str,
11837 note: Option<&str>,
11838 ) -> Result<(), String> {
11839 let mut payload = serde_json::json!({
11840 "bridge_id": bridge_id,
11841 "status": status,
11842 });
11843 if let Some(n) = note
11844 && !n.trim().is_empty()
11845 {
11846 payload["note"] = serde_json::Value::String(n.to_string());
11847 }
11848 let known_ids: Vec<String> = list_bridges(frontier)
11850 .unwrap_or_default()
11851 .into_iter()
11852 .map(|b| b.id)
11853 .collect();
11854 crate::events::validate_bridge_reviewed_against_state(&payload, &known_ids)?;
11855 let event = crate::events::new_bridge_reviewed_event(
11856 bridge_id,
11857 reviewer_id,
11858 "human",
11859 &format!("Bridge {status} by {reviewer_id}"),
11860 payload,
11861 Vec::new(),
11862 );
11863 let events_dir = frontier.join(".vela/events");
11864 std::fs::create_dir_all(&events_dir).map_err(|e| format!("mkdir .vela/events: {e}"))?;
11865 let event_path = events_dir.join(format!("{}.json", event.id));
11866 let data =
11867 serde_json::to_string_pretty(&event).map_err(|e| format!("serialize event: {e}"))?;
11868 std::fs::write(&event_path, format!("{data}\n")).map_err(|e| format!("write event: {e}"))
11869 }
11870
11871 fn list_bridges(frontier: &Path) -> Result<Vec<Bridge>, String> {
11872 let dir = bridges_dir(frontier);
11873 if !dir.is_dir() {
11874 return Ok(Vec::new());
11875 }
11876 let mut out = Vec::new();
11877 for entry in std::fs::read_dir(&dir).map_err(|e| format!("read bridges/: {e}"))? {
11878 let entry = entry.map_err(|e| format!("read entry: {e}"))?;
11879 let path = entry.path();
11880 if path.extension().and_then(|s| s.to_str()) != Some("json") {
11881 continue;
11882 }
11883 let data = std::fs::read_to_string(&path).map_err(|e| format!("read {path:?}: {e}"))?;
11884 let b: Bridge =
11885 serde_json::from_str(&data).map_err(|e| format!("parse {path:?}: {e}"))?;
11886 out.push(b);
11887 }
11888 out.sort_by(|a, b| {
11889 b.finding_refs
11890 .len()
11891 .cmp(&a.finding_refs.len())
11892 .then(a.entity_name.cmp(&b.entity_name))
11893 });
11894 Ok(out)
11895 }
11896
11897 match action {
11898 BridgesAction::Derive {
11899 frontier_a,
11900 label_a,
11901 frontier_b,
11902 label_b,
11903 json,
11904 } => {
11905 let a = repo::load_from_path(&frontier_a).unwrap_or_else(|e| fail_return(&e));
11906 let b = repo::load_from_path(&frontier_b).unwrap_or_else(|e| fail_return(&e));
11907 let now = chrono::Utc::now().to_rfc3339();
11908 let new_bridges =
11909 derive_bridges(&[(label_a.as_str(), &a), (label_b.as_str(), &b)], &now);
11910
11911 let existing = list_bridges(&frontier_a).unwrap_or_default();
11915 let existing_by_id: HashMap<String, Bridge> =
11916 existing.iter().map(|b| (b.id.clone(), b.clone())).collect();
11917 let mut written = 0;
11918 let mut preserved = 0;
11919 let mut new_ids = Vec::new();
11920 for mut bridge in new_bridges {
11921 if let Some(prev) = existing_by_id.get(&bridge.id)
11922 && prev.status != BridgeStatus::Derived
11923 {
11924 bridge.status = prev.status;
11926 bridge.derived_at = prev.derived_at.clone();
11927 preserved += 1;
11928 }
11929 save_bridge(&frontier_a, &bridge).unwrap_or_else(|e| fail_return(&e));
11930 new_ids.push(bridge.id.clone());
11931 written += 1;
11932 }
11933
11934 if json {
11935 println!(
11936 "{}",
11937 serde_json::to_string_pretty(&json!({
11938 "ok": true,
11939 "command": "bridges.derive",
11940 "frontier_a": frontier_a.display().to_string(),
11941 "frontier_b": frontier_b.display().to_string(),
11942 "bridges_written": written,
11943 "reviewer_judgments_preserved": preserved,
11944 "ids": new_ids,
11945 }))
11946 .expect("serialize bridges.derive")
11947 );
11948 return;
11949 }
11950
11951 println!();
11952 println!(
11953 " {}",
11954 format!("VELA · BRIDGES · DERIVE · {} ↔ {}", label_a, label_b)
11955 .to_uppercase()
11956 .dimmed()
11957 );
11958 println!(" {}", style::tick_row(60));
11959 println!(" {} {} bridge(s) materialized", style::ok("ok"), written);
11960 if preserved > 0 {
11961 println!(
11962 " {} {} reviewer judgment(s) preserved",
11963 style::ok("kept"),
11964 preserved
11965 );
11966 }
11967 for id in new_ids.iter().take(10) {
11968 println!(" · {id}");
11969 }
11970 if new_ids.len() > 10 {
11971 println!(" … and {} more", new_ids.len() - 10);
11972 }
11973 println!();
11974 }
11975 BridgesAction::List {
11976 frontier,
11977 status,
11978 json,
11979 } => {
11980 let mut bridges = list_bridges(&frontier).unwrap_or_else(|e| fail_return(&e));
11981 if let Some(s) = status.as_deref() {
11982 let want = match s.to_lowercase().as_str() {
11983 "derived" => BridgeStatus::Derived,
11984 "confirmed" => BridgeStatus::Confirmed,
11985 "refuted" => BridgeStatus::Refuted,
11986 other => fail_return(&format!(
11987 "unknown bridge status '{other}' (try derived|confirmed|refuted)"
11988 )),
11989 };
11990 bridges.retain(|b| b.status == want);
11991 }
11992 if json {
11993 println!(
11994 "{}",
11995 serde_json::to_string_pretty(&json!({
11996 "ok": true,
11997 "command": "bridges.list",
11998 "frontier": frontier.display().to_string(),
11999 "count": bridges.len(),
12000 "bridges": bridges,
12001 }))
12002 .expect("serialize bridges.list")
12003 );
12004 return;
12005 }
12006 println!();
12007 println!(
12008 " {}",
12009 format!("VELA · BRIDGES · LIST · {}", frontier.display())
12010 .to_uppercase()
12011 .dimmed()
12012 );
12013 println!(" {}", style::tick_row(60));
12014 println!(" {} bridge(s)", bridges.len());
12015 for b in &bridges {
12016 let chip = match b.status {
12017 BridgeStatus::Derived => style::warn("derived"),
12018 BridgeStatus::Confirmed => style::ok("confirmed"),
12019 BridgeStatus::Refuted => style::lost("refuted"),
12020 };
12021 println!();
12022 println!(
12023 " {chip} {} {} ↔ findings:{}",
12024 b.id,
12025 b.entity_name,
12026 b.finding_refs.len()
12027 );
12028 println!(" frontiers: {}", b.frontiers.join(", "));
12029 if let Some(t) = &b.tension {
12030 println!(" tension: {t}");
12031 }
12032 }
12033 println!();
12034 }
12035 BridgesAction::Show {
12036 frontier,
12037 bridge_id,
12038 json,
12039 } => {
12040 let b = load_bridge(&frontier, &bridge_id).unwrap_or_else(|e| fail_return(&e));
12041 if json {
12042 println!("{}", serde_json::to_string_pretty(&b).expect("serialize"));
12043 return;
12044 }
12045 println!();
12046 println!(
12047 " {}",
12048 format!("VELA · BRIDGES · SHOW · {}", b.id)
12049 .to_uppercase()
12050 .dimmed()
12051 );
12052 println!(" {}", style::tick_row(60));
12053 println!(" entity: {}", b.entity_name);
12054 println!(" status: {:?}", b.status);
12055 println!(" frontiers: {}", b.frontiers.join(", "));
12056 if !b.frontier_ids.is_empty() {
12057 println!(" frontier_ids: {}", b.frontier_ids.join(", "));
12058 }
12059 if let Some(t) = &b.tension {
12060 println!(" tension: {t}");
12061 }
12062 println!(" derived_at: {}", b.derived_at);
12063 println!(" finding refs ({}):", b.finding_refs.len());
12064 for r in &b.finding_refs {
12065 let dir = r.direction.as_deref().unwrap_or("—");
12066 let truncated: String = r.assertion_text.chars().take(72).collect();
12067 println!(
12068 " · [{}] {} (conf={:.2}, dir={})",
12069 r.frontier, r.finding_id, r.confidence, dir
12070 );
12071 println!(" {truncated}");
12072 }
12073 println!();
12074 }
12075 BridgesAction::Confirm {
12076 frontier,
12077 bridge_id,
12078 reviewer,
12079 note,
12080 json,
12081 } => {
12082 let mut b = load_bridge(&frontier, &bridge_id).unwrap_or_else(|e| fail_return(&e));
12083 let reviewer_id = reviewer.unwrap_or_else(default_reviewer_id);
12084 b.status = BridgeStatus::Confirmed;
12085 save_bridge(&frontier, &b).unwrap_or_else(|e| fail_return(&e));
12086 let _ = emit_bridge_reviewed_event(
12090 &frontier,
12091 &bridge_id,
12092 "confirmed",
12093 &reviewer_id,
12094 note.as_deref(),
12095 );
12096 if json {
12097 println!("{}", serde_json::to_string_pretty(&b).expect("serialize"));
12098 return;
12099 }
12100 println!();
12101 println!(" {} {} now confirmed", style::ok("confirmed"), b.id);
12102 println!();
12103 }
12104 BridgesAction::Refute {
12105 frontier,
12106 bridge_id,
12107 reviewer,
12108 note,
12109 json,
12110 } => {
12111 let mut b = load_bridge(&frontier, &bridge_id).unwrap_or_else(|e| fail_return(&e));
12112 let reviewer_id = reviewer.unwrap_or_else(default_reviewer_id);
12113 b.status = BridgeStatus::Refuted;
12114 save_bridge(&frontier, &b).unwrap_or_else(|e| fail_return(&e));
12115 let _ = emit_bridge_reviewed_event(
12116 &frontier,
12117 &bridge_id,
12118 "refuted",
12119 &reviewer_id,
12120 note.as_deref(),
12121 );
12122 if json {
12123 println!("{}", serde_json::to_string_pretty(&b).expect("serialize"));
12124 return;
12125 }
12126 println!();
12127 println!(" {} {} now refuted", style::lost("refuted"), b.id);
12128 println!();
12129 }
12130 }
12131}
12132
12133fn cmd_federation(action: FederationAction) {
12135 use crate::federation::PeerHub;
12136
12137 match action {
12138 FederationAction::PeerAdd {
12139 frontier,
12140 id,
12141 url,
12142 pubkey,
12143 note,
12144 json,
12145 } => {
12146 let peer = PeerHub {
12147 id: id.clone(),
12148 url: url.clone(),
12149 public_key: pubkey.trim().to_string(),
12150 added_at: chrono::Utc::now().to_rfc3339(),
12151 note: note.clone(),
12152 };
12153 peer.validate().unwrap_or_else(|e| fail_return(&e));
12154
12155 let mut project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
12156 if project.peers.iter().any(|p| p.id == id) {
12157 fail(&format!("peer '{id}' already in registry"));
12158 }
12159 project.peers.push(peer.clone());
12160 repo::save_to_path(&frontier, &project).unwrap_or_else(|e| fail_return(&e));
12161
12162 if json {
12163 println!(
12164 "{}",
12165 serde_json::to_string_pretty(&json!({
12166 "ok": true,
12167 "command": "federation.peer-add",
12168 "frontier": frontier.display().to_string(),
12169 "peer": peer,
12170 "registered_count": project.peers.len(),
12171 }))
12172 .expect("serialize federation.peer-add")
12173 );
12174 } else {
12175 println!(
12176 "{} peer {} (pubkey {}…) at {}",
12177 style::ok("registered"),
12178 id,
12179 &peer.public_key[..16],
12180 peer.url
12181 );
12182 }
12183 }
12184 FederationAction::PeerList { frontier, json } => {
12185 let project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
12186 if json {
12187 println!(
12188 "{}",
12189 serde_json::to_string_pretty(&json!({
12190 "ok": true,
12191 "command": "federation.peer-list",
12192 "frontier": frontier.display().to_string(),
12193 "peers": project.peers,
12194 }))
12195 .expect("serialize federation.peer-list")
12196 );
12197 } else {
12198 println!();
12199 println!(
12200 " {}",
12201 format!("VELA · FEDERATION · PEERS · {}", frontier.display())
12202 .to_uppercase()
12203 .dimmed()
12204 );
12205 println!(" {}", style::tick_row(60));
12206 if project.peers.is_empty() {
12207 println!(" (no peers registered)");
12208 } else {
12209 for p in &project.peers {
12210 let note_suffix = if p.note.is_empty() {
12211 String::new()
12212 } else {
12213 format!(" · {}", p.note)
12214 };
12215 println!(
12216 " {:<24} {} {}…{note_suffix}",
12217 p.id,
12218 p.url,
12219 &p.public_key[..16]
12220 );
12221 }
12222 }
12223 }
12224 }
12225 FederationAction::Sync {
12226 frontier,
12227 peer_id,
12228 url,
12229 via_hub,
12230 vfr_id,
12231 allow_cross_vfr,
12232 dry_run,
12233 json,
12234 } => {
12235 use crate::federation::{self, DiscoveryResult};
12236
12237 let mut project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
12238 let Some(peer) = project.peers.iter().find(|p| p.id == peer_id).cloned() else {
12239 fail(&format!(
12240 "peer '{peer_id}' not in registry; run `vela federation peer add` first"
12241 ));
12242 };
12243 let local_frontier_id = project.frontier_id();
12244
12245 if via_hub
12252 && let Some(target) = vfr_id.as_deref()
12253 && target != local_frontier_id
12254 && !allow_cross_vfr
12255 {
12256 fail(&format!(
12257 "cross-vfr sync refused: --vfr-id {target} differs from local frontier_id {local_frontier_id}. \
12258 Pass --allow-cross-vfr to opt in (every peer-side finding will be recorded as a \
12259 missing_locally conflict). Or omit --vfr-id to default to the local frontier id."
12260 ));
12261 }
12262
12263 #[derive(Debug)]
12265 enum SyncOutcome {
12266 Resolved(crate::project::Project, String), BrokenLocator(String, String, u16), UnverifiedEntry(String, String), EntryNotFound(String, u16),
12270 }
12271
12272 let outcome = if via_hub {
12273 let target_vfr = vfr_id.clone().unwrap_or_else(|| local_frontier_id.clone());
12274 match federation::discover_peer_frontier(
12275 &peer.url,
12276 &target_vfr,
12277 Some(&peer.public_key),
12278 ) {
12279 DiscoveryResult::Resolved(p) => {
12280 let src =
12281 format!("{}/entries/{}", peer.url.trim_end_matches('/'), target_vfr);
12282 SyncOutcome::Resolved(p, src)
12283 }
12284 DiscoveryResult::BrokenLocator {
12285 vfr_id,
12286 locator,
12287 status,
12288 } => SyncOutcome::BrokenLocator(vfr_id, locator, status),
12289 DiscoveryResult::UnverifiedEntry { vfr_id, reason } => {
12290 SyncOutcome::UnverifiedEntry(vfr_id, reason)
12291 }
12292 DiscoveryResult::EntryNotFound { vfr_id, status } => {
12293 SyncOutcome::EntryNotFound(vfr_id, status)
12294 }
12295 DiscoveryResult::Unreachable { url, error } => {
12296 fail(&format!("peer hub unreachable ({url}): {error}"));
12297 }
12298 }
12299 } else {
12300 let resolved_url = url.unwrap_or_else(|| {
12301 let base = peer.url.trim_end_matches('/');
12302 format!("{base}/manifest/{local_frontier_id}.json")
12303 });
12304 match federation::fetch_peer_frontier(&resolved_url) {
12305 Ok(p) => SyncOutcome::Resolved(p, resolved_url),
12306 Err(e) => fail(&format!("direct fetch failed: {e}")),
12307 }
12308 };
12309
12310 let peer_source: String;
12313 let peer_state = match outcome {
12314 SyncOutcome::Resolved(p, src) => {
12315 if !json {
12316 println!(" · resolved via {src}");
12317 }
12318 peer_source = src;
12319 p
12320 }
12321 SyncOutcome::BrokenLocator(vfr, locator, status) => {
12322 if dry_run {
12323 if json {
12324 println!(
12325 "{}",
12326 serde_json::to_string_pretty(&json!({
12327 "ok": true,
12328 "command": "federation.sync",
12329 "dry_run": true,
12330 "outcome": "broken_locator",
12331 "vfr_id": vfr,
12332 "locator": locator,
12333 "http_status": status,
12334 }))
12335 .expect("serialize")
12336 );
12337 } else {
12338 println!(
12339 "{} dry-run: peer entry resolved but locator dead",
12340 style::warn("broken_locator")
12341 );
12342 println!(" vfr_id: {vfr}");
12343 println!(" locator: {locator} (HTTP {status})");
12344 }
12345 return;
12346 }
12347 let report = federation::record_locator_failure(
12348 &mut project,
12349 &peer_id,
12350 &vfr,
12351 &locator,
12352 status,
12353 );
12354 repo::save_to_path(&frontier, &project).unwrap_or_else(|e| fail_return(&e));
12355 if json {
12356 println!(
12357 "{}",
12358 serde_json::to_string_pretty(&json!({
12359 "ok": true,
12360 "command": "federation.sync",
12361 "outcome": "broken_locator",
12362 "report": report,
12363 }))
12364 .expect("serialize")
12365 );
12366 } else {
12367 println!(
12368 "{} sync recorded broken-locator conflict against {peer_id}",
12369 style::warn("broken_locator")
12370 );
12371 println!(" vfr_id: {vfr}");
12372 println!(" locator: {locator} (HTTP {status})");
12373 println!(" events appended: {}", report.events_appended);
12374 }
12375 return;
12376 }
12377 SyncOutcome::UnverifiedEntry(vfr, reason) => {
12378 if dry_run {
12379 if json {
12380 println!(
12381 "{}",
12382 serde_json::to_string_pretty(&json!({
12383 "ok": true,
12384 "command": "federation.sync",
12385 "dry_run": true,
12386 "outcome": "unverified_peer_entry",
12387 "vfr_id": vfr,
12388 "reason": reason,
12389 }))
12390 .expect("serialize")
12391 );
12392 } else {
12393 println!(
12394 "{} dry-run: peer entry signature did not verify",
12395 style::lost("unverified_peer_entry")
12396 );
12397 println!(" vfr_id: {vfr}");
12398 println!(" reason: {reason}");
12399 }
12400 return;
12401 }
12402 let report =
12403 federation::record_unverified_entry(&mut project, &peer_id, &vfr, &reason);
12404 repo::save_to_path(&frontier, &project).unwrap_or_else(|e| fail_return(&e));
12405 if json {
12406 println!(
12407 "{}",
12408 serde_json::to_string_pretty(&json!({
12409 "ok": true,
12410 "command": "federation.sync",
12411 "outcome": "unverified_peer_entry",
12412 "report": report,
12413 }))
12414 .expect("serialize")
12415 );
12416 } else {
12417 println!(
12418 "{} sync halted; peer's registry entry signature did not verify",
12419 style::lost("unverified_peer_entry")
12420 );
12421 println!(" vfr_id: {vfr}");
12422 println!(" reason: {reason}");
12423 }
12424 return;
12425 }
12426 SyncOutcome::EntryNotFound(vfr, status) => {
12427 if json {
12428 println!(
12429 "{}",
12430 serde_json::to_string_pretty(&json!({
12431 "ok": false,
12432 "command": "federation.sync",
12433 "outcome": "entry_not_found",
12434 "vfr_id": vfr,
12435 "http_status": status,
12436 }))
12437 .expect("serialize")
12438 );
12439 } else {
12440 println!(
12441 "{} peer's hub does not publish vfr_id {vfr} (HTTP {status})",
12442 style::warn("entry_not_found")
12443 );
12444 }
12445 return;
12446 }
12447 };
12448
12449 if dry_run {
12450 let conflicts = federation::diff_frontiers(&project, &peer_state);
12451 if json {
12452 println!(
12453 "{}",
12454 serde_json::to_string_pretty(&json!({
12455 "ok": true,
12456 "command": "federation.sync",
12457 "dry_run": true,
12458 "peer_id": peer_id,
12459 "peer_source": peer_source,
12460 "conflicts": conflicts,
12461 }))
12462 .expect("serialize federation.sync (dry-run)")
12463 );
12464 } else {
12465 println!(
12466 "{} dry-run vs {peer_id} ({}): {} conflict(s)",
12467 style::ok("ok"),
12468 peer_source,
12469 conflicts.len()
12470 );
12471 for c in &conflicts {
12472 println!(" · {} {} {}", c.kind.as_str(), c.finding_id, c.detail);
12473 }
12474 }
12475 return;
12476 }
12477
12478 let report = federation::sync_with_peer(&mut project, &peer_id, &peer_state);
12479 repo::save_to_path(&frontier, &project).unwrap_or_else(|e| fail_return(&e));
12480
12481 if json {
12482 println!(
12483 "{}",
12484 serde_json::to_string_pretty(&json!({
12485 "ok": true,
12486 "command": "federation.sync",
12487 "peer_id": peer_id,
12488 "peer_source": peer_source,
12489 "report": report,
12490 }))
12491 .expect("serialize federation.sync")
12492 );
12493 } else {
12494 println!(
12495 "{} synced with {} ({})",
12496 style::ok("ok"),
12497 peer_id,
12498 peer_source
12499 );
12500 println!(
12501 " our: {}",
12502 &report.our_snapshot_hash[..16.min(report.our_snapshot_hash.len())]
12503 );
12504 println!(
12505 " peer: {}",
12506 &report.peer_snapshot_hash[..16.min(report.peer_snapshot_hash.len())]
12507 );
12508 println!(
12509 " conflicts: {} events appended: {}",
12510 report.conflicts.len(),
12511 report.events_appended
12512 );
12513 for c in &report.conflicts {
12514 println!(" · {} {} {}", c.kind.as_str(), c.finding_id, c.detail);
12515 }
12516 }
12517 }
12518 FederationAction::PushResolution {
12519 frontier,
12520 conflict_event_id,
12521 to,
12522 key,
12523 vfr_id,
12524 json,
12525 } => {
12526 cmd_federation_push_resolution(frontier, conflict_event_id, to, key, vfr_id, json);
12527 }
12528 FederationAction::PeerRemove { frontier, id, json } => {
12529 let mut project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
12530 let before = project.peers.len();
12531 project.peers.retain(|p| p.id != id);
12532 if project.peers.len() == before {
12533 fail(&format!("peer '{id}' not found in registry"));
12534 }
12535 repo::save_to_path(&frontier, &project).unwrap_or_else(|e| fail_return(&e));
12536
12537 if json {
12538 println!(
12539 "{}",
12540 serde_json::to_string_pretty(&json!({
12541 "ok": true,
12542 "command": "federation.peer-remove",
12543 "frontier": frontier.display().to_string(),
12544 "removed": id,
12545 "remaining": project.peers.len(),
12546 }))
12547 .expect("serialize federation.peer-remove")
12548 );
12549 } else {
12550 println!(
12551 "{} peer {} ({} remaining)",
12552 style::ok("removed"),
12553 id,
12554 project.peers.len()
12555 );
12556 }
12557 }
12558 }
12559}
12560
12561fn cmd_federation_push_resolution(
12573 frontier: PathBuf,
12574 conflict_event_id: String,
12575 to: String,
12576 key: Option<PathBuf>,
12577 vfr_id: Option<String>,
12578 json: bool,
12579) {
12580 use crate::canonical;
12581 use crate::sign;
12582
12583 let project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
12584
12585 let Some(peer) = project.peers.iter().find(|p| p.id == to).cloned() else {
12586 fail(&format!(
12587 "peer '{to}' not in registry; run `vela federation peer-add` first"
12588 ));
12589 };
12590
12591 let Some(resolution) = project
12593 .events
12594 .iter()
12595 .find(|e| {
12596 e.kind == "frontier.conflict_resolved"
12597 && e.payload.get("conflict_event_id").and_then(|v| v.as_str())
12598 == Some(conflict_event_id.as_str())
12599 })
12600 .cloned()
12601 else {
12602 fail(&format!(
12603 "no frontier.conflict_resolved event paired with conflict {conflict_event_id} in {}",
12604 frontier.display()
12605 ));
12606 };
12607
12608 let actor_id = resolution.actor.id.clone();
12611 let Some(actor) = project.actors.iter().find(|a| a.id == actor_id) else {
12612 fail(&format!(
12613 "resolution event's actor.id ({actor_id}) is not in the frontier's actor registry; \
12614 register the reviewer with `vela actor add` before pushing"
12615 ));
12616 };
12617
12618 let key_path = key.unwrap_or_else(|| {
12621 let home = std::env::var("HOME").unwrap_or_default();
12622 let base = PathBuf::from(home)
12623 .join(".config")
12624 .join("vela")
12625 .join("keys");
12626 let safe_id = actor.id.replace([':', '/'], "_");
12627 let by_actor = base.join(format!("{safe_id}.key"));
12628 if by_actor.exists() {
12629 by_actor
12630 } else {
12631 base.join("private.key")
12632 }
12633 });
12634
12635 let signing_key = sign::load_signing_key_from_path(&key_path).unwrap_or_else(|e| {
12636 fail_return(&format!(
12637 "load private key from {}: {e}",
12638 key_path.display()
12639 ))
12640 });
12641 let pubkey_hex = sign::pubkey_hex(&signing_key);
12642 if !pubkey_hex.eq_ignore_ascii_case(&actor.public_key) {
12643 fail(&format!(
12644 "private key at {} does not match actor {}'s registered public key. \
12645 Loaded pubkey {}, expected {}.",
12646 key_path.display(),
12647 actor.id,
12648 &pubkey_hex[..16],
12649 &actor.public_key[..16]
12650 ));
12651 }
12652
12653 let signature_hex = sign::sign_event(&resolution, &signing_key)
12656 .unwrap_or_else(|e| fail_return(&format!("sign event: {e}")));
12657
12658 let mut body = resolution.clone();
12663 body.signature = None;
12664 let body_value =
12665 serde_json::to_value(&body).unwrap_or_else(|e| fail_return(&format!("serialize: {e}")));
12666 let _canonical_check = canonical::to_canonical_bytes(&body_value)
12667 .unwrap_or_else(|e| fail_return(&format!("canonicalize: {e}")));
12668
12669 let target_vfr = vfr_id.unwrap_or_else(|| project.frontier_id());
12670 let url = format!(
12671 "{}/entries/{}/events",
12672 peer.url.trim_end_matches('/'),
12673 target_vfr
12674 );
12675
12676 let url_owned = url.clone();
12678 let pubkey_owned = pubkey_hex.clone();
12679 let signature_owned = signature_hex.clone();
12680 let body_owned = body_value.clone();
12681 let response: Result<(u16, String), String> = std::thread::spawn(move || {
12682 let client = reqwest::blocking::Client::new();
12683 let resp = client
12684 .post(&url_owned)
12685 .header("X-Vela-Signer-Pubkey", &pubkey_owned)
12686 .header("X-Vela-Signature", &signature_owned)
12687 .json(&body_owned)
12688 .send()
12689 .map_err(|e| format!("HTTP POST {url_owned}: {e}"))?;
12690 let status = resp.status().as_u16();
12691 let text = resp.text().unwrap_or_default();
12692 Ok((status, text))
12693 })
12694 .join()
12695 .map_err(|_| "push thread panicked".to_string())
12696 .unwrap_or_else(|e| fail_return(&e));
12697
12698 let (status, text) = response.unwrap_or_else(|e| fail_return(&e));
12699 let parsed: serde_json::Value =
12700 serde_json::from_str(&text).unwrap_or_else(|_| json!({ "raw": text }));
12701
12702 let accepted = matches!(status, 200..=202);
12703 if json {
12704 println!(
12705 "{}",
12706 serde_json::to_string_pretty(&json!({
12707 "ok": accepted,
12708 "command": "federation.push-resolution",
12709 "frontier": frontier.display().to_string(),
12710 "peer_id": to,
12711 "url": url,
12712 "conflict_event_id": conflict_event_id,
12713 "event_id": resolution.id,
12714 "actor_id": actor.id,
12715 "http_status": status,
12716 "response": parsed,
12717 }))
12718 .expect("serialize federation.push-resolution")
12719 );
12720 } else if accepted {
12721 println!(
12722 "{} resolution {} pushed to {} (HTTP {})",
12723 style::ok("ok"),
12724 &resolution.id[..16.min(resolution.id.len())],
12725 to,
12726 status
12727 );
12728 println!(" url: {url}");
12729 println!(" signer: {} (actor {})", &pubkey_hex[..16], actor.id);
12730 } else {
12731 println!("{} push refused (HTTP {})", style::lost("rejected"), status);
12732 println!(" url: {url}");
12733 println!(" response: {text}");
12734 std::process::exit(1);
12735 }
12736}
12737
12738fn cmd_queue(action: QueueAction) {
12743 use crate::queue;
12744 match action {
12745 QueueAction::List { queue_file, json } => {
12746 let path = queue_file.unwrap_or_else(queue::default_queue_path);
12747 let q = queue::load(&path).unwrap_or_else(|e| fail_return(&e));
12748 if json {
12749 let payload = json!({
12750 "ok": true,
12751 "command": "queue.list",
12752 "queue_file": path.display().to_string(),
12753 "schema": q.schema,
12754 "actions": q.actions,
12755 });
12756 println!(
12757 "{}",
12758 serde_json::to_string_pretty(&payload).expect("failed to serialize queue.list")
12759 );
12760 } else {
12761 println!();
12762 println!(
12763 " {}",
12764 format!("VELA · QUEUE · LIST · {}", path.display())
12765 .to_uppercase()
12766 .dimmed()
12767 );
12768 println!(" {}", style::tick_row(60));
12769 if q.actions.is_empty() {
12770 println!(" (queue is empty)");
12771 } else {
12772 for (idx, action) in q.actions.iter().enumerate() {
12773 println!(
12774 " [{idx}] {} → {} queued {}",
12775 action.kind,
12776 action.frontier.display(),
12777 action.queued_at
12778 );
12779 }
12780 }
12781 }
12782 }
12783 QueueAction::Clear { queue_file, json } => {
12784 let path = queue_file.unwrap_or_else(queue::default_queue_path);
12785 let dropped = queue::clear(&path).unwrap_or_else(|e| fail_return(&e));
12786 if json {
12787 let payload = json!({
12788 "ok": true,
12789 "command": "queue.clear",
12790 "queue_file": path.display().to_string(),
12791 "dropped": dropped,
12792 });
12793 println!(
12794 "{}",
12795 serde_json::to_string_pretty(&payload)
12796 .expect("failed to serialize queue.clear")
12797 );
12798 } else {
12799 println!("{} dropped {dropped} queued action(s)", style::ok("ok"));
12800 }
12801 }
12802 QueueAction::Sign {
12803 actor,
12804 key,
12805 queue_file,
12806 yes_to_all,
12807 json,
12808 } => {
12809 let path = queue_file.unwrap_or_else(queue::default_queue_path);
12810 let q = queue::load(&path).unwrap_or_else(|e| fail_return(&e));
12811 if q.actions.is_empty() {
12812 if json {
12813 println!("{}", json!({"ok": true, "signed": 0, "remaining": 0}));
12814 } else {
12815 println!("{} queue is empty", style::ok("ok"));
12816 }
12817 return;
12818 }
12819 let key_hex = std::fs::read_to_string(&key)
12820 .map(|s| s.trim().to_string())
12821 .unwrap_or_else(|e| fail_return(&format!("read key {}: {e}", key.display())));
12822 let signing_key = parse_signing_key(&key_hex);
12823 let mut signed_count = 0usize;
12824 let mut remaining = Vec::new();
12825 for action in q.actions.iter() {
12826 if !yes_to_all && !confirm_action(action) {
12827 remaining.push(action.clone());
12828 continue;
12829 }
12830 match sign_and_apply(&signing_key, &actor, action) {
12831 Ok(report) => {
12832 signed_count += 1;
12833 if !json {
12834 println!(
12835 "{} {} on {} → {}",
12836 style::ok("signed"),
12837 action.kind,
12838 action.frontier.display(),
12839 report
12840 );
12841 }
12842 }
12843 Err(error) => {
12844 remaining.push(action.clone());
12846 if !json {
12847 eprintln!(
12848 "{} {} on {}: {error}",
12849 style::warn("failed"),
12850 action.kind,
12851 action.frontier.display()
12852 );
12853 }
12854 }
12855 }
12856 }
12857 queue::replace_actions(&path, remaining.clone()).unwrap_or_else(|e| fail_return(&e));
12858 if json {
12859 let payload = json!({
12860 "ok": true,
12861 "command": "queue.sign",
12862 "signed": signed_count,
12863 "remaining": remaining.len(),
12864 });
12865 println!(
12866 "{}",
12867 serde_json::to_string_pretty(&payload).expect("failed to serialize queue.sign")
12868 );
12869 } else {
12870 println!(
12871 "{} signed {signed_count} action(s); {} remaining in queue",
12872 style::ok("ok"),
12873 remaining.len()
12874 );
12875 }
12876 }
12877 }
12878}
12879
12880fn governance_chain_path(frontier: &Path) -> PathBuf {
12885 let dir = if frontier.is_dir() {
12886 frontier.to_path_buf()
12887 } else if let Some(parent) = frontier.parent() {
12888 parent.to_path_buf()
12889 } else {
12890 PathBuf::from(".")
12891 };
12892 dir.join(".vela").join("governance").join("chain.json")
12893}
12894
12895fn default_registry_path() -> PathBuf {
12898 let home = std::env::var("HOME").unwrap_or_else(|_| ".".to_string());
12899 PathBuf::from(home)
12900 .join(".vela")
12901 .join("registry")
12902 .join("entries.json")
12903}
12904
12905fn resolve_vfr_to_path(vfr_id: &str, from: Option<&str>, dest: &Path) -> Result<(), String> {
12912 use crate::registry;
12913 let registry_data = match from {
12914 Some(loc) if loc.starts_with("http") => registry::load_any(loc)?,
12915 Some(loc) => {
12916 let p = registry::resolve_local(loc)?;
12917 registry::load_local(&p)?
12918 }
12919 None => {
12920 let home = std::env::var("HOME").unwrap_or_else(|_| ".".to_string());
12921 let p = PathBuf::from(home)
12922 .join(".vela")
12923 .join("registry")
12924 .join("entries.json");
12925 registry::load_local(&p)?
12926 }
12927 };
12928 let entry = registry::find_latest(®istry_data, vfr_id)
12929 .ok_or_else(|| format!("{vfr_id} not found in registry"))?;
12930 registry::fetch_frontier_to_prefer_event_hub(&entry, from, dest)
12931 .map_err(|e| format!("fetch frontier for {vfr_id}: {e}"))?;
12932 registry::verify_pull(&entry, dest)
12933 .map_err(|e| format!("pull verification failed for {vfr_id}: {e}"))?;
12934 Ok(())
12935}
12936
12937fn parse_signing_key(hex_str: &str) -> ed25519_dalek::SigningKey {
12938 let bytes = hex::decode(hex_str)
12939 .unwrap_or_else(|e| fail_return(&format!("invalid private-key hex: {e}")));
12940 let key_bytes: [u8; 32] = bytes
12941 .try_into()
12942 .unwrap_or_else(|_| fail_return("private key must be 32 bytes"));
12943 ed25519_dalek::SigningKey::from_bytes(&key_bytes)
12944}
12945
12946fn confirm_action(action: &crate::queue::QueuedAction) -> bool {
12947 use std::io::{self, BufRead, Write};
12948 let mut stdout = io::stdout().lock();
12949 let _ = writeln!(
12950 stdout,
12951 " sign {} on {}? [y/N] ",
12952 action.kind,
12953 action.frontier.display()
12954 );
12955 let _ = stdout.flush();
12956 drop(stdout);
12957 let stdin = io::stdin();
12958 let mut line = String::new();
12959 if stdin.lock().read_line(&mut line).is_err() {
12960 return false;
12961 }
12962 matches!(line.trim().to_lowercase().as_str(), "y" | "yes")
12963}
12964
12965fn sign_and_apply(
12970 signing_key: &ed25519_dalek::SigningKey,
12971 actor: &str,
12972 action: &crate::queue::QueuedAction,
12973) -> Result<String, String> {
12974 use crate::events::StateTarget;
12975 use crate::proposals;
12976 let args = &action.args;
12977 match action.kind.as_str() {
12978 "propose_review" | "propose_note" | "propose_revise_confidence" | "propose_retract" => {
12979 let kind = match action.kind.as_str() {
12980 "propose_review" => "finding.review",
12981 "propose_note" => "finding.note",
12982 "propose_revise_confidence" => "finding.confidence_revise",
12983 "propose_retract" => "finding.retract",
12984 _ => unreachable!(),
12985 };
12986 let target_id = args
12987 .get("target_finding_id")
12988 .and_then(Value::as_str)
12989 .ok_or("target_finding_id missing")?;
12990 let reason = args
12991 .get("reason")
12992 .and_then(Value::as_str)
12993 .ok_or("reason missing")?;
12994 let payload = match action.kind.as_str() {
12995 "propose_review" => {
12996 let status = args
12997 .get("status")
12998 .and_then(Value::as_str)
12999 .ok_or("status missing")?;
13000 json!({"status": status})
13001 }
13002 "propose_note" => {
13003 let text = args
13004 .get("text")
13005 .and_then(Value::as_str)
13006 .ok_or("text missing")?;
13007 json!({"text": text})
13008 }
13009 "propose_revise_confidence" => {
13010 let new_score = args
13011 .get("new_score")
13012 .and_then(Value::as_f64)
13013 .ok_or("new_score missing")?;
13014 json!({"new_score": new_score})
13015 }
13016 "propose_retract" => json!({}),
13017 _ => unreachable!(),
13018 };
13019 let created_at = args
13020 .get("created_at")
13021 .and_then(Value::as_str)
13022 .map(String::from)
13023 .unwrap_or_else(|| chrono::Utc::now().to_rfc3339());
13024 let mut proposal = proposals::new_proposal(
13025 kind,
13026 StateTarget {
13027 r#type: "finding".to_string(),
13028 id: target_id.to_string(),
13029 },
13030 actor,
13031 "human",
13032 reason,
13033 payload,
13034 Vec::new(),
13035 Vec::new(),
13036 );
13037 proposal.created_at = created_at;
13038 proposal.id = proposals::proposal_id(&proposal);
13039 let _signature = crate::sign::sign_proposal(&proposal, signing_key)?;
13043 let result = proposals::create_or_apply(&action.frontier, proposal, false)
13044 .map_err(|e| format!("create_or_apply: {e}"))?;
13045 Ok(format!("proposal {}", result.proposal_id))
13046 }
13047 "accept_proposal" | "reject_proposal" => {
13048 let proposal_id = args
13049 .get("proposal_id")
13050 .and_then(Value::as_str)
13051 .ok_or("proposal_id missing")?;
13052 let reason = args
13053 .get("reason")
13054 .and_then(Value::as_str)
13055 .ok_or("reason missing")?;
13056 let timestamp = args
13057 .get("timestamp")
13058 .and_then(Value::as_str)
13059 .map(String::from)
13060 .unwrap_or_else(|| chrono::Utc::now().to_rfc3339());
13061 let preimage = json!({
13063 "action": if action.kind == "accept_proposal" { "accept" } else { "reject" },
13064 "proposal_id": proposal_id,
13065 "reviewer_id": actor,
13066 "reason": reason,
13067 "timestamp": timestamp,
13068 });
13069 let bytes = crate::canonical::to_canonical_bytes(&preimage)?;
13070 use ed25519_dalek::Signer;
13071 let _signature = hex::encode(signing_key.sign(&bytes).to_bytes());
13072 if action.kind == "accept_proposal" {
13073 let event_id =
13074 crate::proposals::accept_at_path(&action.frontier, proposal_id, actor, reason)
13075 .map_err(|e| format!("accept_at_path: {e}"))?;
13076 Ok(format!("event {event_id}"))
13077 } else {
13078 crate::proposals::reject_at_path(&action.frontier, proposal_id, actor, reason)
13079 .map_err(|e| format!("reject_at_path: {e}"))?;
13080 Ok(format!("rejected {proposal_id}"))
13081 }
13082 }
13083 other => Err(format!("unsupported queued action kind '{other}'")),
13084 }
13085}
13086
13087fn cmd_entity(action: EntityAction) {
13099 use crate::entity_resolve;
13100 match action {
13101 EntityAction::Resolve {
13102 frontier,
13103 force,
13104 json,
13105 } => {
13106 let mut p = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
13107 let report = entity_resolve::resolve_frontier(&mut p, force);
13108 repo::save_to_path(&frontier, &p).unwrap_or_else(|e| fail_return(&e));
13109 if json {
13110 println!(
13111 "{}",
13112 serde_json::to_string_pretty(&serde_json::json!({
13113 "ok": true,
13114 "command": "entity.resolve",
13115 "frontier_path": frontier.display().to_string(),
13116 "report": report,
13117 }))
13118 .expect("serialize")
13119 );
13120 } else {
13121 println!(
13122 "{} resolved {} of {} entities ({} already, {} unresolved) across {} findings",
13123 style::ok("entity"),
13124 report.resolved,
13125 report.total_entities,
13126 report.already_resolved,
13127 report.unresolved_count,
13128 report.findings_touched,
13129 );
13130 let unresolved_summary: std::collections::BTreeSet<&str> = report
13131 .per_finding
13132 .iter()
13133 .flat_map(|f| f.unresolved.iter().map(String::as_str))
13134 .collect();
13135 if !unresolved_summary.is_empty() {
13136 let take = unresolved_summary.iter().take(8).collect::<Vec<_>>();
13137 println!(
13138 " unresolved (first {}): {}",
13139 take.len(),
13140 take.iter().copied().cloned().collect::<Vec<_>>().join(", ")
13141 );
13142 }
13143 }
13144 }
13145 EntityAction::List { json } => {
13146 let entries: Vec<serde_json::Value> = entity_resolve::iter_bundled()
13147 .map(|(name, etype, source, id)| {
13148 serde_json::json!({
13149 "canonical_name": name,
13150 "entity_type": etype,
13151 "source": source,
13152 "id": id,
13153 })
13154 })
13155 .collect();
13156 if json {
13157 println!(
13158 "{}",
13159 serde_json::to_string_pretty(&serde_json::json!({
13160 "ok": true,
13161 "command": "entity.list",
13162 "count": entries.len(),
13163 "entries": entries,
13164 }))
13165 .expect("serialize")
13166 );
13167 } else {
13168 println!("{} {} bundled entries", style::ok("entity"), entries.len());
13169 for e in &entries {
13170 println!(
13171 " {:32} {:18} {} {}",
13172 e["canonical_name"].as_str().unwrap_or("?"),
13173 e["entity_type"].as_str().unwrap_or("?"),
13174 e["source"].as_str().unwrap_or("?"),
13175 e["id"].as_str().unwrap_or("?"),
13176 );
13177 }
13178 }
13179 }
13180 }
13181}
13182
13183fn cmd_link(action: LinkAction) {
13184 use crate::bundle::{Link, LinkRef};
13185 match action {
13186 LinkAction::Add {
13187 frontier,
13188 from,
13189 to,
13190 r#type,
13191 note,
13192 inferred_by,
13193 no_check_target,
13194 json,
13195 } => {
13196 validate_enum_arg("--type", &r#type, bundle::VALID_LINK_TYPES);
13197 if !["compiler", "reviewer", "author"].contains(&inferred_by.as_str()) {
13198 fail(&format!(
13199 "invalid --inferred-by '{inferred_by}'. Valid: compiler, reviewer, author"
13200 ));
13201 }
13202 let parsed = LinkRef::parse(&to).unwrap_or_else(|e| {
13203 fail(&format!(
13204 "invalid --to '{to}': {e}. Expected vf_<hex> or vf_<hex>@vfr_<hex>"
13205 ))
13206 });
13207 let mut p = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
13208 let source_idx = p
13209 .findings
13210 .iter()
13211 .position(|f| f.id == from)
13212 .unwrap_or_else(|| {
13213 fail_return(&format!("--from finding '{from}' not in frontier"))
13214 });
13215 if let LinkRef::Local { vf_id } = &parsed
13216 && !p.findings.iter().any(|f| &f.id == vf_id)
13217 {
13218 fail(&format!(
13219 "local --to target '{vf_id}' not in frontier; add the target finding first"
13220 ));
13221 }
13222 if let LinkRef::Cross { vfr_id, .. } = &parsed
13223 && p.dep_for_vfr(vfr_id).is_none()
13224 {
13225 fail(&format!(
13226 "cross-frontier --to references vfr_id '{vfr_id}' but no matching dep is declared. Run `vela frontier add-dep {vfr_id} --locator <url> --snapshot <hash>` first."
13227 ));
13228 }
13229
13230 let mut target_warning: Option<String> = None;
13236 if let LinkRef::Cross {
13237 vfr_id: target_vfr,
13238 vf_id: target_vf,
13239 } = &parsed
13240 && !no_check_target
13241 && let Some(dep) = p.dep_for_vfr(target_vfr)
13242 && let Some(locator) = dep.locator.as_deref()
13243 && (locator.starts_with("http://") || locator.starts_with("https://"))
13244 {
13245 let client = reqwest::blocking::Client::builder()
13246 .timeout(std::time::Duration::from_secs(15))
13247 .build()
13248 .ok();
13249 if let Some(client) = client
13250 && let Ok(resp) = client.get(locator).send()
13251 && resp.status().is_success()
13252 && let Ok(dep_project) = resp.json::<crate::project::Project>()
13253 {
13254 if let Some(target_finding) =
13255 dep_project.findings.iter().find(|f| &f.id == target_vf)
13256 {
13257 if target_finding.flags.superseded {
13258 target_warning = Some(format!(
13259 "warn · cross-frontier target '{target_vf}' in '{target_vfr}' has flags.superseded = true. \
13260You may be linking to outdated wording. Pull --transitive and inspect the supersedes chain to find the current finding. \
13261Use --no-check-target to skip this check."
13262 ));
13263 }
13264 } else {
13265 target_warning = Some(format!(
13266 "warn · cross-frontier target '{target_vf}' not found in dep '{target_vfr}' (fetched from {locator}). \
13267The target may have been removed or never existed in the pinned snapshot."
13268 ));
13269 }
13270 }
13271 }
13272
13273 let now = chrono::Utc::now().to_rfc3339_opts(chrono::SecondsFormat::Secs, true);
13274 let link = Link {
13275 target: to.clone(),
13276 link_type: r#type.clone(),
13277 note: note.clone(),
13278 inferred_by: inferred_by.clone(),
13279 created_at: now,
13280 mechanism: None,
13281 };
13282 p.findings[source_idx].links.push(link);
13283 project::recompute_stats(&mut p);
13284 repo::save_to_path(&frontier, &p).unwrap_or_else(|e| fail_return(&e));
13285 let payload = json!({
13286 "ok": true,
13287 "command": "link.add",
13288 "frontier": frontier.display().to_string(),
13289 "from": from,
13290 "to": to,
13291 "type": r#type,
13292 "cross_frontier": parsed.is_cross_frontier(),
13293 });
13294 if json {
13295 let mut p2 = payload.clone();
13296 if let Some(w) = &target_warning
13297 && let serde_json::Value::Object(m) = &mut p2
13298 {
13299 m.insert(
13300 "target_warning".to_string(),
13301 serde_json::Value::String(w.clone()),
13302 );
13303 }
13304 println!(
13305 "{}",
13306 serde_json::to_string_pretty(&p2).expect("failed to serialize link.add")
13307 );
13308 } else {
13309 println!(
13310 "{} {} --[{}]--> {}{}",
13311 style::ok("link"),
13312 from,
13313 r#type,
13314 to,
13315 if parsed.is_cross_frontier() {
13316 " (cross-frontier)"
13317 } else {
13318 ""
13319 }
13320 );
13321 if let Some(w) = target_warning {
13322 println!(" {w}");
13323 }
13324 }
13325 }
13326 }
13327}
13328
13329fn cmd_frontier(action: FrontierAction) {
13330 use crate::project::ProjectDependency;
13331 use crate::repo;
13332 match action {
13333 FrontierAction::New {
13334 path,
13335 name,
13336 description,
13337 force,
13338 json,
13339 } => {
13340 if path.exists() && !force {
13341 fail(&format!(
13342 "{} already exists; pass --force to overwrite",
13343 path.display()
13344 ));
13345 }
13346 let now = chrono::Utc::now().to_rfc3339_opts(chrono::SecondsFormat::Secs, true);
13347 let project = project::Project {
13348 vela_version: project::VELA_SCHEMA_VERSION.to_string(),
13349 schema: project::VELA_SCHEMA_URL.to_string(),
13350 frontier_id: None,
13351 project: project::ProjectMeta {
13352 name: name.clone(),
13353 description: description.clone(),
13354 compiled_at: now,
13355 compiler: project::VELA_COMPILER_VERSION.to_string(),
13356 papers_processed: 0,
13357 errors: 0,
13358 dependencies: Vec::new(),
13359 },
13360 stats: project::ProjectStats::default(),
13361 findings: Vec::new(),
13362 sources: Vec::new(),
13363 evidence_atoms: Vec::new(),
13364 condition_records: Vec::new(),
13365 review_events: Vec::new(),
13366 confidence_updates: Vec::new(),
13367 events: Vec::new(),
13368 proposals: Vec::new(),
13369 proof_state: proposals::ProofState::default(),
13370 signatures: Vec::new(),
13371 actors: Vec::new(),
13372 replications: Vec::new(),
13373 datasets: Vec::new(),
13374 code_artifacts: Vec::new(),
13375 artifacts: Vec::new(),
13376 predictions: Vec::new(),
13377 resolutions: Vec::new(),
13378 peers: Vec::new(),
13379 negative_results: Vec::new(),
13380 trajectories: Vec::new(),
13381 };
13382 repo::save_to_path(&path, &project).unwrap_or_else(|e| fail_return(&e));
13383 let payload = json!({
13384 "ok": true,
13385 "command": "frontier.new",
13386 "path": path.display().to_string(),
13387 "name": name,
13388 "schema": project::VELA_SCHEMA_URL,
13389 "vela_version": env!("CARGO_PKG_VERSION"),
13390 "next_steps": [
13391 "vela finding add <path> --assertion '...' --author 'reviewer:you' --apply",
13392 "vela sign generate-keypair --out keys",
13393 "vela actor add <path> reviewer:you --pubkey \"$(cat keys/public.key)\"",
13394 "vela registry publish <path> --owner reviewer:you --key keys/private.key --locator <url> --to https://vela-hub.fly.dev",
13395 ],
13396 });
13397 if json {
13398 println!(
13399 "{}",
13400 serde_json::to_string_pretty(&payload)
13401 .expect("failed to serialize frontier.new")
13402 );
13403 } else {
13404 println!(
13405 "{} scaffolded frontier '{name}' at {}",
13406 style::ok("frontier"),
13407 path.display()
13408 );
13409 println!(" next steps:");
13410 println!(
13411 " 1. vela finding add {} --assertion '...' --author 'reviewer:you' --apply",
13412 path.display()
13413 );
13414 println!(" 2. vela sign generate-keypair --out keys");
13415 println!(
13416 " 3. vela actor add {} reviewer:you --pubkey \"$(cat keys/public.key)\"",
13417 path.display()
13418 );
13419 println!(
13420 " 4. vela registry publish {} --owner reviewer:you --key keys/private.key --locator <url> --to https://vela-hub.fly.dev",
13421 path.display()
13422 );
13423 }
13424 }
13425 FrontierAction::Materialize { frontier, json } => {
13426 let payload = frontier_repo::materialize(&frontier).unwrap_or_else(|e| fail_return(&e));
13427 if json {
13428 println!(
13429 "{}",
13430 serde_json::to_string_pretty(&payload)
13431 .expect("failed to serialize frontier materialize")
13432 );
13433 } else {
13434 println!(
13435 "{} materialized frontier repo at {}",
13436 style::ok("frontier"),
13437 frontier.display()
13438 );
13439 }
13440 }
13441 FrontierAction::AddDep {
13442 frontier,
13443 vfr_id,
13444 locator,
13445 snapshot,
13446 name,
13447 json,
13448 } => {
13449 let mut p = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
13450 if p.project
13451 .dependencies
13452 .iter()
13453 .any(|d| d.vfr_id.as_deref() == Some(&vfr_id))
13454 {
13455 fail(&format!(
13456 "cross-frontier dependency '{vfr_id}' already declared; remove it first via `vela frontier remove-dep`"
13457 ));
13458 }
13459 let dep = ProjectDependency {
13460 name: name.unwrap_or_else(|| vfr_id.clone()),
13461 source: "vela.hub".into(),
13462 version: None,
13463 pinned_hash: None,
13464 vfr_id: Some(vfr_id.clone()),
13465 locator: Some(locator.clone()),
13466 pinned_snapshot_hash: Some(snapshot.clone()),
13467 };
13468 p.project.dependencies.push(dep);
13469 repo::save_to_path(&frontier, &p).unwrap_or_else(|e| fail_return(&e));
13470 let payload = json!({
13471 "ok": true,
13472 "command": "frontier.add-dep",
13473 "frontier": frontier.display().to_string(),
13474 "vfr_id": vfr_id,
13475 "locator": locator,
13476 "pinned_snapshot_hash": snapshot,
13477 "declared_count": p.project.dependencies.len(),
13478 });
13479 if json {
13480 println!(
13481 "{}",
13482 serde_json::to_string_pretty(&payload)
13483 .expect("failed to serialize frontier.add-dep")
13484 );
13485 } else {
13486 println!(
13487 "{} declared cross-frontier dep {vfr_id}",
13488 style::ok("frontier")
13489 );
13490 println!(" locator: {locator}");
13491 println!(" snapshot: {snapshot}");
13492 }
13493 }
13494 FrontierAction::ListDeps { frontier, json } => {
13495 let p = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
13496 let deps: Vec<&ProjectDependency> = p.project.dependencies.iter().collect();
13497 if json {
13498 let payload = json!({
13499 "ok": true,
13500 "command": "frontier.list-deps",
13501 "frontier": frontier.display().to_string(),
13502 "count": deps.len(),
13503 "dependencies": deps,
13504 });
13505 println!(
13506 "{}",
13507 serde_json::to_string_pretty(&payload)
13508 .expect("failed to serialize frontier.list-deps")
13509 );
13510 } else {
13511 println!();
13512 println!(
13513 " {}",
13514 format!("VELA · FRONTIER · LIST-DEPS · {}", frontier.display())
13515 .to_uppercase()
13516 .dimmed()
13517 );
13518 println!(" {}", style::tick_row(60));
13519 if deps.is_empty() {
13520 println!(" (no dependencies declared)");
13521 } else {
13522 for d in &deps {
13523 let kind = if d.is_cross_frontier() {
13524 "cross-frontier"
13525 } else {
13526 "compile-time"
13527 };
13528 println!(" · {} [{kind}]", d.name);
13529 if let Some(v) = &d.vfr_id {
13530 println!(" vfr_id: {v}");
13531 }
13532 if let Some(l) = &d.locator {
13533 println!(" locator: {l}");
13534 }
13535 if let Some(s) = &d.pinned_snapshot_hash {
13536 println!(" snapshot: {s}");
13537 }
13538 }
13539 }
13540 }
13541 }
13542 FrontierAction::RemoveDep {
13543 frontier,
13544 vfr_id,
13545 json,
13546 } => {
13547 let mut p = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
13548 for f in &p.findings {
13550 for l in &f.links {
13551 if let Ok(crate::bundle::LinkRef::Cross { vfr_id: ref v, .. }) =
13552 crate::bundle::LinkRef::parse(&l.target)
13553 && v == &vfr_id
13554 {
13555 fail(&format!(
13556 "cannot remove dep '{vfr_id}': finding {} still links to it via {}",
13557 f.id, l.target
13558 ));
13559 }
13560 }
13561 }
13562 let before = p.project.dependencies.len();
13563 p.project
13564 .dependencies
13565 .retain(|d| d.vfr_id.as_deref() != Some(&vfr_id));
13566 let removed = before - p.project.dependencies.len();
13567 if removed == 0 {
13568 fail(&format!("no cross-frontier dependency '{vfr_id}' found"));
13569 }
13570 repo::save_to_path(&frontier, &p).unwrap_or_else(|e| fail_return(&e));
13571 let payload = json!({
13572 "ok": true,
13573 "command": "frontier.remove-dep",
13574 "frontier": frontier.display().to_string(),
13575 "vfr_id": vfr_id,
13576 "removed": removed,
13577 });
13578 if json {
13579 println!(
13580 "{}",
13581 serde_json::to_string_pretty(&payload)
13582 .expect("failed to serialize frontier.remove-dep")
13583 );
13584 } else {
13585 println!(
13586 "{} removed cross-frontier dep {vfr_id}",
13587 style::ok("frontier")
13588 );
13589 }
13590 }
13591 FrontierAction::RefreshDeps {
13592 frontier,
13593 from,
13594 dry_run,
13595 json,
13596 } => {
13597 let mut p = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
13598 let cross_deps: Vec<String> = p
13599 .project
13600 .dependencies
13601 .iter()
13602 .filter_map(|d| d.vfr_id.clone())
13603 .collect();
13604 if cross_deps.is_empty() {
13605 if json {
13606 println!(
13607 "{}",
13608 serde_json::to_string_pretty(&json!({
13609 "ok": true,
13610 "command": "frontier.refresh-deps",
13611 "frontier": frontier.display().to_string(),
13612 "from": from,
13613 "dry_run": dry_run,
13614 "deps": [],
13615 "summary": { "total": 0, "refreshed": 0, "unchanged": 0, "missing": 0, "unreachable": 0 },
13616 })).expect("serialize")
13617 );
13618 } else {
13619 println!(
13620 "{} no cross-frontier deps declared in {}",
13621 style::ok("frontier"),
13622 frontier.display()
13623 );
13624 }
13625 return;
13626 }
13627 let client = reqwest::blocking::Client::builder()
13628 .timeout(std::time::Duration::from_secs(20))
13629 .build()
13630 .unwrap_or_else(|e| fail_return(&format!("http client init failed: {e}")));
13631 let base = from.trim_end_matches('/');
13632 #[derive(serde::Deserialize)]
13633 struct HubEntry {
13634 latest_snapshot_hash: String,
13635 }
13636 let mut per_dep: Vec<serde_json::Value> = Vec::new();
13637 let (mut refreshed, mut unchanged, mut missing, mut unreachable) =
13638 (0u32, 0u32, 0u32, 0u32);
13639 for vfr in &cross_deps {
13640 let url = format!("{base}/entries/{vfr}");
13641 let resp = client.get(&url).send();
13642 let outcome = match resp {
13643 Ok(r) if r.status().as_u16() == 404 => {
13644 missing += 1;
13645 json!({ "vfr_id": vfr, "status": "missing", "url": url })
13646 }
13647 Ok(r) if !r.status().is_success() => {
13648 unreachable += 1;
13649 json!({ "vfr_id": vfr, "status": "unreachable", "http_status": r.status().as_u16() })
13650 }
13651 Err(e) => {
13652 unreachable += 1;
13653 json!({ "vfr_id": vfr, "status": "unreachable", "error": e.to_string() })
13654 }
13655 Ok(r) => match r.json::<HubEntry>() {
13656 Err(e) => {
13657 unreachable += 1;
13658 json!({ "vfr_id": vfr, "status": "unreachable", "error": format!("invalid hub response: {e}") })
13659 }
13660 Ok(entry) => {
13661 match p
13663 .project
13664 .dependencies
13665 .iter()
13666 .position(|d| d.vfr_id.as_deref() == Some(vfr.as_str()))
13667 {
13668 None => {
13669 unreachable += 1;
13670 json!({ "vfr_id": vfr, "status": "unreachable", "error": "dep disappeared mid-scan" })
13671 }
13672 Some(idx) => {
13673 let local_pin =
13674 p.project.dependencies[idx].pinned_snapshot_hash.clone();
13675 let new_pin = entry.latest_snapshot_hash;
13676 if local_pin.as_deref() == Some(new_pin.as_str()) {
13677 unchanged += 1;
13678 json!({ "vfr_id": vfr, "status": "unchanged", "snapshot": new_pin })
13679 } else {
13680 if !dry_run {
13681 p.project.dependencies[idx].pinned_snapshot_hash =
13682 Some(new_pin.clone());
13683 }
13684 refreshed += 1;
13685 json!({
13686 "vfr_id": vfr,
13687 "status": "refreshed",
13688 "old_snapshot": local_pin,
13689 "new_snapshot": new_pin,
13690 })
13691 }
13692 }
13693 }
13694 }
13695 },
13696 };
13697 per_dep.push(outcome);
13698 }
13699 if !dry_run && refreshed > 0 {
13700 repo::save_to_path(&frontier, &p).unwrap_or_else(|e| fail_return(&e));
13701 }
13702 let payload = json!({
13703 "ok": true,
13704 "command": "frontier.refresh-deps",
13705 "frontier": frontier.display().to_string(),
13706 "from": from,
13707 "dry_run": dry_run,
13708 "deps": per_dep,
13709 "summary": {
13710 "total": cross_deps.len(),
13711 "refreshed": refreshed,
13712 "unchanged": unchanged,
13713 "missing": missing,
13714 "unreachable": unreachable,
13715 },
13716 });
13717 if json {
13718 println!(
13719 "{}",
13720 serde_json::to_string_pretty(&payload)
13721 .expect("failed to serialize frontier.refresh-deps")
13722 );
13723 } else {
13724 let mode = if dry_run { " (dry-run)" } else { "" };
13725 println!(
13726 "{} refresh-deps{mode} · {} total · {refreshed} refreshed · {unchanged} unchanged · {missing} missing · {unreachable} unreachable",
13727 style::ok("frontier"),
13728 cross_deps.len()
13729 );
13730 for d in &per_dep {
13731 let vfr = d["vfr_id"].as_str().unwrap_or("?");
13732 let status = d["status"].as_str().unwrap_or("?");
13733 match status {
13734 "refreshed" => println!(
13735 " {vfr} refreshed {} → {}",
13736 d["old_snapshot"]
13737 .as_str()
13738 .unwrap_or("(none)")
13739 .chars()
13740 .take(16)
13741 .collect::<String>(),
13742 d["new_snapshot"]
13743 .as_str()
13744 .unwrap_or("?")
13745 .chars()
13746 .take(16)
13747 .collect::<String>(),
13748 ),
13749 "unchanged" => println!(" {vfr} unchanged"),
13750 "missing" => println!(" {vfr} missing on hub"),
13751 _ => println!(" {vfr} unreachable"),
13752 }
13753 }
13754 }
13755 }
13756 FrontierAction::Diff {
13757 frontier,
13758 since,
13759 week,
13760 json,
13761 } => cmd_frontier_diff(&frontier, since.as_deref(), week.as_deref(), json),
13762 }
13763}
13764
13765fn cmd_repo(action: RepoAction) {
13766 match action {
13767 RepoAction::Status { frontier, json } => {
13768 let payload = frontier_repo::repo_status(&frontier).unwrap_or_else(|e| fail_return(&e));
13769 if json {
13770 println!(
13771 "{}",
13772 serde_json::to_string_pretty(&payload)
13773 .expect("failed to serialize repo status")
13774 );
13775 } else {
13776 let summary = payload.get("summary").unwrap_or(&Value::Null);
13777 let freshness = payload.get("freshness").unwrap_or(&Value::Null);
13778 println!("vela repo status");
13779 println!(" frontier: {}", frontier.display());
13780 println!(
13781 " events: {}",
13782 summary
13783 .get("accepted_events")
13784 .and_then(Value::as_u64)
13785 .unwrap_or_default()
13786 );
13787 println!(
13788 " open proposals: {}",
13789 summary
13790 .get("open_proposals")
13791 .and_then(Value::as_u64)
13792 .unwrap_or_default()
13793 );
13794 println!(
13795 " state: {}",
13796 freshness
13797 .get("materialized_state")
13798 .and_then(Value::as_str)
13799 .unwrap_or("unknown")
13800 );
13801 println!(
13802 " proof: {}",
13803 freshness
13804 .get("proof")
13805 .and_then(Value::as_str)
13806 .unwrap_or("unknown")
13807 );
13808 }
13809 }
13810 RepoAction::Doctor { frontier, json } => {
13811 let payload = frontier_repo::repo_doctor(&frontier).unwrap_or_else(|e| fail_return(&e));
13812 if json {
13813 println!(
13814 "{}",
13815 serde_json::to_string_pretty(&payload)
13816 .expect("failed to serialize repo doctor")
13817 );
13818 } else {
13819 let ok = payload.get("ok").and_then(Value::as_bool) == Some(true);
13820 let issues = payload
13821 .get("issues")
13822 .and_then(Value::as_array)
13823 .map_or(0, Vec::len);
13824 println!("vela repo doctor");
13825 println!(" frontier: {}", frontier.display());
13826 println!(" status: {}", if ok { "ok" } else { "needs attention" });
13827 println!(" issues: {issues}");
13828 }
13829 }
13830 }
13831}
13832
13833fn cmd_proof_verify(frontier: &Path, json_output: bool) {
13834 let payload = frontier_repo::proof_verify(frontier).unwrap_or_else(|e| fail_return(&e));
13835 if json_output {
13836 println!(
13837 "{}",
13838 serde_json::to_string_pretty(&payload).expect("failed to serialize proof verify")
13839 );
13840 if payload.get("ok").and_then(Value::as_bool) != Some(true) {
13841 std::process::exit(1);
13842 }
13843 } else {
13844 let ok = payload.get("ok").and_then(Value::as_bool) == Some(true);
13845 println!("vela proof verify");
13846 println!(" frontier: {}", frontier.display());
13847 println!(" status: {}", if ok { "ok" } else { "failed" });
13848 if let Some(issues) = payload.get("issues").and_then(Value::as_array) {
13849 for issue in issues {
13850 if let Some(message) = issue.get("message").and_then(Value::as_str) {
13851 println!(" issue: {message}");
13852 }
13853 }
13854 }
13855 if !ok {
13856 std::process::exit(1);
13857 }
13858 }
13859}
13860
13861fn cmd_proof_explain(frontier: &Path) {
13862 let text = frontier_repo::proof_explain(frontier).unwrap_or_else(|e| fail_return(&e));
13863 print!("{text}");
13864}
13865
13866fn cmd_frontier_diff(frontier: &Path, since: Option<&str>, week: Option<&str>, json: bool) {
13875 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
13876
13877 let now = chrono::Utc::now();
13879 let (window_start, window_end, week_label): (
13880 chrono::DateTime<chrono::Utc>,
13881 chrono::DateTime<chrono::Utc>,
13882 Option<String>,
13883 ) = if let Some(s) = since {
13884 let parsed = chrono::DateTime::parse_from_rfc3339(s)
13885 .map(|d| d.with_timezone(&chrono::Utc))
13886 .unwrap_or_else(|e| fail_return(&format!("invalid --since timestamp '{s}': {e}")));
13887 (parsed, now, None)
13888 } else {
13889 let key = week
13890 .map(str::to_owned)
13891 .unwrap_or_else(|| iso_week_key_for(now.date_naive()));
13892 let (start, end) = iso_week_bounds(&key)
13893 .unwrap_or_else(|e| fail_return(&format!("invalid --week '{key}': {e}")));
13894 (start, end, Some(key))
13895 };
13896
13897 let mut added: Vec<&crate::bundle::FindingBundle> = Vec::new();
13899 let mut updated: Vec<&crate::bundle::FindingBundle> = Vec::new();
13900 let mut new_contradictions: Vec<&crate::bundle::FindingBundle> = Vec::new();
13901 let mut cumulative: usize = 0;
13902
13903 for f in &project.findings {
13904 let created = chrono::DateTime::parse_from_rfc3339(&f.created)
13905 .map(|d| d.with_timezone(&chrono::Utc))
13906 .ok();
13907 let updated_ts = f
13908 .updated
13909 .as_deref()
13910 .and_then(|u| chrono::DateTime::parse_from_rfc3339(u).ok())
13911 .map(|d| d.with_timezone(&chrono::Utc));
13912
13913 if let Some(c) = created
13914 && c < window_end
13915 {
13916 cumulative += 1;
13917 }
13918
13919 if let Some(c) = created
13920 && c >= window_start
13921 && c < window_end
13922 {
13923 added.push(f);
13924 let is_tension = f.flags.contested || f.assertion.assertion_type == "tension";
13925 if is_tension {
13926 new_contradictions.push(f);
13927 }
13928 continue;
13929 }
13930 if let Some(u) = updated_ts
13931 && u >= window_start
13932 && u < window_end
13933 {
13934 updated.push(f);
13935 }
13936 }
13937
13938 let summary_for = |list: &[&crate::bundle::FindingBundle]| -> Vec<serde_json::Value> {
13940 list.iter()
13941 .map(|f| {
13942 json!({
13943 "id": f.id,
13944 "assertion": f.assertion.text,
13945 "evidence_type": f.evidence.evidence_type,
13946 "confidence": f.confidence.score,
13947 "doi": f.provenance.doi,
13948 "pmid": f.provenance.pmid,
13949 })
13950 })
13951 .collect()
13952 };
13953
13954 let payload = json!({
13955 "ok": true,
13956 "command": "frontier.diff",
13957 "frontier": frontier.display().to_string(),
13958 "frontier_id": project.frontier_id,
13959 "window": {
13960 "start": window_start.to_rfc3339_opts(chrono::SecondsFormat::Secs, true),
13961 "end": window_end.to_rfc3339_opts(chrono::SecondsFormat::Secs, true),
13962 "iso_week": week_label,
13963 },
13964 "totals": {
13965 "added": added.len(),
13966 "updated": updated.len(),
13967 "new_contradictions": new_contradictions.len(),
13968 "cumulative_claims": cumulative,
13969 },
13970 "added": summary_for(&added),
13971 "updated": summary_for(&updated),
13972 "new_contradictions": summary_for(&new_contradictions),
13973 });
13974
13975 if json {
13976 println!(
13977 "{}",
13978 serde_json::to_string_pretty(&payload).expect("failed to serialize frontier.diff")
13979 );
13980 return;
13981 }
13982
13983 let label = week_label
13984 .clone()
13985 .unwrap_or_else(|| format!("since {}", window_start.format("%Y-%m-%d %H:%M UTC")));
13986 println!();
13987 println!(
13988 " {}",
13989 format!("VELA · FRONTIER · DIFF · {label}")
13990 .to_uppercase()
13991 .dimmed()
13992 );
13993 println!(" {}", style::tick_row(60));
13994 println!(
13995 " range: {} → {}",
13996 window_start.format("%Y-%m-%d %H:%M"),
13997 window_end.format("%Y-%m-%d %H:%M")
13998 );
13999 println!(" added: {}", added.len());
14000 println!(" updated: {}", updated.len());
14001 println!(" contradictions: {}", new_contradictions.len());
14002 println!(" cumulative: {cumulative}");
14003 if added.is_empty() && updated.is_empty() {
14004 println!();
14005 println!(" (quiet window — no findings added or updated)");
14006 } else {
14007 println!();
14008 println!(" added:");
14009 for f in &added {
14010 println!(
14011 " · {} {}",
14012 f.id.dimmed(),
14013 truncate(&f.assertion.text, 88)
14014 );
14015 }
14016 if !updated.is_empty() {
14017 println!();
14018 println!(" updated:");
14019 for f in &updated {
14020 println!(
14021 " · {} {}",
14022 f.id.dimmed(),
14023 truncate(&f.assertion.text, 88)
14024 );
14025 }
14026 }
14027 }
14028}
14029
14030fn truncate(s: &str, n: usize) -> String {
14031 if s.chars().count() <= n {
14032 s.to_string()
14033 } else {
14034 let mut out: String = s.chars().take(n.saturating_sub(1)).collect();
14035 out.push('…');
14036 out
14037 }
14038}
14039
14040fn iso_week_key_for(d: chrono::NaiveDate) -> String {
14042 use chrono::Datelike;
14043 let iso = d.iso_week();
14044 format!("{:04}-W{:02}", iso.year(), iso.week())
14045}
14046
14047fn iso_week_bounds(
14050 key: &str,
14051) -> Result<(chrono::DateTime<chrono::Utc>, chrono::DateTime<chrono::Utc>), String> {
14052 let (year_str, week_str) = key
14053 .split_once("-W")
14054 .ok_or_else(|| format!("expected YYYY-Www, got '{key}'"))?;
14055 let year: i32 = year_str
14056 .parse()
14057 .map_err(|e| format!("bad year in '{key}': {e}"))?;
14058 let week: u32 = week_str
14059 .parse()
14060 .map_err(|e| format!("bad week in '{key}': {e}"))?;
14061 let monday = chrono::NaiveDate::from_isoywd_opt(year, week, chrono::Weekday::Mon)
14062 .ok_or_else(|| format!("invalid ISO week: {key}"))?;
14063 let next_monday = monday + chrono::Duration::days(7);
14064 let start = monday.and_hms_opt(0, 0, 0).expect("00:00 valid").and_utc();
14065 let end = next_monday
14066 .and_hms_opt(0, 0, 0)
14067 .expect("00:00 valid")
14068 .and_utc();
14069 Ok((start, end))
14070}
14071
14072fn cmd_registry(action: RegistryAction) {
14077 use crate::registry;
14078 let default_registry = || -> PathBuf {
14079 let home = std::env::var("HOME").unwrap_or_else(|_| ".".to_string());
14080 PathBuf::from(home)
14081 .join(".vela")
14082 .join("registry")
14083 .join("entries.json")
14084 };
14085 match action {
14086 RegistryAction::Governance { action } => cmd_governance(action),
14087 RegistryAction::OwnerRotateGoverned { action } => cmd_owner_rotate_governed(action),
14088 RegistryAction::Checkpoint { action } => cmd_checkpoint(action),
14089 RegistryAction::VerifyAll { from, json } => cmd_verify_all(from, json),
14090 RegistryAction::HubFederation { action } => cmd_hub_federation(action),
14091 RegistryAction::VerifyChain {
14092 frontier,
14093 artifacts,
14094 json,
14095 } => cmd_verify_chain(frontier, artifacts, json),
14096 RegistryAction::DependsOn { vfr_id, from, json } => {
14097 let base = from.trim_end_matches('/');
14098 let url = format!("{base}/entries/{vfr_id}/depends-on");
14099 let client = reqwest::blocking::Client::builder()
14100 .timeout(std::time::Duration::from_secs(30))
14101 .build()
14102 .unwrap_or_else(|e| fail_return(&format!("http client init: {e}")));
14103 let resp = client
14104 .get(&url)
14105 .send()
14106 .unwrap_or_else(|e| fail_return(&format!("GET {url}: {e}")));
14107 if !resp.status().is_success() {
14108 fail(&format!("GET {url}: HTTP {}", resp.status()));
14109 }
14110 let body: serde_json::Value = resp
14111 .json()
14112 .unwrap_or_else(|e| fail_return(&format!("parse response: {e}")));
14113 if json {
14114 println!(
14115 "{}",
14116 serde_json::to_string_pretty(&body).expect("serialize")
14117 );
14118 } else {
14119 let dependents = body
14120 .get("dependents")
14121 .and_then(|v| v.as_array())
14122 .cloned()
14123 .unwrap_or_default();
14124 let count = dependents.len();
14125 println!(
14126 "{} {count} {} on {vfr_id}",
14127 style::ok("registry"),
14128 if count == 1 {
14129 "frontier depends"
14130 } else {
14131 "frontiers depend"
14132 },
14133 );
14134 for e in &dependents {
14135 let v = e.get("vfr_id").and_then(|v| v.as_str()).unwrap_or("?");
14136 let n = e.get("name").and_then(|v| v.as_str()).unwrap_or("?");
14137 let o = e
14138 .get("owner_actor_id")
14139 .and_then(|v| v.as_str())
14140 .unwrap_or("?");
14141 println!(" {v} {n} ({o})");
14142 }
14143 }
14144 }
14145 RegistryAction::Mirror {
14146 vfr_id,
14147 from,
14148 to,
14149 json,
14150 } => {
14151 let src_base = from.trim_end_matches('/');
14152 let dst_base = to.trim_end_matches('/');
14153 let src_url = format!("{src_base}/entries/{vfr_id}");
14154 let dst_url = format!("{dst_base}/entries");
14155 let client = reqwest::blocking::Client::builder()
14156 .timeout(std::time::Duration::from_secs(30))
14157 .build()
14158 .unwrap_or_else(|e| fail_return(&format!("http client init: {e}")));
14159
14160 let entry: serde_json::Value = client
14161 .get(&src_url)
14162 .send()
14163 .unwrap_or_else(|e| fail_return(&format!("GET {src_url}: {e}")))
14164 .error_for_status()
14165 .unwrap_or_else(|e| fail_return(&format!("GET {src_url}: {e}")))
14166 .json()
14167 .unwrap_or_else(|e| fail_return(&format!("parse {src_url}: {e}")));
14168
14169 let resp = client
14170 .post(&dst_url)
14171 .header("content-type", "application/json")
14172 .body(
14173 serde_json::to_vec(&entry)
14174 .unwrap_or_else(|e| fail_return(&format!("serialize: {e}"))),
14175 )
14176 .send()
14177 .unwrap_or_else(|e| fail_return(&format!("POST {dst_url}: {e}")));
14178 let status = resp.status();
14179 if !status.is_success() {
14180 let body = resp.text().unwrap_or_default();
14181 fail(&format!(
14182 "POST {dst_url}: HTTP {status}: {}",
14183 body.chars().take(300).collect::<String>()
14184 ));
14185 }
14186 let body: serde_json::Value = resp
14187 .json()
14188 .unwrap_or_else(|e| fail_return(&format!("parse POST response: {e}")));
14189 let duplicate = body
14190 .get("duplicate")
14191 .and_then(serde_json::Value::as_bool)
14192 .unwrap_or(false);
14193 let payload = json!({
14194 "ok": true,
14195 "command": "registry.mirror",
14196 "vfr_id": vfr_id,
14197 "from": src_base,
14198 "to": dst_base,
14199 "duplicate_on_destination": duplicate,
14200 "destination_response": body,
14201 });
14202 if json {
14203 println!(
14204 "{}",
14205 serde_json::to_string_pretty(&payload).expect("serialize")
14206 );
14207 } else {
14208 println!(
14209 "{} mirrored {vfr_id} from {src_base} → {dst_base}{}",
14210 style::ok("registry"),
14211 if duplicate {
14212 " (duplicate; signature already known)"
14213 } else {
14214 " (fresh insert)"
14215 }
14216 );
14217 }
14218 }
14219 RegistryAction::WitnessCheck { vfr_id, hubs, json } => {
14220 if hubs.len() < 2 {
14229 fail("--hubs requires at least two hub URLs (comma-separated).");
14230 }
14231 let client = reqwest::blocking::Client::builder()
14232 .timeout(std::time::Duration::from_secs(30))
14233 .build()
14234 .unwrap_or_else(|e| fail_return(&format!("http client init: {e}")));
14235
14236 #[derive(serde::Serialize)]
14237 struct HubResponse {
14238 hub: String,
14239 status: String,
14240 #[serde(skip_serializing_if = "Option::is_none")]
14241 canonical_hash: Option<String>,
14242 #[serde(skip_serializing_if = "Option::is_none")]
14243 note: Option<String>,
14244 }
14245
14246 let mut responses: Vec<HubResponse> = Vec::new();
14247 let mut hash_counts: std::collections::BTreeMap<String, usize> =
14248 std::collections::BTreeMap::new();
14249
14250 for hub_url in &hubs {
14251 let base = hub_url.trim_end_matches('/');
14252 let url = format!("{base}/entries/{vfr_id}");
14253 match client.get(&url).send() {
14254 Ok(resp) if resp.status().is_success() => {
14255 match resp.json::<serde_json::Value>() {
14256 Ok(entry) => {
14257 let canonical = crate::canonical::to_canonical_bytes(&entry)
14262 .unwrap_or_else(|e| fail_return(&format!("canonicalize: {e}")));
14263 let hash =
14264 format!("sha256:{}", hex::encode(Sha256::digest(&canonical)));
14265 *hash_counts.entry(hash.clone()).or_insert(0) += 1;
14266 responses.push(HubResponse {
14267 hub: base.to_string(),
14268 status: "ok".to_string(),
14269 canonical_hash: Some(hash),
14270 note: None,
14271 });
14272 }
14273 Err(e) => responses.push(HubResponse {
14274 hub: base.to_string(),
14275 status: "parse_error".to_string(),
14276 canonical_hash: None,
14277 note: Some(format!("parse: {e}")),
14278 }),
14279 }
14280 }
14281 Ok(resp) => responses.push(HubResponse {
14282 hub: base.to_string(),
14283 status: "http_error".to_string(),
14284 canonical_hash: None,
14285 note: Some(format!("HTTP {}", resp.status())),
14286 }),
14287 Err(e) => responses.push(HubResponse {
14288 hub: base.to_string(),
14289 status: "unreachable".to_string(),
14290 canonical_hash: None,
14291 note: Some(format!("{e}")),
14292 }),
14293 }
14294 }
14295
14296 let resolved_count = responses
14298 .iter()
14299 .filter(|r| r.canonical_hash.is_some())
14300 .count();
14301 let consensus = if resolved_count < 2 {
14302 "insufficient".to_string()
14303 } else if hash_counts.len() == 1 {
14304 "unanimous".to_string()
14305 } else {
14306 let max = hash_counts.values().copied().max().unwrap_or(0);
14307 if max * 2 > resolved_count {
14308 "majority".to_string()
14309 } else {
14310 "split".to_string()
14311 }
14312 };
14313
14314 let payload = json!({
14315 "ok": consensus == "unanimous" || consensus == "majority",
14316 "command": "registry.witness-check",
14317 "vfr_id": vfr_id,
14318 "hubs_queried": hubs.len(),
14319 "hubs_resolved": resolved_count,
14320 "distinct_canonical_hashes": hash_counts.len(),
14321 "consensus": consensus,
14322 "responses": responses,
14323 });
14324
14325 if json {
14326 println!(
14327 "{}",
14328 serde_json::to_string_pretty(&payload)
14329 .expect("failed to serialize witness-check")
14330 );
14331 } else {
14332 println!(
14333 "{} witness-check {} across {} hub(s): {}",
14334 style::ok("registry"),
14335 vfr_id,
14336 hubs.len(),
14337 consensus
14338 );
14339 for r in &responses {
14340 let hash_display = r
14341 .canonical_hash
14342 .as_deref()
14343 .map(|h| h.chars().take(16).collect::<String>())
14344 .unwrap_or_else(|| r.note.clone().unwrap_or_default());
14345 println!(" {} {} {hash_display}", r.status, r.hub);
14346 }
14347 }
14348 if consensus == "split" {
14349 std::process::exit(1);
14350 }
14351 }
14352 RegistryAction::List { from, json } => {
14353 let (label, registry_data) = match &from {
14356 Some(loc) if loc.starts_with("http") => (
14357 loc.clone(),
14358 registry::load_any(loc).unwrap_or_else(|e| fail_return(&e)),
14359 ),
14360 Some(loc) => {
14361 let p = registry::resolve_local(loc).unwrap_or_else(|e| fail_return(&e));
14362 (
14363 p.display().to_string(),
14364 registry::load_local(&p).unwrap_or_else(|e| fail_return(&e)),
14365 )
14366 }
14367 None => {
14368 let p = default_registry();
14369 (
14370 p.display().to_string(),
14371 registry::load_local(&p).unwrap_or_else(|e| fail_return(&e)),
14372 )
14373 }
14374 };
14375 let r = registry_data;
14376 let path_label = label;
14377 if json {
14378 let payload = json!({
14379 "ok": true,
14380 "command": "registry.list",
14381 "registry": path_label,
14382 "entry_count": r.entries.len(),
14383 "entries": r.entries,
14384 });
14385 println!(
14386 "{}",
14387 serde_json::to_string_pretty(&payload)
14388 .expect("failed to serialize registry.list")
14389 );
14390 } else {
14391 println!();
14392 println!(
14393 " {}",
14394 format!("VELA · REGISTRY · LIST · {}", path_label)
14395 .to_uppercase()
14396 .dimmed()
14397 );
14398 println!(" {}", style::tick_row(60));
14399 if r.entries.is_empty() {
14400 println!(" (registry is empty)");
14401 } else {
14402 for entry in &r.entries {
14403 println!(
14404 " {} {} ({}) by {} published {}",
14405 entry.vfr_id,
14406 entry.name,
14407 entry.network_locator,
14408 entry.owner_actor_id,
14409 entry.signed_publish_at
14410 );
14411 }
14412 }
14413 }
14414 }
14415 RegistryAction::Publish {
14416 frontier,
14417 owner,
14418 key,
14419 locator,
14420 to,
14421 json,
14422 } => {
14423 let key_hex = std::fs::read_to_string(&key)
14426 .map(|s| s.trim().to_string())
14427 .unwrap_or_else(|e| fail_return(&format!("read key {}: {e}", key.display())));
14428 let signing_key = parse_signing_key(&key_hex);
14429 let derived = hex::encode(signing_key.verifying_key().to_bytes());
14430
14431 let mut frontier_data =
14433 repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
14434
14435 let pubkey = match frontier_data.actors.iter().find(|actor| actor.id == owner) {
14436 Some(actor) => actor.public_key.clone(),
14437 None => {
14438 eprintln!(
14446 " vela registry publish · auto-registering actor {owner} (derived pubkey {})",
14447 &derived[..16]
14448 );
14449 frontier_data.actors.push(sign::ActorRecord {
14450 id: owner.clone(),
14451 public_key: derived.clone(),
14452 algorithm: "ed25519".to_string(),
14453 created_at: chrono::Utc::now().to_rfc3339(),
14454 tier: None,
14455 orcid: None,
14456 access_clearance: None,
14457 revoked_at: None,
14458 revoked_reason: None,
14459 });
14460 repo::save_to_path(&frontier, &frontier_data)
14461 .unwrap_or_else(|e| fail_return(&format!("save actor: {e}")));
14462 derived.clone()
14463 }
14464 };
14465
14466 let snapshot_hash = events::snapshot_hash(&frontier_data);
14470 let event_log_hash = events::event_log_hash(&frontier_data.events);
14471 let vfr_id = frontier_data.frontier_id();
14472 let name = frontier_data.project.name.clone();
14473
14474 if derived != pubkey {
14476 fail(&format!(
14477 "private key does not match registered pubkey for owner '{owner}'"
14478 ));
14479 }
14480
14481 let to_is_remote = matches!(
14489 to.as_deref(),
14490 Some(loc) if loc.starts_with("http://") || loc.starts_with("https://")
14491 );
14492 let resolved_locator = match locator {
14493 Some(l) => l,
14494 None => {
14495 if to_is_remote {
14496 let hub = to.as_deref().unwrap().trim_end_matches('/');
14497 let hub_root = hub.trim_end_matches("/entries");
14498 format!("{hub_root}/entries/{vfr_id}/snapshot")
14499 } else {
14500 fail_return(
14501 "--locator is required for local publishes; pass e.g. \
14502 --locator file:///path/to/frontier.json or an HTTPS URL.",
14503 )
14504 }
14505 }
14506 };
14507
14508 let mut entry = registry::RegistryEntry {
14509 schema: registry::ENTRY_SCHEMA.to_string(),
14510 vfr_id: vfr_id.clone(),
14511 name: name.clone(),
14512 owner_actor_id: owner.clone(),
14513 owner_pubkey: pubkey,
14514 latest_snapshot_hash: snapshot_hash,
14515 latest_event_log_hash: event_log_hash,
14516 network_locator: resolved_locator,
14517 signed_publish_at: chrono::Utc::now().to_rfc3339(),
14518 signature: String::new(),
14519 };
14520 entry.signature =
14521 registry::sign_entry(&entry, &signing_key).unwrap_or_else(|e| fail_return(&e));
14522
14523 let (registry_label, duplicate) = if to_is_remote {
14524 let hub_url = to.clone().unwrap();
14525 let resp = registry::publish_remote(&entry, &hub_url, Some(&frontier_data))
14529 .unwrap_or_else(|e| fail_return(&e));
14530 (hub_url, resp.duplicate)
14531 } else {
14532 let registry_path = match &to {
14533 Some(loc) => registry::resolve_local(loc).unwrap_or_else(|e| fail_return(&e)),
14534 None => default_registry(),
14535 };
14536 registry::publish_entry(®istry_path, entry.clone())
14537 .unwrap_or_else(|e| fail_return(&e));
14538 (registry_path.display().to_string(), false)
14539 };
14540
14541 let payload = json!({
14542 "ok": true,
14543 "command": "registry.publish",
14544 "registry": registry_label,
14545 "vfr_id": vfr_id,
14546 "name": name,
14547 "owner": owner,
14548 "snapshot_hash": entry.latest_snapshot_hash,
14549 "event_log_hash": entry.latest_event_log_hash,
14550 "signed_publish_at": entry.signed_publish_at,
14551 "signature": entry.signature,
14552 "duplicate": duplicate,
14553 });
14554 if json {
14555 println!(
14556 "{}",
14557 serde_json::to_string_pretty(&payload)
14558 .expect("failed to serialize registry.publish")
14559 );
14560 } else {
14561 let dup_suffix = if duplicate { " (duplicate, no-op)" } else { "" };
14562 println!(
14563 "{} published {vfr_id} → {}{}",
14564 style::ok("registry"),
14565 registry_label,
14566 dup_suffix
14567 );
14568 println!(" snapshot: {}", entry.latest_snapshot_hash);
14569 println!(" event_log: {}", entry.latest_event_log_hash);
14570 println!(" signature: {}…", &entry.signature[..16]);
14571 }
14572 }
14573 RegistryAction::OwnerRotate {
14574 frontier,
14575 current_owner,
14576 new_owner,
14577 new_key,
14578 reason,
14579 locator,
14580 to,
14581 json,
14582 } => {
14583 if reason.trim().is_empty() {
14584 fail("--reason must be non-empty (record why the rotation is happening).");
14585 }
14586 if current_owner == new_owner {
14587 fail(
14588 "--current-owner and --new-owner must differ; rotation registers a fresh owner actor record.",
14589 );
14590 }
14591
14592 let key_hex = std::fs::read_to_string(&new_key)
14596 .map(|s| s.trim().to_string())
14597 .unwrap_or_else(|e| {
14598 fail_return(&format!("read new key {}: {e}", new_key.display()))
14599 });
14600 let signing_key = parse_signing_key(&key_hex);
14601 let derived = hex::encode(signing_key.verifying_key().to_bytes());
14602
14603 let mut frontier_data =
14604 repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
14605
14606 let now = chrono::Utc::now().to_rfc3339();
14608 let mut retired_pubkey_prefix: Option<String> = None;
14609 let mut found_current = false;
14610 for actor in frontier_data.actors.iter_mut() {
14611 if actor.id == current_owner {
14612 if actor.revoked_at.is_some() {
14613 fail(&format!(
14614 "Refusing to rotate: actor '{current_owner}' is already revoked at {}.",
14615 actor.revoked_at.as_deref().unwrap_or("?")
14616 ));
14617 }
14618 actor.revoked_at = Some(now.clone());
14619 actor.revoked_reason = Some(reason.clone());
14620 retired_pubkey_prefix = Some(actor.public_key[..16].to_string());
14621 found_current = true;
14622 }
14623 }
14624 if !found_current {
14625 fail(&format!(
14626 "Cannot rotate: actor '{current_owner}' is not registered in this frontier."
14627 ));
14628 }
14629
14630 let new_pubkey = match frontier_data.actors.iter().find(|a| a.id == new_owner) {
14635 Some(existing) => {
14636 if existing.revoked_at.is_some() {
14637 fail(&format!(
14638 "Refusing to rotate: target actor '{new_owner}' is already revoked at {}.",
14639 existing.revoked_at.as_deref().unwrap_or("?")
14640 ));
14641 }
14642 if existing.public_key != derived {
14643 fail(&format!(
14644 "private key does not match registered pubkey for new owner '{new_owner}'"
14645 ));
14646 }
14647 existing.public_key.clone()
14648 }
14649 None => {
14650 frontier_data.actors.push(sign::ActorRecord {
14651 id: new_owner.clone(),
14652 public_key: derived.clone(),
14653 algorithm: "ed25519".to_string(),
14654 created_at: now.clone(),
14655 tier: None,
14656 orcid: None,
14657 access_clearance: None,
14658 revoked_at: None,
14659 revoked_reason: None,
14660 });
14661 derived.clone()
14662 }
14663 };
14664
14665 repo::save_to_path(&frontier, &frontier_data)
14666 .unwrap_or_else(|e| fail_return(&format!("save rotation: {e}")));
14667
14668 let snapshot_hash = events::snapshot_hash(&frontier_data);
14672 let event_log_hash = events::event_log_hash(&frontier_data.events);
14673 let vfr_id = frontier_data.frontier_id();
14674 let name = frontier_data.project.name.clone();
14675
14676 let to_is_remote = matches!(
14677 to.as_deref(),
14678 Some(loc) if loc.starts_with("http://") || loc.starts_with("https://")
14679 );
14680 let resolved_locator = match locator {
14681 Some(l) => l,
14682 None => {
14683 if to_is_remote {
14684 let hub = to.as_deref().unwrap().trim_end_matches('/');
14685 let hub_root = hub.trim_end_matches("/entries");
14686 format!("{hub_root}/entries/{vfr_id}/snapshot")
14687 } else {
14688 fail_return(
14689 "--locator is required for local publishes; pass e.g. \
14690 --locator file:///path/to/frontier.json or an HTTPS URL.",
14691 )
14692 }
14693 }
14694 };
14695
14696 let mut entry = registry::RegistryEntry {
14697 schema: registry::ENTRY_SCHEMA.to_string(),
14698 vfr_id: vfr_id.clone(),
14699 name: name.clone(),
14700 owner_actor_id: new_owner.clone(),
14701 owner_pubkey: new_pubkey,
14702 latest_snapshot_hash: snapshot_hash,
14703 latest_event_log_hash: event_log_hash,
14704 network_locator: resolved_locator,
14705 signed_publish_at: chrono::Utc::now().to_rfc3339(),
14706 signature: String::new(),
14707 };
14708 entry.signature =
14709 registry::sign_entry(&entry, &signing_key).unwrap_or_else(|e| fail_return(&e));
14710
14711 let (registry_label, duplicate) = if to_is_remote {
14712 let hub_url = to.clone().unwrap();
14713 let resp = registry::publish_remote(&entry, &hub_url, Some(&frontier_data))
14714 .unwrap_or_else(|e| fail_return(&e));
14715 (hub_url, resp.duplicate)
14716 } else {
14717 let registry_path = match &to {
14718 Some(loc) => registry::resolve_local(loc).unwrap_or_else(|e| fail_return(&e)),
14719 None => default_registry(),
14720 };
14721 registry::publish_entry(®istry_path, entry.clone())
14722 .unwrap_or_else(|e| fail_return(&e));
14723 (registry_path.display().to_string(), false)
14724 };
14725
14726 let payload = json!({
14727 "ok": true,
14728 "command": "registry.owner_rotate",
14729 "registry": registry_label,
14730 "vfr_id": vfr_id,
14731 "name": name,
14732 "retired_owner": current_owner,
14733 "retired_pubkey_prefix": retired_pubkey_prefix,
14734 "new_owner": new_owner,
14735 "new_pubkey": derived,
14736 "revoked_at": now,
14737 "reason": reason,
14738 "snapshot_hash": entry.latest_snapshot_hash,
14739 "event_log_hash": entry.latest_event_log_hash,
14740 "signed_publish_at": entry.signed_publish_at,
14741 "signature": entry.signature,
14742 "duplicate": duplicate,
14743 });
14744 if json {
14745 println!(
14746 "{}",
14747 serde_json::to_string_pretty(&payload)
14748 .expect("failed to serialize registry.owner_rotate")
14749 );
14750 } else {
14751 println!(
14752 "{} owner rotated: {} (pubkey {}...) retired, {} (pubkey {}...) active",
14753 style::ok("registry"),
14754 current_owner,
14755 retired_pubkey_prefix.as_deref().unwrap_or("?"),
14756 new_owner,
14757 &derived[..16]
14758 );
14759 println!(" revoked_at: {now}");
14760 println!(" reason: {reason}");
14761 let dup_suffix = if duplicate { " (duplicate, no-op)" } else { "" };
14762 println!(" registry: {vfr_id} → {registry_label}{dup_suffix}");
14763 println!(" signature: {}…", &entry.signature[..16]);
14764 }
14765 }
14766 RegistryAction::Pull {
14767 vfr_id,
14768 from,
14769 out,
14770 transitive,
14771 depth,
14772 json,
14773 } => {
14774 let (registry_label, registry_data) = match &from {
14778 Some(loc) if loc.starts_with("http") => (
14779 loc.clone(),
14780 registry::load_any(loc).unwrap_or_else(|e| fail_return(&e)),
14781 ),
14782 Some(loc) => {
14783 let p = registry::resolve_local(loc).unwrap_or_else(|e| fail_return(&e));
14784 (
14785 p.display().to_string(),
14786 registry::load_local(&p).unwrap_or_else(|e| fail_return(&e)),
14787 )
14788 }
14789 None => {
14790 let p = default_registry();
14791 (
14792 p.display().to_string(),
14793 registry::load_local(&p).unwrap_or_else(|e| fail_return(&e)),
14794 )
14795 }
14796 };
14797 let entry = registry::find_latest(®istry_data, &vfr_id)
14798 .unwrap_or_else(|| fail_return(&format!("{vfr_id} not found in registry")));
14799
14800 if transitive {
14801 let result = registry::pull_transitive(®istry_data, &vfr_id, &out, depth)
14805 .unwrap_or_else(|e| fail_return(&format!("transitive pull failed: {e}")));
14806
14807 let dep_paths_json: serde_json::Value = serde_json::Value::Object(
14808 result
14809 .deps
14810 .iter()
14811 .map(|(k, v)| (k.clone(), serde_json::json!(v.display().to_string())))
14812 .collect(),
14813 );
14814 let payload = json!({
14815 "ok": true,
14816 "command": "registry.pull",
14817 "registry": registry_label,
14818 "vfr_id": vfr_id,
14819 "transitive": true,
14820 "depth": depth,
14821 "out_dir": out.display().to_string(),
14822 "primary": result.primary_path.display().to_string(),
14823 "verified": result.verified,
14824 "deps": dep_paths_json,
14825 });
14826 if json {
14827 println!(
14828 "{}",
14829 serde_json::to_string_pretty(&payload)
14830 .expect("failed to serialize registry.pull")
14831 );
14832 } else {
14833 println!(
14834 "{} pulled {vfr_id} (transitive) → {}",
14835 style::ok("registry"),
14836 out.display()
14837 );
14838 println!(" verified {} frontier(s):", result.verified.len());
14839 for v in &result.verified {
14840 println!(" · {v}");
14841 }
14842 println!(" every cross-frontier dependency's pinned snapshot hash matched");
14843 }
14844 return;
14845 }
14846
14847 registry::fetch_frontier_to_prefer_event_hub(&entry, from.as_deref(), &out)
14850 .unwrap_or_else(|e| fail_return(&format!("fetch frontier: {e}")));
14851 registry::verify_pull(&entry, &out).unwrap_or_else(|e| {
14852 let _ = std::fs::remove_file(&out);
14853 fail_return(&format!("pull verification failed: {e}"))
14854 });
14855
14856 let payload = json!({
14857 "ok": true,
14858 "command": "registry.pull",
14859 "registry": registry_label,
14860 "vfr_id": vfr_id,
14861 "out": out.display().to_string(),
14862 "snapshot_hash": entry.latest_snapshot_hash,
14863 "event_log_hash": entry.latest_event_log_hash,
14864 "verified": true,
14865 });
14866 if json {
14867 println!(
14868 "{}",
14869 serde_json::to_string_pretty(&payload)
14870 .expect("failed to serialize registry.pull")
14871 );
14872 } else {
14873 println!(
14874 "{} pulled {vfr_id} → {}",
14875 style::ok("registry"),
14876 out.display()
14877 );
14878 println!(" verified snapshot+event_log hashes match registry; signature ok");
14879 }
14880 }
14881 }
14882}
14883
14884fn cmd_owner_rotate_governed(action: OwnerRotateGovernedAction) {
14886 use crate::governance::{
14887 AttestationEntry, GovernancePolicy, OwnerRotateAttestationBundle, OwnerRotateProposal,
14888 ProposalDraft, verify_quorum,
14889 };
14890 use crate::registry;
14891 use ed25519_dalek::Signer;
14892
14893 match action {
14894 OwnerRotateGovernedAction::Propose {
14895 frontier,
14896 old_owner,
14897 new_owner,
14898 new_pubkey_hex,
14899 target_epoch,
14900 previous_entry_hash,
14901 policy,
14902 reason,
14903 ttl_hours,
14904 out,
14905 json,
14906 } => {
14907 if target_epoch == 0 {
14908 fail("--target-epoch must be >= 1; the first governed rotation produces epoch 1.");
14909 }
14910 if new_pubkey_hex.len() != 64 || hex::decode(&new_pubkey_hex).is_err() {
14911 fail("--new-pubkey-hex must be 64 hex chars (32-byte Ed25519 pubkey).");
14912 }
14913 let frontier_data = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
14914 let frontier_id = frontier_data.frontier_id().to_string();
14915
14916 let policy_raw = std::fs::read_to_string(&policy)
14917 .unwrap_or_else(|e| fail_return(&format!("read policy: {e}")));
14918 let policy_obj: GovernancePolicy = serde_json::from_str(&policy_raw)
14919 .unwrap_or_else(|e| fail_return(&format!("parse policy: {e}")));
14920 policy_obj
14921 .verify_content_address()
14922 .unwrap_or_else(|e| fail_return(&e));
14923
14924 let old_actor = frontier_data
14925 .actors
14926 .iter()
14927 .find(|a| a.id == old_owner)
14928 .unwrap_or_else(|| {
14929 fail_return(&format!(
14930 "old owner `{old_owner}` is not registered in the frontier"
14931 ))
14932 });
14933 let old_pubkey = old_actor.public_key.clone();
14934
14935 let now = chrono::Utc::now();
14936 let expires = now + chrono::Duration::hours(i64::from(ttl_hours));
14937
14938 let draft = ProposalDraft {
14939 frontier_id,
14940 old_owner_actor_id: old_owner,
14941 old_owner_pubkey: old_pubkey,
14942 new_owner_actor_id: new_owner,
14943 new_owner_pubkey: new_pubkey_hex,
14944 owner_epoch: target_epoch,
14945 previous_registry_entry_hash: previous_entry_hash,
14946 governance_policy_id: policy_obj.policy_id.clone(),
14947 reason,
14948 created_at: now.to_rfc3339(),
14949 expires_at: expires.to_rfc3339(),
14950 nonce: hex::encode(rand::random::<[u8; 8]>()),
14951 };
14952 let proposal =
14953 OwnerRotateProposal::from_draft(draft).unwrap_or_else(|e| fail_return(&e));
14954
14955 let body =
14956 serde_json::to_string_pretty(&proposal).expect("serialize owner-rotate proposal");
14957 std::fs::write(&out, format!("{body}\n"))
14958 .unwrap_or_else(|e| fail_return(&format!("write {}: {e}", out.display())));
14959
14960 let preimage_hash = proposal.preimage_hash().unwrap_or_else(|e| fail_return(&e));
14961 if json {
14962 let payload = json!({
14963 "ok": true,
14964 "command": "registry.owner-rotate-governed.propose",
14965 "proposal_id": proposal.proposal_id,
14966 "frontier_id": proposal.frontier_id,
14967 "target_epoch": proposal.owner_epoch,
14968 "governance_policy_id": proposal.governance_policy_id,
14969 "proposal_preimage_hash": preimage_hash,
14970 "expires_at": proposal.expires_at,
14971 "out": out.display().to_string(),
14972 });
14973 println!(
14974 "{}",
14975 serde_json::to_string_pretty(&payload).expect("serialize propose summary")
14976 );
14977 } else {
14978 println!(
14979 "{} proposed owner rotation: {} (target epoch {})",
14980 style::ok("registry"),
14981 proposal.proposal_id,
14982 proposal.owner_epoch
14983 );
14984 println!(" preimage hash: {preimage_hash}");
14985 println!(" policy: {}", proposal.governance_policy_id);
14986 println!(" expires_at: {}", proposal.expires_at);
14987 println!(" out: {}", out.display());
14988 }
14989 }
14990 OwnerRotateGovernedAction::Attest {
14991 proposal,
14992 attester_id,
14993 key,
14994 bundle,
14995 json,
14996 } => {
14997 let proposal_raw = std::fs::read_to_string(&proposal)
14998 .unwrap_or_else(|e| fail_return(&format!("read proposal: {e}")));
14999 let proposal_obj: OwnerRotateProposal = serde_json::from_str(&proposal_raw)
15000 .unwrap_or_else(|e| fail_return(&format!("parse proposal: {e}")));
15001 let derived = proposal_obj.derive_id().unwrap_or_else(|e| fail_return(&e));
15003 if derived != proposal_obj.proposal_id {
15004 fail(&format!(
15005 "proposal id mismatch: stored `{}`, derived `{}`",
15006 proposal_obj.proposal_id, derived
15007 ));
15008 }
15009
15010 let key_hex = std::fs::read_to_string(&key)
15011 .map(|s| s.trim().to_string())
15012 .unwrap_or_else(|e| fail_return(&format!("read key: {e}")));
15013 let sk = parse_signing_key(&key_hex);
15014 let pubkey_hex = hex::encode(sk.verifying_key().to_bytes());
15015
15016 let preimage = proposal_obj
15017 .preimage_bytes()
15018 .unwrap_or_else(|e| fail_return(&e));
15019 let sig = sk.sign(&preimage);
15020
15021 let entry = AttestationEntry {
15022 attester_id: attester_id.clone(),
15023 attester_pubkey: pubkey_hex.clone(),
15024 judgment: "approve_owner_rotate".to_string(),
15025 signature: hex::encode(sig.to_bytes()),
15026 signed_at: chrono::Utc::now().to_rfc3339(),
15027 };
15028
15029 let existing: Option<OwnerRotateAttestationBundle> = if bundle.exists() {
15031 let raw = std::fs::read_to_string(&bundle)
15032 .unwrap_or_else(|e| fail_return(&format!("read bundle: {e}")));
15033 Some(
15034 serde_json::from_str(&raw)
15035 .unwrap_or_else(|e| fail_return(&format!("parse bundle: {e}"))),
15036 )
15037 } else {
15038 None
15039 };
15040
15041 let mut attestations: Vec<AttestationEntry> = existing
15042 .as_ref()
15043 .map(|b| b.attestations.clone())
15044 .unwrap_or_default();
15045 attestations.retain(|a| a.attester_id != attester_id);
15048 attestations.push(entry);
15049
15050 let new_bundle = OwnerRotateAttestationBundle::new(&proposal_obj, attestations)
15051 .unwrap_or_else(|e| fail_return(&e));
15052
15053 let body =
15054 serde_json::to_string_pretty(&new_bundle).expect("serialize attestation bundle");
15055 std::fs::write(&bundle, format!("{body}\n"))
15056 .unwrap_or_else(|e| fail_return(&format!("write {}: {e}", bundle.display())));
15057
15058 if json {
15059 let payload = json!({
15060 "ok": true,
15061 "command": "registry.owner-rotate-governed.attest",
15062 "bundle_id": new_bundle.bundle_id,
15063 "proposal_id": new_bundle.proposal_id,
15064 "attester_id": attester_id,
15065 "attester_pubkey": pubkey_hex,
15066 "attestation_count": new_bundle.attestations.len(),
15067 "bundle": bundle.display().to_string(),
15068 });
15069 println!(
15070 "{}",
15071 serde_json::to_string_pretty(&payload).expect("serialize attest summary")
15072 );
15073 } else {
15074 println!(
15075 "{} attested {} ({} attestation(s) total)",
15076 style::ok("attest"),
15077 new_bundle.bundle_id,
15078 new_bundle.attestations.len()
15079 );
15080 println!(" attester: {attester_id}");
15081 println!(" pubkey: {}...", &pubkey_hex[..16]);
15082 println!(" bundle: {}", bundle.display());
15083 }
15084 }
15085 OwnerRotateGovernedAction::Apply {
15086 frontier,
15087 proposal,
15088 bundle,
15089 policy,
15090 new_key,
15091 locator,
15092 to,
15093 json,
15094 } => {
15095 let proposal_obj: OwnerRotateProposal = serde_json::from_str(
15097 &std::fs::read_to_string(&proposal)
15098 .unwrap_or_else(|e| fail_return(&format!("read proposal: {e}"))),
15099 )
15100 .unwrap_or_else(|e| fail_return(&format!("parse proposal: {e}")));
15101 let bundle_obj: OwnerRotateAttestationBundle = serde_json::from_str(
15102 &std::fs::read_to_string(&bundle)
15103 .unwrap_or_else(|e| fail_return(&format!("read bundle: {e}"))),
15104 )
15105 .unwrap_or_else(|e| fail_return(&format!("parse bundle: {e}")));
15106 let policy_obj: GovernancePolicy = serde_json::from_str(
15107 &std::fs::read_to_string(&policy)
15108 .unwrap_or_else(|e| fail_return(&format!("read policy: {e}"))),
15109 )
15110 .unwrap_or_else(|e| fail_return(&format!("parse policy: {e}")));
15111 policy_obj
15112 .verify_content_address()
15113 .unwrap_or_else(|e| fail_return(&e));
15114
15115 let mut frontier_data =
15117 repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
15118 let revocation = FrontierRevocation {
15119 map: frontier_data
15120 .actors
15121 .iter()
15122 .filter_map(|a| a.revoked_at.as_ref().map(|r| (a.id.clone(), r.clone())))
15123 .collect(),
15124 };
15125
15126 let now = chrono::Utc::now().to_rfc3339();
15127 let report = verify_quorum(&proposal_obj, &bundle_obj, &policy_obj, &revocation, &now)
15128 .unwrap_or_else(|e| fail_return(&format!("quorum verification failed: {e}")));
15129
15130 let key_hex = std::fs::read_to_string(&new_key)
15134 .map(|s| s.trim().to_string())
15135 .unwrap_or_else(|e| fail_return(&format!("read new key: {e}")));
15136 let sk = parse_signing_key(&key_hex);
15137 let derived_pubkey = hex::encode(sk.verifying_key().to_bytes());
15138 if derived_pubkey != proposal_obj.new_owner_pubkey {
15139 fail(&format!(
15140 "--new-key derives to pubkey `{}`, but proposal declares new_owner_pubkey `{}`",
15141 derived_pubkey, proposal_obj.new_owner_pubkey
15142 ));
15143 }
15144
15145 let mut retired_pubkey_prefix: Option<String> = None;
15149 for actor in frontier_data.actors.iter_mut() {
15150 if actor.id == proposal_obj.old_owner_actor_id {
15151 if actor.revoked_at.is_some() {
15152 fail(&format!(
15153 "refusing to apply: old owner `{}` is already revoked at {}",
15154 actor.id,
15155 actor.revoked_at.as_deref().unwrap_or("?")
15156 ));
15157 }
15158 actor.revoked_at = Some(now.clone());
15159 actor.revoked_reason = Some(proposal_obj.reason.clone());
15160 retired_pubkey_prefix = Some(actor.public_key[..16].to_string());
15161 }
15162 }
15163
15164 if !frontier_data
15165 .actors
15166 .iter()
15167 .any(|a| a.id == proposal_obj.new_owner_actor_id)
15168 {
15169 frontier_data.actors.push(sign::ActorRecord {
15170 id: proposal_obj.new_owner_actor_id.clone(),
15171 public_key: proposal_obj.new_owner_pubkey.clone(),
15172 algorithm: "ed25519".to_string(),
15173 created_at: now.clone(),
15174 tier: None,
15175 orcid: None,
15176 access_clearance: None,
15177 revoked_at: None,
15178 revoked_reason: None,
15179 });
15180 }
15181
15182 repo::save_to_path(&frontier, &frontier_data)
15183 .unwrap_or_else(|e| fail_return(&format!("save rotation: {e}")));
15184
15185 let snapshot_hash = events::snapshot_hash(&frontier_data);
15189 let event_log_hash = events::event_log_hash(&frontier_data.events);
15190 let vfr_id = frontier_data.frontier_id();
15191 let name = frontier_data.project.name.clone();
15192
15193 let to_is_remote = matches!(
15194 to.as_deref(),
15195 Some(loc) if loc.starts_with("http://") || loc.starts_with("https://")
15196 );
15197 let resolved_locator = match locator {
15198 Some(l) => l,
15199 None => {
15200 if to_is_remote {
15201 let hub = to.as_deref().unwrap().trim_end_matches('/');
15202 let hub_root = hub.trim_end_matches("/entries");
15203 format!("{hub_root}/entries/{vfr_id}/snapshot")
15204 } else {
15205 fail_return(
15206 "--locator is required for local publishes; pass e.g. \
15207 --locator file:///path/to/frontier.json or an HTTPS URL.",
15208 )
15209 }
15210 }
15211 };
15212
15213 let mut entry = registry::RegistryEntry {
15214 schema: registry::ENTRY_SCHEMA.to_string(),
15215 vfr_id: vfr_id.clone(),
15216 name: name.clone(),
15217 owner_actor_id: proposal_obj.new_owner_actor_id.clone(),
15218 owner_pubkey: proposal_obj.new_owner_pubkey.clone(),
15219 latest_snapshot_hash: snapshot_hash,
15220 latest_event_log_hash: event_log_hash,
15221 network_locator: resolved_locator,
15222 signed_publish_at: chrono::Utc::now().to_rfc3339(),
15223 signature: String::new(),
15224 };
15225 entry.signature = registry::sign_entry(&entry, &sk).unwrap_or_else(|e| fail_return(&e));
15226
15227 let (registry_label, duplicate) = if to_is_remote {
15228 let hub_url = to.clone().unwrap();
15229 let resp = registry::publish_remote(&entry, &hub_url, Some(&frontier_data))
15230 .unwrap_or_else(|e| fail_return(&e));
15231 (hub_url, resp.duplicate)
15232 } else {
15233 let registry_path = match &to {
15234 Some(loc) => registry::resolve_local(loc).unwrap_or_else(|e| fail_return(&e)),
15235 None => default_registry_path(),
15236 };
15237 registry::publish_entry(®istry_path, entry.clone())
15238 .unwrap_or_else(|e| fail_return(&e));
15239 (registry_path.display().to_string(), false)
15240 };
15241
15242 let chain_path = governance_chain_path(&frontier);
15248 let mut chain = if chain_path.exists() {
15249 let raw = std::fs::read_to_string(&chain_path).unwrap_or_else(|e| {
15250 fail_return(&format!("read chain {}: {e}", chain_path.display()))
15251 });
15252 serde_json::from_str::<crate::governance::OwnerEpochChain>(&raw)
15253 .unwrap_or_else(|e| fail_return(&format!("parse chain: {e}")))
15254 } else {
15255 crate::governance::OwnerEpochChain::new(vfr_id.clone())
15256 };
15257 let transition = crate::governance::ChainTransition {
15258 owner_epoch: proposal_obj.owner_epoch,
15259 policy_id: policy_obj.policy_id.clone(),
15260 proposal_id: proposal_obj.proposal_id.clone(),
15261 bundle_id: bundle_obj.bundle_id.clone(),
15262 previous_entry_hash: proposal_obj.previous_registry_entry_hash.clone(),
15263 new_owner_actor_id: proposal_obj.new_owner_actor_id.clone(),
15264 new_owner_pubkey: proposal_obj.new_owner_pubkey.clone(),
15265 signed_at: now.clone(),
15266 };
15267 chain
15268 .append(transition)
15269 .unwrap_or_else(|e| fail_return(&format!("append chain: {e}")));
15270 if let Some(parent) = chain_path.parent() {
15271 std::fs::create_dir_all(parent).unwrap_or_else(|e| {
15272 fail_return(&format!("create chain dir {}: {e}", parent.display()))
15273 });
15274 }
15275 let chain_json =
15276 serde_json::to_string_pretty(&chain).expect("serialize owner-epoch chain");
15277 std::fs::write(&chain_path, format!("{chain_json}\n")).unwrap_or_else(|e| {
15278 fail_return(&format!("write chain {}: {e}", chain_path.display()))
15279 });
15280
15281 let payload = json!({
15282 "ok": true,
15283 "command": "registry.owner-rotate-governed.apply",
15284 "proposal_id": proposal_obj.proposal_id,
15285 "bundle_id": bundle_obj.bundle_id,
15286 "policy_id": policy_obj.policy_id,
15287 "quorum_report": report,
15288 "vfr_id": vfr_id,
15289 "name": name,
15290 "retired_owner": proposal_obj.old_owner_actor_id,
15291 "retired_pubkey_prefix": retired_pubkey_prefix,
15292 "new_owner": proposal_obj.new_owner_actor_id,
15293 "new_pubkey": derived_pubkey,
15294 "registry": registry_label,
15295 "signature": entry.signature,
15296 "duplicate": duplicate,
15297 "chain_file": chain_path.display().to_string(),
15298 "chain_length": chain.transitions.len(),
15299 });
15300 if json {
15301 println!(
15302 "{}",
15303 serde_json::to_string_pretty(&payload).expect("serialize apply summary")
15304 );
15305 } else {
15306 println!(
15307 "{} governed rotation applied: {} -> {} (epoch {})",
15308 style::ok("registry"),
15309 proposal_obj.old_owner_actor_id,
15310 proposal_obj.new_owner_actor_id,
15311 proposal_obj.owner_epoch
15312 );
15313 println!(
15314 " approving signers: {}",
15315 report.approving_signers.join(", ")
15316 );
15317 println!(" threshold: {}", report.threshold);
15318 println!(" bundle: {}", bundle_obj.bundle_id);
15319 println!(" registry: {}", registry_label);
15320 println!(" signature: {}…", &entry.signature[..16]);
15321 }
15322 }
15323 }
15324}
15325
15326#[allow(clippy::too_many_arguments)]
15328fn cmd_proof_attest_verification(
15329 proof_id: String,
15330 tool: String,
15331 tool_version: String,
15332 script_locator: String,
15333 lake_manifest_hash: Option<String>,
15334 verifier_output_hash: String,
15335 status: String,
15336 verifier_actor: String,
15337 key: PathBuf,
15338 out: PathBuf,
15339 json: bool,
15340) {
15341 use crate::proof_verification::{ProofVerification, VerificationDraft};
15342
15343 let key_hex = std::fs::read_to_string(&key)
15344 .map(|s| s.trim().to_string())
15345 .unwrap_or_else(|e| fail_return(&format!("read key: {e}")));
15346 let sk = parse_signing_key(&key_hex);
15347
15348 let draft = VerificationDraft {
15349 proof_id,
15350 tool,
15351 tool_version,
15352 script_locator,
15353 lake_manifest_hash,
15354 verifier_output_hash,
15355 status,
15356 verified_at: chrono::Utc::now().to_rfc3339(),
15357 verifier_actor,
15358 };
15359 let record = ProofVerification::build(draft, &sk).unwrap_or_else(|e| fail_return(&e));
15360
15361 let body = serde_json::to_string_pretty(&record).expect("serialize proof verification record");
15362 std::fs::write(&out, format!("{body}\n"))
15363 .unwrap_or_else(|e| fail_return(&format!("write {}: {e}", out.display())));
15364
15365 if json {
15366 let payload = json!({
15367 "ok": true,
15368 "command": "proof-attest-verification",
15369 "verification_id": record.verification_id,
15370 "proof_id": record.proof_id,
15371 "tool": record.tool,
15372 "tool_version": record.tool_version,
15373 "status": record.status,
15374 "verifier_actor": record.verifier_actor,
15375 "out": out.display().to_string(),
15376 });
15377 println!(
15378 "{}",
15379 serde_json::to_string_pretty(&payload).expect("serialize summary")
15380 );
15381 } else {
15382 println!(
15383 "{} attested {} verifying {} ({} {})",
15384 style::ok("proof"),
15385 record.verification_id,
15386 record.proof_id,
15387 record.tool,
15388 record.tool_version
15389 );
15390 println!(" status: {}", record.status);
15391 println!(" verifier_actor: {}", record.verifier_actor);
15392 println!(" verifier_output_hash: {}", record.verifier_output_hash);
15393 println!(" out: {}", out.display());
15394 }
15395}
15396
15397fn cmd_proof_verify_attestation(record: PathBuf, json: bool) {
15399 use crate::proof_verification::ProofVerification;
15400
15401 let raw = std::fs::read_to_string(&record)
15402 .unwrap_or_else(|e| fail_return(&format!("read record: {e}")));
15403 let parsed: ProofVerification =
15404 serde_json::from_str(&raw).unwrap_or_else(|e| fail_return(&format!("parse record: {e}")));
15405
15406 if let Err(e) = parsed.verify() {
15407 if json {
15408 let payload = json!({
15409 "ok": false,
15410 "command": "proof-verify-attestation",
15411 "verification_id": parsed.verification_id,
15412 "error": e,
15413 });
15414 println!(
15415 "{}",
15416 serde_json::to_string_pretty(&payload).expect("serialize verify failure")
15417 );
15418 } else {
15419 eprintln!("err · {e}");
15420 }
15421 std::process::exit(1);
15422 }
15423
15424 if json {
15425 let payload = json!({
15426 "ok": true,
15427 "command": "proof-verify-attestation",
15428 "verification_id": parsed.verification_id,
15429 "proof_id": parsed.proof_id,
15430 "tool": parsed.tool,
15431 "tool_version": parsed.tool_version,
15432 "status": parsed.status,
15433 "verifier_actor": parsed.verifier_actor,
15434 "verifier_pubkey": parsed.verifier_pubkey,
15435 });
15436 println!(
15437 "{}",
15438 serde_json::to_string_pretty(&payload).expect("serialize verify success")
15439 );
15440 } else {
15441 println!(
15442 "{} verification {} ok ({} {} verified {})",
15443 style::ok("verify"),
15444 parsed.verification_id,
15445 parsed.tool,
15446 parsed.tool_version,
15447 parsed.proof_id
15448 );
15449 }
15450}
15451
15452async fn cmd_search_index(action: SearchAction) {
15456 match action {
15457 SearchAction::Build {
15458 frontiers,
15459 out,
15460 include_bootstrap,
15461 include_broken,
15462 json,
15463 } => match SEARCH_BUILD_HANDLER.get() {
15464 Some(handler) => {
15465 handler(frontiers, out, include_bootstrap, include_broken, json).await;
15466 }
15467 None => fail("search build handler not registered"),
15468 },
15469 SearchAction::Query {
15470 query,
15471 index,
15472 kind,
15473 entity,
15474 status,
15475 frontier_id,
15476 source_id,
15477 chain_status,
15478 limit,
15479 json,
15480 } => match SEARCH_QUERY_HANDLER.get() {
15481 Some(handler) => {
15482 handler(
15483 query,
15484 index,
15485 kind,
15486 entity,
15487 status,
15488 frontier_id,
15489 source_id,
15490 chain_status,
15491 limit,
15492 json,
15493 )
15494 .await;
15495 }
15496 None => fail("search query handler not registered"),
15497 },
15498 }
15499}
15500
15501fn cmd_hub_federation(action: HubFederationAction) {
15503 use crate::checkpoint::RegistryCheckpoint;
15504
15505 match action {
15506 HubFederationAction::Status { sources, json } => {
15507 if sources.len() < 2 {
15508 fail("--source requires at least two id=url pairs (comma-separated or repeated).");
15509 }
15510
15511 #[derive(serde::Serialize)]
15512 struct SourceResponse {
15513 id: String,
15514 url: String,
15515 status: String,
15516 #[serde(skip_serializing_if = "Option::is_none")]
15517 checkpoint_id: Option<String>,
15518 #[serde(skip_serializing_if = "Option::is_none")]
15519 registry_root: Option<String>,
15520 #[serde(skip_serializing_if = "Option::is_none")]
15521 sequence: Option<u64>,
15522 #[serde(skip_serializing_if = "Option::is_none")]
15523 hub_id: Option<String>,
15524 #[serde(skip_serializing_if = "Option::is_none")]
15525 note: Option<String>,
15526 }
15527
15528 let client = reqwest::blocking::Client::builder()
15529 .timeout(std::time::Duration::from_secs(15))
15530 .build()
15531 .unwrap_or_else(|e| fail_return(&format!("http client init: {e}")));
15532
15533 let mut responses: Vec<SourceResponse> = Vec::new();
15534 let mut root_counts: std::collections::BTreeMap<(String, u64), usize> =
15535 std::collections::BTreeMap::new();
15536
15537 for entry in &sources {
15538 let (id, url) = match entry.split_once('=') {
15539 Some(pair) => pair,
15540 None => {
15541 responses.push(SourceResponse {
15542 id: entry.clone(),
15543 url: String::new(),
15544 status: "malformed".to_string(),
15545 checkpoint_id: None,
15546 registry_root: None,
15547 sequence: None,
15548 hub_id: None,
15549 note: Some("source must be `id=url`".to_string()),
15550 });
15551 continue;
15552 }
15553 };
15554
15555 let body_result: Result<String, String> =
15558 if let Some(path) = url.strip_prefix("file://") {
15559 std::fs::read_to_string(path).map_err(|e| format!("read {path}: {e}"))
15560 } else if url.starts_with("http://") || url.starts_with("https://") {
15561 match client.get(url).send() {
15562 Ok(resp) if resp.status().is_success() => {
15563 resp.text().map_err(|e| format!("body: {e}"))
15564 }
15565 Ok(resp) => Err(format!("HTTP {}", resp.status())),
15566 Err(e) => Err(format!("{e}")),
15567 }
15568 } else {
15569 Err(format!("unsupported url scheme: {url}"))
15570 };
15571
15572 match body_result {
15573 Ok(body) => match serde_json::from_str::<RegistryCheckpoint>(&body) {
15574 Ok(cp) => {
15575 let derived = cp.derive_id().unwrap_or_else(|_| String::new());
15577 if derived != cp.checkpoint_id {
15578 responses.push(SourceResponse {
15579 id: id.to_string(),
15580 url: url.to_string(),
15581 status: "id_mismatch".to_string(),
15582 checkpoint_id: Some(cp.checkpoint_id.clone()),
15583 registry_root: None,
15584 sequence: None,
15585 hub_id: Some(cp.hub_id.clone()),
15586 note: Some(format!(
15587 "id mismatch: stored {}, derived {}",
15588 cp.checkpoint_id, derived
15589 )),
15590 });
15591 } else {
15592 *root_counts
15593 .entry((cp.registry_root.clone(), cp.sequence))
15594 .or_insert(0) += 1;
15595 responses.push(SourceResponse {
15596 id: id.to_string(),
15597 url: url.to_string(),
15598 status: "ok".to_string(),
15599 checkpoint_id: Some(cp.checkpoint_id),
15600 registry_root: Some(cp.registry_root),
15601 sequence: Some(cp.sequence),
15602 hub_id: Some(cp.hub_id),
15603 note: None,
15604 });
15605 }
15606 }
15607 Err(e) => responses.push(SourceResponse {
15608 id: id.to_string(),
15609 url: url.to_string(),
15610 status: "parse_error".to_string(),
15611 checkpoint_id: None,
15612 registry_root: None,
15613 sequence: None,
15614 hub_id: None,
15615 note: Some(format!("parse: {e}")),
15616 }),
15617 },
15618 Err(e) => responses.push(SourceResponse {
15619 id: id.to_string(),
15620 url: url.to_string(),
15621 status: "unreachable".to_string(),
15622 checkpoint_id: None,
15623 registry_root: None,
15624 sequence: None,
15625 hub_id: None,
15626 note: Some(e),
15627 }),
15628 }
15629 }
15630
15631 let resolved_count = responses.iter().filter(|r| r.status == "ok").count();
15633 let consensus = if resolved_count < 2 {
15634 "insufficient"
15635 } else if root_counts.len() == 1 {
15636 "unanimous"
15637 } else {
15638 let max = root_counts.values().copied().max().unwrap_or(0);
15639 if max * 2 > resolved_count {
15640 "majority"
15641 } else {
15642 "split"
15643 }
15644 };
15645
15646 let payload = json!({
15647 "ok": consensus == "unanimous" || consensus == "majority",
15648 "command": "registry.federation.status",
15649 "sources_queried": sources.len(),
15650 "sources_resolved": resolved_count,
15651 "distinct_roots": root_counts.len(),
15652 "consensus": consensus,
15653 "responses": responses,
15654 });
15655
15656 if json {
15657 println!(
15658 "{}",
15659 serde_json::to_string_pretty(&payload).expect("serialize federation status")
15660 );
15661 } else {
15662 println!(
15663 "{} federation status across {} source(s): {}",
15664 style::ok("registry"),
15665 sources.len(),
15666 consensus
15667 );
15668 for r in &responses {
15669 let summary = match (&r.registry_root, r.sequence) {
15670 (Some(root), Some(seq)) => {
15671 format!("seq {seq} root {}...", &root[..root.len().min(23)])
15672 }
15673 _ => r.note.clone().unwrap_or_default(),
15674 };
15675 println!(" {} {} ({}) {summary}", r.status, r.id, r.url);
15676 }
15677 }
15678 if consensus == "split" {
15679 std::process::exit(1);
15680 }
15681 }
15682 }
15683}
15684
15685fn cmd_verify_all(from: Option<PathBuf>, json: bool) {
15687 use crate::registry;
15688
15689 let registry_path = match from {
15690 Some(p) => registry::resolve_local(p.to_str().unwrap_or_default())
15691 .unwrap_or_else(|e| fail_return(&e)),
15692 None => default_registry_path(),
15693 };
15694 let registry_data = registry::load_local(®istry_path).unwrap_or_else(|e| fail_return(&e));
15695
15696 #[derive(serde::Serialize)]
15697 struct EntryReport {
15698 vfr_id: String,
15699 signature_ok: bool,
15700 #[serde(skip_serializing_if = "Option::is_none")]
15701 error: Option<String>,
15702 }
15703
15704 let mut reports: Vec<EntryReport> = Vec::new();
15705 let mut pass = 0usize;
15706 let mut fail = 0usize;
15707 for entry in ®istry_data.entries {
15708 match registry::verify_entry(entry) {
15709 Ok(true) => {
15710 pass += 1;
15711 reports.push(EntryReport {
15712 vfr_id: entry.vfr_id.clone(),
15713 signature_ok: true,
15714 error: None,
15715 });
15716 }
15717 Ok(false) => {
15718 fail += 1;
15719 reports.push(EntryReport {
15720 vfr_id: entry.vfr_id.clone(),
15721 signature_ok: false,
15722 error: Some("signature did not verify against owner_pubkey".to_string()),
15723 });
15724 }
15725 Err(e) => {
15726 fail += 1;
15727 reports.push(EntryReport {
15728 vfr_id: entry.vfr_id.clone(),
15729 signature_ok: false,
15730 error: Some(e),
15731 });
15732 }
15733 }
15734 }
15735
15736 let ok = fail == 0;
15737 let payload = json!({
15738 "ok": ok,
15739 "command": "registry.verify-all",
15740 "registry": registry_path.display().to_string(),
15741 "entry_count": registry_data.entries.len(),
15742 "pass": pass,
15743 "fail": fail,
15744 "entries": reports,
15745 });
15746 if json {
15747 println!(
15748 "{}",
15749 serde_json::to_string_pretty(&payload).expect("serialize verify-all")
15750 );
15751 } else {
15752 println!(
15753 "{} verify-all over {}: {} pass, {} fail",
15754 style::ok("registry"),
15755 registry_path.display(),
15756 pass,
15757 fail
15758 );
15759 for r in &reports {
15760 let badge = if r.signature_ok { "ok" } else { "FAIL" };
15761 println!(" {badge} {}", r.vfr_id);
15762 if let Some(e) = &r.error {
15763 println!(" {e}");
15764 }
15765 }
15766 }
15767 if !ok {
15768 std::process::exit(1);
15769 }
15770}
15771
15772fn cmd_checkpoint(action: CheckpointAction) {
15774 use crate::checkpoint::{CheckpointDraft, RegistryCheckpoint};
15775 use crate::registry;
15776
15777 match action {
15778 CheckpointAction::Create {
15779 from,
15780 hub_id,
15781 sequence,
15782 previous,
15783 key,
15784 out,
15785 json,
15786 } => {
15787 let registry_path = registry::resolve_local(from.to_str().unwrap_or_default())
15788 .unwrap_or_else(|e| fail_return(&e));
15789 let registry_data =
15790 registry::load_local(®istry_path).unwrap_or_else(|e| fail_return(&e));
15791
15792 let key_hex = std::fs::read_to_string(&key)
15793 .map(|s| s.trim().to_string())
15794 .unwrap_or_else(|e| fail_return(&format!("read key: {e}")));
15795 let sk = parse_signing_key(&key_hex);
15796
15797 let draft = CheckpointDraft {
15798 hub_id,
15799 sequence,
15800 previous_checkpoint: previous,
15801 created_at: chrono::Utc::now().to_rfc3339(),
15802 };
15803 let checkpoint = RegistryCheckpoint::build(®istry_data, draft, &sk)
15804 .unwrap_or_else(|e| fail_return(&e));
15805
15806 let body = serde_json::to_string_pretty(&checkpoint).expect("serialize checkpoint");
15807 std::fs::write(&out, format!("{body}\n"))
15808 .unwrap_or_else(|e| fail_return(&format!("write {}: {e}", out.display())));
15809
15810 if json {
15811 let payload = json!({
15812 "ok": true,
15813 "command": "registry.checkpoint.create",
15814 "checkpoint_id": checkpoint.checkpoint_id,
15815 "hub_id": checkpoint.hub_id,
15816 "sequence": checkpoint.sequence,
15817 "entry_count": checkpoint.entry_count,
15818 "registry_root": checkpoint.registry_root,
15819 "previous_checkpoint": checkpoint.previous_checkpoint,
15820 "signer_pubkey": checkpoint.signer_pubkey,
15821 "out": out.display().to_string(),
15822 });
15823 println!(
15824 "{}",
15825 serde_json::to_string_pretty(&payload).expect("serialize create summary")
15826 );
15827 } else {
15828 println!(
15829 "{} checkpoint {} sequence {} over {} entries",
15830 style::ok("registry"),
15831 checkpoint.checkpoint_id,
15832 checkpoint.sequence,
15833 checkpoint.entry_count
15834 );
15835 println!(" registry_root: {}", checkpoint.registry_root);
15836 println!(" signer pubkey: {}...", &checkpoint.signer_pubkey[..16]);
15837 println!(" out: {}", out.display());
15838 }
15839 }
15840 CheckpointAction::Verify {
15841 checkpoint,
15842 registry,
15843 json,
15844 } => {
15845 let cp_raw = std::fs::read_to_string(&checkpoint).unwrap_or_else(|e| {
15846 fail_return(&format!("read checkpoint {}: {e}", checkpoint.display()))
15847 });
15848 let cp: RegistryCheckpoint = serde_json::from_str(&cp_raw)
15849 .unwrap_or_else(|e| fail_return(&format!("parse checkpoint: {e}")));
15850 let registry_path = registry::resolve_local(registry.to_str().unwrap_or_default())
15851 .unwrap_or_else(|e| fail_return(&e));
15852 let registry_data =
15853 registry::load_local(®istry_path).unwrap_or_else(|e| fail_return(&e));
15854
15855 if let Err(e) = cp.verify(®istry_data) {
15856 if json {
15857 let payload = json!({
15858 "ok": false,
15859 "command": "registry.checkpoint.verify",
15860 "checkpoint_id": cp.checkpoint_id,
15861 "error": e,
15862 });
15863 println!(
15864 "{}",
15865 serde_json::to_string_pretty(&payload).expect("serialize verify failure")
15866 );
15867 } else {
15868 eprintln!("err · {e}");
15869 }
15870 std::process::exit(1);
15871 }
15872
15873 if json {
15874 let payload = json!({
15875 "ok": true,
15876 "command": "registry.checkpoint.verify",
15877 "checkpoint_id": cp.checkpoint_id,
15878 "hub_id": cp.hub_id,
15879 "sequence": cp.sequence,
15880 "entry_count": cp.entry_count,
15881 "registry_root": cp.registry_root,
15882 "signer_pubkey": cp.signer_pubkey,
15883 });
15884 println!(
15885 "{}",
15886 serde_json::to_string_pretty(&payload).expect("serialize verify success")
15887 );
15888 } else {
15889 println!(
15890 "{} checkpoint {} verified (sequence {}, {} entries)",
15891 style::ok("verify"),
15892 cp.checkpoint_id,
15893 cp.sequence,
15894 cp.entry_count
15895 );
15896 }
15897 }
15898 }
15899}
15900
15901fn cmd_verify_chain(frontier: PathBuf, artifacts: PathBuf, json: bool) {
15903 use crate::governance::{
15904 ChainStatus, GovernancePolicy, OwnerEpochChain, OwnerRotateAttestationBundle,
15905 OwnerRotateProposal, verify_chain,
15906 };
15907
15908 let chain_path = governance_chain_path(&frontier);
15909 if !chain_path.exists() {
15910 if json {
15912 println!(
15913 "{}",
15914 serde_json::to_string_pretty(&json!({
15915 "ok": true,
15916 "command": "registry.verify-chain",
15917 "frontier": frontier.display().to_string(),
15918 "chain_status": "legacy",
15919 "reason": format!("no chain file at {}", chain_path.display()),
15920 }))
15921 .expect("serialize legacy")
15922 );
15923 } else {
15924 println!(
15925 "{} chain status: legacy ({} not present)",
15926 style::ok("verify-chain"),
15927 chain_path.display()
15928 );
15929 }
15930 return;
15931 }
15932
15933 let chain_raw = std::fs::read_to_string(&chain_path)
15934 .unwrap_or_else(|e| fail_return(&format!("read chain: {e}")));
15935 let chain: OwnerEpochChain = serde_json::from_str(&chain_raw)
15936 .unwrap_or_else(|e| fail_return(&format!("parse chain: {e}")));
15937
15938 let mut policies: std::collections::HashMap<String, GovernancePolicy> =
15940 std::collections::HashMap::new();
15941 let mut proposals: std::collections::HashMap<String, OwnerRotateProposal> =
15942 std::collections::HashMap::new();
15943 let mut bundles: std::collections::HashMap<String, OwnerRotateAttestationBundle> =
15944 std::collections::HashMap::new();
15945
15946 for transition in &chain.transitions {
15947 let policy_path = artifacts.join(format!("{}.json", transition.policy_id));
15948 let proposal_path = artifacts.join(format!("{}.json", transition.proposal_id));
15949 let bundle_path = artifacts.join(format!("{}.json", transition.bundle_id));
15950
15951 if !policies.contains_key(&transition.policy_id) {
15952 let raw = std::fs::read_to_string(&policy_path).unwrap_or_else(|e| {
15953 fail_return(&format!("read policy {}: {e}", policy_path.display()))
15954 });
15955 let p: GovernancePolicy = serde_json::from_str(&raw)
15956 .unwrap_or_else(|e| fail_return(&format!("parse policy: {e}")));
15957 policies.insert(transition.policy_id.clone(), p);
15958 }
15959 if !proposals.contains_key(&transition.proposal_id) {
15960 let raw = std::fs::read_to_string(&proposal_path).unwrap_or_else(|e| {
15961 fail_return(&format!("read proposal {}: {e}", proposal_path.display()))
15962 });
15963 let p: OwnerRotateProposal = serde_json::from_str(&raw)
15964 .unwrap_or_else(|e| fail_return(&format!("parse proposal: {e}")));
15965 proposals.insert(transition.proposal_id.clone(), p);
15966 }
15967 if !bundles.contains_key(&transition.bundle_id) {
15968 let raw = std::fs::read_to_string(&bundle_path).unwrap_or_else(|e| {
15969 fail_return(&format!("read bundle {}: {e}", bundle_path.display()))
15970 });
15971 let b: OwnerRotateAttestationBundle = serde_json::from_str(&raw)
15972 .unwrap_or_else(|e| fail_return(&format!("parse bundle: {e}")));
15973 bundles.insert(transition.bundle_id.clone(), b);
15974 }
15975 }
15976
15977 let frontier_data = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
15978 let revocation = FrontierRevocation {
15979 map: frontier_data
15980 .actors
15981 .iter()
15982 .filter_map(|a| a.revoked_at.as_ref().map(|r| (a.id.clone(), r.clone())))
15983 .collect(),
15984 };
15985 let now = chrono::Utc::now().to_rfc3339();
15986
15987 let status = verify_chain(&chain, &policies, &proposals, &bundles, &revocation, &now);
15988
15989 let status_str = match status {
15990 ChainStatus::Bootstrap => "bootstrap",
15991 ChainStatus::Verified => "verified",
15992 ChainStatus::Legacy => "legacy",
15993 ChainStatus::Broken => "broken",
15994 };
15995
15996 if json {
15997 let payload = json!({
15998 "ok": !matches!(status, ChainStatus::Broken),
15999 "command": "registry.verify-chain",
16000 "frontier": frontier.display().to_string(),
16001 "chain_status": status_str,
16002 "transition_count": chain.transitions.len(),
16003 "current_epoch": chain.transitions.last().map_or(0, |t| t.owner_epoch),
16004 });
16005 println!(
16006 "{}",
16007 serde_json::to_string_pretty(&payload).expect("serialize verify-chain")
16008 );
16009 } else {
16010 println!(
16011 "{} chain status: {} ({} transition(s))",
16012 style::ok("verify-chain"),
16013 status_str,
16014 chain.transitions.len()
16015 );
16016 if let Some(t) = chain.transitions.last() {
16017 println!(
16018 " current epoch: {} policy: {} bundle: {}",
16019 t.owner_epoch, t.policy_id, t.bundle_id
16020 );
16021 }
16022 }
16023
16024 if matches!(status, ChainStatus::Broken) {
16025 std::process::exit(1);
16026 }
16027}
16028
16029struct FrontierRevocation {
16031 map: std::collections::HashMap<String, String>,
16032}
16033
16034impl crate::governance::ActorRevocationLookup for FrontierRevocation {
16035 fn revoked_at(&self, actor_id: &str) -> Option<&str> {
16036 self.map.get(actor_id).map(String::as_str)
16037 }
16038}
16039
16040fn cmd_governance(action: GovernanceAction) {
16042 use crate::governance::{GovernancePolicy, PolicyDraft, Quorum};
16043
16044 match action {
16045 GovernanceAction::Init {
16046 frontier,
16047 threshold,
16048 eligible,
16049 bootstrap,
16050 owner_epoch,
16051 current_owner_counts,
16052 attestation_ttl_hours,
16053 out,
16054 json,
16055 } => {
16056 let frontier_data = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
16057 let frontier_id = frontier_data.frontier_id().to_string();
16058
16059 let resolved_owner_epoch = match (bootstrap, owner_epoch) {
16060 (true, None) => 0,
16061 (true, Some(e)) if e != 0 => {
16062 fail_return("--bootstrap requires --owner-epoch 0 (or omit --owner-epoch).")
16063 }
16064 (true, Some(e)) => e,
16065 (false, None) => fail_return(
16066 "--owner-epoch is required for non-bootstrap policies (or pass --bootstrap).",
16067 ),
16068 (false, Some(e)) => e,
16069 };
16070
16071 if eligible.is_empty() {
16072 fail("--eligible must list at least one actor id (comma-separated).");
16073 }
16074
16075 let draft = PolicyDraft {
16076 frontier_id,
16077 owner_epoch: resolved_owner_epoch,
16078 bootstrap_epoch: if bootstrap { 0 } else { resolved_owner_epoch },
16079 rotate_quorum: Quorum {
16080 threshold,
16081 eligible_actors: eligible,
16082 current_owner_counts,
16083 role_constraints: None,
16084 timelock_hours: None,
16085 },
16086 emergency_quorum: None,
16087 policy_update_quorum: None,
16088 attestation_ttl_hours,
16089 created_at: chrono::Utc::now().to_rfc3339(),
16090 };
16091 let policy = GovernancePolicy::from_draft(draft).unwrap_or_else(|e| fail_return(&e));
16092
16093 let body = serde_json::to_string_pretty(&policy).expect("serialize governance policy");
16094
16095 if let Some(path) = out {
16096 std::fs::write(&path, format!("{body}\n"))
16097 .unwrap_or_else(|e| fail_return(&format!("write {}: {e}", path.display())));
16098 if json {
16099 let payload = json!({
16100 "ok": true,
16101 "command": "registry.governance.init",
16102 "policy_id": policy.policy_id,
16103 "frontier_id": policy.frontier_id,
16104 "owner_epoch": policy.owner_epoch,
16105 "bootstrap_epoch": policy.bootstrap_epoch,
16106 "out": path.display().to_string(),
16107 });
16108 println!(
16109 "{}",
16110 serde_json::to_string_pretty(&payload).expect("serialize init summary")
16111 );
16112 } else {
16113 println!(
16114 "{} governance policy {} (epoch {}, bootstrap_epoch {}) -> {}",
16115 style::ok("registry"),
16116 policy.policy_id,
16117 policy.owner_epoch,
16118 policy.bootstrap_epoch,
16119 path.display()
16120 );
16121 println!(" threshold: {}", policy.rotate_quorum.threshold);
16122 println!(
16123 " eligible: {}",
16124 policy.rotate_quorum.eligible_actors.join(", ")
16125 );
16126 println!(
16127 " current_owner_counts: {}",
16128 policy.rotate_quorum.current_owner_counts
16129 );
16130 }
16131 } else {
16132 println!("{body}");
16136 }
16137 }
16138 GovernanceAction::Show { policy, json } => {
16139 let raw = std::fs::read_to_string(&policy)
16140 .unwrap_or_else(|e| fail_return(&format!("read {}: {e}", policy.display())));
16141 let parsed: GovernancePolicy = serde_json::from_str(&raw)
16142 .unwrap_or_else(|e| fail_return(&format!("parse policy: {e}")));
16143 if json {
16144 println!(
16145 "{}",
16146 serde_json::to_string_pretty(&parsed).expect("serialize policy")
16147 );
16148 } else {
16149 println!(" vela registry governance policy");
16150 println!(" policy_id: {}", parsed.policy_id);
16151 println!(" frontier_id: {}", parsed.frontier_id);
16152 println!(" owner_epoch: {}", parsed.owner_epoch);
16153 println!(" bootstrap_epoch: {}", parsed.bootstrap_epoch);
16154 println!(" rotate threshold: {}", parsed.rotate_quorum.threshold);
16155 println!(
16156 " eligible: {}",
16157 parsed.rotate_quorum.eligible_actors.join(", ")
16158 );
16159 println!(
16160 " current_owner_counts: {}",
16161 parsed.rotate_quorum.current_owner_counts
16162 );
16163 if let Some(q) = &parsed.emergency_quorum {
16164 println!(" emergency threshold: {}", q.threshold);
16165 }
16166 if let Some(q) = &parsed.policy_update_quorum {
16167 println!(" policy_update threshold: {}", q.threshold);
16168 }
16169 println!(" attestation_ttl_hours: {}", parsed.attestation_ttl_hours);
16170 println!(" created_at: {}", parsed.created_at);
16171 }
16172 }
16173 GovernanceAction::Validate { policy, json } => {
16174 let raw = std::fs::read_to_string(&policy)
16175 .unwrap_or_else(|e| fail_return(&format!("read {}: {e}", policy.display())));
16176 let parsed: GovernancePolicy = serde_json::from_str(&raw)
16177 .unwrap_or_else(|e| fail_return(&format!("parse policy: {e}")));
16178 if let Err(e) = parsed.validate() {
16179 if json {
16180 let payload = json!({
16181 "ok": false,
16182 "command": "registry.governance.validate",
16183 "policy_id": parsed.policy_id,
16184 "error": e,
16185 });
16186 println!(
16187 "{}",
16188 serde_json::to_string_pretty(&payload).expect("serialize validate failure")
16189 );
16190 } else {
16191 eprintln!("err · {e}");
16192 }
16193 std::process::exit(1);
16194 }
16195 if let Err(e) = parsed.verify_content_address() {
16196 if json {
16197 let payload = json!({
16198 "ok": false,
16199 "command": "registry.governance.validate",
16200 "policy_id": parsed.policy_id,
16201 "error": e,
16202 });
16203 println!(
16204 "{}",
16205 serde_json::to_string_pretty(&payload)
16206 .expect("serialize content-address failure")
16207 );
16208 } else {
16209 eprintln!("err · {e}");
16210 }
16211 std::process::exit(1);
16212 }
16213 if json {
16214 let payload = json!({
16215 "ok": true,
16216 "command": "registry.governance.validate",
16217 "policy_id": parsed.policy_id,
16218 "frontier_id": parsed.frontier_id,
16219 "owner_epoch": parsed.owner_epoch,
16220 "bootstrap_epoch": parsed.bootstrap_epoch,
16221 });
16222 println!(
16223 "{}",
16224 serde_json::to_string_pretty(&payload).expect("serialize validate success")
16225 );
16226 } else {
16227 println!(
16228 "{} governance policy {} valid (epoch {})",
16229 style::ok("validate"),
16230 parsed.policy_id,
16231 parsed.owner_epoch
16232 );
16233 }
16234 }
16235 }
16236}
16237
16238fn print_stats_json(path: &Path) {
16239 let frontier = load_frontier_or_fail(path);
16240 let source_hash = hash_path_or_fail(path);
16241 let payload = json!({
16242 "ok": true,
16243 "command": "stats",
16244 "schema_version": project::VELA_SCHEMA_VERSION,
16245 "frontier": {
16246 "name": &frontier.project.name,
16247 "description": &frontier.project.description,
16248 "source": path.display().to_string(),
16249 "hash": format!("sha256:{source_hash}"),
16250 "compiled_at": &frontier.project.compiled_at,
16251 "compiler": &frontier.project.compiler,
16252 "papers_processed": frontier.project.papers_processed,
16253 "errors": frontier.project.errors,
16254 },
16255 "stats": frontier.stats,
16256 "proposals": proposals::summary(&frontier),
16257 "proof_state": frontier.proof_state,
16258 });
16259 println!(
16260 "{}",
16261 serde_json::to_string_pretty(&payload).expect("failed to serialize stats")
16262 );
16263}
16264
16265fn cmd_search(
16266 source: Option<&Path>,
16267 query: &str,
16268 entity: Option<&str>,
16269 assertion_type: Option<&str>,
16270 all: Option<&Path>,
16271 limit: usize,
16272 json_output: bool,
16273) {
16274 if let Some(dir) = all {
16275 search::run_all(dir, query, entity, assertion_type, limit);
16276 return;
16277 }
16278 let Some(src) = source else {
16279 fail("Provide --source <frontier> or --all <directory>.");
16280 };
16281 if json_output {
16282 let results = search::search(src, query, entity, assertion_type, limit);
16283 let loaded = load_frontier_or_fail(src);
16284 let source_hash = hash_path_or_fail(src);
16285 let payload = json!({
16286 "ok": true,
16287 "command": "search",
16288 "schema_version": project::VELA_SCHEMA_VERSION,
16289 "query": query,
16290 "frontier": {
16291 "name": &loaded.project.name,
16292 "source": src.display().to_string(),
16293 "hash": format!("sha256:{source_hash}"),
16294 },
16295 "filters": {
16296 "entity": entity,
16297 "assertion_type": assertion_type,
16298 "limit": limit,
16299 },
16300 "count": results.len(),
16301 "results": results.iter().map(|result| json!({
16302 "id": &result.id,
16303 "score": result.score,
16304 "assertion": &result.assertion,
16305 "assertion_type": &result.assertion_type,
16306 "confidence": result.confidence,
16307 "entities": &result.entities,
16308 "doi": &result.doi,
16309 })).collect::<Vec<_>>()
16310 });
16311 println!(
16312 "{}",
16313 serde_json::to_string_pretty(&payload).expect("failed to serialize search results")
16314 );
16315 } else {
16316 search::run(src, query, entity, assertion_type, limit);
16317 }
16318}
16319
16320fn cmd_tensions(source: &Path, both_high: bool, cross_domain: bool, top: usize, json_output: bool) {
16321 let frontier = load_frontier_or_fail(source);
16322 let result = tensions::analyze(&frontier, both_high, cross_domain, top);
16323 if json_output {
16324 let source_hash = hash_path_or_fail(source);
16325 let payload = json!({
16326 "ok": true,
16327 "command": "tensions",
16328 "schema_version": project::VELA_SCHEMA_VERSION,
16329 "frontier": {
16330 "name": &frontier.project.name,
16331 "source": source.display().to_string(),
16332 "hash": format!("sha256:{source_hash}"),
16333 },
16334 "filters": {
16335 "both_high": both_high,
16336 "cross_domain": cross_domain,
16337 "top": top,
16338 },
16339 "count": result.len(),
16340 "tensions": result.iter().map(|t| json!({
16341 "score": t.score,
16342 "resolved": t.resolved,
16343 "superseding_id": &t.superseding_id,
16344 "finding_a": {
16345 "id": &t.finding_a.id,
16346 "assertion": &t.finding_a.assertion,
16347 "confidence": t.finding_a.confidence,
16348 "assertion_type": &t.finding_a.assertion_type,
16349 "citation_count": t.finding_a.citation_count,
16350 "contradicts_count": t.finding_a.contradicts_count,
16351 },
16352 "finding_b": {
16353 "id": &t.finding_b.id,
16354 "assertion": &t.finding_b.assertion,
16355 "confidence": t.finding_b.confidence,
16356 "assertion_type": &t.finding_b.assertion_type,
16357 "citation_count": t.finding_b.citation_count,
16358 "contradicts_count": t.finding_b.contradicts_count,
16359 }
16360 })).collect::<Vec<_>>()
16361 });
16362 println!(
16363 "{}",
16364 serde_json::to_string_pretty(&payload).expect("failed to serialize tensions")
16365 );
16366 } else {
16367 tensions::print_tensions(&result);
16368 }
16369}
16370
16371fn cmd_gaps(action: GapsAction) {
16372 match action {
16373 GapsAction::Rank {
16374 frontier,
16375 top,
16376 domain,
16377 json,
16378 } => cmd_gap_rank(&frontier, top, domain.as_deref(), json),
16379 }
16380}
16381
16382fn cmd_gap_rank(frontier_path: &Path, top: usize, domain: Option<&str>, json_output: bool) {
16383 let frontier = load_frontier_or_fail(frontier_path);
16384 let mut ranked = frontier
16385 .findings
16386 .iter()
16387 .filter(|finding| finding.flags.gap || finding.flags.negative_space)
16388 .filter(|finding| {
16389 domain.is_none_or(|domain| {
16390 finding
16391 .assertion
16392 .text
16393 .to_lowercase()
16394 .contains(&domain.to_lowercase())
16395 || finding
16396 .assertion
16397 .entities
16398 .iter()
16399 .any(|entity| entity.name.to_lowercase().contains(&domain.to_lowercase()))
16400 })
16401 })
16402 .map(|finding| {
16403 let dependency_count = frontier
16404 .findings
16405 .iter()
16406 .flat_map(|candidate| candidate.links.iter())
16407 .filter(|link| link.target == finding.id)
16408 .count();
16409 let score = dependency_count as f64 + finding.confidence.score;
16410 json!({
16411 "id": &finding.id,
16412 "kind": "candidate_gap_review_lead",
16413 "assertion": &finding.assertion.text,
16414 "score": score,
16415 "dependency_count": dependency_count,
16416 "confidence": finding.confidence.score,
16417 "evidence_type": &finding.evidence.evidence_type,
16418 "entities": finding.assertion.entities.iter().map(|e| &e.name).collect::<Vec<_>>(),
16419 "recommended_action": "Review source scope and missing evidence before treating this as an experiment target.",
16420 "caveats": ["Candidate gap rankings are review leads, not guaranteed underexplored areas or experiment targets."],
16421 })
16422 })
16423 .collect::<Vec<_>>();
16424 ranked.sort_by(|a, b| {
16425 b.get("score")
16426 .and_then(Value::as_f64)
16427 .partial_cmp(&a.get("score").and_then(Value::as_f64))
16428 .unwrap_or(std::cmp::Ordering::Equal)
16429 });
16430 ranked.truncate(top);
16431 if json_output {
16432 let source_hash = hash_path_or_fail(frontier_path);
16433 let payload = json!({
16434 "ok": true,
16435 "command": "gaps rank",
16436 "schema_version": project::VELA_SCHEMA_VERSION,
16437 "frontier": {
16438 "name": &frontier.project.name,
16439 "source": frontier_path.display().to_string(),
16440 "hash": format!("sha256:{source_hash}"),
16441 },
16442 "filters": {
16443 "top": top,
16444 "domain": domain,
16445 },
16446 "count": ranked.len(),
16447 "ranking_label": "candidate gap review leads",
16448 "caveats": ["These rankings are navigation signals over flagged findings, not scientific conclusions."],
16449 "review_leads": ranked.clone(),
16450 "gaps": ranked,
16451 });
16452 println!(
16453 "{}",
16454 serde_json::to_string_pretty(&payload).expect("failed to serialize gap ranking")
16455 );
16456 } else {
16457 println!();
16458 println!(" {}", "CANDIDATE GAP REVIEW LEADS".dimmed());
16459 println!(" {}", style::tick_row(60));
16460 println!(" review source scope; these are not guaranteed experiment targets.");
16461 println!();
16462 for (idx, gap) in ranked.iter().enumerate() {
16463 println!(
16464 " {}. [{}] score={} {}",
16465 idx + 1,
16466 gap["id"].as_str().unwrap_or("?"),
16467 gap["score"].as_f64().unwrap_or(0.0),
16468 gap["assertion"].as_str().unwrap_or("")
16469 );
16470 }
16471 }
16472}
16473
16474async fn cmd_bridge(inputs: &[PathBuf], check_novelty: bool, top_n: usize) {
16475 if inputs.len() < 2 {
16476 fail("need at least 2 frontier files for bridge detection.");
16477 }
16478 println!();
16479 println!(" {}", "VELA · BRIDGE · V0.36.0".dimmed());
16480 println!(" {}", style::tick_row(60));
16481 println!(" loading {} frontiers...", inputs.len());
16482 let mut named_projects = Vec::<(String, project::Project)>::new();
16483 let mut total_findings = 0;
16484 for path in inputs {
16485 let frontier = load_frontier_or_fail(path);
16486 let name = path
16487 .file_stem()
16488 .unwrap_or_default()
16489 .to_string_lossy()
16490 .to_string();
16491 println!(" {} · {} findings", name, frontier.stats.findings);
16492 total_findings += frontier.stats.findings;
16493 named_projects.push((name, frontier));
16494 }
16495 let refs = named_projects
16496 .iter()
16497 .map(|(name, frontier)| (name.as_str(), frontier))
16498 .collect::<Vec<_>>();
16499 let mut bridges = bridge::detect_bridges(&refs);
16500 if check_novelty && !bridges.is_empty() {
16501 let client = Client::new();
16502 let check_count = bridges.len().min(top_n);
16503 println!(" running rough PubMed prior-art checks for top {check_count} bridges...");
16504 for bridge_item in bridges.iter_mut().take(check_count) {
16505 let query = bridge::novelty_query(&bridge_item.entity_name, bridge_item);
16506 match bridge::check_novelty(&client, &query).await {
16507 Ok(count) => bridge_item.pubmed_count = Some(count),
16508 Err(e) => eprintln!(
16509 " {} prior-art check failed for {}: {e}",
16510 style::err_prefix(),
16511 bridge_item.entity_name
16512 ),
16513 }
16514 tokio::time::sleep(std::time::Duration::from_millis(350)).await;
16515 }
16516 }
16517 print!("{}", bridge::format_report(&bridges, total_findings));
16518}
16519
16520struct BenchArgs {
16521 frontier: Option<PathBuf>,
16522 gold: Option<PathBuf>,
16523 entity_gold: Option<PathBuf>,
16524 link_gold: Option<PathBuf>,
16525 suite: Option<PathBuf>,
16526 suite_ready: bool,
16527 min_f1: Option<f64>,
16528 min_precision: Option<f64>,
16529 min_recall: Option<f64>,
16530 no_thresholds: bool,
16531 json: bool,
16532}
16533
16534fn cmd_agent_bench(
16539 gold: &Path,
16540 candidate: &Path,
16541 sources: Option<&Path>,
16542 threshold: Option<f64>,
16543 report_path: Option<&Path>,
16544 json_out: bool,
16545) {
16546 let input = crate::agent_bench::BenchInput {
16547 gold_path: gold.to_path_buf(),
16548 candidate_path: candidate.to_path_buf(),
16549 sources: sources.map(Path::to_path_buf),
16550 threshold: threshold.unwrap_or(0.0),
16551 };
16552 let report = match crate::agent_bench::run(input) {
16553 Ok(r) => r,
16554 Err(e) => {
16555 eprintln!("{} bench failed: {e}", style::err_prefix());
16556 std::process::exit(1);
16557 }
16558 };
16559
16560 let json = serde_json::to_string_pretty(&report).unwrap_or_default();
16561 if let Some(path) = report_path
16562 && let Err(e) = std::fs::write(path, &json)
16563 {
16564 eprintln!(
16565 "{} failed to write report to {}: {e}",
16566 style::err_prefix(),
16567 path.display()
16568 );
16569 }
16570
16571 if json_out {
16572 println!("{json}");
16573 } else {
16574 println!();
16575 println!(" {}", "VELA · BENCH · AGENT STATE-UPDATE".dimmed());
16576 println!(" {}", style::tick_row(60));
16577 print!("{}", crate::agent_bench::render_pretty(&report));
16578 println!();
16579 }
16580
16581 if !report.pass {
16582 std::process::exit(1);
16583 }
16584}
16585
16586fn cmd_bench(args: BenchArgs) {
16587 if args.suite_ready {
16588 let suite_path = args
16589 .suite
16590 .unwrap_or_else(|| PathBuf::from("benchmarks/suites/bbb-core.json"));
16591 let payload =
16592 benchmark::suite_ready_report(&suite_path).unwrap_or_else(|e| fail_return(&e));
16593 println!(
16594 "{}",
16595 serde_json::to_string_pretty(&payload).expect("failed to serialize suite-ready report")
16596 );
16597 if payload.get("ok").and_then(Value::as_bool) != Some(true) {
16598 std::process::exit(1);
16599 }
16600 return;
16601 }
16602 if let Some(suite_path) = args.suite {
16603 let payload = benchmark::run_suite(&suite_path).unwrap_or_else(|e| fail_return(&e));
16604 if args.json {
16605 println!(
16606 "{}",
16607 serde_json::to_string_pretty(&payload)
16608 .expect("failed to serialize benchmark suite")
16609 );
16610 } else {
16611 let ok = payload.get("ok").and_then(Value::as_bool) == Some(true);
16612 let metrics = payload.get("metrics").unwrap_or(&Value::Null);
16613 println!();
16614 println!(" {}", "VELA · BENCH · SUITE".dimmed());
16615 println!(" {}", style::tick_row(60));
16616 println!(" suite: {}", suite_path.display());
16617 println!(
16618 " status: {}",
16619 if ok {
16620 style::ok("pass")
16621 } else {
16622 style::lost("fail")
16623 }
16624 );
16625 println!(
16626 " tasks: {}/{} passed",
16627 metrics
16628 .get("tasks_passed")
16629 .and_then(Value::as_u64)
16630 .unwrap_or(0),
16631 metrics
16632 .get("tasks_total")
16633 .and_then(Value::as_u64)
16634 .unwrap_or(0)
16635 );
16636 }
16637 if payload.get("ok").and_then(Value::as_bool) != Some(true) {
16638 std::process::exit(1);
16639 }
16640 return;
16641 }
16642
16643 let frontier = args
16644 .frontier
16645 .unwrap_or_else(|| PathBuf::from("frontiers/bbb-alzheimer.json"));
16646 let thresholds = benchmark::BenchmarkThresholds {
16647 min_f1: if args.no_thresholds {
16648 None
16649 } else {
16650 args.min_f1.or(Some(0.05))
16651 },
16652 min_precision: if args.no_thresholds {
16653 None
16654 } else {
16655 args.min_precision
16656 },
16657 min_recall: if args.no_thresholds {
16658 None
16659 } else {
16660 args.min_recall
16661 },
16662 ..Default::default()
16663 };
16664 if let Some(path) = args.link_gold {
16665 print_benchmark_or_exit(benchmark::task_envelope(
16666 &frontier,
16667 None,
16668 benchmark::BenchmarkMode::Link,
16669 Some(&path),
16670 &thresholds,
16671 None,
16672 ));
16673 } else if let Some(path) = args.entity_gold {
16674 print_benchmark_or_exit(benchmark::task_envelope(
16675 &frontier,
16676 None,
16677 benchmark::BenchmarkMode::Entity,
16678 Some(&path),
16679 &thresholds,
16680 None,
16681 ));
16682 } else if let Some(path) = args.gold {
16683 if args.json {
16684 print_benchmark_or_exit(benchmark::task_envelope(
16685 &frontier,
16686 None,
16687 benchmark::BenchmarkMode::Finding,
16688 Some(&path),
16689 &thresholds,
16690 None,
16691 ));
16692 } else {
16693 benchmark::run(&frontier, &path, false);
16694 }
16695 } else {
16696 fail("Provide --suite, --gold, --entity-gold, or --link-gold.");
16697 }
16698}
16699
16700fn print_benchmark_or_exit(result: Result<Value, String>) {
16701 let payload = result.unwrap_or_else(|e| fail_return(&e));
16702 println!(
16703 "{}",
16704 serde_json::to_string_pretty(&payload).expect("failed to serialize benchmark report")
16705 );
16706 if payload.get("ok").and_then(Value::as_bool) != Some(true) {
16707 std::process::exit(1);
16708 }
16709}
16710
16711fn cmd_packet(action: PacketAction) {
16712 let (result, json_output) = match action {
16713 PacketAction::Inspect { path, json } => (packet::inspect(&path), json),
16714 PacketAction::Validate { path, json } => (packet::validate(&path), json),
16715 };
16716 match result {
16717 Ok(output) if json_output => {
16718 println!(
16719 "{}",
16720 serde_json::to_string_pretty(&json!({
16721 "ok": true,
16722 "command": "packet",
16723 "result": output,
16724 }))
16725 .expect("failed to serialize packet response")
16726 );
16727 }
16728 Ok(output) => println!("{output}"),
16729 Err(e) => fail(&e),
16730 }
16731}
16732
16733fn cmd_verify(path: &Path, json_output: bool) {
16738 let result = packet::validate(path);
16739 match result {
16740 Ok(output) if json_output => {
16741 println!(
16742 "{}",
16743 serde_json::to_string_pretty(&json!({
16744 "ok": true,
16745 "command": "verify",
16746 "result": output,
16747 }))
16748 .expect("failed to serialize verify response")
16749 );
16750 }
16751 Ok(output) => {
16752 println!("{output}");
16753 println!(
16754 "\nverify: ok\n every file in the manifest matched its claimed sha256.\n pull this packet on another machine, run the same command, see the same line."
16755 );
16756 }
16757 Err(e) => fail(&e),
16758 }
16759}
16760
16761fn cmd_init(path: &Path, name: &str, template: &str, initialize_git: bool, json_output: bool) {
16762 if path.join(".vela").exists() {
16763 fail(&format!(
16764 "already initialized: {} exists",
16765 path.join(".vela").display()
16766 ));
16767 }
16768 let payload = frontier_repo::initialize(
16769 path,
16770 frontier_repo::InitOptions {
16771 name,
16772 template,
16773 initialize_git,
16774 },
16775 )
16776 .unwrap_or_else(|e| fail_return(&e));
16777 if json_output {
16778 println!(
16779 "{}",
16780 serde_json::to_string_pretty(&payload).expect("failed to serialize init report")
16781 );
16782 } else {
16783 println!(
16784 "{} initialized frontier repository in {}",
16785 style::ok("ok"),
16786 path.display()
16787 );
16788 }
16789}
16790
16791fn cmd_agent(action: AgentAction) {
16809 use std::process::Command;
16810 match action {
16811 AgentAction::Init {
16812 name,
16813 framework,
16814 out,
16815 json,
16816 } => {
16817 let slug = name.trim();
16818 if slug.is_empty() {
16819 fail("agent name must be non-empty");
16820 }
16821 if !slug
16824 .chars()
16825 .all(|c| c.is_ascii_lowercase() || c.is_ascii_digit() || c == '-')
16826 {
16827 fail("agent name must be lowercase alphanumeric + hyphens");
16828 }
16829 let valid_frameworks = [
16830 "claude-code",
16831 "claude-api",
16832 "langchain",
16833 "openai",
16834 "agent4science",
16835 "scienceclaw",
16836 "custom",
16837 ];
16838 if !valid_frameworks.contains(&framework.as_str()) {
16839 fail(&format!(
16840 "--framework must be one of: {}",
16841 valid_frameworks.join(", ")
16842 ));
16843 }
16844
16845 let target = out
16846 .clone()
16847 .unwrap_or_else(|| PathBuf::from("agents").join(slug));
16848 if target.exists() {
16849 fail(&format!(
16850 "agent directory already exists: {}",
16851 target.display()
16852 ));
16853 }
16854 let keys_dir = target.join("keys");
16855 std::fs::create_dir_all(&keys_dir)
16856 .unwrap_or_else(|e| fail_return(&format!("create {}: {e}", keys_dir.display())));
16857
16858 let exe = std::env::current_exe()
16862 .unwrap_or_else(|e| fail_return(&format!("cannot locate current executable: {e}")));
16863 let keys_out_str = keys_dir.to_string_lossy().into_owned();
16864 let kp_out = Command::new(&exe)
16865 .args(["sign", "generate-keypair", "--out", &keys_out_str, "--json"])
16866 .output()
16867 .unwrap_or_else(|e| fail_return(&format!("sign.generate-keypair: spawn: {e}")));
16868 if !kp_out.status.success() {
16869 let stderr = String::from_utf8_lossy(&kp_out.stderr);
16870 fail(&format!("sign.generate-keypair failed:\n{stderr}"));
16871 }
16872 let kp_json: Value = serde_json::from_slice(&kp_out.stdout)
16873 .unwrap_or_else(|e| fail_return(&format!("sign.generate-keypair bad json: {e}")));
16874 let public_key = kp_json
16875 .get("public_key")
16876 .and_then(Value::as_str)
16877 .unwrap_or_else(|| fail_return("sign.generate-keypair: missing public_key"))
16878 .to_string();
16879
16880 let date = chrono::Utc::now().format("%Y-%m-%d").to_string();
16881 let agent_id = format!("agent:{slug}-{date}");
16882 let now = chrono::Utc::now().to_rfc3339();
16883
16884 let actor_record = json!({
16888 "schema": "vela.agent_kit.actor.v0.1",
16889 "id": agent_id,
16890 "public_key": public_key,
16891 "algorithm": "ed25519",
16892 "actor_type": "agent",
16893 "created_at": now,
16894 "framework": framework,
16895 "name": slug,
16896 });
16897 std::fs::write(
16898 target.join("actor.json"),
16899 serde_json::to_vec_pretty(&actor_record).expect("serialize actor.json"),
16900 )
16901 .unwrap_or_else(|e| fail_return(&format!("write actor.json: {e}")));
16902
16903 let yaml = format!(
16905 "# v0.131: portable AI-agent kit scaffolded by `vela agent init`.\n\
16906 # The substrate makes the agent-draft / human-verdict\n\
16907 # distinction load-bearing. See docs/AI_ATTRIBUTION.md.\n\
16908 \n\
16909 schema: vela.agent_kit.v0.1\n\
16910 id: {agent_id}\n\
16911 name: {slug}\n\
16912 framework: {framework}\n\
16913 created_at: {now}\n\
16914 \n\
16915 # Workflow:\n\
16916 # 1. A human reviewer registers this agent in a frontier:\n\
16917 # vela actor add <frontier> '{agent_id}' \\\n\
16918 # --pubkey {public_key}\n\
16919 # 2. The agent reads frontier state through the MCP\n\
16920 # server: `vela serve <frontier>` (stdio JSON-RPC).\n\
16921 # Tools include frontier_stats, search_findings,\n\
16922 # get_finding, list_events.\n\
16923 # 3. The agent drafts proposals signed under the\n\
16924 # keypair in keys/ via `vela propose ...` or by\n\
16925 # POSTing to `vela serve --http`.\n\
16926 # 4. A human reviewer adjudicates each proposal.\n\
16927 # No agent-drafted proposal becomes accepted state\n\
16928 # without a signed human verdict.\n"
16929 );
16930 std::fs::write(target.join("agent.yaml"), yaml)
16931 .unwrap_or_else(|e| fail_return(&format!("write agent.yaml: {e}")));
16932
16933 let payload = json!({
16934 "ok": true,
16935 "command": "agent.init",
16936 "agent_id": agent_id,
16937 "name": slug,
16938 "framework": framework,
16939 "public_key": public_key,
16940 "keys_dir": keys_dir.display().to_string(),
16941 "actor_json": target.join("actor.json").display().to_string(),
16942 "agent_yaml": target.join("agent.yaml").display().to_string(),
16943 });
16944 if json {
16945 println!(
16946 "{}",
16947 serde_json::to_string_pretty(&payload).expect("failed to serialize agent.init")
16948 );
16949 } else {
16950 println!("{} scaffolded agent {}", style::ok("agent.init"), agent_id);
16951 println!(" framework: {framework}");
16952 println!(" public_key: {}", &public_key[..16]);
16953 println!(" out: {}", target.display());
16954 println!();
16955 println!(" next: register this agent in a frontier:");
16956 println!(
16957 " vela actor add <frontier> '{agent_id}' --pubkey {}",
16958 &public_key[..16]
16959 );
16960 println!(" see docs/AGENT_QUICKSTART.md for the full workflow.");
16961 }
16962 }
16963 AgentAction::List { root, json } => {
16964 let mut entries: Vec<Value> = Vec::new();
16965 if root.is_dir() {
16966 for entry in std::fs::read_dir(&root)
16967 .unwrap_or_else(|e| fail_return(&format!("read {}: {e}", root.display())))
16968 {
16969 let entry = match entry {
16970 Ok(e) => e,
16971 Err(_) => continue,
16972 };
16973 let actor_json = entry.path().join("actor.json");
16974 if !actor_json.is_file() {
16975 continue;
16976 }
16977 if let Ok(text) = std::fs::read_to_string(&actor_json)
16978 && let Ok(v) = serde_json::from_str::<Value>(&text)
16979 {
16980 entries.push(v);
16981 }
16982 }
16983 }
16984 if json {
16985 println!(
16986 "{}",
16987 serde_json::to_string_pretty(&json!({
16988 "ok": true,
16989 "command": "agent.list",
16990 "root": root.display().to_string(),
16991 "agents": entries,
16992 }))
16993 .expect("failed to serialize agent.list")
16994 );
16995 } else {
16996 println!("agents under {}: {}", root.display(), entries.len());
16997 for a in &entries {
16998 let id = a.get("id").and_then(Value::as_str).unwrap_or("?");
16999 let fw = a.get("framework").and_then(Value::as_str).unwrap_or("?");
17000 println!(" · {id} framework={fw}");
17001 }
17002 }
17003 }
17004 }
17005}
17006
17007fn cmd_quickstart(
17008 path: &Path,
17009 name: &str,
17010 reviewer: &str,
17011 assertion: Option<&str>,
17012 keys_out: Option<&Path>,
17013 json_output: bool,
17014) {
17015 use std::process::Command;
17016
17017 if path.join(".vela").exists() {
17018 fail(&format!(
17019 "already initialized: {} exists",
17020 path.join(".vela").display()
17021 ));
17022 }
17023
17024 let exe = std::env::current_exe()
17025 .unwrap_or_else(|e| fail_return(&format!("cannot locate current executable: {e}")));
17026 let keys_dir = keys_out
17027 .map(Path::to_path_buf)
17028 .unwrap_or_else(|| path.join("keys"));
17029 let assertion_text =
17030 assertion.unwrap_or("Quickstart placeholder claim. Replace with your real assertion.");
17031
17032 let run_step = |label: &str, args: &[&str]| -> std::process::Output {
17033 let out = Command::new(&exe)
17034 .args(args)
17035 .output()
17036 .unwrap_or_else(|e| fail_return(&format!("{label}: failed to spawn: {e}")));
17037 if !out.status.success() {
17038 let stderr = String::from_utf8_lossy(&out.stderr);
17039 fail(&format!("{label} failed:\n{stderr}"));
17040 }
17041 out
17042 };
17043
17044 run_step(
17046 "init",
17047 &[
17048 "init",
17049 path.to_string_lossy().as_ref(),
17050 "--name",
17051 name,
17052 "--no-git",
17053 "--json",
17054 ],
17055 );
17056
17057 let keys_out_str = keys_dir.to_string_lossy().into_owned();
17059 let keypair_out = run_step(
17060 "sign.generate-keypair",
17061 &[
17062 "sign",
17063 "generate-keypair",
17064 "--out",
17065 keys_out_str.as_ref(),
17066 "--json",
17067 ],
17068 );
17069 let keypair_json: serde_json::Value = serde_json::from_slice(&keypair_out.stdout)
17070 .unwrap_or_else(|e| fail_return(&format!("sign.generate-keypair: bad json: {e}")));
17071 let public_key = keypair_json
17072 .get("public_key")
17073 .and_then(|v| v.as_str())
17074 .unwrap_or_else(|| fail_return("sign.generate-keypair: missing public_key in output"))
17075 .to_string();
17076
17077 run_step(
17079 "actor.add",
17080 &[
17081 "actor",
17082 "add",
17083 path.to_string_lossy().as_ref(),
17084 reviewer,
17085 "--pubkey",
17086 public_key.as_str(),
17087 "--json",
17088 ],
17089 );
17090
17091 let finding_out = run_step(
17093 "finding.add",
17094 &[
17095 "finding",
17096 "add",
17097 path.to_string_lossy().as_ref(),
17098 "--assertion",
17099 assertion_text,
17100 "--author",
17101 reviewer,
17102 "--apply",
17103 "--json",
17104 ],
17105 );
17106 let finding_json: serde_json::Value = serde_json::from_slice(&finding_out.stdout)
17107 .unwrap_or_else(|e| fail_return(&format!("finding.add: bad json: {e}")));
17108 let finding_id = finding_json
17109 .get("finding_id")
17110 .and_then(|v| v.as_str())
17111 .map(str::to_string);
17112
17113 if json_output {
17114 let payload = json!({
17115 "ok": true,
17116 "command": "quickstart",
17117 "frontier": path.display().to_string(),
17118 "name": name,
17119 "reviewer": reviewer,
17120 "public_key": public_key,
17121 "keys_dir": keys_dir.display().to_string(),
17122 "finding_id": finding_id,
17123 "next_steps": [
17124 format!("vela serve {}", path.display()),
17125 format!(
17126 "vela ingest <paper.pdf|doi:...> --frontier {}",
17127 path.display()
17128 ),
17129 format!("vela log {}", path.display()),
17130 ],
17131 });
17132 println!(
17133 "{}",
17134 serde_json::to_string_pretty(&payload).expect("failed to serialize quickstart")
17135 );
17136 return;
17137 }
17138
17139 println!();
17140 println!(
17141 " {}",
17142 format!("VELA · QUICKSTART · {}", path.display())
17143 .to_uppercase()
17144 .dimmed()
17145 );
17146 println!(" {}", style::tick_row(60));
17147 println!(" frontier: {}", path.display());
17148 println!(" name: {name}");
17149 println!(" reviewer: {reviewer}");
17150 println!(" keys: {}", keys_dir.display());
17151 println!(" pubkey: {}…", &public_key[..16]);
17152 if let Some(id) = finding_id.as_deref() {
17153 println!(" finding: {id}");
17154 }
17155 println!();
17156 println!(" {}", style::ok("done"));
17157 println!(" next:");
17158 println!(" vela serve {}", path.display());
17159 println!(
17160 " vela ingest <paper.pdf|doi:10.xxx|pmid:xxx> --frontier {}",
17161 path.display()
17162 );
17163 println!(" vela log {}", path.display());
17164 println!();
17165}
17166
17167fn cmd_lock(path: &Path, check: bool, json_output: bool) {
17173 if check {
17174 cmd_lock_check(path, json_output);
17175 return;
17176 }
17177 let payload = crate::frontier_repo::materialize(path).unwrap_or_else(|e| fail_return(&e));
17178 if json_output {
17179 println!(
17180 "{}",
17181 serde_json::to_string_pretty(&json!({
17182 "ok": true,
17183 "command": "lock",
17184 "path": path.display().to_string(),
17185 "snapshot_hash": payload.get("snapshot_hash"),
17186 "event_log_hash": payload.get("event_log_hash"),
17187 "proposal_state_hash": payload.get("proposal_state_hash"),
17188 }))
17189 .expect("failed to serialize lock report")
17190 );
17191 return;
17192 }
17193 println!();
17194 println!(
17195 " {}",
17196 format!("VELA · LOCK · {}", path.display())
17197 .to_uppercase()
17198 .dimmed()
17199 );
17200 println!(" {}", style::tick_row(60));
17201 println!(
17202 " snapshot_hash: {}",
17203 payload
17204 .get("snapshot_hash")
17205 .and_then(|v| v.as_str())
17206 .unwrap_or("?")
17207 );
17208 println!(
17209 " event_log_hash: {}",
17210 payload
17211 .get("event_log_hash")
17212 .and_then(|v| v.as_str())
17213 .unwrap_or("?")
17214 );
17215 println!(
17216 " proposal_state_hash: {}",
17217 payload
17218 .get("proposal_state_hash")
17219 .and_then(|v| v.as_str())
17220 .unwrap_or("?")
17221 );
17222 println!();
17223 println!(" {}", style::ok("locked"));
17224}
17225
17226fn cmd_lock_check(path: &Path, json_output: bool) {
17227 use crate::frontier_repo::read_lock;
17228 let lock = read_lock(path).unwrap_or_else(|e| fail_return(&e));
17229 let Some(lock) = lock else {
17230 fail("lock --check: no vela.lock found at path");
17231 };
17232 let project = repo::load_from_path(path).unwrap_or_else(|e| fail_return(&e));
17233 let current_snapshot = format!("sha256:{}", crate::events::snapshot_hash(&project));
17234 let current_event_log = format!("sha256:{}", crate::events::event_log_hash(&project.events));
17235 let mut drift: Vec<String> = Vec::new();
17236 if lock.snapshot_hash != current_snapshot {
17237 drift.push(format!(
17238 "snapshot_hash: lock={} current={}",
17239 lock.snapshot_hash, current_snapshot
17240 ));
17241 }
17242 if lock.event_log_hash != current_event_log {
17243 drift.push(format!(
17244 "event_log_hash: lock={} current={}",
17245 lock.event_log_hash, current_event_log
17246 ));
17247 }
17248 let ok = drift.is_empty();
17249 if json_output {
17250 println!(
17251 "{}",
17252 serde_json::to_string_pretty(&json!({
17253 "ok": ok,
17254 "command": "lock.check",
17255 "path": path.display().to_string(),
17256 "drift": drift,
17257 "lock_snapshot_hash": lock.snapshot_hash,
17258 "current_snapshot_hash": current_snapshot,
17259 "lock_event_log_hash": lock.event_log_hash,
17260 "current_event_log_hash": current_event_log,
17261 "dependency_count": lock.dependencies.len(),
17262 }))
17263 .expect("failed to serialize lock check report")
17264 );
17265 } else {
17266 println!();
17267 println!(
17268 " {}",
17269 format!("VELA · LOCK · CHECK · {}", path.display())
17270 .to_uppercase()
17271 .dimmed()
17272 );
17273 println!(" {}", style::tick_row(60));
17274 if ok {
17275 println!(" snapshot_hash: {}", lock.snapshot_hash);
17276 println!(" event_log_hash: {}", lock.event_log_hash);
17277 println!(" dependencies pinned: {}", lock.dependencies.len());
17278 println!();
17279 println!(" {} on-disk state matches vela.lock", style::ok("ok"));
17280 } else {
17281 println!(" {} drift detected:", style::err_prefix());
17282 for d in &drift {
17283 println!(" - {d}");
17284 }
17285 }
17286 }
17287 if !ok {
17288 std::process::exit(1);
17289 }
17290}
17291
17292fn cmd_doc(path: &Path, out: Option<&Path>, json_output: bool) {
17297 let project = repo::load_from_path(path).unwrap_or_else(|e| fail_return(&e));
17298 let out_dir = out
17299 .map(Path::to_path_buf)
17300 .unwrap_or_else(|| path.join("doc"));
17301 let report =
17302 crate::doc_render::write_site(&project, &out_dir).unwrap_or_else(|e| fail_return(&e));
17303 if json_output {
17304 println!(
17305 "{}",
17306 serde_json::to_string_pretty(&report).expect("failed to serialize doc report")
17307 );
17308 return;
17309 }
17310 println!();
17311 println!(
17312 " {}",
17313 format!("VELA · DOC · {}", path.display())
17314 .to_uppercase()
17315 .dimmed()
17316 );
17317 println!(" {}", style::tick_row(60));
17318 println!(" frontier_id: {}", report.frontier_id);
17319 println!(" out: {}", report.out);
17320 println!(" files written: {}", report.files_written);
17321 println!(" findings: {}", report.findings_documented);
17322 println!(" events: {}", report.events_documented);
17323 println!();
17324 println!(
17325 " {} open {}/index.html in a browser",
17326 style::ok("ok"),
17327 report.out
17328 );
17329}
17330
17331fn cmd_import(frontier_path: &Path, into: Option<&Path>) {
17332 let frontier = repo::load_from_path(frontier_path).unwrap_or_else(|e| fail_return(&e));
17333 let target = into
17334 .map(Path::to_path_buf)
17335 .unwrap_or_else(|| PathBuf::from(frontier.project.name.replace(' ', "-").to_lowercase()));
17336 repo::init_repo(&target, &frontier).unwrap_or_else(|e| fail(&e));
17337 println!(
17338 "{} {} findings · {}",
17339 style::ok("imported"),
17340 frontier.findings.len(),
17341 target.display()
17342 );
17343}
17344
17345fn cmd_locator_repair(
17346 path: &Path,
17347 atom_id: &str,
17348 locator_override: Option<&str>,
17349 reviewer: &str,
17350 reason: &str,
17351 apply: bool,
17352 json_output: bool,
17353) {
17354 let report = state::repair_evidence_atom_locator(
17355 path,
17356 atom_id,
17357 locator_override,
17358 reviewer,
17359 reason,
17360 apply,
17361 )
17362 .unwrap_or_else(|e| fail_return(&e));
17363 print_state_report(&report, json_output);
17364}
17365
17366async fn cmd_source_fetch(
17371 identifier: &str,
17372 cache_root: Option<&Path>,
17373 out_path: Option<&Path>,
17374 refresh: bool,
17375 _json_output: bool,
17376) {
17377 use sha2::{Digest, Sha256};
17378
17379 let normalized = normalize_source_identifier(identifier);
17380 let cache_path = cache_root.map(|root| {
17381 let hash = format!("{:x}", Sha256::digest(normalized.as_bytes()));
17382 root.join("sources")
17383 .join("cache")
17384 .join(format!("{hash}.json"))
17385 });
17386
17387 if !refresh
17388 && let Some(p) = cache_path.as_ref()
17389 && p.is_file()
17390 {
17391 let body = std::fs::read_to_string(p)
17392 .unwrap_or_else(|e| fail_return(&format!("read cache {}: {e}", p.display())));
17393 emit_source_fetch_result(&body, out_path);
17394 return;
17395 }
17396
17397 let result = fetch_source_metadata(&normalized).await;
17398 let json = match result {
17399 Ok(value) => serde_json::to_string_pretty(&value)
17400 .unwrap_or_else(|e| fail_return(&format!("serialize fetched record: {e}"))),
17401 Err(e) => fail_return(&format!("source-fetch '{identifier}': {e}")),
17402 };
17403
17404 if let Some(p) = cache_path.as_ref() {
17405 if let Some(parent) = p.parent() {
17406 std::fs::create_dir_all(parent)
17407 .unwrap_or_else(|e| fail_return(&format!("mkdir {}: {e}", parent.display())));
17408 }
17409 std::fs::write(p, &json)
17410 .unwrap_or_else(|e| fail_return(&format!("write cache {}: {e}", p.display())));
17411 }
17412 emit_source_fetch_result(&json, out_path);
17413}
17414
17415fn emit_source_fetch_result(body: &str, out_path: Option<&Path>) {
17416 if let Some(p) = out_path {
17417 if let Some(parent) = p.parent() {
17418 let _ = std::fs::create_dir_all(parent);
17419 }
17420 std::fs::write(p, body)
17421 .unwrap_or_else(|e| fail_return(&format!("write {}: {e}", p.display())));
17422 } else {
17423 println!("{body}");
17424 }
17425}
17426
17427fn normalize_source_identifier(raw: &str) -> String {
17428 let trimmed = raw.trim();
17429 if trimmed.starts_with("doi:")
17430 || trimmed.starts_with("pmid:")
17431 || trimmed.starts_with("nct:")
17432 || trimmed.starts_with("pmc:")
17433 {
17434 return trimmed.to_string();
17435 }
17436 if trimmed.starts_with("10.") {
17437 return format!("doi:{trimmed}");
17438 }
17439 if trimmed.starts_with("NCT") || trimmed.starts_with("nct") {
17440 return format!(
17441 "nct:{}",
17442 trimmed
17443 .to_uppercase()
17444 .trim_start_matches("NCT")
17445 .to_string()
17446 .split_at(0)
17447 .0
17448 );
17449 }
17450 if trimmed.chars().all(|c| c.is_ascii_digit()) {
17451 return format!("pmid:{trimmed}");
17452 }
17453 trimmed.to_string()
17454}
17455
17456async fn fetch_source_metadata(normalized: &str) -> Result<Value, String> {
17457 let client = Client::builder()
17458 .user_agent("vela/0.66.0 (vela-source-fetch; +https://vela-science.fly.dev)")
17459 .timeout(std::time::Duration::from_secs(30))
17460 .build()
17461 .map_err(|e| format!("client build: {e}"))?;
17462 if let Some(rest) = normalized.strip_prefix("doi:") {
17463 let mut record = fetch_via_crossref(&client, rest).await?;
17470 let crossref_abstract = record
17471 .get("abstract")
17472 .and_then(|v| v.as_str())
17473 .unwrap_or("");
17474 if crossref_abstract.is_empty()
17475 && let Some(pmid) = resolve_doi_to_pmid(&client, rest).await
17476 && let Ok(pubmed_record) = fetch_via_pubmed(&client, &pmid).await
17477 {
17478 let pubmed_abstract = pubmed_record
17479 .get("abstract")
17480 .and_then(|v| v.as_str())
17481 .unwrap_or("")
17482 .to_string();
17483 if !pubmed_abstract.is_empty()
17484 && let Some(obj) = record.as_object_mut()
17485 {
17486 obj.insert("abstract".to_string(), Value::String(pubmed_abstract));
17487 obj.insert(
17488 "abstract_source".to_string(),
17489 Value::String(format!("pubmed:{pmid}")),
17490 );
17491 }
17492 }
17493 return Ok(record);
17494 }
17495 if let Some(rest) = normalized.strip_prefix("pmid:") {
17496 return fetch_via_pubmed(&client, rest).await;
17497 }
17498 if let Some(rest) = normalized.strip_prefix("nct:") {
17499 return fetch_via_ctgov(&client, rest).await;
17500 }
17501 Err(format!(
17502 "unsupported source identifier '{normalized}'. Use doi:, pmid:, or nct: prefix."
17503 ))
17504}
17505
17506async fn resolve_doi_to_pmid(client: &Client, doi: &str) -> Option<String> {
17510 let url = format!(
17511 "https://eutils.ncbi.nlm.nih.gov/entrez/eutils/esearch.fcgi?db=pubmed&term={}[doi]&retmode=json",
17512 urlencoding::encode(doi)
17513 );
17514 let resp = client.get(&url).send().await.ok()?;
17515 if !resp.status().is_success() {
17516 return None;
17517 }
17518 let body: Value = resp.json().await.ok()?;
17519 let id_list = body.pointer("/esearchresult/idlist")?.as_array()?;
17520 if id_list.len() != 1 {
17521 return None;
17524 }
17525 id_list.first()?.as_str().map(|s| s.to_string())
17526}
17527
17528async fn fetch_via_crossref(client: &Client, doi: &str) -> Result<Value, String> {
17529 let url = format!("https://api.crossref.org/works/{doi}");
17530 let resp = client
17531 .get(&url)
17532 .send()
17533 .await
17534 .map_err(|e| format!("crossref get: {e}"))?;
17535 if !resp.status().is_success() {
17536 return Err(format!("crossref returned {}", resp.status()));
17537 }
17538 let body: Value = resp
17539 .json()
17540 .await
17541 .map_err(|e| format!("crossref json: {e}"))?;
17542 let work = body.get("message").cloned().unwrap_or(Value::Null);
17543 let title = work
17544 .get("title")
17545 .and_then(|v| v.as_array())
17546 .and_then(|a| a.first())
17547 .and_then(|v| v.as_str())
17548 .unwrap_or("")
17549 .to_string();
17550 let abstract_html = work
17551 .get("abstract")
17552 .and_then(|v| v.as_str())
17553 .unwrap_or("")
17554 .to_string();
17555 let abstract_text = strip_jats_tags(&abstract_html);
17556 let year = work
17557 .get("issued")
17558 .and_then(|v| v.get("date-parts"))
17559 .and_then(|v| v.as_array())
17560 .and_then(|a| a.first())
17561 .and_then(|v| v.as_array())
17562 .and_then(|a| a.first())
17563 .and_then(|v| v.as_i64());
17564 let journal = work
17565 .get("container-title")
17566 .and_then(|v| v.as_array())
17567 .and_then(|a| a.first())
17568 .and_then(|v| v.as_str())
17569 .unwrap_or("")
17570 .to_string();
17571 let authors = work
17572 .get("author")
17573 .and_then(|v| v.as_array())
17574 .map(|arr| {
17575 arr.iter()
17576 .filter_map(|a| {
17577 let given = a.get("given").and_then(|v| v.as_str()).unwrap_or("");
17578 let family = a.get("family").and_then(|v| v.as_str()).unwrap_or("");
17579 let combined = format!("{given} {family}").trim().to_string();
17580 if combined.is_empty() {
17581 None
17582 } else {
17583 Some(combined)
17584 }
17585 })
17586 .collect::<Vec<_>>()
17587 })
17588 .unwrap_or_default();
17589 Ok(json!({
17590 "schema": "vela.source_fetch.v0.1",
17591 "identifier": format!("doi:{doi}"),
17592 "source": "crossref",
17593 "title": title,
17594 "abstract": abstract_text,
17595 "year": year,
17596 "journal": journal,
17597 "authors": authors,
17598 "retrieved_at": chrono::Utc::now().to_rfc3339(),
17599 }))
17600}
17601
17602async fn fetch_via_pubmed(client: &Client, pmid: &str) -> Result<Value, String> {
17603 let url = format!(
17604 "https://eutils.ncbi.nlm.nih.gov/entrez/eutils/efetch.fcgi?db=pubmed&id={pmid}&retmode=xml"
17605 );
17606 let resp = client
17607 .get(&url)
17608 .send()
17609 .await
17610 .map_err(|e| format!("pubmed get: {e}"))?;
17611 if !resp.status().is_success() {
17612 return Err(format!("pubmed returned {}", resp.status()));
17613 }
17614 let xml = resp.text().await.map_err(|e| format!("pubmed text: {e}"))?;
17615 let title = extract_xml_text(&xml, "<ArticleTitle>", "</ArticleTitle>");
17616 let abstract_text = extract_xml_text(&xml, "<AbstractText>", "</AbstractText>");
17617 let year = extract_xml_text(&xml, "<Year>", "</Year>")
17618 .parse::<i64>()
17619 .ok();
17620 let journal = extract_xml_text(&xml, "<Title>", "</Title>");
17621 Ok(json!({
17622 "schema": "vela.source_fetch.v0.1",
17623 "identifier": format!("pmid:{pmid}"),
17624 "source": "pubmed",
17625 "title": title,
17626 "abstract": abstract_text,
17627 "year": year,
17628 "journal": journal,
17629 "authors": Vec::<String>::new(),
17630 "retrieved_at": chrono::Utc::now().to_rfc3339(),
17631 }))
17632}
17633
17634async fn fetch_via_ctgov(client: &Client, nct: &str) -> Result<Value, String> {
17635 let nct_clean = nct.trim();
17636 let nct_id = if nct_clean.starts_with("NCT") || nct_clean.starts_with("nct") {
17637 nct_clean.to_uppercase()
17638 } else {
17639 format!("NCT{nct_clean}")
17640 };
17641 let url = format!("https://clinicaltrials.gov/api/v2/studies/{nct_id}");
17642 let resp = client
17643 .get(&url)
17644 .send()
17645 .await
17646 .map_err(|e| format!("ctgov get: {e}"))?;
17647 if !resp.status().is_success() {
17648 return Err(format!("ctgov returned {}", resp.status()));
17649 }
17650 let body: Value = resp.json().await.map_err(|e| format!("ctgov json: {e}"))?;
17651 let title = body
17652 .pointer("/protocolSection/identificationModule/briefTitle")
17653 .and_then(|v| v.as_str())
17654 .unwrap_or("")
17655 .to_string();
17656 let abstract_text = body
17657 .pointer("/protocolSection/descriptionModule/briefSummary")
17658 .and_then(|v| v.as_str())
17659 .unwrap_or("")
17660 .to_string();
17661 let phase = body
17662 .pointer("/protocolSection/designModule/phases")
17663 .and_then(|v| v.as_array())
17664 .and_then(|a| a.first())
17665 .and_then(|v| v.as_str())
17666 .unwrap_or("")
17667 .to_string();
17668 Ok(json!({
17669 "schema": "vela.source_fetch.v0.1",
17670 "identifier": format!("nct:{nct_id}"),
17671 "source": "clinicaltrials.gov",
17672 "title": title,
17673 "abstract": abstract_text,
17674 "year": Value::Null,
17675 "journal": phase,
17676 "authors": Vec::<String>::new(),
17677 "retrieved_at": chrono::Utc::now().to_rfc3339(),
17678 }))
17679}
17680
17681fn extract_xml_text(xml: &str, open: &str, close: &str) -> String {
17682 if let Some(start) = xml.find(open) {
17683 let after = &xml[start + open.len()..];
17684 if let Some(end) = after.find(close) {
17685 return after[..end].trim().to_string();
17686 }
17687 }
17688 String::new()
17689}
17690
17691fn strip_jats_tags(html: &str) -> String {
17692 let mut out = String::with_capacity(html.len());
17693 let mut in_tag = false;
17694 for c in html.chars() {
17695 match c {
17696 '<' => in_tag = true,
17697 '>' => in_tag = false,
17698 _ if !in_tag => out.push(c),
17699 _ => {}
17700 }
17701 }
17702 out.split_whitespace().collect::<Vec<_>>().join(" ")
17703}
17704
17705fn cmd_span_repair(
17706 path: &Path,
17707 finding_id: &str,
17708 section: &str,
17709 text: &str,
17710 reviewer: &str,
17711 reason: &str,
17712 apply: bool,
17713 json_output: bool,
17714) {
17715 let report =
17716 state::repair_finding_span(path, finding_id, section, text, reviewer, reason, apply)
17717 .unwrap_or_else(|e| fail_return(&e));
17718 print_state_report(&report, json_output);
17719}
17720
17721#[allow(clippy::too_many_arguments)]
17722fn cmd_entity_resolve(
17723 path: &Path,
17724 finding_id: &str,
17725 entity_name: &str,
17726 source: &str,
17727 id: &str,
17728 confidence: f64,
17729 matched_name: Option<&str>,
17730 resolution_method: &str,
17731 reviewer: &str,
17732 reason: &str,
17733 apply: bool,
17734 json_output: bool,
17735) {
17736 let report = state::resolve_finding_entity(
17737 path,
17738 finding_id,
17739 entity_name,
17740 source,
17741 id,
17742 confidence,
17743 matched_name,
17744 resolution_method,
17745 reviewer,
17746 reason,
17747 apply,
17748 )
17749 .unwrap_or_else(|e| fail_return(&e));
17750 print_state_report(&report, json_output);
17751}
17752
17753fn cmd_propagate(
17754 path: &Path,
17755 retract: Option<String>,
17756 reduce_confidence: Option<String>,
17757 to: Option<f64>,
17758 output: Option<&Path>,
17759) {
17760 let mut frontier = load_frontier_or_fail(path);
17761 let (finding_id, action, label) = if let Some(id) = retract {
17762 (id, propagate::PropagationAction::Retracted, "retraction")
17763 } else if let Some(id) = reduce_confidence {
17764 let score = to.unwrap_or_else(|| fail_return("--reduce-confidence requires --to <score>"));
17765 if !(0.0..=1.0).contains(&score) {
17766 fail("--to must be between 0.0 and 1.0");
17767 }
17768 (
17769 id,
17770 propagate::PropagationAction::ConfidenceReduced { new_score: score },
17771 "confidence reduction",
17772 )
17773 } else {
17774 fail("specify --retract <id> or --reduce-confidence <id> --to <score>");
17775 };
17776 if !frontier.findings.iter().any(|f| f.id == finding_id) {
17777 fail(&format!("finding not found: {finding_id}"));
17778 }
17779 let result = propagate::propagate_correction(&mut frontier, &finding_id, action);
17780 frontier.review_events.extend(result.events.clone());
17785 project::recompute_stats(&mut frontier);
17786 propagate::print_result(&result, label, &finding_id);
17787 let out = output.unwrap_or(path);
17788 repo::save_to_path(out, &frontier).expect("Failed to save frontier");
17789 println!(" output: {}", out.display());
17790}
17791
17792fn cmd_mcp_setup(source: Option<&Path>, frontiers: Option<&Path>) {
17793 let source_desc = source
17794 .map(|p| p.display().to_string())
17795 .or_else(|| frontiers.map(|p| p.display().to_string()))
17796 .unwrap_or_else(|| "frontier.json".to_string());
17797 let args = if let Some(path) = source {
17798 format!(r#""serve", "{}""#, path.display())
17799 } else if let Some(path) = frontiers {
17800 format!(r#""serve", "--frontiers", "{}""#, path.display())
17801 } else {
17802 r#""serve", "frontier.json""#.to_string()
17803 };
17804 println!(
17805 r#"Add this MCP server configuration to your client:
17806
17807{{
17808 "mcpServers": {{
17809 "vela": {{
17810 "command": "vela",
17811 "args": [{args}]
17812 }}
17813 }}
17814}}
17815
17816Source: {source_desc}"#
17817 );
17818}
17819
17820fn parse_entities(input: &str) -> Vec<(String, String)> {
17821 if input.trim().is_empty() {
17822 return Vec::new();
17823 }
17824 input
17825 .split(',')
17826 .filter_map(|pair| {
17827 let parts = pair.trim().splitn(2, ':').collect::<Vec<_>>();
17828 if parts.len() == 2 {
17829 Some((parts[0].trim().to_string(), parts[1].trim().to_string()))
17830 } else {
17831 eprintln!(
17832 "{} skipping malformed entity '{}'",
17833 style::warn("warn"),
17834 pair.trim()
17835 );
17836 None
17837 }
17838 })
17839 .collect()
17840}
17841
17842fn parse_evidence_spans(inputs: &[String]) -> Vec<Value> {
17843 inputs
17844 .iter()
17845 .filter_map(|input| {
17846 let trimmed = input.trim();
17847 if trimmed.is_empty() {
17848 return None;
17849 }
17850 if trimmed.starts_with('{') {
17851 match serde_json::from_str::<Value>(trimmed) {
17852 Ok(value @ Value::Object(_)) => return Some(value),
17853 Ok(_) | Err(_) => {
17854 eprintln!(
17855 "{} evidence span JSON should be an object; storing as text",
17856 style::warn("warn")
17857 );
17858 }
17859 }
17860 }
17861 Some(json!({
17862 "section": "curator_source",
17863 "text": trimmed,
17864 }))
17865 })
17866 .collect()
17867}
17868
17869fn hash_path(path: &Path) -> Result<String, String> {
17870 let mut hasher = Sha256::new();
17871 if path.is_file() {
17872 let bytes = std::fs::read(path)
17873 .map_err(|e| format!("Failed to read {} for hashing: {e}", path.display()))?;
17874 hasher.update(&bytes);
17875 } else if path.is_dir() {
17876 let mut files = Vec::new();
17877 collect_hash_files(path, path, &mut files)?;
17878 files.sort();
17879 for rel in files {
17880 hasher.update(rel.to_string_lossy().as_bytes());
17881 let bytes = std::fs::read(path.join(&rel))
17882 .map_err(|e| format!("Failed to read {} for hashing: {e}", rel.display()))?;
17883 hasher.update(bytes);
17884 }
17885 } else {
17886 return Err(format!("Cannot hash missing path {}", path.display()));
17887 }
17888 Ok(format!("{:x}", hasher.finalize()))
17889}
17890
17891fn load_frontier_or_fail(path: &Path) -> project::Project {
17892 repo::load_from_path(path).unwrap_or_else(|e| {
17893 fail_return(&format!(
17894 "Failed to load frontier '{}': {e}",
17895 path.display()
17896 ))
17897 })
17898}
17899
17900fn hash_path_or_fail(path: &Path) -> String {
17901 hash_path(path).unwrap_or_else(|e| {
17902 fail_return(&format!(
17903 "Failed to hash frontier '{}': {e}",
17904 path.display()
17905 ))
17906 })
17907}
17908
17909fn collect_hash_files(root: &Path, dir: &Path, files: &mut Vec<PathBuf>) -> Result<(), String> {
17910 for entry in
17911 std::fs::read_dir(dir).map_err(|e| format!("Failed to read {}: {e}", dir.display()))?
17912 {
17913 let entry = entry.map_err(|e| format!("Failed to read directory entry: {e}"))?;
17914 let path = entry.path();
17915 if path.is_dir() {
17916 collect_hash_files(root, &path, files)?;
17917 } else if path.is_file() {
17918 files.push(
17919 path.strip_prefix(root)
17920 .map_err(|e| e.to_string())?
17921 .to_path_buf(),
17922 );
17923 }
17924 }
17925 Ok(())
17926}
17927
17928fn schema_error_suggestion(error: &str) -> &'static str {
17929 if schema_error_action(error).is_some() {
17930 "Run `vela normalize` to repair deterministic frontier state."
17931 } else {
17932 "Inspect and correct the referenced frontier field."
17933 }
17934}
17935
17936fn schema_error_fix(error: &str) -> bool {
17937 schema_error_action(error).is_some()
17938}
17939
17940fn schema_error_action(error: &str) -> Option<&'static str> {
17941 if error.contains("stats.findings")
17942 || error.contains("stats.links")
17943 || error.contains("Invalid compiler")
17944 || error.contains("Invalid vela_version")
17945 || error.contains("Invalid schema")
17946 {
17947 Some("normalize_metadata_and_stats")
17948 } else if error.contains("does not match content-address") {
17949 Some("rewrite_ids")
17950 } else {
17951 None
17952 }
17953}
17954
17955fn build_repair_plan(diagnostics: &[Value]) -> Vec<Value> {
17956 let mut actions = std::collections::BTreeMap::<String, usize>::new();
17957 for diagnostic in diagnostics {
17958 if let Some(action) = diagnostic.get("normalize_action").and_then(Value::as_str) {
17959 *actions.entry(action.to_string()).or_default() += 1;
17960 }
17961 }
17962 actions
17963 .into_iter()
17964 .map(|(action, count)| {
17965 let command = if action == "rewrite_ids" {
17966 "vela normalize <frontier> --write --rewrite-ids --id-map id-map.json"
17967 } else {
17968 "vela normalize <frontier> --write"
17969 };
17970 json!({
17971 "action": action,
17972 "count": count,
17973 "command": command,
17974 })
17975 })
17976 .collect()
17977}
17978
17979fn cmd_integrity(frontier: &Path, json: bool) {
17980 let report = state_integrity::analyze_path(frontier).unwrap_or_else(|e| fail_return(&e));
17981 if json {
17982 println!(
17983 "{}",
17984 serde_json::to_string_pretty(&report).expect("failed to serialize integrity report")
17985 );
17986 } else {
17987 println!("vela integrity");
17988 println!(" frontier: {}", frontier.display());
17989 println!(" status: {}", report.status);
17990 println!(" proof freshness: {}", report.proof_freshness);
17991 println!(" structural errors: {}", report.structural_errors.len());
17992 for error in report.structural_errors.iter().take(8) {
17993 println!(" - {}: {}", error.rule_id, error.message);
17994 }
17995 }
17996}
17997
17998fn cmd_impact(frontier: &Path, finding_id: &str, depth: Option<usize>, json: bool) {
17999 let report =
18000 impact::analyze_path(frontier, finding_id, depth).unwrap_or_else(|e| fail_return(&e));
18001 if json {
18002 println!(
18003 "{}",
18004 serde_json::to_string_pretty(&report).expect("failed to serialize impact report")
18005 );
18006 } else {
18007 println!("vela impact");
18008 println!(" finding: {}", report.target.id);
18009 println!(" frontier: {}", report.frontier.vfr_id);
18010 println!(" direct dependents: {}", report.summary.direct_dependents);
18011 println!(" downstream: {}", report.summary.total_downstream);
18012 println!(" open proposals: {}", report.summary.open_proposals);
18013 println!(" accepted events: {}", report.summary.accepted_events);
18014 println!(" proof: {}", report.summary.proof_status);
18015 }
18016}
18017
18018fn cmd_discord(frontier: &Path, json: bool, kind_filter: Option<&str>) {
18019 use crate::discord::DiscordKind;
18020 use crate::discord_compute::compute_discord_assignment;
18021
18022 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
18023 let assignment = compute_discord_assignment(&project);
18024 let support = assignment.frontier_support();
18025
18026 let mut rows: Vec<(String, Vec<String>)> = Vec::new();
18029 for context in support.iter() {
18030 let set = assignment.get(context);
18031 let kinds: Vec<String> = set.iter().map(|k| k.as_str().to_string()).collect();
18032 if let Some(filter) = kind_filter
18033 && !kinds.iter().any(|k| k == filter)
18034 {
18035 continue;
18036 }
18037 rows.push((context.clone(), kinds));
18038 }
18039
18040 let mut histogram: std::collections::BTreeMap<&'static str, usize> =
18044 std::collections::BTreeMap::new();
18045 for kind in DiscordKind::ALL {
18046 let count = assignment
18047 .iter()
18048 .filter(|(_, set)| set.contains(*kind))
18049 .count();
18050 if count > 0 {
18051 histogram.insert(kind.as_str(), count);
18052 }
18053 }
18054
18055 let total_findings = project.findings.len();
18056 let frontier_id = project
18057 .frontier_id
18058 .clone()
18059 .unwrap_or_else(|| String::from("<unknown>"));
18060
18061 if json {
18062 let row_value = |row: &(String, Vec<String>)| {
18063 serde_json::json!({
18064 "finding_id": row.0,
18065 "discord_kinds": row.1,
18066 })
18067 };
18068 let report = serde_json::json!({
18069 "frontier_id": frontier_id,
18070 "total_findings": total_findings,
18071 "frontier_support_size": support.len(),
18072 "filtered_row_count": rows.len(),
18073 "filter_kind": kind_filter,
18074 "histogram": histogram,
18075 "rows": rows.iter().map(row_value).collect::<Vec<_>>(),
18076 });
18077 println!(
18078 "{}",
18079 serde_json::to_string_pretty(&report).expect("serialize discord report")
18080 );
18081 return;
18082 }
18083
18084 println!("vela discord");
18085 println!(" frontier: {frontier_id}");
18086 println!(" total findings: {total_findings}");
18087 println!(
18088 " frontier support (any discord): {} of {}",
18089 support.len(),
18090 total_findings
18091 );
18092 if let Some(k) = kind_filter {
18093 println!(" filter: kind = {k}");
18094 }
18095 println!();
18096 if histogram.is_empty() {
18097 println!(" no discord detected.");
18098 } else {
18099 println!(" discord histogram:");
18100 for (k, n) in &histogram {
18101 println!(" {n:>4} {k}");
18102 }
18103 }
18104 if !rows.is_empty() {
18105 println!();
18106 println!(" findings with discord (showing up to 50):");
18107 for (fid, kinds) in rows.iter().take(50) {
18108 println!(" {fid} · {}", kinds.join(", "));
18109 }
18110 if rows.len() > 50 {
18111 println!(" ... and {} more", rows.len() - 50);
18112 }
18113 }
18114}
18115
18116fn empty_signal_report() -> signals::SignalReport {
18117 signals::SignalReport {
18118 schema: "vela.signals.v0".to_string(),
18119 frontier: "unavailable".to_string(),
18120 signals: Vec::new(),
18121 review_queue: Vec::new(),
18122 proof_readiness: signals::ProofReadiness {
18123 status: "unavailable".to_string(),
18124 blockers: 0,
18125 warnings: 0,
18126 caveats: vec!["Frontier could not be loaded for signal analysis.".to_string()],
18127 },
18128 }
18129}
18130
18131fn print_signal_summary(report: &signals::SignalReport, strict: bool) {
18132 println!();
18133 println!(" {}", "SIGNALS".dimmed());
18134 println!(" {}", style::tick_row(60));
18135 println!(" total signals: {}", report.signals.len());
18136 println!(" proof readiness: {}", report.proof_readiness.status);
18137 if !report.review_queue.is_empty() {
18138 println!(" review queue: {} items", report.review_queue.len());
18139 }
18140 if strict && report.proof_readiness.status != "ready" {
18141 println!(
18142 " {} proof readiness has blocking signals.",
18143 style::lost("strict check failed")
18144 );
18145 }
18146}
18147
18148fn append_packet_json_file(
18149 packet_dir: &Path,
18150 relative_path: &str,
18151 value: &Value,
18152) -> Result<(), String> {
18153 let content = serde_json::to_vec_pretty(value)
18154 .map_err(|e| format!("Failed to serialize packet JSON file: {e}"))?;
18155 let path = packet_dir.join(relative_path);
18156 if let Some(parent) = path.parent() {
18157 std::fs::create_dir_all(parent)
18158 .map_err(|e| format!("Failed to create {}: {e}", parent.display()))?;
18159 }
18160 std::fs::write(&path, &content)
18161 .map_err(|e| format!("Failed to write {}: {e}", path.display()))?;
18162 let entry = json!({
18163 "path": relative_path,
18164 "sha256": hex::encode(Sha256::digest(&content)),
18165 "bytes": content.len(),
18166 });
18167
18168 for manifest_name in ["manifest.json", "packet.lock.json"] {
18169 let manifest_path = packet_dir.join(manifest_name);
18170 let data = std::fs::read_to_string(&manifest_path)
18171 .map_err(|e| format!("Failed to read {}: {e}", manifest_path.display()))?;
18172 let mut manifest: Value = serde_json::from_str(&data)
18173 .map_err(|e| format!("Failed to parse {}: {e}", manifest_path.display()))?;
18174 let array_key = if manifest_name == "manifest.json" {
18175 "included_files"
18176 } else {
18177 "files"
18178 };
18179 let files = manifest
18180 .get_mut(array_key)
18181 .and_then(Value::as_array_mut)
18182 .ok_or_else(|| format!("{} missing {array_key} array", manifest_path.display()))?;
18183 files.retain(|file| {
18184 file.get("path")
18185 .and_then(Value::as_str)
18186 .is_none_or(|path| path != relative_path)
18187 });
18188 files.push(entry.clone());
18189 std::fs::write(
18190 &manifest_path,
18191 serde_json::to_vec_pretty(&manifest)
18192 .map_err(|e| format!("Failed to serialize {}: {e}", manifest_path.display()))?,
18193 )
18194 .map_err(|e| format!("Failed to write {}: {e}", manifest_path.display()))?;
18195 }
18196
18197 let lock_path = packet_dir.join("packet.lock.json");
18198 let lock_content = std::fs::read(&lock_path)
18199 .map_err(|e| format!("Failed to read {}: {e}", lock_path.display()))?;
18200 let lock_entry = json!({
18201 "path": "packet.lock.json",
18202 "sha256": hex::encode(Sha256::digest(&lock_content)),
18203 "bytes": lock_content.len(),
18204 });
18205 let manifest_path = packet_dir.join("manifest.json");
18206 let data = std::fs::read_to_string(&manifest_path)
18207 .map_err(|e| format!("Failed to read {}: {e}", manifest_path.display()))?;
18208 let mut manifest: Value = serde_json::from_str(&data)
18209 .map_err(|e| format!("Failed to parse {}: {e}", manifest_path.display()))?;
18210 let files = manifest
18211 .get_mut("included_files")
18212 .and_then(Value::as_array_mut)
18213 .ok_or_else(|| format!("{} missing included_files array", manifest_path.display()))?;
18214 files.retain(|file| {
18215 file.get("path")
18216 .and_then(Value::as_str)
18217 .is_none_or(|path| path != "packet.lock.json")
18218 });
18219 files.push(lock_entry);
18220 std::fs::write(
18221 &manifest_path,
18222 serde_json::to_vec_pretty(&manifest)
18223 .map_err(|e| format!("Failed to serialize {}: {e}", manifest_path.display()))?,
18224 )
18225 .map_err(|e| format!("Failed to write {}: {e}", manifest_path.display()))?;
18226 Ok(())
18227}
18228
18229fn print_tool_check_report(report: &Value) {
18230 let summary = report.get("summary").unwrap_or(&Value::Null);
18231 let frontier = report.get("frontier").unwrap_or(&Value::Null);
18232 println!();
18233 println!(" {}", "VELA · SERVE · CHECK-TOOLS".dimmed());
18234 println!(" {}", style::tick_row(60));
18235 println!(
18236 "frontier: {}",
18237 frontier
18238 .get("name")
18239 .and_then(Value::as_str)
18240 .unwrap_or("unknown")
18241 );
18242 println!(
18243 "findings: {}",
18244 frontier
18245 .get("findings")
18246 .and_then(Value::as_u64)
18247 .unwrap_or_default()
18248 );
18249 println!(
18250 "checks: {} passed, {} failed",
18251 summary
18252 .get("passed")
18253 .and_then(Value::as_u64)
18254 .unwrap_or_default(),
18255 summary
18256 .get("failed")
18257 .and_then(Value::as_u64)
18258 .unwrap_or_default()
18259 );
18260 if let Some(tools) = report.get("tools").and_then(Value::as_array) {
18261 let names = tools
18262 .iter()
18263 .filter_map(Value::as_str)
18264 .collect::<Vec<_>>()
18265 .join(", ");
18266 println!("tools: {names}");
18267 }
18268 if let Some(checks) = report.get("checks").and_then(Value::as_array) {
18269 for check in checks {
18270 let status = if check.get("ok").and_then(Value::as_bool) == Some(true) {
18271 style::ok("ok")
18272 } else {
18273 style::lost("lost")
18274 };
18275 println!(
18276 " {} {}",
18277 status,
18278 check
18279 .get("tool")
18280 .and_then(Value::as_str)
18281 .unwrap_or("unknown")
18282 );
18283 }
18284 }
18285}
18286
18287fn print_state_report(report: &state::StateCommandReport, json_output: bool) {
18288 if json_output {
18289 println!(
18290 "{}",
18291 serde_json::to_string_pretty(report).expect("failed to serialize state command report")
18292 );
18293 } else {
18294 println!("{}", report.message);
18295 println!(" frontier: {}", report.frontier);
18296 println!(" finding: {}", report.finding_id);
18297 println!(" proposal: {}", report.proposal_id);
18298 println!(" status: {}", report.proposal_status);
18299 if let Some(event_id) = &report.applied_event_id {
18300 println!(" event: {}", event_id);
18301 }
18302 println!(" wrote: {}", report.wrote_to);
18303 }
18304}
18305
18306fn print_history(payload: &Value) {
18307 let finding = payload.get("finding").unwrap_or(&Value::Null);
18308 println!("vela history");
18309 println!(
18310 " finding: {}",
18311 finding
18312 .get("id")
18313 .and_then(Value::as_str)
18314 .unwrap_or("unknown")
18315 );
18316 println!(
18317 " assertion: {}",
18318 finding
18319 .get("assertion")
18320 .and_then(Value::as_str)
18321 .unwrap_or("")
18322 );
18323 println!(
18324 " confidence: {:.3}",
18325 finding
18326 .get("confidence")
18327 .and_then(Value::as_f64)
18328 .unwrap_or_default()
18329 );
18330 let reviews = payload
18331 .get("review_events")
18332 .and_then(Value::as_array)
18333 .map_or(0, Vec::len);
18334 let updates = payload
18335 .get("confidence_updates")
18336 .and_then(Value::as_array)
18337 .map_or(0, Vec::len);
18338 let annotations = finding
18339 .get("annotations")
18340 .and_then(Value::as_array)
18341 .map_or(0, Vec::len);
18342 let sources = payload
18343 .get("sources")
18344 .and_then(Value::as_array)
18345 .map_or(0, Vec::len);
18346 let atoms = payload
18347 .get("evidence_atoms")
18348 .and_then(Value::as_array)
18349 .map_or(0, Vec::len);
18350 let conditions = payload
18351 .get("condition_records")
18352 .and_then(Value::as_array)
18353 .map_or(0, Vec::len);
18354 let proposals = payload
18355 .get("proposals")
18356 .and_then(Value::as_array)
18357 .map_or(0, Vec::len);
18358 let events = payload
18359 .get("events")
18360 .and_then(Value::as_array)
18361 .map_or(0, Vec::len);
18362 println!(" review events: {reviews}");
18363 println!(" confidence updates: {updates}");
18364 println!(" annotations: {annotations}");
18365 println!(" sources: {sources}");
18366 println!(" evidence atoms: {atoms}");
18367 println!(" condition records: {conditions}");
18368 println!(" proposals: {proposals}");
18369 println!(" canonical events: {events}");
18370 if let Some(status) = payload
18371 .get("proof_state")
18372 .and_then(|value| value.get("latest_packet"))
18373 .and_then(|value| value.get("status"))
18374 .and_then(Value::as_str)
18375 {
18376 println!(" proof state: {status}");
18377 }
18378 if let Some(events) = payload.get("review_events").and_then(Value::as_array) {
18379 for event in events.iter().take(8) {
18380 println!(
18381 " - {} {} {}",
18382 event
18383 .get("reviewed_at")
18384 .and_then(Value::as_str)
18385 .unwrap_or(""),
18386 event.get("id").and_then(Value::as_str).unwrap_or(""),
18387 event.get("reason").and_then(Value::as_str).unwrap_or("")
18388 );
18389 }
18390 }
18391}
18392
18393#[derive(Debug, Serialize)]
18394pub struct ProofTrace {
18395 pub trace_version: String,
18396 pub command: Vec<String>,
18397 pub source: String,
18398 pub source_hash: String,
18399 pub schema_version: String,
18400 pub checked_artifacts: Vec<String>,
18401 pub benchmark: Option<Value>,
18402 pub packet_manifest: String,
18403 pub packet_validation: String,
18404 pub caveats: Vec<String>,
18405 pub status: String,
18406 pub trace_path: String,
18407}
18408
18409const SCIENCE_SUBCOMMANDS: &[&str] = &[
18410 "compile-notes",
18411 "compile-code",
18412 "compile-data",
18413 "review-pending",
18414 "find-tensions",
18415 "plan-experiments",
18416 "scout",
18417 "check",
18418 "normalize",
18419 "integrity",
18420 "impact",
18421 "discord",
18422 "quickstart",
18423 "proof",
18424 "repo",
18425 "serve",
18426 "stats",
18427 "search",
18428 "search-index",
18429 "proof-attest-verification",
18430 "proof-verify-attestation",
18431 "tensions",
18432 "gaps",
18433 "bridge",
18434 "export",
18435 "packet",
18436 "bench",
18437 "conformance",
18438 "version",
18439 "sign",
18440 "actor",
18441 "frontier",
18442 "queue",
18443 "registry",
18444 "init",
18445 "import",
18446 "lock",
18447 "doc",
18448 "diff",
18449 "proposals",
18450 "finding",
18451 "link",
18452 "entity",
18453 "review",
18454 "note",
18455 "caveat",
18456 "revise",
18457 "reject",
18458 "history",
18459 "import-events",
18460 "retract",
18461 "propagate",
18462 "replicate",
18464 "replications",
18465 "dataset-add",
18468 "datasets",
18469 "code-add",
18470 "code-artifacts",
18471 "artifact-add",
18472 "artifact-to-state",
18473 "bridge-kit",
18474 "source-adapter",
18475 "runtime-adapter",
18476 "artifacts",
18477 "artifact-audit",
18478 "decision-brief",
18479 "trial-summary",
18480 "source-verification",
18481 "source-ingest-plan",
18482 "clinical-trial-import",
18483 "negative-result-add",
18485 "negative-results",
18486 "trajectory-create",
18488 "trajectory-step",
18489 "trajectories",
18490 "tier-set",
18492 "locator-repair",
18494 "span-repair",
18496 "entity-resolve",
18498 "entity-add",
18500 "proof-add",
18502 "agent",
18504 "source-fetch",
18506 "predict",
18509 "resolve",
18510 "predictions",
18511 "predictions-expire",
18512 "calibration",
18513 "consensus",
18516 "federation",
18518 "causal",
18520 "status",
18524 "log",
18525 "inbox",
18526 "ask",
18527 "bridges",
18529 "workbench",
18531 "verify",
18533 "ingest",
18537 "propose",
18538 "accept",
18539 "attest",
18540 "lineage",
18541 "carina",
18544 "atlas",
18547 "constellation",
18550];
18551
18552pub fn is_science_subcommand(name: &str) -> bool {
18553 SCIENCE_SUBCOMMANDS.contains(&name)
18554}
18555
18556fn print_strict_help() {
18557 println!(
18558 r#"Vela {}
18559Version control for scientific state.
18560
18561Usage:
18562 vela <COMMAND>
18563
18564Core flow (v0.74):
18565 init Initialize a split frontier repo
18566 ingest Ingest a paper, dataset, or Carina packet (dispatches by file type)
18567 propose Create a finding.review proposal
18568 diff Preview a `vpr_*` proposal, or compare two frontier files
18569 accept Apply a proposal under reviewer authority
18570 attest Sign findings under your private key
18571 log Recent canonical state events
18572 lineage State-transition replay for one finding
18573 serve Local Workbench (findings, evidence, diff, lineage)
18574
18575Read-only inspection:
18576 check Validate a frontier, repo, or proof packet
18577 integrity Check accepted frontier state integrity
18578 impact Report downstream finding impact
18579 normalize Apply deterministic frontier-state repairs
18580 proof Export and validate a proof packet
18581 repo Inspect split frontier repository status and shape
18582 stats Show frontier statistics
18583 search Search findings
18584 tensions List candidate contradictions and tensions
18585 gaps Inspect and rank candidate gap review leads
18586 bridge Find candidate cross-domain connections
18587
18588Advanced (proposal-creation, agent inboxes, federation):
18589 scout Run Literature Scout against a folder of PDFs (writes proposals)
18590 compile-notes Run Notes Compiler against a Markdown vault (writes proposals)
18591 compile-code Run Code & Notebook Analyst against a research repo (writes proposals)
18592 compile-data Run Datasets agent against a folder of CSV/TSV data (writes proposals)
18593 review-pending Run Reviewer Agent: score every pending proposal (writes notes)
18594 find-tensions Run Contradiction Finder: surface real contradictions among findings
18595 plan-experiments Run Experiment Planner: propose experiments for open questions / hypotheses
18596 export Export frontier artifacts
18597 packet Inspect or validate proof packets
18598 bench Run deterministic benchmark gates
18599 conformance Run protocol conformance vectors
18600 sign Optional signing and signature verification
18601 runtime-adapter
18602 Normalize external runtime exports into reviewable proposals
18603 version Show version information
18604 import Import frontier.json into a .vela repo
18605 proposals Inspect, validate, export, import, accept, or reject write proposals
18606 artifact-to-state
18607 Import a Carina artifact packet as reviewable proposals
18608 bridge-kit
18609 Validate Carina artifact packets before importing runtime output
18610 source-adapter
18611 Run reviewed source adapters into artifact-to-state proposals
18612 finding Add or manage finding bundles as frontier state
18613 link Add typed links between findings (incl. cross-frontier vf_at-vfr targets)
18614 entity Resolve unresolved entities against a bundled common-entity table (v0.19)
18615 frontier Scaffold (`new`), materialize, and manage frontier metadata + deps
18616 actor Register Ed25519 publisher identities in a frontier
18617 registry Publish, list, or pull frontiers (open hub at https://vela-hub.fly.dev)
18618 review Create a review proposal or review interactively
18619 note Add a lightweight note to a finding
18620 caveat Create an explicit caveat proposal
18621 revise Create a confidence revision proposal
18622 reject Create a rejection proposal
18623 history Show state-transition history for one finding (v0.74 alias: `lineage`)
18624 import-events Import review/state events from a packet or JSON file
18625 retract Create a retraction proposal
18626 propagate Simulate impact over declared dependency links
18627 artifact-add Register a content-addressed artifact
18628 artifacts List content-addressed artifacts
18629 artifact-audit Audit artifact locators, hashes, references, and profiles
18630 decision-brief Show the validated decision brief projection
18631 trial-summary Show the validated trial outcome projection
18632 source-verification Show the validated source verification projection
18633 source-ingest-plan Show the validated source ingest plan
18634 clinical-trial-import Import a ClinicalTrials.gov record as an artifact
18635 locator-repair Mechanically repair an evidence atom's missing source locator
18636 span-repair Mechanically repair a finding's missing evidence span
18637 entity-resolve Resolve a finding entity to a canonical id
18638 source-fetch Fetch metadata + abstract for a doi:/pmid:/nct: source
18639 atlas Compose multiple frontiers into a domain-level Atlas (vat_*) (v0.78+)
18640 constellation Compose multiple Atlases into a cross-domain Constellation (vco_*) (v0.82+)
18641
18642Quick start (the demo):
18643 vela init demo --name "Your bounded question"
18644 vela ingest paper.pdf --frontier demo
18645 vela propose demo <vf_id> --status accepted --reason "..." --reviewer reviewer:you --apply
18646 vela diff <vpr_id> --frontier demo
18647 vela accept demo <vpr_id> --reviewer reviewer:you --reason "applied"
18648 vela serve --path demo
18649
18650Substrate health:
18651 vela frontier materialize my-frontier --json
18652 vela repo status my-frontier --json
18653 vela proof verify my-frontier --json
18654 vela check my-frontier --strict --json
18655
18656Monolithic frontier file:
18657 vela frontier new frontier.json --name "Your bounded question"
18658 vela finding add frontier.json --assertion "..." --author "reviewer:demo" --apply
18659 vela check frontier.json --json
18660 FINDING_ID=$(jq -r '.findings[0].id' frontier.json)
18661 vela review frontier.json "$FINDING_ID" --status contested --reason "Mouse-only evidence" --reviewer reviewer:demo --apply
18662
18663Publish your own frontier (see docs/PUBLISHING.md):
18664 vela frontier new ./frontier.json --name "Your bounded question"
18665 vela finding add ./frontier.json --assertion "..." --author "reviewer:you" --apply
18666 vela sign generate-keypair --out keys
18667 vela actor add ./frontier.json reviewer:you --pubkey "$(cat keys/public.key)"
18668 vela registry publish ./frontier.json --owner reviewer:you --key keys/private.key \
18669 --to https://vela-hub.fly.dev
18670"#,
18671 env!("CARGO_PKG_VERSION")
18672 );
18673}
18674
18675pub type ScoutHandler = fn(
18684 folder: PathBuf,
18685 frontier: PathBuf,
18686 backend: Option<String>,
18687 dry_run: bool,
18688 json: bool,
18689) -> Pin<Box<dyn Future<Output = ()> + Send>>;
18690
18691static SCOUT_HANDLER: OnceLock<ScoutHandler> = OnceLock::new();
18692
18693pub fn register_scout_handler(handler: ScoutHandler) {
18697 let _ = SCOUT_HANDLER.set(handler);
18698}
18699
18700pub type AtlasInitHandler = fn(
18704 atlases_root: PathBuf,
18705 name: String,
18706 domain: String,
18707 scope_note: Option<String>,
18708 frontiers: Vec<PathBuf>,
18709 json: bool,
18710) -> Pin<Box<dyn Future<Output = ()> + Send>>;
18711
18712static ATLAS_INIT_HANDLER: OnceLock<AtlasInitHandler> = OnceLock::new();
18713
18714pub fn register_atlas_init_handler(handler: AtlasInitHandler) {
18715 let _ = ATLAS_INIT_HANDLER.set(handler);
18716}
18717
18718pub type SearchBuildHandler = fn(
18720 frontiers: Vec<PathBuf>,
18721 out: PathBuf,
18722 include_bootstrap: bool,
18723 include_broken: bool,
18724 json: bool,
18725) -> Pin<Box<dyn Future<Output = ()> + Send>>;
18726
18727static SEARCH_BUILD_HANDLER: OnceLock<SearchBuildHandler> = OnceLock::new();
18728
18729pub fn register_search_build_handler(handler: SearchBuildHandler) {
18730 let _ = SEARCH_BUILD_HANDLER.set(handler);
18731}
18732
18733#[allow(clippy::too_many_arguments)]
18735pub type SearchQueryHandler = fn(
18736 query: String,
18737 index: Option<PathBuf>,
18738 kind: Option<String>,
18739 entity: Option<String>,
18740 status: Option<String>,
18741 frontier_id: Option<String>,
18742 source_id: Option<String>,
18743 chain_status: Option<String>,
18744 limit: Option<usize>,
18745 json: bool,
18746) -> Pin<Box<dyn Future<Output = ()> + Send>>;
18747
18748static SEARCH_QUERY_HANDLER: OnceLock<SearchQueryHandler> = OnceLock::new();
18749
18750pub fn register_search_query_handler(handler: SearchQueryHandler) {
18751 let _ = SEARCH_QUERY_HANDLER.set(handler);
18752}
18753
18754pub type AtlasMaterializeHandler =
18756 fn(atlases_root: PathBuf, name: String, json: bool) -> Pin<Box<dyn Future<Output = ()> + Send>>;
18757
18758static ATLAS_MATERIALIZE_HANDLER: OnceLock<AtlasMaterializeHandler> = OnceLock::new();
18759
18760pub fn register_atlas_materialize_handler(handler: AtlasMaterializeHandler) {
18761 let _ = ATLAS_MATERIALIZE_HANDLER.set(handler);
18762}
18763
18764pub type AtlasServeHandler = fn(
18769 atlases_root: PathBuf,
18770 name: String,
18771 port: u16,
18772 open_browser: bool,
18773) -> Pin<Box<dyn Future<Output = ()> + Send>>;
18774
18775static ATLAS_SERVE_HANDLER: OnceLock<AtlasServeHandler> = OnceLock::new();
18776
18777pub fn register_atlas_serve_handler(handler: AtlasServeHandler) {
18778 let _ = ATLAS_SERVE_HANDLER.set(handler);
18779}
18780
18781pub type AtlasUpdateHandler = fn(
18786 atlases_root: PathBuf,
18787 name: String,
18788 add_frontier: Vec<PathBuf>,
18789 remove_vfr_id: Vec<String>,
18790 json: bool,
18791) -> Pin<Box<dyn Future<Output = ()> + Send>>;
18792
18793static ATLAS_UPDATE_HANDLER: OnceLock<AtlasUpdateHandler> = OnceLock::new();
18794
18795pub fn register_atlas_update_handler(handler: AtlasUpdateHandler) {
18796 let _ = ATLAS_UPDATE_HANDLER.set(handler);
18797}
18798
18799pub type ConstellationInitHandler = fn(
18803 constellations_root: PathBuf,
18804 name: String,
18805 scope_note: Option<String>,
18806 atlases: Vec<PathBuf>,
18807 json: bool,
18808) -> Pin<Box<dyn Future<Output = ()> + Send>>;
18809
18810static CONSTELLATION_INIT_HANDLER: OnceLock<ConstellationInitHandler> = OnceLock::new();
18811
18812pub fn register_constellation_init_handler(handler: ConstellationInitHandler) {
18813 let _ = CONSTELLATION_INIT_HANDLER.set(handler);
18814}
18815
18816pub type ConstellationMaterializeHandler = fn(
18817 constellations_root: PathBuf,
18818 name: String,
18819 json: bool,
18820) -> Pin<Box<dyn Future<Output = ()> + Send>>;
18821
18822static CONSTELLATION_MATERIALIZE_HANDLER: OnceLock<ConstellationMaterializeHandler> =
18823 OnceLock::new();
18824
18825pub fn register_constellation_materialize_handler(handler: ConstellationMaterializeHandler) {
18826 let _ = CONSTELLATION_MATERIALIZE_HANDLER.set(handler);
18827}
18828
18829pub type ConstellationServeHandler = fn(
18830 constellations_root: PathBuf,
18831 name: String,
18832 port: u16,
18833 open_browser: bool,
18834) -> Pin<Box<dyn Future<Output = ()> + Send>>;
18835
18836static CONSTELLATION_SERVE_HANDLER: OnceLock<ConstellationServeHandler> = OnceLock::new();
18837
18838pub fn register_constellation_serve_handler(handler: ConstellationServeHandler) {
18839 let _ = CONSTELLATION_SERVE_HANDLER.set(handler);
18840}
18841
18842pub type NotesHandler = fn(
18846 vault: PathBuf,
18847 frontier: PathBuf,
18848 backend: Option<String>,
18849 max_files: Option<usize>,
18850 max_items_per_category: Option<usize>,
18851 dry_run: bool,
18852 json: bool,
18853) -> Pin<Box<dyn Future<Output = ()> + Send>>;
18854
18855static NOTES_HANDLER: OnceLock<NotesHandler> = OnceLock::new();
18856
18857pub fn register_notes_handler(handler: NotesHandler) {
18859 let _ = NOTES_HANDLER.set(handler);
18860}
18861
18862pub type CodeHandler = fn(
18864 root: PathBuf,
18865 frontier: PathBuf,
18866 backend: Option<String>,
18867 max_files: Option<usize>,
18868 dry_run: bool,
18869 json: bool,
18870) -> Pin<Box<dyn Future<Output = ()> + Send>>;
18871
18872static CODE_HANDLER: OnceLock<CodeHandler> = OnceLock::new();
18873
18874pub fn register_code_handler(handler: CodeHandler) {
18876 let _ = CODE_HANDLER.set(handler);
18877}
18878
18879pub type DatasetsHandler = fn(
18881 root: PathBuf,
18882 frontier: PathBuf,
18883 backend: Option<String>,
18884 sample_rows: Option<usize>,
18885 dry_run: bool,
18886 json: bool,
18887) -> Pin<Box<dyn Future<Output = ()> + Send>>;
18888
18889static DATASETS_HANDLER: OnceLock<DatasetsHandler> = OnceLock::new();
18890
18891pub fn register_datasets_handler(handler: DatasetsHandler) {
18893 let _ = DATASETS_HANDLER.set(handler);
18894}
18895
18896pub type ReviewerHandler = fn(
18898 frontier: PathBuf,
18899 backend: Option<String>,
18900 max_proposals: Option<usize>,
18901 batch_size: usize,
18902 dry_run: bool,
18903 json: bool,
18904) -> Pin<Box<dyn Future<Output = ()> + Send>>;
18905
18906static REVIEWER_HANDLER: OnceLock<ReviewerHandler> = OnceLock::new();
18907
18908pub fn register_reviewer_handler(handler: ReviewerHandler) {
18910 let _ = REVIEWER_HANDLER.set(handler);
18911}
18912
18913pub type TensionsHandler = fn(
18915 frontier: PathBuf,
18916 backend: Option<String>,
18917 max_findings: Option<usize>,
18918 dry_run: bool,
18919 json: bool,
18920) -> Pin<Box<dyn Future<Output = ()> + Send>>;
18921
18922static TENSIONS_HANDLER: OnceLock<TensionsHandler> = OnceLock::new();
18923
18924pub fn register_tensions_handler(handler: TensionsHandler) {
18926 let _ = TENSIONS_HANDLER.set(handler);
18927}
18928
18929pub type ExperimentsHandler = fn(
18931 frontier: PathBuf,
18932 backend: Option<String>,
18933 max_findings: Option<usize>,
18934 dry_run: bool,
18935 json: bool,
18936) -> Pin<Box<dyn Future<Output = ()> + Send>>;
18937
18938static EXPERIMENTS_HANDLER: OnceLock<ExperimentsHandler> = OnceLock::new();
18939
18940pub fn register_experiments_handler(handler: ExperimentsHandler) {
18942 let _ = EXPERIMENTS_HANDLER.set(handler);
18943}
18944
18945fn find_vela_repo() -> Option<PathBuf> {
18961 let mut cur = std::env::current_dir().ok()?;
18962 loop {
18963 if cur.join(".vela").is_dir() {
18964 return Some(cur);
18965 }
18966 if !cur.pop() {
18967 return None;
18968 }
18969 }
18970}
18971
18972fn print_session_help() {
18973 println!();
18974 println!(
18975 " Vela {} · Version control for scientific state.",
18976 env!("CARGO_PKG_VERSION")
18977 );
18978 println!();
18979 println!(" USAGE");
18980 println!(" vela Open a session against the nearest .vela/ repo");
18981 println!(" vela <command> Run a specific subcommand");
18982 println!(" vela help advanced Full subcommand list (30+ commands)");
18983 println!();
18984 println!(" CORE FLOW (v0.74)");
18985 println!(" init Initialize a split frontier repo");
18986 println!(" ingest <path> Ingest a paper, dataset, or Carina packet");
18987 println!(" propose Create a finding.review proposal");
18988 println!(" diff <vpr_id> Preview a pending proposal vs current frontier");
18989 println!(" accept <vpr_id> Apply a proposal under reviewer authority");
18990 println!(" attest Sign findings under your private key");
18991 println!(" log Recent canonical state events");
18992 println!(" lineage <vf_id> State-transition replay for one finding");
18993 println!(" serve Local Workbench (find, evidence, diff, lineage)");
18994 println!();
18995 println!(" DAILY ALSO-RANS");
18996 println!(" status One-screen frontier health");
18997 println!(" inbox Pending review proposals");
18998 println!(" review Review a proposal interactively");
18999 println!(" ask <question> Plain-text query against the frontier");
19000 println!();
19001 println!(" REASONING (Pearl 1 → 2 → 3)");
19002 println!(" causal audit Per-finding identifiability");
19003 println!(" causal effect <src> --on <tgt> Pairwise back-door / front-door");
19004 println!(" causal counterfactual <src> --target <tgt> --set-to <0..1>");
19005 println!();
19006 println!(" COMPOSITION");
19007 println!(" bridge <a> <b> Cross-frontier hypotheses");
19008 println!(" consensus <vf> Field consensus over similar claims");
19009 println!();
19010 println!(" PUBLISH");
19011 println!(" registry publish Push a signed manifest to the hub");
19012 println!(" federation peer-add Federate with another hub");
19013 println!();
19014 println!(" In session, type a single letter for a quick verb, or any");
19015 println!(" question in plain text. `q` or `exit` quits.");
19016 println!();
19017}
19018
19019fn print_session_dashboard(project: &crate::project::Project, repo_path: &Path) {
19020 use crate::causal_reasoning::{audit_frontier, summarize_audit};
19021
19022 let label = frontier_label(project);
19023 let vfr = project.frontier_id();
19024 let vfr_short = vfr.chars().take(16).collect::<String>();
19025
19026 let mut pending = 0usize;
19027 let mut by_kind: std::collections::BTreeMap<String, usize> = std::collections::BTreeMap::new();
19028 for p in &project.proposals {
19029 if p.status == "pending_review" {
19030 pending += 1;
19031 *by_kind.entry(p.kind.clone()).or_insert(0) += 1;
19032 }
19033 }
19034
19035 let audit = audit_frontier(project);
19036 let audit_summary = summarize_audit(&audit);
19037
19038 let bridges_dir = repo_path.join(".vela/bridges");
19039 let mut bridge_total = 0usize;
19040 let mut bridge_confirmed = 0usize;
19041 let mut bridge_derived = 0usize;
19042 if bridges_dir.is_dir()
19043 && let Ok(entries) = std::fs::read_dir(&bridges_dir)
19044 {
19045 for entry in entries.flatten() {
19046 let path = entry.path();
19047 if path.extension().and_then(|s| s.to_str()) != Some("json") {
19048 continue;
19049 }
19050 bridge_total += 1;
19051 if let Ok(data) = std::fs::read_to_string(&path)
19052 && let Ok(b) = serde_json::from_str::<crate::bridge::Bridge>(&data)
19053 {
19054 match b.status {
19055 crate::bridge::BridgeStatus::Confirmed => bridge_confirmed += 1,
19056 crate::bridge::BridgeStatus::Derived => bridge_derived += 1,
19057 _ => {}
19058 }
19059 }
19060 }
19061 }
19062
19063 let mut targets_with_success = std::collections::HashSet::new();
19064 let mut failed_replications = 0usize;
19065 for r in &project.replications {
19066 if r.outcome == "replicated" {
19067 targets_with_success.insert(r.target_finding.clone());
19068 } else if r.outcome == "failed" {
19069 failed_replications += 1;
19070 }
19071 }
19072
19073 println!();
19074 let version = crate::project::VELA_COMPILER_VERSION
19075 .strip_prefix("vela/")
19076 .unwrap_or(crate::project::VELA_COMPILER_VERSION);
19077 println!(
19078 " {}",
19079 format!("VELA · {version} · {label}")
19080 .to_uppercase()
19081 .dimmed()
19082 );
19083 println!(" {}", style::tick_row(60));
19084 println!(
19085 " vfr_id {}… repo {}",
19086 vfr_short,
19087 repo_path.display()
19088 );
19089 println!(
19090 " findings {:>4} events {} proposals pending {}",
19091 project.findings.len(),
19092 project.events.len(),
19093 pending
19094 );
19095
19096 if pending > 0 {
19097 let parts: Vec<String> = by_kind.iter().map(|(k, n)| format!("{n} {k}")).collect();
19098 println!(" {} · {}", style::warn("inbox"), parts.join(" "));
19099 }
19100 if audit_summary.underidentified > 0 || audit_summary.conditional > 0 {
19101 println!(
19102 " {} · {} underidentified · {} conditional",
19103 if audit_summary.underidentified > 0 {
19104 style::lost("audit")
19105 } else {
19106 style::warn("audit")
19107 },
19108 audit_summary.underidentified,
19109 audit_summary.conditional,
19110 );
19111 }
19112 if bridge_total > 0 {
19113 println!(
19114 " {} · {} total · {} confirmed · {} awaiting review",
19115 style::ok("bridges"),
19116 bridge_total,
19117 bridge_confirmed,
19118 bridge_derived
19119 );
19120 }
19121 if !project.replications.is_empty() {
19122 println!(
19123 " {} · {} records · {} findings replicated · {} failed",
19124 style::ok("replications"),
19125 project.replications.len(),
19126 targets_with_success.len(),
19127 failed_replications,
19128 );
19129 }
19130
19131 println!();
19132 println!(" type a verb or ask anything:");
19133 println!(" a audit problems i inbox (pending) b bridges");
19134 println!(" g causal graph l log (recent) c counterfactuals");
19135 println!(" s refresh status h help (more verbs) q quit");
19136 println!();
19137}
19138
19139fn run_session_verb(verb: &str, repo_path: &Path) -> bool {
19141 match verb {
19142 "a" | "audit" => {
19143 let action = CausalAction::Audit {
19144 frontier: repo_path.to_path_buf(),
19145 problems_only: true,
19146 json: false,
19147 };
19148 cmd_causal(action);
19149 true
19150 }
19151 "i" | "inbox" => {
19152 let action = ProposalAction::List {
19153 frontier: repo_path.to_path_buf(),
19154 status: Some("pending_review".into()),
19155 json: false,
19156 };
19157 cmd_proposals(action);
19158 true
19159 }
19160 "b" | "bridges" => {
19161 let action = BridgesAction::List {
19162 frontier: repo_path.to_path_buf(),
19163 status: None,
19164 json: false,
19165 };
19166 cmd_bridges(action);
19167 true
19168 }
19169 "g" | "graph" => {
19170 let action = CausalAction::Graph {
19171 frontier: repo_path.to_path_buf(),
19172 node: None,
19173 json: false,
19174 };
19175 cmd_causal(action);
19176 true
19177 }
19178 "l" | "log" => {
19179 cmd_log(repo_path, 10, None, false);
19180 true
19181 }
19182 "c" | "counterfactual" | "counterfactuals" => {
19183 let project = match repo::load_from_path(repo_path) {
19186 Ok(p) => p,
19187 Err(e) => {
19188 eprintln!("{} {e}", style::err_prefix());
19189 return true;
19190 }
19191 };
19192 println!();
19193 println!(" {}", "VELA · COUNTERFACTUAL · LIVE PAIRS".dimmed());
19194 println!(" {}", style::tick_row(60));
19195 let mut pairs = 0usize;
19199 for child in &project.findings {
19200 for link in &child.links {
19201 if !matches!(link.link_type.as_str(), "depends" | "supports") {
19202 continue;
19203 }
19204 if link.mechanism.is_none() {
19205 continue;
19206 }
19207 let parent = link
19208 .target
19209 .split_once(':')
19210 .map_or(link.target.as_str(), |(_, r)| r);
19211 pairs += 1;
19212 if pairs <= 10 {
19213 println!(" · do({parent}) → {}", child.id);
19214 }
19215 }
19216 }
19217 if pairs == 0 {
19218 println!(" no mechanism-annotated edges found.");
19219 println!(" add a mechanism via the link's `mechanism` field; see /counterfactual");
19220 } else {
19221 println!();
19222 println!(" {pairs} live pair(s). Run with:");
19223 println!(" vela causal counterfactual <repo> <src> --target <tgt> --set-to 0.5");
19224 }
19225 println!();
19226 true
19227 }
19228 "s" | "status" | "refresh" => {
19229 match repo::load_from_path(repo_path) {
19231 Ok(p) => print_session_dashboard(&p, repo_path),
19232 Err(e) => eprintln!("{} {e}", style::err_prefix()),
19233 }
19234 true
19235 }
19236 "h" | "help" | "?" => {
19237 print_session_help();
19238 true
19239 }
19240 _ => false,
19241 }
19242}
19243
19244fn run_session() {
19245 let repo_path = match find_vela_repo() {
19246 Some(p) => p,
19247 None => {
19248 println!();
19249 println!(
19250 " {}",
19251 "VELA · NO FRONTIER FOUND IN CWD OR ANY PARENT".dimmed()
19252 );
19253 println!(" {}", style::tick_row(60));
19254 println!(" Run `vela init` here to create a frontier, or cd into one.");
19255 println!(" Or run `vela help` for the command list.");
19256 println!();
19257 return;
19258 }
19259 };
19260
19261 let project = match repo::load_from_path(&repo_path) {
19262 Ok(p) => p,
19263 Err(e) => {
19264 eprintln!("{} failed to load .vela/ repo: {e}", style::err_prefix());
19265 std::process::exit(1);
19266 }
19267 };
19268
19269 print_session_dashboard(&project, &repo_path);
19270
19271 use std::io::{BufRead, Write};
19272 let stdin = std::io::stdin();
19273 let mut stdout = std::io::stdout();
19274 loop {
19275 print!(" > ");
19276 stdout.flush().ok();
19277 let mut line = String::new();
19278 if stdin.lock().read_line(&mut line).is_err() {
19279 break;
19280 }
19281 let input = line.trim();
19282 if input.is_empty() {
19283 continue;
19284 }
19285 if matches!(input, "q" | "quit" | "exit") {
19286 break;
19287 }
19288 if run_session_verb(input, &repo_path) {
19289 continue;
19290 }
19291 let project = match repo::load_from_path(&repo_path) {
19293 Ok(p) => p,
19294 Err(e) => {
19295 eprintln!("{} {e}", style::err_prefix());
19296 continue;
19297 }
19298 };
19299 answer(&project, input, false);
19300 }
19301}
19302
19303pub fn run_from_args() {
19304 style::init();
19305 let args = std::env::args().collect::<Vec<_>>();
19306 match args.get(1).map(String::as_str) {
19307 None => {
19311 run_session();
19312 return;
19313 }
19314 Some("-h" | "--help" | "help") => {
19315 if args.get(2).map(String::as_str) == Some("advanced") {
19318 print_strict_help();
19319 } else {
19320 print_session_help();
19321 }
19322 return;
19323 }
19324 Some("-V" | "--version" | "version") => {
19325 println!("vela {}", env!("CARGO_PKG_VERSION"));
19326 return;
19327 }
19328 Some("proof") if args.get(2).map(String::as_str) == Some("verify") => {
19329 let json = args.iter().any(|arg| arg == "--json");
19330 let frontier = args
19331 .iter()
19332 .skip(3)
19333 .find(|arg| !arg.starts_with('-'))
19334 .map(PathBuf::from)
19335 .unwrap_or_else(|| {
19336 eprintln!(
19337 "{} proof verify requires a frontier repo",
19338 style::err_prefix()
19339 );
19340 std::process::exit(2);
19341 });
19342 cmd_proof_verify(&frontier, json);
19343 return;
19344 }
19345 Some("proof") if args.get(2).map(String::as_str) == Some("explain") => {
19346 let frontier = args
19347 .iter()
19348 .skip(3)
19349 .find(|arg| !arg.starts_with('-'))
19350 .map(PathBuf::from)
19351 .unwrap_or_else(|| {
19352 eprintln!(
19353 "{} proof explain requires a frontier repo",
19354 style::err_prefix()
19355 );
19356 std::process::exit(2);
19357 });
19358 cmd_proof_explain(&frontier);
19359 return;
19360 }
19361 Some(cmd) if !is_science_subcommand(cmd) => {
19362 eprintln!(
19363 "{} unknown or non-release command: {cmd}",
19364 style::err_prefix()
19365 );
19366 eprintln!("run `vela --help` for the strict v0 command surface.");
19367 std::process::exit(2);
19368 }
19369 Some(_) => {}
19370 }
19371 let runtime = tokio::runtime::Runtime::new().expect("failed to create tokio runtime");
19372 runtime.block_on(run_command());
19373}
19374
19375fn fail(message: &str) -> ! {
19376 eprintln!("{} {message}", style::err_prefix());
19377 std::process::exit(1);
19378}
19379
19380fn validate_enum_arg(flag: &str, value: &str, valid: &[&str]) {
19385 if !valid.contains(&value) {
19386 fail(&format!(
19387 "invalid {flag} '{value}'. Valid: {}",
19388 valid.join(", ")
19389 ));
19390 }
19391}
19392
19393fn fail_return<T>(message: &str) -> T {
19394 fail(message)
19395}