1use crate::{
2 benchmark, bridge, bundle, carina_validate, conformance, decision, diff, events, export,
3 frontier_repo, impact, lint, normalize, packet, project, propagate, proposals, repo, review,
4 search, serve, sign, signals, sources, state, state_integrity, tensions, validate,
5};
6
7use std::collections::BTreeMap;
8use std::future::Future;
9use std::path::{Path, PathBuf};
10use std::pin::Pin;
11use std::sync::OnceLock;
12
13use clap::{Parser, Subcommand};
14use colored::Colorize;
15
16use crate::cli_style as style;
17use reqwest::Client;
18use serde::Serialize;
19use serde_json::{Value, json};
20use sha2::{Digest, Sha256};
21
22#[derive(Parser)]
23#[command(name = "vela", version)]
24#[command(about = "Portable frontier state for science")]
25struct Cli {
26 #[command(subcommand)]
27 command: Commands,
28}
29
30#[derive(Subcommand)]
31enum Commands {
32 Scout {
39 folder: PathBuf,
41 #[arg(long)]
43 frontier: PathBuf,
44 #[arg(short, long)]
46 backend: Option<String>,
47 #[arg(long)]
49 dry_run: bool,
50 #[arg(long)]
52 json: bool,
53 },
54 CompileNotes {
61 vault: PathBuf,
63 #[arg(long)]
65 frontier: PathBuf,
66 #[arg(short, long)]
68 backend: Option<String>,
69 #[arg(long)]
71 max_files: Option<usize>,
72 #[arg(long)]
77 max_items_per_category: Option<usize>,
78 #[arg(long)]
80 dry_run: bool,
81 #[arg(long)]
83 json: bool,
84 },
85 CompileCode {
91 root: PathBuf,
93 #[arg(long)]
95 frontier: PathBuf,
96 #[arg(short, long)]
98 backend: Option<String>,
99 #[arg(long)]
101 max_files: Option<usize>,
102 #[arg(long)]
104 dry_run: bool,
105 #[arg(long)]
107 json: bool,
108 },
109 ReviewPending {
115 #[arg(long)]
116 frontier: PathBuf,
117 #[arg(short, long)]
118 backend: Option<String>,
119 #[arg(long)]
120 max_proposals: Option<usize>,
121 #[arg(long, default_value = "1")]
126 batch_size: usize,
127 #[arg(long)]
128 dry_run: bool,
129 #[arg(long)]
130 json: bool,
131 },
132 FindTensions {
136 #[arg(long)]
137 frontier: PathBuf,
138 #[arg(short, long)]
139 backend: Option<String>,
140 #[arg(long)]
141 max_findings: Option<usize>,
142 #[arg(long)]
143 dry_run: bool,
144 #[arg(long)]
145 json: bool,
146 },
147 PlanExperiments {
151 #[arg(long)]
152 frontier: PathBuf,
153 #[arg(short, long)]
154 backend: Option<String>,
155 #[arg(long)]
156 max_findings: Option<usize>,
157 #[arg(long)]
158 dry_run: bool,
159 #[arg(long)]
160 json: bool,
161 },
162 CompileData {
167 root: PathBuf,
169 #[arg(long)]
171 frontier: PathBuf,
172 #[arg(short, long)]
174 backend: Option<String>,
175 #[arg(long)]
177 sample_rows: Option<usize>,
178 #[arg(long)]
180 dry_run: bool,
181 #[arg(long)]
183 json: bool,
184 },
185 Check {
187 source: Option<PathBuf>,
189 #[arg(long)]
191 schema: bool,
192 #[arg(long)]
194 stats: bool,
195 #[arg(long)]
197 conformance: bool,
198 #[arg(long, default_value = "tests/conformance")]
200 conformance_dir: PathBuf,
201 #[arg(long)]
203 all: bool,
204 #[arg(long)]
206 schema_only: bool,
207 #[arg(long)]
209 strict: bool,
210 #[arg(long)]
212 fix: bool,
213 #[arg(long)]
215 json: bool,
216 },
217 Integrity {
219 frontier: PathBuf,
221 #[arg(long)]
223 json: bool,
224 },
225 Impact {
227 frontier: PathBuf,
229 finding_id: String,
231 #[arg(long)]
233 depth: Option<usize>,
234 #[arg(long)]
236 json: bool,
237 },
238 Discord {
245 frontier: PathBuf,
247 #[arg(long)]
249 json: bool,
250 #[arg(long)]
254 kind: Option<String>,
255 },
256 Normalize {
258 source: PathBuf,
260 #[arg(short, long)]
262 out: Option<PathBuf>,
263 #[arg(long)]
265 write: bool,
266 #[arg(long)]
268 dry_run: bool,
269 #[arg(long)]
271 rewrite_ids: bool,
272 #[arg(long)]
274 id_map: Option<PathBuf>,
275 #[arg(long)]
279 resync_provenance: bool,
280 #[arg(long)]
282 json: bool,
283 },
284 Proof {
286 frontier: PathBuf,
288 #[arg(long, short = 'o', default_value = "proof-packet")]
290 out: PathBuf,
291 #[arg(long, default_value = "bbb-alzheimer")]
293 template: String,
294 #[arg(long)]
296 gold: Option<PathBuf>,
297 #[arg(long)]
299 record_proof_state: bool,
300 #[arg(long)]
302 json: bool,
303 },
304 Repo {
306 #[command(subcommand)]
307 action: RepoAction,
308 },
309 Serve {
311 #[arg(required_unless_present_any = ["frontiers", "setup"])]
313 frontier: Option<PathBuf>,
314 #[arg(long)]
316 frontiers: Option<PathBuf>,
317 #[arg(short, long)]
319 backend: Option<String>,
320 #[arg(long)]
322 http: Option<u16>,
323 #[arg(long)]
325 setup: bool,
326 #[arg(long)]
328 check_tools: bool,
329 #[arg(long)]
331 json: bool,
332 #[arg(long)]
336 workbench: bool,
337 },
338 Status {
342 frontier: PathBuf,
343 #[arg(long)]
345 json: bool,
346 },
347 Log {
350 frontier: PathBuf,
351 #[arg(long, default_value = "20")]
353 limit: usize,
354 #[arg(long)]
356 kind: Option<String>,
357 #[arg(long)]
359 json: bool,
360 },
361 Inbox {
365 frontier: PathBuf,
366 #[arg(long)]
368 kind: Option<String>,
369 #[arg(long, default_value = "30")]
371 limit: usize,
372 #[arg(long)]
374 json: bool,
375 },
376 Ask {
381 frontier: PathBuf,
382 #[arg(trailing_var_arg = true)]
384 question: Vec<String>,
385 #[arg(long)]
387 json: bool,
388 },
389 Stats {
391 frontier: PathBuf,
393 #[arg(long)]
395 json: bool,
396 },
397 Search {
399 query: String,
401 #[arg(long)]
403 source: Option<PathBuf>,
404 #[arg(long)]
406 entity: Option<String>,
407 #[arg(long)]
409 r#type: Option<String>,
410 #[arg(long)]
412 all: Option<PathBuf>,
413 #[arg(long, default_value = "20")]
415 limit: usize,
416 #[arg(long)]
418 json: bool,
419 },
420 Tensions {
422 source: PathBuf,
423 #[arg(long)]
424 both_high: bool,
425 #[arg(long)]
426 cross_domain: bool,
427 #[arg(long, default_value = "20")]
428 top: usize,
429 #[arg(long)]
430 json: bool,
431 },
432 Gaps {
434 #[command(subcommand)]
435 action: GapsAction,
436 },
437 Bridge {
439 #[arg(required = true)]
441 inputs: Vec<PathBuf>,
442 #[arg(long, default_value = "true", action = clap::ArgAction::Set)]
444 novelty: bool,
445 #[arg(long, default_value = "30")]
447 top: usize,
448 },
449 Export {
451 frontier: PathBuf,
452 #[arg(short, long, default_value = "csv")]
453 format: String,
454 #[arg(short, long)]
455 output: Option<PathBuf>,
456 },
457 Packet {
459 #[command(subcommand)]
460 action: PacketAction,
461 },
462 Verify {
469 path: PathBuf,
471 #[arg(long)]
472 json: bool,
473 },
474 Bench {
486 frontier: Option<PathBuf>,
488 #[arg(long)]
490 gold: Option<PathBuf>,
491 #[arg(long)]
495 candidate: Option<PathBuf>,
496 #[arg(long)]
500 sources: Option<PathBuf>,
501 #[arg(long)]
504 threshold: Option<f64>,
505 #[arg(long)]
508 report: Option<PathBuf>,
509 #[arg(long)]
510 entity_gold: Option<PathBuf>,
511 #[arg(long)]
512 link_gold: Option<PathBuf>,
513 #[arg(long)]
514 suite: Option<PathBuf>,
515 #[arg(long)]
516 suite_ready: bool,
517 #[arg(long)]
518 min_f1: Option<f64>,
519 #[arg(long)]
520 min_precision: Option<f64>,
521 #[arg(long)]
522 min_recall: Option<f64>,
523 #[arg(long)]
524 no_thresholds: bool,
525 #[arg(long)]
526 json: bool,
527 },
528 Conformance {
530 #[arg(default_value = "tests/conformance")]
531 dir: PathBuf,
532 },
533 Version,
535 Sign {
537 #[command(subcommand)]
538 action: SignAction,
539 },
540 Actor {
542 #[command(subcommand)]
543 action: ActorAction,
544 },
545 Federation {
550 #[command(subcommand)]
551 action: FederationAction,
552 },
553 Causal {
560 #[command(subcommand)]
561 action: CausalAction,
562 },
563 Frontier {
567 #[command(subcommand)]
568 action: FrontierAction,
569 },
570 Queue {
573 #[command(subcommand)]
574 action: QueueAction,
575 },
576 Registry {
579 #[command(subcommand)]
580 action: RegistryAction,
581 },
582 Init {
584 #[arg(default_value = ".")]
585 path: PathBuf,
586 #[arg(long, default_value = "unnamed")]
587 name: String,
588 #[arg(long, default_value = "default")]
589 template: String,
590 #[arg(long)]
591 no_git: bool,
592 #[arg(long)]
593 json: bool,
594 },
595 Quickstart {
601 #[arg(default_value = "demo")]
603 path: PathBuf,
604 #[arg(long, default_value = "Quickstart frontier")]
606 name: String,
607 #[arg(long, default_value = "reviewer:you")]
611 reviewer: String,
612 #[arg(long)]
615 assertion: Option<String>,
616 #[arg(long)]
619 keys_out: Option<PathBuf>,
620 #[arg(long)]
622 json: bool,
623 },
624 Lock {
631 path: PathBuf,
633 #[arg(long)]
636 check: bool,
637 #[arg(long)]
639 json: bool,
640 },
641 Doc {
648 path: PathBuf,
650 #[arg(long)]
652 out: Option<PathBuf>,
653 #[arg(long)]
656 json: bool,
657 },
658 Import {
660 frontier: PathBuf,
661 #[arg(long)]
662 into: Option<PathBuf>,
663 },
664 Diff {
674 target: String,
677 frontier_b: Option<PathBuf>,
680 #[arg(long)]
684 frontier: Option<PathBuf>,
685 #[arg(long, default_value = "reviewer:preview")]
687 reviewer: String,
688 #[arg(long)]
689 json: bool,
690 #[arg(long)]
691 quiet: bool,
692 },
693 Proposals {
695 #[command(subcommand)]
696 action: ProposalAction,
697 },
698 ArtifactToState {
700 frontier: PathBuf,
702 packet: PathBuf,
704 #[arg(long)]
706 actor: String,
707 #[arg(long)]
709 apply_artifacts: bool,
710 #[arg(long)]
711 json: bool,
712 },
713 BridgeKit {
715 #[command(subcommand)]
716 action: BridgeKitAction,
717 },
718 SourceAdapter {
720 #[command(subcommand)]
721 action: SourceAdapterAction,
722 },
723 RuntimeAdapter {
725 #[command(subcommand)]
726 action: RuntimeAdapterAction,
727 },
728 Finding {
730 #[command(subcommand)]
731 command: FindingCommands,
732 },
733 Link {
737 #[command(subcommand)]
738 action: LinkAction,
739 },
740 Workbench {
745 #[arg(default_value = ".")]
747 path: PathBuf,
748 #[arg(long, default_value_t = 3850)]
750 port: u16,
751 #[arg(long)]
753 no_open: bool,
754 },
755 Bridges {
761 #[command(subcommand)]
762 action: BridgesAction,
763 },
764 Entity {
769 #[command(subcommand)]
770 action: EntityAction,
771 },
772 Review {
774 frontier: PathBuf,
776 finding_id: String,
778 #[arg(long)]
780 status: Option<String>,
781 #[arg(long)]
783 reason: Option<String>,
784 #[arg(long)]
786 reviewer: String,
787 #[arg(long)]
789 apply: bool,
790 #[arg(long)]
792 json: bool,
793 },
794 Note {
796 frontier: PathBuf,
797 finding_id: String,
798 #[arg(long)]
799 text: String,
800 #[arg(long)]
801 author: String,
802 #[arg(long)]
804 apply: bool,
805 #[arg(long)]
806 json: bool,
807 },
808 Caveat {
810 frontier: PathBuf,
811 finding_id: String,
812 #[arg(long)]
813 text: String,
814 #[arg(long)]
815 author: String,
816 #[arg(long)]
817 apply: bool,
818 #[arg(long)]
819 json: bool,
820 },
821 Revise {
823 frontier: PathBuf,
824 finding_id: String,
825 #[arg(long)]
827 confidence: f64,
828 #[arg(long)]
830 reason: String,
831 #[arg(long)]
833 reviewer: String,
834 #[arg(long)]
835 apply: bool,
836 #[arg(long)]
837 json: bool,
838 },
839 Reject {
841 frontier: PathBuf,
842 finding_id: String,
843 #[arg(long)]
844 reason: String,
845 #[arg(long)]
846 reviewer: String,
847 #[arg(long)]
848 apply: bool,
849 #[arg(long)]
850 json: bool,
851 },
852 History {
854 frontier: PathBuf,
855 finding_id: String,
856 #[arg(long)]
857 json: bool,
858 #[arg(long, value_name = "RFC3339_TIMESTAMP")]
862 as_of: Option<String>,
863 },
864 ImportEvents {
866 source: PathBuf,
867 #[arg(long)]
868 into: PathBuf,
869 #[arg(long)]
870 json: bool,
871 },
872 Retract {
874 source: PathBuf,
875 finding_id: String,
876 #[arg(long)]
877 reason: String,
878 #[arg(long)]
879 reviewer: String,
880 #[arg(long)]
881 apply: bool,
882 #[arg(long)]
883 json: bool,
884 },
885 ProofAdd {
897 frontier: PathBuf,
898 #[arg(long = "target-finding")]
900 target_finding: String,
901 #[arg(long, default_value = "lean4")]
904 tool: String,
905 #[arg(long = "tool-version", default_value = "4.29.1")]
907 tool_version: String,
908 #[arg(long = "script-path")]
910 script_path: PathBuf,
911 #[arg(long, default_value = "Proof script")]
913 name: String,
914 #[arg(long)]
916 reviewer: String,
917 #[arg(long)]
919 reason: String,
920 #[arg(long)]
921 json: bool,
922 },
923 EntityAdd {
928 frontier: PathBuf,
929 finding_id: String,
930 #[arg(long)]
931 entity: String,
932 #[arg(long)]
936 entity_type: String,
937 #[arg(long)]
938 reviewer: String,
939 #[arg(long)]
940 reason: String,
941 #[arg(long)]
942 apply: bool,
943 #[arg(long)]
944 json: bool,
945 },
946 EntityResolve {
950 frontier: PathBuf,
951 finding_id: String,
952 #[arg(long)]
953 entity: String,
954 #[arg(long)]
955 source: String,
956 #[arg(long)]
957 id: String,
958 #[arg(long)]
959 confidence: f64,
960 #[arg(long)]
961 matched_name: Option<String>,
962 #[arg(long, default_value = "manual")]
963 resolution_method: String,
964 #[arg(long)]
965 reviewer: String,
966 #[arg(long)]
967 reason: String,
968 #[arg(long)]
969 apply: bool,
970 #[arg(long)]
971 json: bool,
972 },
973 SourceFetch {
981 identifier: String,
984 #[arg(long)]
988 cache: Option<PathBuf>,
989 #[arg(long)]
991 out: Option<PathBuf>,
992 #[arg(long)]
994 refresh: bool,
995 #[arg(long)]
996 json: bool,
997 },
998 SpanRepair {
1001 frontier: PathBuf,
1002 finding_id: String,
1003 #[arg(long)]
1004 section: String,
1005 #[arg(long)]
1006 text: String,
1007 #[arg(long)]
1008 reviewer: String,
1009 #[arg(long)]
1010 reason: String,
1011 #[arg(long)]
1012 apply: bool,
1013 #[arg(long)]
1014 json: bool,
1015 },
1016 LocatorRepair {
1021 frontier: PathBuf,
1022 atom_id: String,
1023 #[arg(long)]
1026 locator: Option<String>,
1027 #[arg(long)]
1030 reviewer: String,
1031 #[arg(long)]
1033 reason: String,
1034 #[arg(long)]
1036 apply: bool,
1037 #[arg(long)]
1038 json: bool,
1039 },
1040 Propagate {
1042 frontier: PathBuf,
1043 #[arg(long)]
1044 retract: Option<String>,
1045 #[arg(long)]
1046 reduce_confidence: Option<String>,
1047 #[arg(long)]
1048 to: Option<f64>,
1049 #[arg(short, long)]
1050 output: Option<PathBuf>,
1051 },
1052 Replicate {
1061 frontier: PathBuf,
1063 target: String,
1065 #[arg(long)]
1067 outcome: String,
1068 #[arg(long)]
1070 by: String,
1071 #[arg(long)]
1075 conditions: String,
1076 #[arg(long)]
1078 source_title: String,
1079 #[arg(long)]
1081 doi: Option<String>,
1082 #[arg(long)]
1084 pmid: Option<String>,
1085 #[arg(long)]
1087 sample_size: Option<String>,
1088 #[arg(long, default_value = "")]
1091 note: String,
1092 #[arg(long)]
1094 previous_attempt: Option<String>,
1095 #[arg(long, default_value_t = false)]
1102 no_cascade: bool,
1103 #[arg(long)]
1105 json: bool,
1106 },
1107 Replications {
1110 frontier: PathBuf,
1112 #[arg(long)]
1114 target: Option<String>,
1115 #[arg(long)]
1117 json: bool,
1118 },
1119 DatasetAdd {
1126 frontier: PathBuf,
1128 #[arg(long)]
1130 name: String,
1131 #[arg(long)]
1133 version: Option<String>,
1134 #[arg(long)]
1138 content_hash: String,
1139 #[arg(long)]
1141 url: Option<String>,
1142 #[arg(long)]
1144 license: Option<String>,
1145 #[arg(long)]
1147 source_title: String,
1148 #[arg(long)]
1150 doi: Option<String>,
1151 #[arg(long)]
1153 row_count: Option<u64>,
1154 #[arg(long)]
1156 json: bool,
1157 },
1158 Datasets {
1160 frontier: PathBuf,
1161 #[arg(long)]
1162 json: bool,
1163 },
1164 CodeAdd {
1168 frontier: PathBuf,
1170 #[arg(long)]
1172 language: String,
1173 #[arg(long)]
1175 repo_url: Option<String>,
1176 #[arg(long)]
1179 commit: Option<String>,
1180 #[arg(long)]
1182 path: String,
1183 #[arg(long)]
1185 content_hash: String,
1186 #[arg(long)]
1188 line_start: Option<u32>,
1189 #[arg(long)]
1191 line_end: Option<u32>,
1192 #[arg(long)]
1194 entry_point: Option<String>,
1195 #[arg(long)]
1197 json: bool,
1198 },
1199 CodeArtifacts {
1201 frontier: PathBuf,
1202 #[arg(long)]
1203 json: bool,
1204 },
1205 ArtifactAdd {
1210 frontier: PathBuf,
1212 #[arg(long)]
1215 kind: String,
1216 #[arg(long)]
1218 name: String,
1219 #[arg(long)]
1222 file: Option<PathBuf>,
1223 #[arg(long)]
1225 url: Option<String>,
1226 #[arg(long)]
1228 content_hash: Option<String>,
1229 #[arg(long)]
1231 media_type: Option<String>,
1232 #[arg(long)]
1234 license: Option<String>,
1235 #[arg(long)]
1237 source_title: Option<String>,
1238 #[arg(long)]
1240 source_url: Option<String>,
1241 #[arg(long)]
1243 doi: Option<String>,
1244 #[arg(long)]
1246 target: Vec<String>,
1247 #[arg(long)]
1249 metadata: Vec<String>,
1250 #[arg(long, default_value = "public")]
1252 access_tier: String,
1253 #[arg(long, default_value = "reviewer:manual")]
1255 deposited_by: String,
1256 #[arg(long, default_value = "artifact deposit")]
1258 reason: String,
1259 #[arg(long)]
1261 json: bool,
1262 },
1263 Artifacts {
1265 frontier: PathBuf,
1266 #[arg(long)]
1268 target: Option<String>,
1269 #[arg(long)]
1270 json: bool,
1271 },
1272 ArtifactAudit {
1274 frontier: PathBuf,
1275 #[arg(long)]
1277 json: bool,
1278 },
1279 DecisionBrief {
1281 frontier: PathBuf,
1282 #[arg(long)]
1284 json: bool,
1285 },
1286 TrialSummary {
1288 frontier: PathBuf,
1289 #[arg(long)]
1291 json: bool,
1292 },
1293 SourceVerification {
1295 frontier: PathBuf,
1296 #[arg(long)]
1298 json: bool,
1299 },
1300 SourceIngestPlan {
1302 frontier: PathBuf,
1303 #[arg(long)]
1305 json: bool,
1306 },
1307 ClinicalTrialImport {
1310 frontier: PathBuf,
1312 nct_id: String,
1314 #[arg(long)]
1317 input_json: Option<PathBuf>,
1318 #[arg(long)]
1320 target: Vec<String>,
1321 #[arg(long, default_value = "reviewer:manual")]
1323 deposited_by: String,
1324 #[arg(long, default_value = "clinical trial record import")]
1326 reason: String,
1327 #[arg(long, default_value = "ClinicalTrials.gov public record")]
1329 license: String,
1330 #[arg(long)]
1332 json: bool,
1333 },
1334 NegativeResultAdd {
1342 frontier: PathBuf,
1344 #[arg(long)]
1346 kind: String,
1347 #[arg(long)]
1349 deposited_by: String,
1350 #[arg(long)]
1352 reason: String,
1353 #[arg(long)]
1356 conditions_text: String,
1357 #[arg(long, default_value = "")]
1359 notes: String,
1360 #[arg(long)]
1363 target: Vec<String>,
1364 #[arg(long)]
1368 endpoint: Option<String>,
1369 #[arg(long)]
1371 intervention: Option<String>,
1372 #[arg(long)]
1374 comparator: Option<String>,
1375 #[arg(long)]
1377 population: Option<String>,
1378 #[arg(long)]
1380 n_enrolled: Option<u32>,
1381 #[arg(long)]
1383 power: Option<f64>,
1384 #[arg(long)]
1386 ci_lower: Option<f64>,
1387 #[arg(long)]
1389 ci_upper: Option<f64>,
1390 #[arg(long)]
1392 effect_size_threshold: Option<f64>,
1393 #[arg(long)]
1395 registry_id: Option<String>,
1396 #[arg(long)]
1399 reagent: Option<String>,
1400 #[arg(long)]
1402 observation: Option<String>,
1403 #[arg(long)]
1405 attempts: Option<u32>,
1406 #[arg(long)]
1409 source_title: String,
1410 #[arg(long)]
1412 doi: Option<String>,
1413 #[arg(long)]
1415 url: Option<String>,
1416 #[arg(long)]
1418 year: Option<i32>,
1419 #[arg(long)]
1421 json: bool,
1422 },
1423 NegativeResults {
1425 frontier: PathBuf,
1426 #[arg(long)]
1428 target: Option<String>,
1429 #[arg(long)]
1430 json: bool,
1431 },
1432 TrajectoryCreate {
1437 frontier: PathBuf,
1439 #[arg(long)]
1441 deposited_by: String,
1442 #[arg(long)]
1444 reason: String,
1445 #[arg(long)]
1450 target: Vec<String>,
1451 #[arg(long, default_value = "")]
1453 notes: String,
1454 #[arg(long)]
1455 json: bool,
1456 },
1457 TrajectoryStep {
1460 frontier: PathBuf,
1462 trajectory_id: String,
1464 #[arg(long)]
1466 kind: String,
1467 #[arg(long)]
1471 description: String,
1472 #[arg(long)]
1474 actor: String,
1475 #[arg(long)]
1477 reason: String,
1478 #[arg(long)]
1481 reference: Vec<String>,
1482 #[arg(long)]
1483 json: bool,
1484 },
1485 Trajectories {
1487 frontier: PathBuf,
1488 #[arg(long)]
1490 target: Option<String>,
1491 #[arg(long)]
1492 json: bool,
1493 },
1494 TierSet {
1500 frontier: PathBuf,
1502 #[arg(long)]
1504 object_type: String,
1505 #[arg(long)]
1507 object_id: String,
1508 #[arg(long)]
1510 tier: String,
1511 #[arg(long)]
1514 actor: String,
1515 #[arg(long)]
1518 reason: String,
1519 #[arg(long)]
1520 json: bool,
1521 },
1522 Predict {
1529 frontier: PathBuf,
1531 #[arg(long)]
1533 by: String,
1534 #[arg(long)]
1537 claim: String,
1538 #[arg(long)]
1540 criterion: String,
1541 #[arg(long)]
1543 resolves_by: Option<String>,
1544 #[arg(long)]
1546 confidence: f64,
1547 #[arg(long, default_value = "")]
1549 target: String,
1550 #[arg(long, default_value = "affirmed")]
1552 outcome: String,
1553 #[arg(long, default_value = "")]
1555 conditions: String,
1556 #[arg(long)]
1558 json: bool,
1559 },
1560 Resolve {
1565 frontier: PathBuf,
1567 prediction: String,
1569 #[arg(long)]
1571 outcome: String,
1572 #[arg(long)]
1574 matched: bool,
1575 #[arg(long)]
1578 by: String,
1579 #[arg(long, default_value = "1.0")]
1581 confidence: f64,
1582 #[arg(long, default_value = "")]
1584 source_title: String,
1585 #[arg(long)]
1587 doi: Option<String>,
1588 #[arg(long)]
1590 json: bool,
1591 },
1592 Predictions {
1594 frontier: PathBuf,
1595 #[arg(long)]
1597 by: Option<String>,
1598 #[arg(long)]
1600 open: bool,
1601 #[arg(long)]
1603 json: bool,
1604 },
1605 Calibration {
1608 frontier: PathBuf,
1609 #[arg(long)]
1611 actor: Option<String>,
1612 #[arg(long)]
1614 json: bool,
1615 },
1616 PredictionsExpire {
1624 frontier: PathBuf,
1625 #[arg(long)]
1628 now: Option<String>,
1629 #[arg(long)]
1632 dry_run: bool,
1633 #[arg(long)]
1634 json: bool,
1635 },
1636 Consensus {
1645 frontier: PathBuf,
1647 target: String,
1649 #[arg(long, default_value = "composite")]
1652 weighting: String,
1653 #[arg(long)]
1658 causal_claim: Option<String>,
1659 #[arg(long)]
1664 causal_grade_min: Option<String>,
1665 #[arg(long)]
1667 json: bool,
1668 },
1669
1670 Ingest {
1686 path: String,
1689 #[arg(long)]
1692 frontier: PathBuf,
1693 #[arg(short, long)]
1697 backend: Option<String>,
1698 #[arg(long)]
1702 actor: Option<String>,
1703 #[arg(long)]
1705 dry_run: bool,
1706 #[arg(long)]
1707 json: bool,
1708 },
1709
1710 Propose {
1716 frontier: PathBuf,
1717 finding_id: String,
1718 #[arg(long)]
1720 status: String,
1721 #[arg(long)]
1722 reason: String,
1723 #[arg(long)]
1724 reviewer: String,
1725 #[arg(long)]
1728 apply: bool,
1729 #[arg(long)]
1730 json: bool,
1731 },
1732
1733 Accept {
1737 frontier: PathBuf,
1738 proposal_id: String,
1739 #[arg(long)]
1740 reviewer: String,
1741 #[arg(long)]
1742 reason: String,
1743 #[arg(long)]
1744 json: bool,
1745 },
1746
1747 Attest {
1759 frontier: PathBuf,
1761 #[arg(long)]
1765 event: Option<String>,
1766 #[arg(long)]
1769 attester: Option<String>,
1770 #[arg(long)]
1773 scope_note: Option<String>,
1774 #[arg(long)]
1777 proof_id: Option<String>,
1778 #[arg(long)]
1783 signature: Option<String>,
1784 #[arg(long)]
1787 key: Option<PathBuf>,
1788 #[arg(long)]
1789 json: bool,
1790 },
1791
1792 Lineage {
1795 frontier: PathBuf,
1796 finding_id: String,
1797 #[arg(long, value_name = "RFC3339_TIMESTAMP")]
1798 as_of: Option<String>,
1799 #[arg(long)]
1800 json: bool,
1801 },
1802
1803 Carina {
1806 #[command(subcommand)]
1807 action: CarinaAction,
1808 },
1809
1810 Atlas {
1815 #[command(subcommand)]
1816 action: AtlasAction,
1817 },
1818
1819 Constellation {
1825 #[command(subcommand)]
1826 action: ConstellationAction,
1827 },
1828}
1829
1830#[derive(Subcommand)]
1835enum AtlasAction {
1836 Init {
1841 name: String,
1844 #[arg(long, value_delimiter = ',', num_args = 1..)]
1846 frontiers: Vec<PathBuf>,
1847 #[arg(long, default_value = "general")]
1850 domain: String,
1851 #[arg(long)]
1853 scope_note: Option<String>,
1854 #[arg(long, default_value = "atlases")]
1856 atlases_root: PathBuf,
1857 #[arg(long)]
1858 json: bool,
1859 },
1860 Materialize {
1864 name: String,
1866 #[arg(long, default_value = "atlases")]
1867 atlases_root: PathBuf,
1868 #[arg(long)]
1869 json: bool,
1870 },
1871 Serve {
1876 name: String,
1877 #[arg(long, default_value = "atlases")]
1878 atlases_root: PathBuf,
1879 #[arg(long, default_value_t = 3848)]
1880 port: u16,
1881 #[arg(long)]
1882 no_open: bool,
1883 },
1884 Update {
1891 name: String,
1892 #[arg(long, value_delimiter = ',')]
1895 add_frontier: Vec<PathBuf>,
1896 #[arg(long, value_delimiter = ',')]
1899 remove_vfr_id: Vec<String>,
1900 #[arg(long, default_value = "atlases")]
1901 atlases_root: PathBuf,
1902 #[arg(long)]
1903 json: bool,
1904 },
1905}
1906
1907#[derive(Subcommand)]
1911enum ConstellationAction {
1912 Init {
1916 name: String,
1917 #[arg(long, value_delimiter = ',', num_args = 1..)]
1919 atlases: Vec<PathBuf>,
1920 #[arg(long)]
1921 scope_note: Option<String>,
1922 #[arg(long, default_value = "constellations")]
1923 constellations_root: PathBuf,
1924 #[arg(long)]
1925 json: bool,
1926 },
1927 Materialize {
1932 name: String,
1933 #[arg(long, default_value = "constellations")]
1934 constellations_root: PathBuf,
1935 #[arg(long)]
1936 json: bool,
1937 },
1938 Serve {
1942 name: String,
1943 #[arg(long, default_value = "constellations")]
1944 constellations_root: PathBuf,
1945 #[arg(long, default_value_t = 3849)]
1946 port: u16,
1947 #[arg(long)]
1948 no_open: bool,
1949 },
1950}
1951
1952#[derive(Subcommand)]
1956enum CarinaAction {
1957 Validate {
1962 path: PathBuf,
1966 #[arg(long)]
1969 primitive: Option<String>,
1970 #[arg(long)]
1971 json: bool,
1972 },
1973 List {
1975 #[arg(long)]
1976 json: bool,
1977 },
1978 Schema { primitive: String },
1980}
1981
1982#[derive(Subcommand)]
1983enum PacketAction {
1984 Inspect {
1986 path: PathBuf,
1987 #[arg(long)]
1988 json: bool,
1989 },
1990 Validate {
1992 path: PathBuf,
1993 #[arg(long)]
1994 json: bool,
1995 },
1996}
1997
1998#[derive(Subcommand)]
1999enum SignAction {
2000 GenerateKeypair {
2002 #[arg(long, default_value = ".vela/keys")]
2003 out: PathBuf,
2004 #[arg(long)]
2005 json: bool,
2006 },
2007 Apply {
2009 frontier: PathBuf,
2010 #[arg(long)]
2011 private_key: PathBuf,
2012 #[arg(long)]
2013 json: bool,
2014 },
2015 Verify {
2017 frontier: PathBuf,
2018 #[arg(long)]
2019 public_key: Option<PathBuf>,
2020 #[arg(long)]
2021 json: bool,
2022 },
2023 ThresholdSet {
2028 frontier: PathBuf,
2029 finding_id: String,
2031 #[arg(long)]
2033 to: u32,
2034 #[arg(long)]
2035 json: bool,
2036 },
2037}
2038
2039#[derive(Subcommand)]
2040enum ActorAction {
2041 Add {
2043 frontier: PathBuf,
2044 id: String,
2046 #[arg(long)]
2048 pubkey: String,
2049 #[arg(long)]
2053 tier: Option<String>,
2054 #[arg(long)]
2058 orcid: Option<String>,
2059 #[arg(long)]
2064 clearance: Option<String>,
2065 #[arg(long)]
2066 json: bool,
2067 },
2068 List {
2070 frontier: PathBuf,
2071 #[arg(long)]
2072 json: bool,
2073 },
2074}
2075
2076#[derive(Subcommand)]
2077enum CausalAction {
2078 Audit {
2082 frontier: PathBuf,
2083 #[arg(long)]
2086 problems_only: bool,
2087 #[arg(long)]
2088 json: bool,
2089 },
2090 Effect {
2103 frontier: PathBuf,
2104 source: String,
2106 #[arg(long)]
2108 on: String,
2109 #[arg(long)]
2110 json: bool,
2111 },
2112 Graph {
2115 frontier: PathBuf,
2116 #[arg(long)]
2118 node: Option<String>,
2119 #[arg(long)]
2120 json: bool,
2121 },
2122 Counterfactual {
2129 frontier: PathBuf,
2130 intervene_on: String,
2132 #[arg(long)]
2134 set_to: f64,
2135 #[arg(long)]
2137 target: String,
2138 #[arg(long)]
2139 json: bool,
2140 },
2141}
2142
2143#[derive(Subcommand)]
2144enum BridgesAction {
2145 Derive {
2149 frontier_a: PathBuf,
2152 #[arg(long, default_value = "a")]
2154 label_a: String,
2155 frontier_b: PathBuf,
2157 #[arg(long, default_value = "b")]
2159 label_b: String,
2160 #[arg(long)]
2161 json: bool,
2162 },
2163 List {
2165 frontier: PathBuf,
2167 #[arg(long)]
2169 status: Option<String>,
2170 #[arg(long)]
2171 json: bool,
2172 },
2173 Show {
2175 frontier: PathBuf,
2176 bridge_id: String,
2177 #[arg(long)]
2178 json: bool,
2179 },
2180 Confirm {
2185 frontier: PathBuf,
2186 bridge_id: String,
2187 #[arg(long)]
2190 reviewer: Option<String>,
2191 #[arg(long)]
2193 note: Option<String>,
2194 #[arg(long)]
2195 json: bool,
2196 },
2197 Refute {
2200 frontier: PathBuf,
2201 bridge_id: String,
2202 #[arg(long)]
2203 reviewer: Option<String>,
2204 #[arg(long)]
2205 note: Option<String>,
2206 #[arg(long)]
2207 json: bool,
2208 },
2209}
2210
2211#[derive(Subcommand)]
2212enum FederationAction {
2213 PeerAdd {
2217 frontier: PathBuf,
2218 id: String,
2220 #[arg(long)]
2222 url: String,
2223 #[arg(long)]
2225 pubkey: String,
2226 #[arg(long, default_value = "")]
2228 note: String,
2229 #[arg(long)]
2230 json: bool,
2231 },
2232 PeerList {
2234 frontier: PathBuf,
2235 #[arg(long)]
2236 json: bool,
2237 },
2238 PeerRemove {
2242 frontier: PathBuf,
2243 id: String,
2244 #[arg(long)]
2245 json: bool,
2246 },
2247 Sync {
2264 frontier: PathBuf,
2265 peer_id: String,
2267 #[arg(long)]
2269 url: Option<String>,
2270 #[arg(long)]
2274 via_hub: bool,
2275 #[arg(long)]
2278 vfr_id: Option<String>,
2279 #[arg(long)]
2286 allow_cross_vfr: bool,
2287 #[arg(long)]
2289 dry_run: bool,
2290 #[arg(long)]
2291 json: bool,
2292 },
2293 PushResolution {
2306 frontier: PathBuf,
2307 conflict_event_id: String,
2311 #[arg(long = "to")]
2313 to: String,
2314 #[arg(long)]
2318 key: Option<PathBuf>,
2319 #[arg(long)]
2322 vfr_id: Option<String>,
2323 #[arg(long)]
2324 json: bool,
2325 },
2326}
2327
2328#[derive(Subcommand)]
2329enum FrontierAction {
2330 New {
2337 path: PathBuf,
2339 #[arg(long)]
2341 name: String,
2342 #[arg(long, default_value = "")]
2344 description: String,
2345 #[arg(long)]
2347 force: bool,
2348 #[arg(long)]
2349 json: bool,
2350 },
2351 Materialize {
2353 frontier: PathBuf,
2355 #[arg(long)]
2356 json: bool,
2357 },
2358 AddDep {
2362 frontier: PathBuf,
2364 vfr_id: String,
2366 #[arg(long)]
2369 locator: String,
2370 #[arg(long)]
2373 snapshot: String,
2374 #[arg(long)]
2376 name: Option<String>,
2377 #[arg(long)]
2378 json: bool,
2379 },
2380 ListDeps {
2382 frontier: PathBuf,
2383 #[arg(long)]
2384 json: bool,
2385 },
2386 RemoveDep {
2389 frontier: PathBuf,
2390 vfr_id: String,
2391 #[arg(long)]
2392 json: bool,
2393 },
2394 RefreshDeps {
2401 frontier: PathBuf,
2402 #[arg(long, default_value = "https://vela-hub.fly.dev")]
2404 from: String,
2405 #[arg(long)]
2407 dry_run: bool,
2408 #[arg(long)]
2409 json: bool,
2410 },
2411 Diff {
2423 frontier: PathBuf,
2425 #[arg(long)]
2428 since: Option<String>,
2429 #[arg(long)]
2432 week: Option<String>,
2433 #[arg(long)]
2435 json: bool,
2436 },
2437}
2438
2439#[derive(Subcommand)]
2440enum RepoAction {
2441 Status {
2443 frontier: PathBuf,
2445 #[arg(long)]
2447 json: bool,
2448 },
2449 Doctor {
2451 frontier: PathBuf,
2453 #[arg(long)]
2455 json: bool,
2456 },
2457}
2458
2459#[derive(Subcommand)]
2460enum QueueAction {
2461 List {
2463 #[arg(long)]
2464 queue_file: Option<PathBuf>,
2465 #[arg(long)]
2466 json: bool,
2467 },
2468 Sign {
2471 #[arg(long)]
2473 actor: String,
2474 #[arg(long)]
2476 key: PathBuf,
2477 #[arg(long)]
2479 queue_file: Option<PathBuf>,
2480 #[arg(long, alias = "all")]
2486 yes_to_all: bool,
2487 #[arg(long)]
2488 json: bool,
2489 },
2490 Clear {
2492 #[arg(long)]
2493 queue_file: Option<PathBuf>,
2494 #[arg(long)]
2495 json: bool,
2496 },
2497}
2498
2499#[derive(Subcommand)]
2500enum RegistryAction {
2501 List {
2503 #[arg(long)]
2505 from: Option<String>,
2506 #[arg(long)]
2507 json: bool,
2508 },
2509 Publish {
2511 frontier: PathBuf,
2513 #[arg(long)]
2515 owner: String,
2516 #[arg(long)]
2518 key: PathBuf,
2519 #[arg(long)]
2526 locator: Option<String>,
2527 #[arg(long)]
2529 to: Option<String>,
2530 #[arg(long)]
2531 json: bool,
2532 },
2533 DependsOn {
2540 vfr_id: String,
2542 #[arg(long, default_value = "https://vela-hub.fly.dev")]
2544 from: String,
2545 #[arg(long)]
2546 json: bool,
2547 },
2548 Mirror {
2556 vfr_id: String,
2558 #[arg(long)]
2560 from: String,
2561 #[arg(long)]
2563 to: String,
2564 #[arg(long)]
2565 json: bool,
2566 },
2567 Pull {
2569 vfr_id: String,
2571 #[arg(long)]
2573 from: Option<String>,
2574 #[arg(long)]
2578 out: PathBuf,
2579 #[arg(long)]
2582 transitive: bool,
2583 #[arg(long, default_value = "4")]
2586 depth: usize,
2587 #[arg(long)]
2588 json: bool,
2589 },
2590}
2591
2592#[derive(Subcommand)]
2593enum GapsAction {
2594 Rank {
2596 frontier: PathBuf,
2597 #[arg(long, default_value = "10")]
2598 top: usize,
2599 #[arg(long)]
2600 domain: Option<String>,
2601 #[arg(long)]
2602 json: bool,
2603 },
2604}
2605
2606#[derive(Subcommand)]
2607enum LinkAction {
2608 Add {
2613 frontier: PathBuf,
2615 #[arg(long)]
2617 from: String,
2618 #[arg(long)]
2620 to: String,
2621 #[arg(long, default_value = "supports")]
2623 r#type: String,
2624 #[arg(long, default_value = "")]
2626 note: String,
2627 #[arg(long, default_value = "reviewer")]
2629 inferred_by: String,
2630 #[arg(long)]
2639 no_check_target: bool,
2640 #[arg(long)]
2641 json: bool,
2642 },
2643}
2644
2645#[derive(Subcommand)]
2646enum EntityAction {
2647 Resolve {
2654 frontier: PathBuf,
2655 #[arg(long)]
2657 force: bool,
2658 #[arg(long)]
2659 json: bool,
2660 },
2661 List {
2663 #[arg(long)]
2664 json: bool,
2665 },
2666}
2667
2668#[derive(Subcommand)]
2669enum FindingCommands {
2670 Add {
2672 frontier: PathBuf,
2674 #[arg(long)]
2676 assertion: String,
2677 #[arg(long, default_value = "mechanism")]
2679 r#type: String,
2680 #[arg(long, default_value = "manual finding")]
2682 source: String,
2683 #[arg(long, default_value = "expert_assertion")]
2685 source_type: String,
2686 #[arg(long)]
2688 author: String,
2689 #[arg(long, default_value = "0.3")]
2691 confidence: f64,
2692 #[arg(long, default_value = "theoretical")]
2694 evidence_type: String,
2695 #[arg(long, default_value = "")]
2697 entities: String,
2698 #[arg(long)]
2700 entities_reviewed: bool,
2701 #[arg(long)]
2703 evidence_span: Vec<String>,
2704 #[arg(long)]
2706 gap: bool,
2707 #[arg(long)]
2709 negative_space: bool,
2710 #[arg(long)]
2712 doi: Option<String>,
2713 #[arg(long)]
2715 pmid: Option<String>,
2716 #[arg(long)]
2718 year: Option<i32>,
2719 #[arg(long)]
2721 journal: Option<String>,
2722 #[arg(long)]
2724 url: Option<String>,
2725 #[arg(long)]
2727 source_authors: Option<String>,
2728 #[arg(long)]
2730 conditions_text: Option<String>,
2731 #[arg(long)]
2733 species: Option<String>,
2734 #[arg(long)]
2736 in_vivo: bool,
2737 #[arg(long)]
2739 in_vitro: bool,
2740 #[arg(long)]
2742 human_data: bool,
2743 #[arg(long)]
2745 clinical_trial: bool,
2746 #[arg(long)]
2748 json: bool,
2749 #[arg(long)]
2751 apply: bool,
2752 },
2753 Supersede {
2760 frontier: PathBuf,
2762 old_id: String,
2764 #[arg(long)]
2766 assertion: String,
2767 #[arg(long, default_value = "mechanism")]
2769 r#type: String,
2770 #[arg(long, default_value = "manual finding")]
2772 source: String,
2773 #[arg(long, default_value = "expert_assertion")]
2775 source_type: String,
2776 #[arg(long)]
2778 author: String,
2779 #[arg(long)]
2781 reason: String,
2782 #[arg(long, default_value = "0.5")]
2784 confidence: f64,
2785 #[arg(long, default_value = "experimental")]
2787 evidence_type: String,
2788 #[arg(long, default_value = "")]
2790 entities: String,
2791 #[arg(long)]
2793 doi: Option<String>,
2794 #[arg(long)]
2796 pmid: Option<String>,
2797 #[arg(long)]
2799 year: Option<i32>,
2800 #[arg(long)]
2802 journal: Option<String>,
2803 #[arg(long)]
2805 url: Option<String>,
2806 #[arg(long)]
2808 source_authors: Option<String>,
2809 #[arg(long)]
2811 conditions_text: Option<String>,
2812 #[arg(long)]
2814 species: Option<String>,
2815 #[arg(long)]
2816 in_vivo: bool,
2817 #[arg(long)]
2818 in_vitro: bool,
2819 #[arg(long)]
2820 human_data: bool,
2821 #[arg(long)]
2822 clinical_trial: bool,
2823 #[arg(long)]
2824 json: bool,
2825 #[arg(long)]
2827 apply: bool,
2828 },
2829 CausalSet {
2835 frontier: PathBuf,
2837 finding_id: String,
2839 #[arg(long)]
2841 claim: String,
2842 #[arg(long)]
2845 grade: Option<String>,
2846 #[arg(long)]
2849 actor: String,
2850 #[arg(long)]
2853 reason: String,
2854 #[arg(long)]
2855 json: bool,
2856 },
2857}
2858
2859#[derive(Subcommand)]
2860enum ProposalAction {
2861 List {
2863 frontier: PathBuf,
2864 #[arg(long)]
2865 status: Option<String>,
2866 #[arg(long)]
2867 json: bool,
2868 },
2869 Show {
2871 frontier: PathBuf,
2872 proposal_id: String,
2873 #[arg(long)]
2874 json: bool,
2875 },
2876 Preview {
2878 frontier: PathBuf,
2879 proposal_id: String,
2880 #[arg(long, default_value = "reviewer:preview")]
2881 reviewer: String,
2882 #[arg(long)]
2883 json: bool,
2884 },
2885 Import {
2887 frontier: PathBuf,
2888 source: PathBuf,
2889 #[arg(long)]
2890 json: bool,
2891 },
2892 Validate {
2894 source: PathBuf,
2895 #[arg(long)]
2896 json: bool,
2897 },
2898 Export {
2900 frontier: PathBuf,
2901 output: PathBuf,
2902 #[arg(long)]
2903 status: Option<String>,
2904 #[arg(long)]
2905 json: bool,
2906 },
2907 Accept {
2909 frontier: PathBuf,
2910 proposal_id: String,
2911 #[arg(long)]
2912 reviewer: String,
2913 #[arg(long)]
2914 reason: String,
2915 #[arg(long)]
2916 json: bool,
2917 },
2918 Reject {
2920 frontier: PathBuf,
2921 proposal_id: String,
2922 #[arg(long)]
2923 reviewer: String,
2924 #[arg(long)]
2925 reason: String,
2926 #[arg(long)]
2927 json: bool,
2928 },
2929}
2930
2931#[derive(Subcommand)]
2932enum SourceAdapterAction {
2933 Run {
2935 frontier: PathBuf,
2937 adapter: String,
2939 #[arg(long)]
2941 actor: String,
2942 #[arg(long = "entry")]
2944 entries: Vec<String>,
2945 #[arg(long)]
2947 priority: Option<String>,
2948 #[arg(long)]
2950 include_excluded: bool,
2951 #[arg(long)]
2953 allow_partial: bool,
2954 #[arg(long)]
2956 dry_run: bool,
2957 #[arg(long)]
2959 input_dir: Option<PathBuf>,
2960 #[arg(long)]
2962 apply_artifacts: bool,
2963 #[arg(long)]
2965 json: bool,
2966 },
2967}
2968
2969#[derive(Subcommand)]
2970enum RuntimeAdapterAction {
2971 Run {
2973 frontier: PathBuf,
2975 adapter: String,
2977 #[arg(long)]
2979 input: PathBuf,
2980 #[arg(long)]
2982 actor: String,
2983 #[arg(long)]
2985 dry_run: bool,
2986 #[arg(long)]
2988 apply_artifacts: bool,
2989 #[arg(long)]
2991 json: bool,
2992 },
2993}
2994
2995#[derive(Subcommand)]
2996enum BridgeKitAction {
2997 Validate {
2999 source: PathBuf,
3001 #[arg(long)]
3003 json: bool,
3004 },
3005 VerifyProvenance {
3012 packet: PathBuf,
3014 #[arg(long)]
3016 json: bool,
3017 },
3018}
3019
3020pub async fn run_command() {
3021 dotenvy::dotenv().ok();
3022
3023 match Cli::parse().command {
3024 Commands::Scout {
3025 folder,
3026 frontier,
3027 backend,
3028 dry_run,
3029 json,
3030 } => {
3031 cmd_scout(&folder, &frontier, backend.as_deref(), dry_run, json).await;
3032 }
3033 Commands::CompileNotes {
3034 vault,
3035 frontier,
3036 backend,
3037 max_files,
3038 max_items_per_category,
3039 dry_run,
3040 json,
3041 } => {
3042 cmd_compile_notes(
3043 &vault,
3044 &frontier,
3045 backend.as_deref(),
3046 max_files,
3047 max_items_per_category,
3048 dry_run,
3049 json,
3050 )
3051 .await;
3052 }
3053 Commands::CompileCode {
3054 root,
3055 frontier,
3056 backend,
3057 max_files,
3058 dry_run,
3059 json,
3060 } => {
3061 cmd_compile_code(
3062 &root,
3063 &frontier,
3064 backend.as_deref(),
3065 max_files,
3066 dry_run,
3067 json,
3068 )
3069 .await;
3070 }
3071 Commands::CompileData {
3072 root,
3073 frontier,
3074 backend,
3075 sample_rows,
3076 dry_run,
3077 json,
3078 } => {
3079 cmd_compile_data(
3080 &root,
3081 &frontier,
3082 backend.as_deref(),
3083 sample_rows,
3084 dry_run,
3085 json,
3086 )
3087 .await;
3088 }
3089 Commands::ReviewPending {
3090 frontier,
3091 backend,
3092 max_proposals,
3093 batch_size,
3094 dry_run,
3095 json,
3096 } => {
3097 cmd_review_pending(
3098 &frontier,
3099 backend.as_deref(),
3100 max_proposals,
3101 batch_size,
3102 dry_run,
3103 json,
3104 )
3105 .await;
3106 }
3107 Commands::FindTensions {
3108 frontier,
3109 backend,
3110 max_findings,
3111 dry_run,
3112 json,
3113 } => {
3114 cmd_find_tensions(&frontier, backend.as_deref(), max_findings, dry_run, json).await;
3115 }
3116 Commands::PlanExperiments {
3117 frontier,
3118 backend,
3119 max_findings,
3120 dry_run,
3121 json,
3122 } => {
3123 cmd_plan_experiments(&frontier, backend.as_deref(), max_findings, dry_run, json).await;
3124 }
3125 Commands::Check {
3126 source,
3127 schema,
3128 stats,
3129 conformance,
3130 conformance_dir,
3131 all,
3132 schema_only,
3133 strict,
3134 fix,
3135 json,
3136 } => cmd_check(
3137 source.as_deref(),
3138 schema,
3139 stats,
3140 conformance,
3141 &conformance_dir,
3142 all,
3143 schema_only,
3144 strict,
3145 fix,
3146 json,
3147 ),
3148 Commands::Integrity { frontier, json } => cmd_integrity(&frontier, json),
3149 Commands::Impact {
3150 frontier,
3151 finding_id,
3152 depth,
3153 json,
3154 } => cmd_impact(&frontier, &finding_id, depth, json),
3155 Commands::Discord {
3156 frontier,
3157 json,
3158 kind,
3159 } => cmd_discord(&frontier, json, kind.as_deref()),
3160 Commands::Normalize {
3161 source,
3162 out,
3163 write,
3164 dry_run,
3165 rewrite_ids,
3166 id_map,
3167 resync_provenance,
3168 json,
3169 } => cmd_normalize(
3170 &source,
3171 out.as_deref(),
3172 write,
3173 dry_run,
3174 rewrite_ids,
3175 id_map.as_deref(),
3176 resync_provenance,
3177 json,
3178 ),
3179 Commands::Proof {
3180 frontier,
3181 out,
3182 template,
3183 gold,
3184 record_proof_state,
3185 json,
3186 } => cmd_proof(
3187 &frontier,
3188 &out,
3189 &template,
3190 gold.as_deref(),
3191 record_proof_state,
3192 json,
3193 ),
3194 Commands::Repo { action } => cmd_repo(action),
3195 Commands::Serve {
3196 frontier,
3197 frontiers,
3198 backend,
3199 http,
3200 setup,
3201 check_tools,
3202 json,
3203 workbench,
3204 } => {
3205 if setup {
3206 cmd_mcp_setup(frontier.as_deref(), frontiers.as_deref());
3207 } else if check_tools {
3208 let source =
3209 serve::ProjectSource::from_args(frontier.as_deref(), frontiers.as_deref());
3210 match serve::check_tools(source) {
3211 Ok(report) => {
3212 if json {
3213 println!(
3214 "{}",
3215 serde_json::to_string_pretty(&report)
3216 .expect("failed to serialize tool check report")
3217 );
3218 } else {
3219 print_tool_check_report(&report);
3220 }
3221 }
3222 Err(e) => fail(&format!("Tool check failed: {e}")),
3223 }
3224 } else {
3225 let source =
3226 serve::ProjectSource::from_args(frontier.as_deref(), frontiers.as_deref());
3227 let resolved_port = if workbench {
3229 Some(http.unwrap_or(3848))
3230 } else {
3231 http
3232 };
3233 if let Some(port) = resolved_port {
3234 serve::run_http(source, backend.as_deref(), port, workbench).await;
3235 } else {
3236 serve::run(source, backend.as_deref()).await;
3237 }
3238 }
3239 }
3240 Commands::Status { frontier, json } => cmd_status(&frontier, json),
3241 Commands::Log {
3242 frontier,
3243 limit,
3244 kind,
3245 json,
3246 } => cmd_log(&frontier, limit, kind.as_deref(), json),
3247 Commands::Inbox {
3248 frontier,
3249 kind,
3250 limit,
3251 json,
3252 } => cmd_inbox(&frontier, kind.as_deref(), limit, json),
3253 Commands::Ask {
3254 frontier,
3255 question,
3256 json,
3257 } => cmd_ask(&frontier, &question.join(" "), json),
3258 Commands::Stats { frontier, json } => {
3259 if json {
3260 print_stats_json(&frontier);
3261 } else {
3262 cmd_stats(&frontier);
3263 }
3264 }
3265 Commands::Search {
3266 source,
3267 query,
3268 entity,
3269 r#type,
3270 all,
3271 limit,
3272 json,
3273 } => cmd_search(
3274 source.as_deref(),
3275 &query,
3276 entity.as_deref(),
3277 r#type.as_deref(),
3278 all.as_deref(),
3279 limit,
3280 json,
3281 ),
3282 Commands::Tensions {
3283 source,
3284 both_high,
3285 cross_domain,
3286 top,
3287 json,
3288 } => cmd_tensions(&source, both_high, cross_domain, top, json),
3289 Commands::Gaps { action } => cmd_gaps(action),
3290 Commands::Bridge {
3291 inputs,
3292 novelty,
3293 top,
3294 } => cmd_bridge(&inputs, novelty, top).await,
3295 Commands::Export {
3296 frontier,
3297 format,
3298 output,
3299 } => export::run(&frontier, &format, output.as_deref()),
3300 Commands::Packet { action } => cmd_packet(action),
3301 Commands::Verify { path, json } => cmd_verify(&path, json),
3302 Commands::Bench {
3303 frontier,
3304 gold,
3305 candidate,
3306 sources,
3307 threshold,
3308 report,
3309 entity_gold,
3310 link_gold,
3311 suite,
3312 suite_ready,
3313 min_f1,
3314 min_precision,
3315 min_recall,
3316 no_thresholds,
3317 json,
3318 } => {
3319 if let Some(cand) = candidate.clone() {
3324 let Some(g) = gold.clone() else {
3325 eprintln!(
3326 "{} `vela bench --candidate <…>` requires `--gold <…>`",
3327 style::err_prefix()
3328 );
3329 std::process::exit(2);
3330 };
3331 cmd_agent_bench(
3332 &g,
3333 &cand,
3334 sources.as_deref(),
3335 threshold,
3336 report.as_deref(),
3337 json,
3338 );
3339 } else {
3340 cmd_bench(BenchArgs {
3341 frontier,
3342 gold,
3343 entity_gold,
3344 link_gold,
3345 suite,
3346 suite_ready,
3347 min_f1,
3348 min_precision,
3349 min_recall,
3350 no_thresholds,
3351 json,
3352 });
3353 }
3354 }
3355 Commands::Conformance { dir } => {
3356 let _ = conformance::run(&dir);
3357 }
3358 Commands::Version => println!("vela 0.36.0"),
3359 Commands::Sign { action } => cmd_sign(action),
3360 Commands::Actor { action } => cmd_actor(action),
3361 Commands::Federation { action } => cmd_federation(action),
3362 Commands::Causal { action } => cmd_causal(action),
3363 Commands::Frontier { action } => cmd_frontier(action),
3364 Commands::Queue { action } => cmd_queue(action),
3365 Commands::Registry { action } => cmd_registry(action),
3366 Commands::Init {
3367 path,
3368 name,
3369 template,
3370 no_git,
3371 json,
3372 } => cmd_init(&path, &name, &template, !no_git, json),
3373 Commands::Quickstart {
3374 path,
3375 name,
3376 reviewer,
3377 assertion,
3378 keys_out,
3379 json,
3380 } => cmd_quickstart(
3381 &path,
3382 &name,
3383 &reviewer,
3384 assertion.as_deref(),
3385 keys_out.as_deref(),
3386 json,
3387 ),
3388 Commands::Lock { path, check, json } => cmd_lock(&path, check, json),
3389 Commands::Doc { path, out, json } => cmd_doc(&path, out.as_deref(), json),
3390 Commands::Import { frontier, into } => cmd_import(&frontier, into.as_deref()),
3391 Commands::Diff {
3392 target,
3393 frontier_b,
3394 frontier,
3395 reviewer,
3396 json,
3397 quiet,
3398 } => {
3399 if target.starts_with("vpr_") {
3404 let frontier_root = frontier
3405 .clone()
3406 .or_else(|| frontier_b.clone())
3407 .unwrap_or_else(|| std::path::PathBuf::from("."));
3408 let preview = proposals::preview_at_path(&frontier_root, &target, &reviewer)
3409 .unwrap_or_else(|e| fail_return(&e));
3410 let payload = json!({
3411 "ok": true,
3412 "command": "diff.proposal",
3413 "frontier": frontier_root.display().to_string(),
3414 "proposal_id": target,
3415 "preview": preview,
3416 });
3417 if json {
3418 println!(
3419 "{}",
3420 serde_json::to_string_pretty(&payload)
3421 .expect("failed to serialize diff preview")
3422 );
3423 } else {
3424 println!("vela diff · proposal preview");
3425 println!(" proposal: {}", target);
3426 println!(" kind: {}", preview.kind);
3427 println!(
3428 " findings: {} -> {}",
3429 preview.findings_before, preview.findings_after
3430 );
3431 println!(
3432 " artifacts: {} -> {}",
3433 preview.artifacts_before, preview.artifacts_after
3434 );
3435 println!(
3436 " events: {} -> {}",
3437 preview.events_before, preview.events_after
3438 );
3439 if !preview.changed_findings.is_empty() {
3440 println!(
3441 " findings changed: {}",
3442 preview.changed_findings.join(", ")
3443 );
3444 }
3445 }
3446 } else {
3447 let frontier_a = std::path::PathBuf::from(&target);
3448 let b = frontier_b.unwrap_or_else(|| {
3449 fail_return(
3450 "diff: two-frontier mode needs a second positional path; for proposal preview pass a `vpr_*` id",
3451 )
3452 });
3453 diff::run(&frontier_a, &b, json, quiet);
3454 }
3455 }
3456 Commands::Proposals { action } => cmd_proposals(action),
3457 Commands::ArtifactToState {
3458 frontier,
3459 packet,
3460 actor,
3461 apply_artifacts,
3462 json,
3463 } => cmd_artifact_to_state(&frontier, &packet, &actor, apply_artifacts, json),
3464 Commands::BridgeKit { action } => cmd_bridge_kit(action).await,
3465 Commands::SourceAdapter { action } => cmd_source_adapter(action).await,
3466 Commands::RuntimeAdapter { action } => cmd_runtime_adapter(action),
3467 Commands::Link { action } => cmd_link(action),
3468 Commands::Workbench {
3469 path,
3470 port,
3471 no_open,
3472 } => {
3473 if let Err(e) = crate::workbench::run(path, port, !no_open).await {
3474 fail(&e);
3475 }
3476 }
3477 Commands::Bridges { action } => cmd_bridges(action),
3478 Commands::Entity { action } => cmd_entity(action),
3479 Commands::Finding { command } => match command {
3480 FindingCommands::Add {
3481 frontier,
3482 assertion,
3483 r#type,
3484 source,
3485 source_type,
3486 author,
3487 confidence,
3488 evidence_type,
3489 entities,
3490 entities_reviewed,
3491 evidence_span,
3492 gap,
3493 negative_space,
3494 doi,
3495 pmid,
3496 year,
3497 journal,
3498 url,
3499 source_authors,
3500 conditions_text,
3501 species,
3502 in_vivo,
3503 in_vitro,
3504 human_data,
3505 clinical_trial,
3506 json,
3507 apply,
3508 } => {
3509 validate_enum_arg("--type", &r#type, bundle::VALID_ASSERTION_TYPES);
3510 validate_enum_arg(
3511 "--evidence-type",
3512 &evidence_type,
3513 bundle::VALID_EVIDENCE_TYPES,
3514 );
3515 validate_enum_arg(
3516 "--source-type",
3517 &source_type,
3518 bundle::VALID_PROVENANCE_SOURCE_TYPES,
3519 );
3520 let parsed_entities = parse_entities(&entities);
3521 let parsed_evidence_spans = parse_evidence_spans(&evidence_span);
3522 for (name, etype) in &parsed_entities {
3523 if !bundle::VALID_ENTITY_TYPES.contains(&etype.as_str()) {
3524 fail(&format!(
3525 "invalid entity type '{}' for '{}'. Valid: {}",
3526 etype,
3527 name,
3528 bundle::VALID_ENTITY_TYPES.join(", "),
3529 ));
3530 }
3531 }
3532 let parsed_source_authors = source_authors
3533 .map(|s| {
3534 s.split(';')
3535 .map(|a| a.trim().to_string())
3536 .filter(|a| !a.is_empty())
3537 .collect()
3538 })
3539 .unwrap_or_default();
3540 let parsed_species = species
3541 .map(|s| {
3542 s.split(';')
3543 .map(|a| a.trim().to_string())
3544 .filter(|a| !a.is_empty())
3545 .collect()
3546 })
3547 .unwrap_or_default();
3548 let report = state::add_finding(
3549 &frontier,
3550 state::FindingDraftOptions {
3551 text: assertion,
3552 assertion_type: r#type,
3553 source,
3554 source_type,
3555 author,
3556 confidence,
3557 evidence_type,
3558 entities: parsed_entities,
3559 doi,
3560 pmid,
3561 year,
3562 journal,
3563 url,
3564 source_authors: parsed_source_authors,
3565 conditions_text,
3566 species: parsed_species,
3567 in_vivo,
3568 in_vitro,
3569 human_data,
3570 clinical_trial,
3571 entities_reviewed,
3572 evidence_spans: parsed_evidence_spans,
3573 gap,
3574 negative_space,
3575 },
3576 apply,
3577 )
3578 .unwrap_or_else(|e| fail_return(&e));
3579 print_state_report(&report, json);
3580 }
3581 FindingCommands::Supersede {
3582 frontier,
3583 old_id,
3584 assertion,
3585 r#type,
3586 source,
3587 source_type,
3588 author,
3589 reason,
3590 confidence,
3591 evidence_type,
3592 entities,
3593 doi,
3594 pmid,
3595 year,
3596 journal,
3597 url,
3598 source_authors,
3599 conditions_text,
3600 species,
3601 in_vivo,
3602 in_vitro,
3603 human_data,
3604 clinical_trial,
3605 json,
3606 apply,
3607 } => {
3608 validate_enum_arg("--type", &r#type, bundle::VALID_ASSERTION_TYPES);
3609 validate_enum_arg(
3610 "--evidence-type",
3611 &evidence_type,
3612 bundle::VALID_EVIDENCE_TYPES,
3613 );
3614 validate_enum_arg(
3615 "--source-type",
3616 &source_type,
3617 bundle::VALID_PROVENANCE_SOURCE_TYPES,
3618 );
3619 let parsed_entities = parse_entities(&entities);
3620 for (name, etype) in &parsed_entities {
3621 if !bundle::VALID_ENTITY_TYPES.contains(&etype.as_str()) {
3622 fail(&format!(
3623 "invalid entity type '{}' for '{}'. Valid: {}",
3624 etype,
3625 name,
3626 bundle::VALID_ENTITY_TYPES.join(", "),
3627 ));
3628 }
3629 }
3630 let parsed_source_authors = source_authors
3631 .map(|s| {
3632 s.split(';')
3633 .map(|a| a.trim().to_string())
3634 .filter(|a| !a.is_empty())
3635 .collect()
3636 })
3637 .unwrap_or_default();
3638 let parsed_species = species
3639 .map(|s| {
3640 s.split(';')
3641 .map(|a| a.trim().to_string())
3642 .filter(|a| !a.is_empty())
3643 .collect()
3644 })
3645 .unwrap_or_default();
3646 let report = state::supersede_finding(
3647 &frontier,
3648 &old_id,
3649 &reason,
3650 state::FindingDraftOptions {
3651 text: assertion,
3652 assertion_type: r#type,
3653 source,
3654 source_type,
3655 author,
3656 confidence,
3657 evidence_type,
3658 entities: parsed_entities,
3659 doi,
3660 pmid,
3661 year,
3662 journal,
3663 url,
3664 source_authors: parsed_source_authors,
3665 conditions_text,
3666 species: parsed_species,
3667 in_vivo,
3668 in_vitro,
3669 human_data,
3670 clinical_trial,
3671 entities_reviewed: false,
3672 evidence_spans: Vec::new(),
3673 gap: false,
3674 negative_space: false,
3675 },
3676 apply,
3677 )
3678 .unwrap_or_else(|e| fail_return(&e));
3679 print_state_report(&report, json);
3680 }
3681 FindingCommands::CausalSet {
3682 frontier,
3683 finding_id,
3684 claim,
3685 grade,
3686 actor,
3687 reason,
3688 json,
3689 } => {
3690 if !bundle::VALID_CAUSAL_CLAIMS.contains(&claim.as_str()) {
3691 fail(&format!(
3692 "invalid --claim '{claim}'; valid: {:?}",
3693 bundle::VALID_CAUSAL_CLAIMS
3694 ));
3695 }
3696 if let Some(g) = grade.as_deref()
3697 && !bundle::VALID_CAUSAL_EVIDENCE_GRADES.contains(&g)
3698 {
3699 fail(&format!(
3700 "invalid --grade '{g}'; valid: {:?}",
3701 bundle::VALID_CAUSAL_EVIDENCE_GRADES
3702 ));
3703 }
3704 let report = state::set_causal(
3705 &frontier,
3706 &finding_id,
3707 &claim,
3708 grade.as_deref(),
3709 &actor,
3710 &reason,
3711 )
3712 .unwrap_or_else(|e| fail_return(&e));
3713 print_state_report(&report, json);
3714 }
3715 },
3716 Commands::Review {
3717 frontier,
3718 finding_id,
3719 status,
3720 reason,
3721 reviewer,
3722 apply,
3723 json,
3724 } => {
3725 let status = status.unwrap_or_else(|| fail_return("--status is required for review"));
3726 let reason = reason.unwrap_or_else(|| fail_return("--reason is required for review"));
3727 let report = state::review_finding(
3728 &frontier,
3729 &finding_id,
3730 state::ReviewOptions {
3731 status,
3732 reason,
3733 reviewer,
3734 },
3735 apply,
3736 )
3737 .unwrap_or_else(|e| fail_return(&e));
3738 print_state_report(&report, json);
3739 }
3740 Commands::Note {
3741 frontier,
3742 finding_id,
3743 text,
3744 author,
3745 apply,
3746 json,
3747 } => {
3748 let report = state::add_note(&frontier, &finding_id, &text, &author, apply)
3749 .unwrap_or_else(|e| fail_return(&e));
3750 print_state_report(&report, json);
3751 }
3752 Commands::Caveat {
3753 frontier,
3754 finding_id,
3755 text,
3756 author,
3757 apply,
3758 json,
3759 } => {
3760 let report = state::caveat_finding(&frontier, &finding_id, &text, &author, apply)
3761 .unwrap_or_else(|e| fail_return(&e));
3762 print_state_report(&report, json);
3763 }
3764 Commands::Revise {
3765 frontier,
3766 finding_id,
3767 confidence,
3768 reason,
3769 reviewer,
3770 apply,
3771 json,
3772 } => {
3773 let report = state::revise_confidence(
3774 &frontier,
3775 &finding_id,
3776 state::ReviseOptions {
3777 confidence,
3778 reason,
3779 reviewer,
3780 },
3781 apply,
3782 )
3783 .unwrap_or_else(|e| fail_return(&e));
3784 print_state_report(&report, json);
3785 }
3786 Commands::Reject {
3787 frontier,
3788 finding_id,
3789 reason,
3790 reviewer,
3791 apply,
3792 json,
3793 } => {
3794 let report = state::reject_finding(&frontier, &finding_id, &reviewer, &reason, apply)
3795 .unwrap_or_else(|e| fail_return(&e));
3796 print_state_report(&report, json);
3797 }
3798 Commands::History {
3799 frontier,
3800 finding_id,
3801 json,
3802 as_of,
3803 } => {
3804 let payload = state::history_as_of(&frontier, &finding_id, as_of.as_deref())
3805 .unwrap_or_else(|e| fail_return(&e));
3806 if json {
3807 println!(
3808 "{}",
3809 serde_json::to_string_pretty(&payload)
3810 .expect("failed to serialize history response")
3811 );
3812 } else {
3813 print_history(&payload);
3814 }
3815 }
3816 Commands::ImportEvents { source, into, json } => {
3817 let report =
3818 review::import_review_events(&source, &into).unwrap_or_else(|e| fail_return(&e));
3819 if json {
3820 println!(
3821 "{}",
3822 serde_json::to_string_pretty(&json!({
3823 "ok": true,
3824 "command": "import-events",
3825 "source": report.source,
3826 "target": into.display().to_string(),
3827 "summary": {
3828 "imported": report.imported,
3829 "new": report.new,
3830 "duplicate": report.duplicate,
3831 "canonical_events_imported": report.events_imported,
3832 "canonical_events_new": report.events_new,
3833 "canonical_events_duplicate": report.events_duplicate,
3834 }
3835 }))
3836 .expect("failed to serialize import-events response")
3837 );
3838 } else {
3839 println!("{report}");
3840 }
3841 }
3842 Commands::Retract {
3843 source,
3844 finding_id,
3845 reason,
3846 reviewer,
3847 apply,
3848 json,
3849 } => {
3850 let report = state::retract_finding(&source, &finding_id, &reviewer, &reason, apply)
3851 .unwrap_or_else(|e| fail_return(&e));
3852 print_state_report(&report, json);
3853 }
3854 Commands::LocatorRepair {
3855 frontier,
3856 atom_id,
3857 locator,
3858 reviewer,
3859 reason,
3860 apply,
3861 json,
3862 } => {
3863 cmd_locator_repair(
3864 &frontier,
3865 &atom_id,
3866 locator.as_deref(),
3867 &reviewer,
3868 &reason,
3869 apply,
3870 json,
3871 );
3872 }
3873 Commands::SourceFetch {
3874 identifier,
3875 cache,
3876 out,
3877 refresh,
3878 json,
3879 } => {
3880 cmd_source_fetch(&identifier, cache.as_deref(), out.as_deref(), refresh, json).await;
3881 }
3882 Commands::SpanRepair {
3883 frontier,
3884 finding_id,
3885 section,
3886 text,
3887 reviewer,
3888 reason,
3889 apply,
3890 json,
3891 } => {
3892 cmd_span_repair(
3893 &frontier,
3894 &finding_id,
3895 §ion,
3896 &text,
3897 &reviewer,
3898 &reason,
3899 apply,
3900 json,
3901 );
3902 }
3903 Commands::ProofAdd {
3904 frontier,
3905 target_finding,
3906 tool,
3907 tool_version,
3908 script_path,
3909 name,
3910 reviewer,
3911 reason,
3912 json,
3913 } => {
3914 cmd_proof_add(
3915 &frontier,
3916 &target_finding,
3917 &tool,
3918 &tool_version,
3919 &script_path,
3920 &name,
3921 &reviewer,
3922 &reason,
3923 json,
3924 );
3925 }
3926 Commands::EntityAdd {
3927 frontier,
3928 finding_id,
3929 entity,
3930 entity_type,
3931 reviewer,
3932 reason,
3933 apply,
3934 json,
3935 } => {
3936 let report = state::add_finding_entity(
3937 &frontier,
3938 &finding_id,
3939 &entity,
3940 &entity_type,
3941 &reviewer,
3942 &reason,
3943 apply,
3944 )
3945 .unwrap_or_else(|e| fail_return(&e));
3946 print_state_report(&report, json);
3947 }
3948 Commands::EntityResolve {
3949 frontier,
3950 finding_id,
3951 entity,
3952 source,
3953 id,
3954 confidence,
3955 matched_name,
3956 resolution_method,
3957 reviewer,
3958 reason,
3959 apply,
3960 json,
3961 } => {
3962 cmd_entity_resolve(
3963 &frontier,
3964 &finding_id,
3965 &entity,
3966 &source,
3967 &id,
3968 confidence,
3969 matched_name.as_deref(),
3970 &resolution_method,
3971 &reviewer,
3972 &reason,
3973 apply,
3974 json,
3975 );
3976 }
3977 Commands::Propagate {
3978 frontier,
3979 retract,
3980 reduce_confidence,
3981 to,
3982 output,
3983 } => cmd_propagate(&frontier, retract, reduce_confidence, to, output.as_deref()),
3984 Commands::Replicate {
3985 frontier,
3986 target,
3987 outcome,
3988 by,
3989 conditions,
3990 source_title,
3991 doi,
3992 pmid,
3993 sample_size,
3994 note,
3995 previous_attempt,
3996 no_cascade,
3997 json,
3998 } => cmd_replicate(
3999 &frontier,
4000 &target,
4001 &outcome,
4002 &by,
4003 &conditions,
4004 &source_title,
4005 doi.as_deref(),
4006 pmid.as_deref(),
4007 sample_size.as_deref(),
4008 ¬e,
4009 previous_attempt.as_deref(),
4010 no_cascade,
4011 json,
4012 ),
4013 Commands::Replications {
4014 frontier,
4015 target,
4016 json,
4017 } => cmd_replications(&frontier, target.as_deref(), json),
4018 Commands::DatasetAdd {
4019 frontier,
4020 name,
4021 version,
4022 content_hash,
4023 url,
4024 license,
4025 source_title,
4026 doi,
4027 row_count,
4028 json,
4029 } => cmd_dataset_add(
4030 &frontier,
4031 &name,
4032 version.as_deref(),
4033 &content_hash,
4034 url.as_deref(),
4035 license.as_deref(),
4036 &source_title,
4037 doi.as_deref(),
4038 row_count,
4039 json,
4040 ),
4041 Commands::Datasets { frontier, json } => cmd_datasets(&frontier, json),
4042 Commands::CodeAdd {
4043 frontier,
4044 language,
4045 repo_url,
4046 commit,
4047 path,
4048 content_hash,
4049 line_start,
4050 line_end,
4051 entry_point,
4052 json,
4053 } => cmd_code_add(
4054 &frontier,
4055 &language,
4056 repo_url.as_deref(),
4057 commit.as_deref(),
4058 &path,
4059 &content_hash,
4060 line_start,
4061 line_end,
4062 entry_point.as_deref(),
4063 json,
4064 ),
4065 Commands::CodeArtifacts { frontier, json } => cmd_code_artifacts(&frontier, json),
4066 Commands::ArtifactAdd {
4067 frontier,
4068 kind,
4069 name,
4070 file,
4071 url,
4072 content_hash,
4073 media_type,
4074 license,
4075 source_title,
4076 source_url,
4077 doi,
4078 target,
4079 metadata,
4080 access_tier,
4081 deposited_by,
4082 reason,
4083 json,
4084 } => cmd_artifact_add(
4085 &frontier,
4086 &kind,
4087 &name,
4088 file.as_deref(),
4089 url.as_deref(),
4090 content_hash.as_deref(),
4091 media_type.as_deref(),
4092 license.as_deref(),
4093 source_title.as_deref(),
4094 source_url.as_deref(),
4095 doi.as_deref(),
4096 target,
4097 metadata,
4098 &access_tier,
4099 &deposited_by,
4100 &reason,
4101 json,
4102 ),
4103 Commands::Artifacts {
4104 frontier,
4105 target,
4106 json,
4107 } => cmd_artifacts(&frontier, target.as_deref(), json),
4108 Commands::ArtifactAudit { frontier, json } => cmd_artifact_audit(&frontier, json),
4109 Commands::DecisionBrief { frontier, json } => cmd_decision_brief(&frontier, json),
4110 Commands::TrialSummary { frontier, json } => cmd_trial_summary(&frontier, json),
4111 Commands::SourceVerification { frontier, json } => cmd_source_verification(&frontier, json),
4112 Commands::SourceIngestPlan { frontier, json } => cmd_source_ingest_plan(&frontier, json),
4113 Commands::ClinicalTrialImport {
4114 frontier,
4115 nct_id,
4116 input_json,
4117 target,
4118 deposited_by,
4119 reason,
4120 license,
4121 json,
4122 } => {
4123 cmd_clinical_trial_import(
4124 &frontier,
4125 &nct_id,
4126 input_json.as_deref(),
4127 target,
4128 &deposited_by,
4129 &reason,
4130 &license,
4131 json,
4132 )
4133 .await
4134 }
4135 Commands::NegativeResultAdd {
4136 frontier,
4137 kind,
4138 deposited_by,
4139 reason,
4140 conditions_text,
4141 notes,
4142 target,
4143 endpoint,
4144 intervention,
4145 comparator,
4146 population,
4147 n_enrolled,
4148 power,
4149 ci_lower,
4150 ci_upper,
4151 effect_size_threshold,
4152 registry_id,
4153 reagent,
4154 observation,
4155 attempts,
4156 source_title,
4157 doi,
4158 url,
4159 year,
4160 json,
4161 } => cmd_negative_result_add(
4162 &frontier,
4163 &kind,
4164 &deposited_by,
4165 &reason,
4166 &conditions_text,
4167 ¬es,
4168 target,
4169 endpoint.as_deref(),
4170 intervention.as_deref(),
4171 comparator.as_deref(),
4172 population.as_deref(),
4173 n_enrolled,
4174 power,
4175 ci_lower,
4176 ci_upper,
4177 effect_size_threshold,
4178 registry_id.as_deref(),
4179 reagent.as_deref(),
4180 observation.as_deref(),
4181 attempts,
4182 &source_title,
4183 doi.as_deref(),
4184 url.as_deref(),
4185 year,
4186 json,
4187 ),
4188 Commands::NegativeResults {
4189 frontier,
4190 target,
4191 json,
4192 } => cmd_negative_results(&frontier, target.as_deref(), json),
4193 Commands::TrajectoryCreate {
4194 frontier,
4195 deposited_by,
4196 reason,
4197 target,
4198 notes,
4199 json,
4200 } => cmd_trajectory_create(&frontier, &deposited_by, &reason, target, ¬es, json),
4201 Commands::TrajectoryStep {
4202 frontier,
4203 trajectory_id,
4204 kind,
4205 description,
4206 actor,
4207 reason,
4208 reference,
4209 json,
4210 } => cmd_trajectory_step(
4211 &frontier,
4212 &trajectory_id,
4213 &kind,
4214 &description,
4215 &actor,
4216 &reason,
4217 reference,
4218 json,
4219 ),
4220 Commands::Trajectories {
4221 frontier,
4222 target,
4223 json,
4224 } => cmd_trajectories(&frontier, target.as_deref(), json),
4225 Commands::TierSet {
4226 frontier,
4227 object_type,
4228 object_id,
4229 tier,
4230 actor,
4231 reason,
4232 json,
4233 } => cmd_tier_set(
4234 &frontier,
4235 &object_type,
4236 &object_id,
4237 &tier,
4238 &actor,
4239 &reason,
4240 json,
4241 ),
4242 Commands::Predict {
4243 frontier,
4244 by,
4245 claim,
4246 criterion,
4247 resolves_by,
4248 confidence,
4249 target,
4250 outcome,
4251 conditions,
4252 json,
4253 } => cmd_predict(
4254 &frontier,
4255 &by,
4256 &claim,
4257 &criterion,
4258 resolves_by.as_deref(),
4259 confidence,
4260 &target,
4261 &outcome,
4262 &conditions,
4263 json,
4264 ),
4265 Commands::Resolve {
4266 frontier,
4267 prediction,
4268 outcome,
4269 matched,
4270 by,
4271 confidence,
4272 source_title,
4273 doi,
4274 json,
4275 } => cmd_resolve(
4276 &frontier,
4277 &prediction,
4278 &outcome,
4279 matched,
4280 &by,
4281 confidence,
4282 &source_title,
4283 doi.as_deref(),
4284 json,
4285 ),
4286 Commands::Predictions {
4287 frontier,
4288 by,
4289 open,
4290 json,
4291 } => cmd_predictions(&frontier, by.as_deref(), open, json),
4292 Commands::Calibration {
4293 frontier,
4294 actor,
4295 json,
4296 } => cmd_calibration(&frontier, actor.as_deref(), json),
4297 Commands::PredictionsExpire {
4298 frontier,
4299 now,
4300 dry_run,
4301 json,
4302 } => cmd_predictions_expire(&frontier, now.as_deref(), dry_run, json),
4303 Commands::Consensus {
4304 frontier,
4305 target,
4306 weighting,
4307 causal_claim,
4308 causal_grade_min,
4309 json,
4310 } => cmd_consensus(
4311 &frontier,
4312 &target,
4313 &weighting,
4314 causal_claim.as_deref(),
4315 causal_grade_min.as_deref(),
4316 json,
4317 ),
4318
4319 Commands::Ingest {
4322 path,
4323 frontier,
4324 backend,
4325 actor,
4326 dry_run,
4327 json,
4328 } => {
4329 cmd_ingest(
4330 &path,
4331 &frontier,
4332 backend.as_deref(),
4333 actor.as_deref(),
4334 dry_run,
4335 json,
4336 )
4337 .await
4338 }
4339
4340 Commands::Propose {
4341 frontier,
4342 finding_id,
4343 status,
4344 reason,
4345 reviewer,
4346 apply,
4347 json,
4348 } => {
4349 let options = state::ReviewOptions {
4352 status: status.clone(),
4353 reason: reason.clone(),
4354 reviewer: reviewer.clone(),
4355 };
4356 let report = state::review_finding(&frontier, &finding_id, options, apply)
4357 .unwrap_or_else(|e| fail_return(&e));
4358 print_state_report(&report, json);
4359 }
4360
4361 Commands::Accept {
4362 frontier,
4363 proposal_id,
4364 reviewer,
4365 reason,
4366 json,
4367 } => {
4368 let event_id = proposals::accept_at_path(&frontier, &proposal_id, &reviewer, &reason)
4370 .unwrap_or_else(|e| fail_return(&e));
4371 let payload = json!({
4372 "ok": true,
4373 "command": "accept",
4374 "frontier": frontier.display().to_string(),
4375 "proposal_id": proposal_id,
4376 "reviewer": reviewer,
4377 "applied_event_id": event_id,
4378 });
4379 if json {
4380 println!(
4381 "{}",
4382 serde_json::to_string_pretty(&payload)
4383 .expect("failed to serialize accept response")
4384 );
4385 } else {
4386 println!(
4387 "{} accepted and applied proposal {}",
4388 style::ok("ok"),
4389 proposal_id
4390 );
4391 println!(" event: {}", event_id);
4392 }
4393 }
4394
4395 Commands::Attest {
4396 frontier,
4397 event,
4398 attester,
4399 scope_note,
4400 proof_id,
4401 signature,
4402 key,
4403 json,
4404 } => {
4405 if let Some(target_event_id) = event {
4409 let attester_id = attester.unwrap_or_else(|| {
4410 fail_return("attest: --attester is required in per-event mode")
4411 });
4412 let scope = scope_note.unwrap_or_else(|| {
4413 fail_return("attest: --scope-note is required in per-event mode")
4414 });
4415 let attestation_event_id = state::record_attestation(
4416 &frontier,
4417 &target_event_id,
4418 &attester_id,
4419 &scope,
4420 proof_id.as_deref(),
4421 signature.as_deref(),
4422 )
4423 .unwrap_or_else(|e| fail_return(&e));
4424 if json {
4425 let payload = json!({
4426 "ok": true,
4427 "command": "attest.event",
4428 "frontier": frontier.display().to_string(),
4429 "target_event_id": target_event_id,
4430 "attestation_event_id": attestation_event_id,
4431 "attester_id": attester_id,
4432 });
4433 println!(
4434 "{}",
4435 serde_json::to_string_pretty(&payload)
4436 .expect("failed to serialize attest.event response")
4437 );
4438 } else {
4439 println!(
4440 "{} attested {} by {} ({})",
4441 style::ok("ok"),
4442 target_event_id,
4443 attester_id,
4444 attestation_event_id
4445 );
4446 }
4447 return;
4448 }
4449 let key_path = key.unwrap_or_else(|| {
4451 fail_return(
4452 "attest: --key is required in frontier-wide mode (or pass --event for per-event mode)",
4453 )
4454 });
4455 let count =
4456 sign::sign_frontier(&frontier, &key_path).unwrap_or_else(|e| fail_return(&e));
4457 let payload = json!({
4458 "ok": true,
4459 "command": "attest",
4460 "frontier": frontier.display().to_string(),
4461 "private_key": key_path.display().to_string(),
4462 "signed": count,
4463 });
4464 if json {
4465 println!(
4466 "{}",
4467 serde_json::to_string_pretty(&payload)
4468 .expect("failed to serialize attest response")
4469 );
4470 } else {
4471 println!(
4472 "{} {count} findings in {}",
4473 style::ok("attested"),
4474 frontier.display()
4475 );
4476 }
4477 }
4478
4479 Commands::Lineage {
4480 frontier,
4481 finding_id,
4482 as_of,
4483 json,
4484 } => {
4485 let payload = state::history_as_of(&frontier, &finding_id, as_of.as_deref())
4487 .unwrap_or_else(|e| fail_return(&e));
4488 if json {
4489 println!(
4490 "{}",
4491 serde_json::to_string_pretty(&payload)
4492 .expect("failed to serialize lineage response")
4493 );
4494 } else {
4495 print_history(&payload);
4496 }
4497 }
4498
4499 Commands::Carina { action } => cmd_carina(action),
4500
4501 Commands::Atlas { action } => cmd_atlas(action).await,
4502
4503 Commands::Constellation { action } => cmd_constellation(action).await,
4504 }
4505}
4506
4507async fn cmd_atlas(action: AtlasAction) {
4512 match action {
4513 AtlasAction::Init {
4514 name,
4515 frontiers,
4516 domain,
4517 scope_note,
4518 atlases_root,
4519 json,
4520 } => match ATLAS_INIT_HANDLER.get() {
4521 Some(handler) => {
4522 handler(atlases_root, name, domain, scope_note, frontiers, json).await;
4523 }
4524 None => fail("vela atlas init: handler not registered (built without vela-atlas)"),
4525 },
4526 AtlasAction::Materialize {
4527 name,
4528 atlases_root,
4529 json,
4530 } => match ATLAS_MATERIALIZE_HANDLER.get() {
4531 Some(handler) => handler(atlases_root, name, json).await,
4532 None => fail("vela atlas materialize: handler not registered"),
4533 },
4534 AtlasAction::Serve {
4535 name,
4536 atlases_root,
4537 port,
4538 no_open,
4539 } => {
4540 match ATLAS_SERVE_HANDLER.get() {
4544 Some(handler) => handler(atlases_root, name, port, !no_open).await,
4545 None => fail("vela atlas serve: handler not registered"),
4546 }
4547 }
4548 AtlasAction::Update {
4549 name,
4550 add_frontier,
4551 remove_vfr_id,
4552 atlases_root,
4553 json,
4554 } => match ATLAS_UPDATE_HANDLER.get() {
4555 Some(handler) => {
4556 handler(atlases_root, name, add_frontier, remove_vfr_id, json).await;
4557 }
4558 None => fail("vela atlas update: handler not registered"),
4559 },
4560 }
4561}
4562
4563async fn cmd_constellation(action: ConstellationAction) {
4567 match action {
4568 ConstellationAction::Init {
4569 name,
4570 atlases,
4571 scope_note,
4572 constellations_root,
4573 json,
4574 } => match CONSTELLATION_INIT_HANDLER.get() {
4575 Some(handler) => {
4576 handler(constellations_root, name, scope_note, atlases, json).await;
4577 }
4578 None => fail(
4579 "vela constellation init: handler not registered (built without vela-constellation)",
4580 ),
4581 },
4582 ConstellationAction::Materialize {
4583 name,
4584 constellations_root,
4585 json,
4586 } => match CONSTELLATION_MATERIALIZE_HANDLER.get() {
4587 Some(handler) => handler(constellations_root, name, json).await,
4588 None => fail("vela constellation materialize: handler not registered"),
4589 },
4590 ConstellationAction::Serve {
4591 name,
4592 constellations_root,
4593 port,
4594 no_open,
4595 } => match CONSTELLATION_SERVE_HANDLER.get() {
4596 Some(handler) => handler(constellations_root, name, port, !no_open).await,
4597 None => fail("vela constellation serve: handler not registered"),
4598 },
4599 }
4600}
4601
4602fn cmd_carina(action: CarinaAction) {
4605 match action {
4606 CarinaAction::List { json } => {
4607 if json {
4608 println!(
4609 "{}",
4610 serde_json::to_string_pretty(&json!({
4611 "ok": true,
4612 "command": "carina.list",
4613 "primitives": carina_validate::PRIMITIVE_NAMES,
4614 }))
4615 .expect("failed to serialize carina.list")
4616 );
4617 } else {
4618 println!("Carina primitives bundled with this build:");
4619 for name in carina_validate::PRIMITIVE_NAMES {
4620 println!(" · {name}");
4621 }
4622 }
4623 }
4624 CarinaAction::Schema { primitive } => match carina_validate::schema_text(&primitive) {
4625 Some(text) => print!("{text}"),
4626 None => fail(&format!("carina: unknown primitive '{primitive}'")),
4627 },
4628 CarinaAction::Validate {
4629 path,
4630 primitive,
4631 json,
4632 } => {
4633 let text = std::fs::read_to_string(&path)
4634 .unwrap_or_else(|e| fail_return(&format!("read {}: {e}", path.display())));
4635 let value: Value = serde_json::from_str(&text)
4636 .unwrap_or_else(|e| fail_return(&format!("parse {}: {e}", path.display())));
4637 type CarinaValidateOutcome = Result<Option<&'static str>, Vec<String>>;
4643 let mut report: Vec<(String, CarinaValidateOutcome)> = Vec::new();
4644 if value.get("primitives").and_then(Value::as_object).is_some() && primitive.is_none() {
4645 let primitives = value.get("primitives").and_then(Value::as_object).unwrap();
4646 for (key, child) in primitives {
4647 let outcome = carina_validate::validate(key, child)
4648 .map(|()| carina_validate::detect_primitive(child));
4649 report.push((key.clone(), outcome));
4650 }
4651 } else {
4652 let outcome = match primitive.as_deref() {
4653 Some(name) => carina_validate::validate(name, &value).map(|()| {
4654 carina_validate::PRIMITIVE_NAMES
4655 .iter()
4656 .copied()
4657 .find(|p| *p == name)
4658 }),
4659 None => carina_validate::validate_auto(&value).map(Some),
4660 };
4661 let label = primitive.clone().unwrap_or_else(|| "<auto>".to_string());
4662 report.push((label, outcome));
4663 }
4664
4665 let total = report.len();
4666 let pass = report.iter().filter(|(_, r)| r.is_ok()).count();
4667 let fail = total - pass;
4668
4669 if json {
4670 let entries: Vec<Value> = report
4671 .iter()
4672 .map(|(label, r)| match r {
4673 Ok(name) => json!({
4674 "key": label,
4675 "primitive": name,
4676 "ok": true,
4677 }),
4678 Err(errs) => json!({
4679 "key": label,
4680 "ok": false,
4681 "errors": errs,
4682 }),
4683 })
4684 .collect();
4685 println!(
4686 "{}",
4687 serde_json::to_string_pretty(&json!({
4688 "ok": fail == 0,
4689 "command": "carina.validate",
4690 "file": path.display().to_string(),
4691 "total": total,
4692 "passed": pass,
4693 "failed": fail,
4694 "entries": entries,
4695 }))
4696 .expect("failed to serialize carina.validate")
4697 );
4698 } else {
4699 for (label, r) in &report {
4700 match r {
4701 Ok(Some(name)) => println!(" {} {label} (as {name})", style::ok("ok")),
4702 Ok(None) => println!(" {} {label}", style::ok("ok")),
4703 Err(errs) => {
4704 println!(" {} {label}", style::lost("fail"));
4705 for e in errs {
4706 println!(" {e}");
4707 }
4708 }
4709 }
4710 }
4711 println!();
4712 if fail == 0 {
4713 println!("{} {pass}/{total} valid", style::ok("carina.validate"));
4714 } else {
4715 println!(
4716 "{} {pass}/{total} valid · {fail} failed",
4717 style::lost("carina.validate")
4718 );
4719 }
4720 }
4721
4722 if fail > 0 {
4723 std::process::exit(1);
4724 }
4725 }
4726 }
4727}
4728
4729#[allow(clippy::too_many_arguments)]
4740fn cmd_proof_add(
4741 frontier: &Path,
4742 target_finding: &str,
4743 tool: &str,
4744 tool_version: &str,
4745 script_path: &Path,
4746 name: &str,
4747 reviewer: &str,
4748 reason: &str,
4749 json_output: bool,
4750) {
4751 use std::collections::BTreeMap;
4752
4753 if !target_finding.starts_with("vf_") {
4755 fail(&format!(
4756 "--target-finding must be a vf_* finding id; got `{target_finding}`"
4757 ));
4758 }
4759 let valid_tools = [
4761 "lean4", "coq", "isabelle", "agda", "metamath", "rocq", "other",
4762 ];
4763 if !valid_tools.contains(&tool) {
4764 fail(&format!(
4765 "--tool `{tool}` not in {valid_tools:?}; see embedded/carina-schemas/proof.schema.json"
4766 ));
4767 }
4768
4769 let script_bytes = std::fs::read(script_path)
4771 .unwrap_or_else(|e| fail_return(&format!("read {}: {e}", script_path.display())));
4772 let script_hash_hex = hex::encode(Sha256::digest(&script_bytes));
4773 let script_locator = format!("sha256:{script_hash_hex}");
4774
4775 let vpf_preimage = format!("{script_locator}|{tool}|{tool_version}|{target_finding}");
4779 let vpf_id = format!(
4780 "vpf_{}",
4781 &hex::encode(Sha256::digest(vpf_preimage.as_bytes()))[..16]
4782 );
4783
4784 let verified_at = chrono::Utc::now().to_rfc3339();
4787 let proof_primitive = json!({
4788 "schema": "carina.proof.v0.3",
4789 "id": vpf_id,
4790 "tool": tool,
4791 "tool_version": tool_version,
4792 "script_locator": script_locator,
4793 "verifier_output_hash": format!("sha256:{}", "0".repeat(64)),
4797 "verified_at": verified_at,
4798 "target_finding_id": target_finding,
4799 });
4800 if let Err(errs) = carina_validate::validate("proof", &proof_primitive) {
4801 fail(&format!(
4802 "constructed Proof primitive does not validate against proof.schema.json:\n - {}",
4803 errs.join("\n - ")
4804 ));
4805 }
4806
4807 let mut metadata: BTreeMap<String, Value> = BTreeMap::new();
4809 metadata.insert(
4810 "carina_kind".to_string(),
4811 Value::String("proof_script".to_string()),
4812 );
4813 metadata.insert(
4814 "carina_proof_tool".to_string(),
4815 Value::String(tool.to_string()),
4816 );
4817 metadata.insert(
4818 "carina_proof_tool_version".to_string(),
4819 Value::String(tool_version.to_string()),
4820 );
4821 metadata.insert("carina_proof_id".to_string(), Value::String(vpf_id.clone()));
4822 metadata.insert(
4823 "carina_proof_target_finding".to_string(),
4824 Value::String(target_finding.to_string()),
4825 );
4826
4827 let media_type = match tool {
4828 "lean4" | "rocq" => Some("text/x-lean".to_string()),
4829 "coq" => Some("text/x-coq".to_string()),
4830 "isabelle" => Some("text/x-isabelle".to_string()),
4831 "agda" => Some("text/x-agda".to_string()),
4832 "metamath" => Some("text/x-metamath".to_string()),
4833 _ => None,
4834 };
4835
4836 let provenance = crate::bundle::Provenance {
4837 source_type: "code_repository".to_string(),
4838 doi: None,
4839 pmid: None,
4840 pmc: None,
4841 openalex_id: None,
4842 url: None,
4843 title: format!("Proof script for {target_finding} ({tool} {tool_version})"),
4844 authors: Vec::new(),
4845 year: None,
4846 journal: None,
4847 license: Some("Apache-2.0 OR MIT".to_string()),
4848 publisher: None,
4849 funders: Vec::new(),
4850 extraction: crate::bundle::Extraction::default(),
4851 review: None,
4852 citation_count: None,
4853 };
4854
4855 let artifact_id = crate::bundle::Artifact::content_address(
4856 "source_file",
4857 name,
4858 &format!("sha256:{script_hash_hex}"),
4859 None,
4860 Some(&script_path.display().to_string()),
4861 );
4862
4863 let artifact = crate::bundle::Artifact {
4864 id: artifact_id.clone(),
4865 kind: "source_file".to_string(),
4866 name: name.to_string(),
4867 content_hash: format!("sha256:{script_hash_hex}"),
4868 size_bytes: Some(script_bytes.len() as u64),
4869 media_type,
4870 storage_mode: "pointer".to_string(),
4871 locator: Some(script_path.display().to_string()),
4872 source_url: None,
4873 license: Some("Apache-2.0 OR MIT".to_string()),
4874 target_findings: vec![target_finding.to_string()],
4875 source_id: None,
4876 provenance,
4877 metadata,
4878 review_state: None,
4879 retracted: false,
4880 access_tier: crate::access_tier::AccessTier::default(),
4881 created: verified_at.clone(),
4882 };
4883
4884 let report = state::add_artifact(frontier, artifact, reviewer, reason)
4888 .unwrap_or_else(|e| fail_return(&e));
4889
4890 let payload = json!({
4892 "ok": true,
4893 "command": "proof-add",
4894 "frontier": frontier.display().to_string(),
4895 "target_finding": target_finding,
4896 "tool": tool,
4897 "tool_version": tool_version,
4898 "script_path": script_path.display().to_string(),
4899 "script_locator": script_locator,
4900 "size_bytes": script_bytes.len(),
4901 "vpf_id": vpf_id,
4902 "va_id": artifact_id,
4903 "applied_event_id": report.applied_event_id,
4904 "verified_at": verified_at,
4905 "reviewer": reviewer,
4906 });
4907
4908 if json_output {
4909 println!(
4910 "{}",
4911 serde_json::to_string_pretty(&payload).expect("failed to serialize proof-add response")
4912 );
4913 } else {
4914 println!(
4915 "{} proof artifact deposited for {target_finding}",
4916 style::ok("ok")
4917 );
4918 println!(" vpf_id: {vpf_id}");
4919 println!(" va_id: {artifact_id}");
4920 println!(" locator: {script_locator}");
4921 println!(" tool: {tool} {tool_version}");
4922 if let Some(eid) = &report.applied_event_id {
4923 println!(" event: {eid}");
4924 }
4925 }
4926}
4927
4928fn cmd_consensus(
4931 frontier: &Path,
4932 target: &str,
4933 weighting_str: &str,
4934 causal_claim: Option<&str>,
4935 causal_grade_min: Option<&str>,
4936 json: bool,
4937) {
4938 use crate::bundle::{CausalClaim, CausalEvidenceGrade};
4939
4940 if !target.starts_with("vf_") {
4941 fail(&format!("target `{target}` is not a vf_ finding id"));
4942 }
4943 let scheme =
4944 crate::aggregate::WeightingScheme::parse(weighting_str).unwrap_or_else(|e| fail_return(&e));
4945
4946 let parsed_claim = match causal_claim {
4947 None => None,
4948 Some("correlation") => Some(CausalClaim::Correlation),
4949 Some("mediation") => Some(CausalClaim::Mediation),
4950 Some("intervention") => Some(CausalClaim::Intervention),
4951 Some(other) => fail_return(&format!(
4952 "invalid --causal-claim '{other}'; valid: correlation | mediation | intervention"
4953 )),
4954 };
4955 let parsed_grade = match causal_grade_min {
4956 None => None,
4957 Some("theoretical") => Some(CausalEvidenceGrade::Theoretical),
4958 Some("observational") => Some(CausalEvidenceGrade::Observational),
4959 Some("quasi_experimental") => Some(CausalEvidenceGrade::QuasiExperimental),
4960 Some("rct") => Some(CausalEvidenceGrade::Rct),
4961 Some(other) => fail_return(&format!(
4962 "invalid --causal-grade-min '{other}'; valid: theoretical | observational | quasi_experimental | rct"
4963 )),
4964 };
4965 let filter = crate::aggregate::AggregateFilter {
4966 causal_claim: parsed_claim,
4967 causal_grade_min: parsed_grade,
4968 };
4969 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
4970
4971 let result = crate::aggregate::consensus_for_with_filter(&project, target, scheme, &filter)
4972 .unwrap_or_else(|| fail_return(&format!("target `{target}` not in frontier")));
4973
4974 if json {
4975 println!(
4976 "{}",
4977 serde_json::to_string_pretty(&result).expect("serialize consensus")
4978 );
4979 return;
4980 }
4981
4982 println!();
4983 println!(
4984 " {}",
4985 format!(
4986 "VELA · CONSENSUS · {} ({})",
4987 result.target, result.weighting
4988 )
4989 .to_uppercase()
4990 .dimmed()
4991 );
4992 println!(" {}", style::tick_row(60));
4993 println!(
4994 " target: {}",
4995 truncate(&result.target_assertion, 80)
4996 );
4997 println!(" similar findings: {}", result.n_findings);
4998 println!(
4999 " consensus: {:.3} ({:.3} – {:.3} 95% credible)",
5000 result.consensus_confidence, result.credible_interval_lo, result.credible_interval_hi
5001 );
5002 println!();
5003 println!(" constituents (sorted by weight):");
5004 let mut sorted = result.constituents.clone();
5005 sorted.sort_by(|a, b| {
5006 b.weight
5007 .partial_cmp(&a.weight)
5008 .unwrap_or(std::cmp::Ordering::Equal)
5009 });
5010 for c in sorted.iter().take(10) {
5011 let repls = if c.n_replications > 0 {
5012 format!(" ({}r {}f)", c.n_replicated, c.n_failed_replications)
5013 } else {
5014 String::new()
5015 };
5016 println!(
5017 " · w={:.2} raw={:.2} adj={:.2}{}",
5018 c.weight, c.raw_score, c.adjusted_score, repls
5019 );
5020 println!(" {}", truncate(&c.assertion_text, 88));
5021 }
5022 if result.constituents.len() > 10 {
5023 println!(" ... ({} more)", result.constituents.len() - 10);
5024 }
5025}
5026
5027fn parse_expected_outcome(s: &str) -> Result<crate::bundle::ExpectedOutcome, String> {
5033 let trimmed = s.trim();
5034 if trimmed.eq_ignore_ascii_case("affirmed") {
5035 return Ok(crate::bundle::ExpectedOutcome::Affirmed);
5036 }
5037 if trimmed.eq_ignore_ascii_case("falsified") {
5038 return Ok(crate::bundle::ExpectedOutcome::Falsified);
5039 }
5040 if let Some(rest) = trimmed.strip_prefix("cat:") {
5041 return Ok(crate::bundle::ExpectedOutcome::Categorical {
5042 value: rest.to_string(),
5043 });
5044 }
5045 if let Some(rest) = trimmed.strip_prefix("quant:") {
5046 let (vt, units) = rest.split_once(' ').unwrap_or((rest, ""));
5047 let (val_s, tol_s) = vt
5048 .split_once('±')
5049 .or_else(|| vt.split_once("+/-"))
5050 .ok_or_else(|| format!("expected `quant:VALUE±TOL UNITS`, got `quant:{rest}`"))?;
5051 let value: f64 = val_s
5052 .parse()
5053 .map_err(|e| format!("bad quant value `{val_s}`: {e}"))?;
5054 let tolerance: f64 = tol_s
5055 .parse()
5056 .map_err(|e| format!("bad quant tolerance `{tol_s}`: {e}"))?;
5057 return Ok(crate::bundle::ExpectedOutcome::Quantitative {
5058 value,
5059 tolerance,
5060 units: units.to_string(),
5061 });
5062 }
5063 Err(format!(
5064 "unknown outcome `{s}`; expected one of: affirmed | falsified | quant:V±T units | cat:label"
5065 ))
5066}
5067
5068#[allow(clippy::too_many_arguments)]
5070fn cmd_predict(
5071 frontier: &Path,
5072 by: &str,
5073 claim: &str,
5074 criterion: &str,
5075 resolves_by: Option<&str>,
5076 confidence: f64,
5077 target_csv: &str,
5078 outcome: &str,
5079 conditions_text: &str,
5080 json: bool,
5081) {
5082 if !(0.0..=1.0).contains(&confidence) {
5083 fail(&format!("confidence must be in [0, 1]; got {confidence}"));
5084 }
5085 let expected = parse_expected_outcome(outcome).unwrap_or_else(|e| fail_return(&e));
5086
5087 let mut project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
5088
5089 let targets: Vec<String> = target_csv
5090 .split(',')
5091 .map(|s| s.trim().to_string())
5092 .filter(|s| !s.is_empty())
5093 .collect();
5094 for t in &targets {
5095 if !t.starts_with("vf_") {
5096 fail(&format!("target `{t}` is not a vf_ id"));
5097 }
5098 if !project.findings.iter().any(|f| f.id == *t) {
5099 fail(&format!("target `{t}` not present in frontier"));
5100 }
5101 }
5102
5103 let lower = conditions_text.to_lowercase();
5104 let conditions = crate::bundle::Conditions {
5105 text: conditions_text.to_string(),
5106 species_verified: Vec::new(),
5107 species_unverified: Vec::new(),
5108 in_vitro: lower.contains("in vitro"),
5109 in_vivo: lower.contains("in vivo"),
5110 human_data: lower.contains("human") || lower.contains("clinical"),
5111 clinical_trial: lower.contains("clinical trial") || lower.contains("phase "),
5112 concentration_range: None,
5113 duration: None,
5114 age_group: None,
5115 cell_type: None,
5116 };
5117
5118 let prediction = crate::bundle::Prediction::new(
5119 claim.to_string(),
5120 targets,
5121 None,
5122 resolves_by.map(|s| s.to_string()),
5123 criterion.to_string(),
5124 expected,
5125 by.to_string(),
5126 confidence,
5127 conditions,
5128 );
5129
5130 if project.predictions.iter().any(|p| p.id == prediction.id) {
5131 if json {
5132 println!(
5133 "{}",
5134 serde_json::to_string_pretty(&json!({
5135 "ok": false,
5136 "command": "predict",
5137 "reason": "prediction_already_exists",
5138 "id": prediction.id,
5139 }))
5140 .expect("serialize")
5141 );
5142 } else {
5143 println!(
5144 "{} prediction {} already exists in {}; skipping.",
5145 style::warn("predict"),
5146 prediction.id,
5147 frontier.display()
5148 );
5149 }
5150 return;
5151 }
5152
5153 let new_id = prediction.id.clone();
5154 project.predictions.push(prediction);
5155 repo::save_to_path(frontier, &project).unwrap_or_else(|e| fail_return(&e));
5156
5157 if json {
5158 println!(
5159 "{}",
5160 serde_json::to_string_pretty(&json!({
5161 "ok": true,
5162 "command": "predict",
5163 "id": new_id,
5164 "made_by": by,
5165 "confidence": confidence,
5166 "frontier": frontier.display().to_string(),
5167 }))
5168 .expect("serialize predict result")
5169 );
5170 } else {
5171 println!();
5172 println!(
5173 " {}",
5174 format!("VELA · PREDICT · {}", new_id)
5175 .to_uppercase()
5176 .dimmed()
5177 );
5178 println!(" {}", style::tick_row(60));
5179 println!(" by: {by}");
5180 println!(" confidence: {confidence:.3}");
5181 if let Some(d) = resolves_by {
5182 println!(" resolves by: {d}");
5183 }
5184 println!(" outcome: {outcome}");
5185 println!(" claim: {}", truncate(claim, 88));
5186 println!();
5187 println!(
5188 " {} prediction recorded in {}",
5189 style::ok("ok"),
5190 frontier.display()
5191 );
5192 }
5193}
5194
5195#[allow(clippy::too_many_arguments)]
5197fn cmd_resolve(
5198 frontier: &Path,
5199 prediction_id: &str,
5200 actual_outcome: &str,
5201 matched: bool,
5202 by: &str,
5203 confidence: f64,
5204 source_title: &str,
5205 doi: Option<&str>,
5206 json: bool,
5207) {
5208 if !prediction_id.starts_with("vpred_") {
5209 fail(&format!("prediction `{prediction_id}` is not a vpred_ id"));
5210 }
5211 if !(0.0..=1.0).contains(&confidence) {
5212 fail(&format!("confidence must be in [0, 1]; got {confidence}"));
5213 }
5214 let mut project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
5215 if !project.predictions.iter().any(|p| p.id == prediction_id) {
5216 fail(&format!(
5217 "prediction `{prediction_id}` not present in frontier"
5218 ));
5219 }
5220
5221 let evidence = crate::bundle::Evidence {
5222 evidence_type: "experimental".to_string(),
5223 model_system: String::new(),
5224 species: None,
5225 method: "prediction_resolution".to_string(),
5226 sample_size: None,
5227 effect_size: None,
5228 p_value: None,
5229 replicated: false,
5230 replication_count: None,
5231 evidence_spans: if source_title.is_empty() {
5232 Vec::new()
5233 } else {
5234 vec![serde_json::json!({"text": source_title})]
5235 },
5236 };
5237
5238 let _ = doi; let resolution = crate::bundle::Resolution::new(
5245 prediction_id.to_string(),
5246 actual_outcome.to_string(),
5247 matched,
5248 by.to_string(),
5249 evidence,
5250 confidence,
5251 );
5252
5253 if project.resolutions.iter().any(|r| r.id == resolution.id) {
5254 if json {
5255 println!(
5256 "{}",
5257 serde_json::to_string_pretty(&json!({
5258 "ok": false,
5259 "command": "resolve",
5260 "reason": "resolution_already_exists",
5261 "id": resolution.id,
5262 }))
5263 .expect("serialize")
5264 );
5265 } else {
5266 println!(
5267 "{} resolution {} already exists in {}; skipping.",
5268 style::warn("resolve"),
5269 resolution.id,
5270 frontier.display()
5271 );
5272 }
5273 return;
5274 }
5275
5276 let new_id = resolution.id.clone();
5277 project.resolutions.push(resolution);
5278 repo::save_to_path(frontier, &project).unwrap_or_else(|e| fail_return(&e));
5279
5280 if json {
5281 println!(
5282 "{}",
5283 serde_json::to_string_pretty(&json!({
5284 "ok": true,
5285 "command": "resolve",
5286 "id": new_id,
5287 "prediction": prediction_id,
5288 "matched": matched,
5289 "frontier": frontier.display().to_string(),
5290 }))
5291 .expect("serialize resolve result")
5292 );
5293 } else {
5294 println!();
5295 println!(
5296 " {}",
5297 format!("VELA · RESOLVE · {}", new_id)
5298 .to_uppercase()
5299 .dimmed()
5300 );
5301 println!(" {}", style::tick_row(60));
5302 println!(" prediction: {prediction_id}");
5303 println!(
5304 " matched: {}",
5305 if matched {
5306 style::ok("yes")
5307 } else {
5308 style::lost("no")
5309 }
5310 );
5311 println!(" by: {by}");
5312 println!(" outcome: {}", truncate(actual_outcome, 80));
5313 println!();
5314 println!(
5315 " {} resolution recorded in {}",
5316 style::ok("ok"),
5317 frontier.display()
5318 );
5319 }
5320}
5321
5322fn cmd_predictions(frontier: &Path, by: Option<&str>, open: bool, json: bool) {
5324 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
5325
5326 let resolved_ids: std::collections::HashSet<&str> = project
5327 .resolutions
5328 .iter()
5329 .map(|r| r.prediction_id.as_str())
5330 .collect();
5331
5332 let mut filtered: Vec<&crate::bundle::Prediction> = project
5333 .predictions
5334 .iter()
5335 .filter(|p| by.is_none_or(|b| p.made_by == b))
5336 .filter(|p| !open || !resolved_ids.contains(p.id.as_str()))
5337 .collect();
5338 filtered.sort_by(|a, b| {
5339 a.resolves_by
5340 .as_deref()
5341 .unwrap_or("9999")
5342 .cmp(b.resolves_by.as_deref().unwrap_or("9999"))
5343 });
5344
5345 if json {
5346 let payload: Vec<serde_json::Value> = filtered
5347 .iter()
5348 .map(|p| {
5349 json!({
5350 "id": p.id,
5351 "claim_text": p.claim_text,
5352 "made_by": p.made_by,
5353 "confidence": p.confidence,
5354 "predicted_at": p.predicted_at,
5355 "resolves_by": p.resolves_by,
5356 "expected_outcome": p.expected_outcome,
5357 "resolved": resolved_ids.contains(p.id.as_str()),
5358 })
5359 })
5360 .collect();
5361 println!(
5362 "{}",
5363 serde_json::to_string_pretty(&json!({
5364 "ok": true,
5365 "command": "predictions",
5366 "frontier": frontier.display().to_string(),
5367 "count": payload.len(),
5368 "predictions": payload,
5369 }))
5370 .expect("serialize predictions")
5371 );
5372 return;
5373 }
5374
5375 println!();
5376 println!(
5377 " {}",
5378 format!("VELA · PREDICTIONS · {}", frontier.display())
5379 .to_uppercase()
5380 .dimmed()
5381 );
5382 println!(" {}", style::tick_row(60));
5383 if filtered.is_empty() {
5384 println!(" (no predictions matching filters)");
5385 return;
5386 }
5387 for p in &filtered {
5388 let resolved = resolved_ids.contains(p.id.as_str());
5389 let chip = if resolved {
5390 style::ok("resolved")
5391 } else {
5392 style::warn("open")
5393 };
5394 let deadline = p.resolves_by.as_deref().unwrap_or("(no deadline)");
5395 println!(
5396 " · {} {} by {} → {}",
5397 p.id.dimmed(),
5398 chip,
5399 p.made_by,
5400 deadline,
5401 );
5402 println!(" claim: {}", truncate(&p.claim_text, 90));
5403 println!(" confidence: {:.2}", p.confidence);
5404 }
5405}
5406
5407fn cmd_predictions_expire(frontier: &Path, now_override: Option<&str>, dry_run: bool, json: bool) {
5412 use chrono::DateTime;
5413
5414 let now_dt = match now_override {
5415 Some(s) => DateTime::parse_from_rfc3339(s)
5416 .map(|dt| dt.with_timezone(&chrono::Utc))
5417 .unwrap_or_else(|e| fail_return(&format!("invalid --now '{s}': {e}"))),
5418 None => chrono::Utc::now(),
5419 };
5420
5421 let mut project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
5422 if dry_run {
5423 let mut probe = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
5425 let report = crate::calibration::expire_overdue_predictions(&mut probe, now_dt);
5426 if json {
5427 println!(
5428 "{}",
5429 serde_json::to_string_pretty(&json!({
5430 "ok": true,
5431 "command": "predictions.expire",
5432 "dry_run": true,
5433 "report": report,
5434 }))
5435 .expect("serialize predictions.expire (dry-run)")
5436 );
5437 } else {
5438 println!(
5439 "{} dry-run @ {}: {} would expire, {} already expired, {} resolved, {} still open",
5440 style::ok("ok"),
5441 report.now,
5442 report.newly_expired.len(),
5443 report.already_expired.len(),
5444 report.already_resolved.len(),
5445 report.still_open.len(),
5446 );
5447 for id in &report.newly_expired {
5448 println!(" · {id}");
5449 }
5450 }
5451 return;
5452 }
5453
5454 let report = crate::calibration::expire_overdue_predictions(&mut project, now_dt);
5455 repo::save_to_path(frontier, &project).unwrap_or_else(|e| fail_return(&e));
5456
5457 if json {
5458 println!(
5459 "{}",
5460 serde_json::to_string_pretty(&json!({
5461 "ok": true,
5462 "command": "predictions.expire",
5463 "report": report,
5464 }))
5465 .expect("serialize predictions.expire")
5466 );
5467 } else {
5468 println!(
5469 "{} @ {}: {} newly expired, {} already expired, {} resolved, {} still open",
5470 style::ok("expired"),
5471 report.now,
5472 report.newly_expired.len(),
5473 report.already_expired.len(),
5474 report.already_resolved.len(),
5475 report.still_open.len(),
5476 );
5477 for id in &report.newly_expired {
5478 println!(" · {id}");
5479 }
5480 }
5481}
5482
5483fn cmd_calibration(frontier: &Path, actor: Option<&str>, json: bool) {
5484 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
5485 let records = match actor {
5486 Some(a) => {
5487 crate::calibration::calibration_for_actor(a, &project.predictions, &project.resolutions)
5488 .map(|r| vec![r])
5489 .unwrap_or_default()
5490 }
5491 None => crate::calibration::calibration_records(&project.predictions, &project.resolutions),
5492 };
5493
5494 if json {
5495 println!(
5496 "{}",
5497 serde_json::to_string_pretty(&json!({
5498 "ok": true,
5499 "command": "calibration",
5500 "frontier": frontier.display().to_string(),
5501 "filter_actor": actor,
5502 "records": records,
5503 }))
5504 .expect("serialize calibration")
5505 );
5506 return;
5507 }
5508
5509 println!();
5510 println!(
5511 " {}",
5512 format!("VELA · CALIBRATION · {}", frontier.display())
5513 .to_uppercase()
5514 .dimmed()
5515 );
5516 println!(" {}", style::tick_row(60));
5517 if records.is_empty() {
5518 println!(" (no calibration records)");
5519 return;
5520 }
5521 for r in &records {
5522 println!(" · {}", r.actor);
5523 println!(
5524 " predictions: {} resolved: {} hits: {}",
5525 r.n_predictions, r.n_resolved, r.n_hit
5526 );
5527 match r.hit_rate {
5528 Some(h) => println!(" hit rate: {:.1}%", h * 100.0),
5529 None => println!(" hit rate: n/a"),
5530 }
5531 match r.brier_score {
5532 Some(b) => println!(
5533 " brier: {:.4} (lower is better; 0.25 = chance)",
5534 b
5535 ),
5536 None => println!(" brier: n/a"),
5537 }
5538 match r.log_score {
5539 Some(l) => println!(
5540 " log score: {:.4} (higher is better; 0 = perfect)",
5541 l
5542 ),
5543 None => println!(" log score: n/a"),
5544 }
5545 }
5546}
5547
5548#[allow(clippy::too_many_arguments)]
5550fn cmd_dataset_add(
5551 frontier: &Path,
5552 name: &str,
5553 version: Option<&str>,
5554 content_hash: &str,
5555 url: Option<&str>,
5556 license: Option<&str>,
5557 source_title: &str,
5558 doi: Option<&str>,
5559 row_count: Option<u64>,
5560 json: bool,
5561) {
5562 let mut project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
5563
5564 let provenance = crate::bundle::Provenance {
5565 source_type: "data_release".to_string(),
5566 doi: doi.map(|s| s.to_string()),
5567 pmid: None,
5568 pmc: None,
5569 openalex_id: None,
5570 url: url.map(|s| s.to_string()),
5571 title: source_title.to_string(),
5572 authors: Vec::new(),
5573 year: None,
5574 journal: None,
5575 license: license.map(|s| s.to_string()),
5576 publisher: None,
5577 funders: Vec::new(),
5578 extraction: crate::bundle::Extraction {
5579 method: "manual_curation".to_string(),
5580 model: None,
5581 model_version: None,
5582 extracted_at: chrono::Utc::now().to_rfc3339(),
5583 extractor_version: env!("CARGO_PKG_VERSION").to_string(),
5584 },
5585 review: None,
5586 citation_count: None,
5587 };
5588
5589 let mut dataset = crate::bundle::Dataset::new(
5590 name.to_string(),
5591 version.map(|s| s.to_string()),
5592 content_hash.to_string(),
5593 url.map(|s| s.to_string()),
5594 license.map(|s| s.to_string()),
5595 provenance,
5596 );
5597 dataset.row_count = row_count;
5598
5599 if project.datasets.iter().any(|d| d.id == dataset.id) {
5600 if json {
5601 println!(
5602 "{}",
5603 serde_json::to_string_pretty(&json!({
5604 "ok": false,
5605 "command": "dataset.add",
5606 "reason": "dataset_already_exists",
5607 "id": dataset.id,
5608 }))
5609 .expect("serialize")
5610 );
5611 } else {
5612 println!(
5613 "{} dataset {} already exists in {}; skipping.",
5614 style::warn("dataset"),
5615 dataset.id,
5616 frontier.display()
5617 );
5618 }
5619 return;
5620 }
5621
5622 let new_id = dataset.id.clone();
5623 project.datasets.push(dataset);
5624 repo::save_to_path(frontier, &project).unwrap_or_else(|e| fail_return(&e));
5625
5626 if json {
5627 println!(
5628 "{}",
5629 serde_json::to_string_pretty(&json!({
5630 "ok": true,
5631 "command": "dataset.add",
5632 "id": new_id,
5633 "name": name,
5634 "version": version,
5635 "frontier": frontier.display().to_string(),
5636 }))
5637 .expect("failed to serialize dataset.add result")
5638 );
5639 } else {
5640 println!();
5641 println!(
5642 " {}",
5643 format!("VELA · DATASET · {}", new_id)
5644 .to_uppercase()
5645 .dimmed()
5646 );
5647 println!(" {}", style::tick_row(60));
5648 println!(" name: {name}");
5649 if let Some(v) = version {
5650 println!(" version: {v}");
5651 }
5652 println!(" content_hash: {content_hash}");
5653 if let Some(u) = url {
5654 println!(" url: {u}");
5655 }
5656 println!(" source: {source_title}");
5657 println!();
5658 println!(
5659 " {} dataset recorded in {}",
5660 style::ok("ok"),
5661 frontier.display()
5662 );
5663 }
5664}
5665
5666#[allow(clippy::too_many_arguments)]
5672fn cmd_negative_result_add(
5673 frontier: &Path,
5674 kind: &str,
5675 deposited_by: &str,
5676 reason: &str,
5677 conditions_text: &str,
5678 notes: &str,
5679 targets: Vec<String>,
5680 endpoint: Option<&str>,
5681 intervention: Option<&str>,
5682 comparator: Option<&str>,
5683 population: Option<&str>,
5684 n_enrolled: Option<u32>,
5685 power: Option<f64>,
5686 ci_lower: Option<f64>,
5687 ci_upper: Option<f64>,
5688 effect_size_threshold: Option<f64>,
5689 registry_id: Option<&str>,
5690 reagent: Option<&str>,
5691 observation: Option<&str>,
5692 attempts: Option<u32>,
5693 source_title: &str,
5694 doi: Option<&str>,
5695 url: Option<&str>,
5696 year: Option<i32>,
5697 json: bool,
5698) {
5699 let nr_kind = match kind {
5700 "registered_trial" => {
5701 let endpoint =
5702 endpoint.unwrap_or_else(|| fail_return("--endpoint required for registered_trial"));
5703 let intervention = intervention
5704 .unwrap_or_else(|| fail_return("--intervention required for registered_trial"));
5705 let comparator = comparator
5706 .unwrap_or_else(|| fail_return("--comparator required for registered_trial"));
5707 let population = population
5708 .unwrap_or_else(|| fail_return("--population required for registered_trial"));
5709 let n_enrolled = n_enrolled
5710 .unwrap_or_else(|| fail_return("--n-enrolled required for registered_trial"));
5711 let power =
5712 power.unwrap_or_else(|| fail_return("--power required for registered_trial"));
5713 let ci_lower =
5714 ci_lower.unwrap_or_else(|| fail_return("--ci-lower required for registered_trial"));
5715 let ci_upper =
5716 ci_upper.unwrap_or_else(|| fail_return("--ci-upper required for registered_trial"));
5717 crate::bundle::NegativeResultKind::RegisteredTrial {
5718 endpoint: endpoint.to_string(),
5719 intervention: intervention.to_string(),
5720 comparator: comparator.to_string(),
5721 population: population.to_string(),
5722 n_enrolled,
5723 power,
5724 effect_size_ci: (ci_lower, ci_upper),
5725 effect_size_threshold,
5726 registry_id: registry_id.map(|s| s.to_string()),
5727 }
5728 }
5729 "exploratory" => {
5730 let reagent =
5731 reagent.unwrap_or_else(|| fail_return("--reagent required for exploratory"));
5732 let observation = observation
5733 .unwrap_or_else(|| fail_return("--observation required for exploratory"));
5734 let attempts =
5735 attempts.unwrap_or_else(|| fail_return("--attempts required for exploratory"));
5736 crate::bundle::NegativeResultKind::Exploratory {
5737 reagent: reagent.to_string(),
5738 observation: observation.to_string(),
5739 attempts,
5740 }
5741 }
5742 other => fail_return(&format!(
5743 "--kind must be 'registered_trial' or 'exploratory', got '{other}'"
5744 )),
5745 };
5746
5747 let conditions = crate::bundle::Conditions {
5748 text: conditions_text.to_string(),
5749 species_verified: Vec::new(),
5750 species_unverified: Vec::new(),
5751 in_vitro: false,
5752 in_vivo: false,
5753 human_data: false,
5754 clinical_trial: matches!(kind, "registered_trial"),
5755 concentration_range: None,
5756 duration: None,
5757 age_group: None,
5758 cell_type: None,
5759 };
5760
5761 let provenance = crate::bundle::Provenance {
5762 source_type: if matches!(kind, "registered_trial") {
5763 "clinical_trial".to_string()
5764 } else {
5765 "lab_notebook".to_string()
5766 },
5767 doi: doi.map(|s| s.to_string()),
5768 pmid: None,
5769 pmc: None,
5770 openalex_id: None,
5771 url: url.map(|s| s.to_string()),
5772 title: source_title.to_string(),
5773 authors: Vec::new(),
5774 year,
5775 journal: None,
5776 license: None,
5777 publisher: None,
5778 funders: Vec::new(),
5779 extraction: crate::bundle::Extraction {
5780 method: "manual_curation".to_string(),
5781 model: None,
5782 model_version: None,
5783 extracted_at: chrono::Utc::now().to_rfc3339(),
5784 extractor_version: env!("CARGO_PKG_VERSION").to_string(),
5785 },
5786 review: None,
5787 citation_count: None,
5788 };
5789
5790 let report = state::add_negative_result(
5791 frontier,
5792 nr_kind,
5793 targets,
5794 deposited_by,
5795 conditions,
5796 provenance,
5797 notes,
5798 reason,
5799 )
5800 .unwrap_or_else(|e| fail_return(&e));
5801
5802 if json {
5803 println!(
5804 "{}",
5805 serde_json::to_string_pretty(&report).expect("serialize report")
5806 );
5807 } else {
5808 println!();
5809 println!(
5810 " {}",
5811 format!("VELA · NEGATIVE-RESULT · {}", report.finding_id)
5812 .to_uppercase()
5813 .dimmed()
5814 );
5815 println!(" {}", style::tick_row(60));
5816 println!(" kind: {kind}");
5817 println!(" deposited_by: {deposited_by}");
5818 if let Some(ev) = &report.applied_event_id {
5819 println!(" event: {ev}");
5820 }
5821 println!(
5822 " {} negative_result deposited in {}",
5823 style::ok("ok"),
5824 frontier.display()
5825 );
5826 }
5827}
5828
5829fn cmd_negative_results(frontier: &Path, target: Option<&str>, json: bool) {
5832 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
5833 let filtered: Vec<&crate::bundle::NegativeResult> = project
5834 .negative_results
5835 .iter()
5836 .filter(|nr| {
5837 target
5838 .map(|t| nr.target_findings.iter().any(|f| f == t))
5839 .unwrap_or(true)
5840 })
5841 .collect();
5842
5843 if json {
5844 println!(
5845 "{}",
5846 serde_json::to_string_pretty(&json!({
5847 "ok": true,
5848 "command": "negative_results",
5849 "frontier": frontier.display().to_string(),
5850 "count": filtered.len(),
5851 "negative_results": filtered,
5852 }))
5853 .expect("serialize negative_results")
5854 );
5855 return;
5856 }
5857
5858 if filtered.is_empty() {
5859 println!(" no negative_results in {}", frontier.display());
5860 return;
5861 }
5862
5863 println!();
5864 println!(
5865 " {} ({})",
5866 "VELA · NEGATIVE RESULTS".dimmed(),
5867 filtered.len()
5868 );
5869 println!(" {}", style::tick_row(60));
5870 for nr in &filtered {
5871 let kind_label = match &nr.kind {
5872 crate::bundle::NegativeResultKind::RegisteredTrial {
5873 endpoint, power, ..
5874 } => format!("trial · {endpoint} · power {power:.2}"),
5875 crate::bundle::NegativeResultKind::Exploratory {
5876 reagent, attempts, ..
5877 } => format!("exploratory · {reagent} · {attempts} attempts"),
5878 };
5879 let retracted = if nr.retracted { " [retracted]" } else { "" };
5880 let review = nr
5881 .review_state
5882 .as_ref()
5883 .map(|s| format!(" [{s:?}]"))
5884 .unwrap_or_default();
5885 println!(" {}{}{}", nr.id, retracted, review);
5886 println!(" {kind_label}");
5887 if !nr.target_findings.is_empty() {
5888 println!(" targets: {}", nr.target_findings.join(", "));
5889 }
5890 }
5891 println!();
5892}
5893
5894#[allow(clippy::too_many_arguments)]
5896fn cmd_tier_set(
5897 frontier: &Path,
5898 object_type: &str,
5899 object_id: &str,
5900 tier: &str,
5901 actor: &str,
5902 reason: &str,
5903 json: bool,
5904) {
5905 let parsed_tier =
5906 crate::access_tier::AccessTier::parse(tier).unwrap_or_else(|e| fail_return(&e));
5907 let report = state::set_tier(frontier, object_type, object_id, parsed_tier, actor, reason)
5908 .unwrap_or_else(|e| fail_return(&e));
5909
5910 if json {
5911 println!(
5912 "{}",
5913 serde_json::to_string_pretty(&report).expect("serialize tier-set report")
5914 );
5915 } else {
5916 println!();
5917 println!(
5918 " {}",
5919 format!("VELA · TIER · {}", object_id)
5920 .to_uppercase()
5921 .dimmed()
5922 );
5923 println!(" {}", style::tick_row(60));
5924 println!(" object_type: {object_type}");
5925 println!(" new_tier: {}", parsed_tier.canonical());
5926 println!(" actor: {actor}");
5927 if let Some(ev) = &report.applied_event_id {
5928 println!(" event: {ev}");
5929 }
5930 println!(" {} tier set in {}", style::ok("ok"), frontier.display());
5931 }
5932}
5933
5934#[allow(clippy::too_many_arguments)]
5936fn cmd_trajectory_create(
5937 frontier: &Path,
5938 deposited_by: &str,
5939 reason: &str,
5940 targets: Vec<String>,
5941 notes: &str,
5942 json: bool,
5943) {
5944 let report = state::create_trajectory(frontier, targets, deposited_by, notes, reason)
5945 .unwrap_or_else(|e| fail_return(&e));
5946
5947 if json {
5948 println!(
5949 "{}",
5950 serde_json::to_string_pretty(&report).expect("serialize trajectory report")
5951 );
5952 } else {
5953 println!();
5954 println!(
5955 " {}",
5956 format!("VELA · TRAJECTORY · {}", report.finding_id)
5957 .to_uppercase()
5958 .dimmed()
5959 );
5960 println!(" {}", style::tick_row(60));
5961 println!(" deposited_by: {deposited_by}");
5962 if let Some(ev) = &report.applied_event_id {
5963 println!(" event: {ev}");
5964 }
5965 println!(
5966 " {} trajectory opened in {}",
5967 style::ok("ok"),
5968 frontier.display()
5969 );
5970 }
5971}
5972
5973#[allow(clippy::too_many_arguments)]
5975fn cmd_trajectory_step(
5976 frontier: &Path,
5977 trajectory_id: &str,
5978 kind: &str,
5979 description: &str,
5980 actor: &str,
5981 reason: &str,
5982 references: Vec<String>,
5983 json: bool,
5984) {
5985 let parsed_kind = match kind {
5986 "hypothesis" => crate::bundle::TrajectoryStepKind::Hypothesis,
5987 "tried" => crate::bundle::TrajectoryStepKind::Tried,
5988 "ruled_out" => crate::bundle::TrajectoryStepKind::RuledOut,
5989 "observed" => crate::bundle::TrajectoryStepKind::Observed,
5990 "refined" => crate::bundle::TrajectoryStepKind::Refined,
5991 other => fail_return(&format!(
5992 "--kind must be one of hypothesis|tried|ruled_out|observed|refined, got '{other}'"
5993 )),
5994 };
5995 let report = state::append_trajectory_step(
5996 frontier,
5997 trajectory_id,
5998 parsed_kind,
5999 description,
6000 actor,
6001 references,
6002 reason,
6003 )
6004 .unwrap_or_else(|e| fail_return(&e));
6005
6006 if json {
6007 println!(
6008 "{}",
6009 serde_json::to_string_pretty(&report).expect("serialize step report")
6010 );
6011 } else {
6012 println!();
6013 println!(
6014 " {}",
6015 format!("VELA · STEP · {}", report.finding_id)
6016 .to_uppercase()
6017 .dimmed()
6018 );
6019 println!(" {}", style::tick_row(60));
6020 println!(" trajectory: {trajectory_id}");
6021 println!(" kind: {kind}");
6022 println!(" actor: {actor}");
6023 println!(
6024 " {} step appended in {}",
6025 style::ok("ok"),
6026 frontier.display()
6027 );
6028 }
6029}
6030
6031fn cmd_trajectories(frontier: &Path, target: Option<&str>, json: bool) {
6033 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
6034 let filtered: Vec<&crate::bundle::Trajectory> = project
6035 .trajectories
6036 .iter()
6037 .filter(|t| {
6038 target
6039 .map(|tg| t.target_findings.iter().any(|f| f == tg))
6040 .unwrap_or(true)
6041 })
6042 .collect();
6043
6044 if json {
6045 println!(
6046 "{}",
6047 serde_json::to_string_pretty(&json!({
6048 "ok": true,
6049 "command": "trajectories",
6050 "frontier": frontier.display().to_string(),
6051 "count": filtered.len(),
6052 "trajectories": filtered,
6053 }))
6054 .expect("serialize trajectories")
6055 );
6056 return;
6057 }
6058
6059 if filtered.is_empty() {
6060 println!(" no trajectories in {}", frontier.display());
6061 return;
6062 }
6063
6064 println!();
6065 println!(" {} ({})", "VELA · TRAJECTORIES".dimmed(), filtered.len());
6066 println!(" {}", style::tick_row(60));
6067 for t in &filtered {
6068 let retracted = if t.retracted { " [retracted]" } else { "" };
6069 let review = t
6070 .review_state
6071 .as_ref()
6072 .map(|s| format!(" [{s:?}]"))
6073 .unwrap_or_default();
6074 println!(" {}{}{}", t.id, retracted, review);
6075 println!(
6076 " {} step(s){}",
6077 t.steps.len(),
6078 if t.target_findings.is_empty() {
6079 String::new()
6080 } else {
6081 format!(" · targets: {}", t.target_findings.join(", "))
6082 }
6083 );
6084 for step in &t.steps {
6085 let label = match step.kind {
6086 crate::bundle::TrajectoryStepKind::Hypothesis => "hypothesis",
6087 crate::bundle::TrajectoryStepKind::Tried => "tried",
6088 crate::bundle::TrajectoryStepKind::RuledOut => "ruled_out",
6089 crate::bundle::TrajectoryStepKind::Observed => "observed",
6090 crate::bundle::TrajectoryStepKind::Refined => "refined",
6091 };
6092 let preview: String = step.description.chars().take(80).collect();
6093 println!(" [{label}] {preview}");
6094 }
6095 }
6096 println!();
6097}
6098
6099fn cmd_datasets(frontier: &Path, json: bool) {
6101 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
6102 if json {
6103 println!(
6104 "{}",
6105 serde_json::to_string_pretty(&json!({
6106 "ok": true,
6107 "command": "datasets",
6108 "frontier": frontier.display().to_string(),
6109 "count": project.datasets.len(),
6110 "datasets": project.datasets,
6111 }))
6112 .expect("serialize datasets")
6113 );
6114 return;
6115 }
6116 println!();
6117 println!(
6118 " {}",
6119 format!("VELA · DATASETS · {}", frontier.display())
6120 .to_uppercase()
6121 .dimmed()
6122 );
6123 println!(" {}", style::tick_row(60));
6124 if project.datasets.is_empty() {
6125 println!(" (no datasets registered)");
6126 return;
6127 }
6128 for ds in &project.datasets {
6129 let v = ds
6130 .version
6131 .as_deref()
6132 .map(|s| format!("@{s}"))
6133 .unwrap_or_default();
6134 println!(" · {} {}{}", ds.id.dimmed(), ds.name, v);
6135 if let Some(u) = &ds.url {
6136 println!(" url: {}", truncate(u, 80));
6137 }
6138 println!(" hash: {}", truncate(&ds.content_hash, 80));
6139 }
6140}
6141
6142#[allow(clippy::too_many_arguments)]
6144fn cmd_code_add(
6145 frontier: &Path,
6146 language: &str,
6147 repo_url: Option<&str>,
6148 commit: Option<&str>,
6149 path: &str,
6150 content_hash: &str,
6151 line_start: Option<u32>,
6152 line_end: Option<u32>,
6153 entry_point: Option<&str>,
6154 json: bool,
6155) {
6156 let mut project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
6157
6158 let line_range = match (line_start, line_end) {
6159 (Some(a), Some(b)) => Some((a, b)),
6160 (Some(a), None) => Some((a, a)),
6161 _ => None,
6162 };
6163
6164 let artifact = crate::bundle::CodeArtifact::new(
6165 language.to_string(),
6166 repo_url.map(|s| s.to_string()),
6167 commit.map(|s| s.to_string()),
6168 path.to_string(),
6169 line_range,
6170 content_hash.to_string(),
6171 entry_point.map(|s| s.to_string()),
6172 );
6173
6174 if project.code_artifacts.iter().any(|c| c.id == artifact.id) {
6175 if json {
6176 println!(
6177 "{}",
6178 serde_json::to_string_pretty(&json!({
6179 "ok": false,
6180 "command": "code.add",
6181 "reason": "artifact_already_exists",
6182 "id": artifact.id,
6183 }))
6184 .expect("serialize")
6185 );
6186 } else {
6187 println!(
6188 "{} code artifact {} already exists in {}; skipping.",
6189 style::warn("code"),
6190 artifact.id,
6191 frontier.display()
6192 );
6193 }
6194 return;
6195 }
6196
6197 let new_id = artifact.id.clone();
6198 project.code_artifacts.push(artifact);
6199 repo::save_to_path(frontier, &project).unwrap_or_else(|e| fail_return(&e));
6200
6201 if json {
6202 println!(
6203 "{}",
6204 serde_json::to_string_pretty(&json!({
6205 "ok": true,
6206 "command": "code.add",
6207 "id": new_id,
6208 "language": language,
6209 "path": path,
6210 "frontier": frontier.display().to_string(),
6211 }))
6212 .expect("failed to serialize code.add result")
6213 );
6214 } else {
6215 println!();
6216 println!(
6217 " {}",
6218 format!("VELA · CODE · {}", new_id).to_uppercase().dimmed()
6219 );
6220 println!(" {}", style::tick_row(60));
6221 println!(" language: {language}");
6222 if let Some(r) = repo_url {
6223 println!(" repo: {r}");
6224 }
6225 if let Some(c) = commit {
6226 println!(" commit: {c}");
6227 }
6228 println!(" path: {path}");
6229 if let Some((a, b)) = line_range {
6230 println!(" lines: {a}-{b}");
6231 }
6232 println!(" content_hash: {content_hash}");
6233 println!();
6234 println!(
6235 " {} code artifact recorded in {}",
6236 style::ok("ok"),
6237 frontier.display()
6238 );
6239 }
6240}
6241
6242fn cmd_code_artifacts(frontier: &Path, json: bool) {
6244 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
6245 if json {
6246 println!(
6247 "{}",
6248 serde_json::to_string_pretty(&json!({
6249 "ok": true,
6250 "command": "code-artifacts",
6251 "frontier": frontier.display().to_string(),
6252 "count": project.code_artifacts.len(),
6253 "code_artifacts": project.code_artifacts,
6254 }))
6255 .expect("serialize code-artifacts")
6256 );
6257 return;
6258 }
6259 println!();
6260 println!(
6261 " {}",
6262 format!("VELA · CODE · {}", frontier.display())
6263 .to_uppercase()
6264 .dimmed()
6265 );
6266 println!(" {}", style::tick_row(60));
6267 if project.code_artifacts.is_empty() {
6268 println!(" (no code artifacts registered)");
6269 return;
6270 }
6271 for c in &project.code_artifacts {
6272 let lr = c
6273 .line_range
6274 .map(|(a, b)| format!(":{a}-{b}"))
6275 .unwrap_or_default();
6276 println!(" · {} {} {}{}", c.id.dimmed(), c.language, c.path, lr);
6277 if let Some(r) = &c.repo_url {
6278 println!(" repo: {}", truncate(r, 80));
6279 }
6280 if let Some(g) = &c.git_commit {
6281 println!(" commit: {g}");
6282 }
6283 }
6284}
6285
6286fn sha256_for_bytes(bytes: &[u8]) -> String {
6287 format!("sha256:{}", hex::encode(Sha256::digest(bytes)))
6288}
6289
6290fn sha256_hex_part(content_hash: &str) -> &str {
6291 content_hash.strip_prefix("sha256:").unwrap_or(content_hash)
6292}
6293
6294fn artifact_blob_locator(frontier: &Path, content_hash: &str, bytes: &[u8]) -> Option<String> {
6295 let Ok(repo::VelaSource::VelaRepo(root)) = repo::detect(frontier) else {
6296 return None;
6297 };
6298 let hex = sha256_hex_part(content_hash);
6299 let rel = format!(".vela/artifact-blobs/sha256/{hex}");
6300 let path = root.join(&rel);
6301 if let Some(parent) = path.parent() {
6302 std::fs::create_dir_all(parent).unwrap_or_else(|e| {
6303 fail(&format!(
6304 "Failed to create artifact blob directory {}: {e}",
6305 parent.display()
6306 ))
6307 });
6308 }
6309 if !path.is_file() {
6310 std::fs::write(&path, bytes)
6311 .unwrap_or_else(|e| fail(&format!("Failed to write artifact blob: {e}")));
6312 }
6313 Some(rel)
6314}
6315
6316fn parse_metadata_pairs(pairs: Vec<String>) -> BTreeMap<String, Value> {
6317 let mut out = BTreeMap::new();
6318 for pair in pairs {
6319 let Some((key, value)) = pair.split_once('=') else {
6320 fail(&format!("--metadata must be key=value, got {pair:?}"));
6321 };
6322 let key = key.trim();
6323 if key.is_empty() {
6324 fail("--metadata key must be non-empty");
6325 }
6326 out.insert(key.to_string(), Value::String(value.trim().to_string()));
6327 }
6328 out
6329}
6330
6331fn artifact_source_type(kind: &str) -> &'static str {
6332 match kind {
6333 "clinical_trial_record" | "protocol" => "clinical_trial",
6334 "dataset" => "data_release",
6335 "model_output" => "model_output",
6336 "registry_record" => "database_record",
6337 "lab_file" => "lab_notebook",
6338 _ => "database_record",
6339 }
6340}
6341
6342fn artifact_provenance(
6343 kind: &str,
6344 title: &str,
6345 url: Option<&str>,
6346 doi: Option<&str>,
6347 license: Option<&str>,
6348) -> crate::bundle::Provenance {
6349 crate::bundle::Provenance {
6350 source_type: artifact_source_type(kind).to_string(),
6351 doi: doi.map(str::to_string),
6352 pmid: None,
6353 pmc: None,
6354 openalex_id: None,
6355 url: url.map(str::to_string),
6356 title: title.to_string(),
6357 authors: Vec::new(),
6358 year: None,
6359 journal: None,
6360 license: license.map(str::to_string),
6361 publisher: None,
6362 funders: Vec::new(),
6363 extraction: crate::bundle::Extraction {
6364 method: "artifact_deposit".to_string(),
6365 model: None,
6366 model_version: None,
6367 extracted_at: chrono::Utc::now().to_rfc3339(),
6368 extractor_version: env!("CARGO_PKG_VERSION").to_string(),
6369 },
6370 review: None,
6371 citation_count: None,
6372 }
6373}
6374
6375#[allow(clippy::too_many_arguments)]
6376fn cmd_artifact_add(
6377 frontier: &Path,
6378 kind: &str,
6379 name: &str,
6380 file: Option<&Path>,
6381 url: Option<&str>,
6382 content_hash: Option<&str>,
6383 media_type: Option<&str>,
6384 license: Option<&str>,
6385 source_title: Option<&str>,
6386 source_url: Option<&str>,
6387 doi: Option<&str>,
6388 target: Vec<String>,
6389 metadata: Vec<String>,
6390 access_tier: &str,
6391 deposited_by: &str,
6392 reason: &str,
6393 json_out: bool,
6394) {
6395 let tier =
6396 crate::access_tier::AccessTier::parse(access_tier).unwrap_or_else(|e| fail_return(&e));
6397 let mut size_bytes = None;
6398 let mut storage_mode = "pointer".to_string();
6399 let mut locator = url.map(str::to_string);
6400 let mut computed_hash = content_hash.map(str::to_string);
6401
6402 if let Some(path) = file {
6403 let bytes = std::fs::read(path)
6404 .unwrap_or_else(|e| fail(&format!("Failed to read artifact file: {e}")));
6405 let actual_hash = sha256_for_bytes(&bytes);
6406 if let Some(expected) = content_hash {
6407 let expected_hex = sha256_hex_part(expected);
6408 let actual_hex = sha256_hex_part(&actual_hash);
6409 if !expected_hex.eq_ignore_ascii_case(actual_hex) {
6410 fail(&format!(
6411 "--content-hash does not match file bytes: expected {expected}, got {actual_hash}"
6412 ));
6413 }
6414 }
6415 size_bytes = Some(bytes.len() as u64);
6416 computed_hash = Some(actual_hash.clone());
6417 if let Some(rel) = artifact_blob_locator(frontier, &actual_hash, &bytes) {
6418 storage_mode = "local_blob".to_string();
6419 locator = Some(rel);
6420 } else {
6421 storage_mode = "local_file".to_string();
6422 locator = Some(path.display().to_string());
6423 }
6424 }
6425
6426 let Some(content_hash) = computed_hash else {
6427 fail("Provide --content-hash unless --file is present.");
6428 };
6429 let content_hash_for_print = content_hash.clone();
6430 if file.is_none() && url.is_some() {
6431 storage_mode = "remote".to_string();
6432 }
6433
6434 let source_url_effective = source_url.or(url);
6435 let source_title = source_title.unwrap_or(name);
6436 let provenance = artifact_provenance(kind, source_title, source_url_effective, doi, license);
6437 let metadata = parse_metadata_pairs(metadata);
6438 let artifact = crate::bundle::Artifact::new(
6439 kind.to_string(),
6440 name.to_string(),
6441 content_hash,
6442 size_bytes,
6443 media_type.map(str::to_string),
6444 storage_mode,
6445 locator,
6446 source_url_effective.map(str::to_string),
6447 license.map(str::to_string),
6448 target,
6449 provenance,
6450 metadata,
6451 tier,
6452 )
6453 .unwrap_or_else(|e| fail_return(&e));
6454
6455 let artifact_id = artifact.id.clone();
6456 let report = state::add_artifact(frontier, artifact, deposited_by, reason)
6457 .unwrap_or_else(|e| fail_return(&e));
6458
6459 if json_out {
6460 println!(
6461 "{}",
6462 serde_json::to_string_pretty(&json!({
6463 "ok": true,
6464 "command": "artifact.add",
6465 "id": artifact_id,
6466 "frontier": frontier.display().to_string(),
6467 "event": report.applied_event_id,
6468 }))
6469 .expect("serialize artifact.add")
6470 );
6471 } else {
6472 println!();
6473 println!(
6474 " {}",
6475 format!("VELA · ARTIFACT · {}", artifact_id)
6476 .to_uppercase()
6477 .dimmed()
6478 );
6479 println!(" {}", style::tick_row(60));
6480 println!(" kind: {kind}");
6481 println!(" name: {name}");
6482 println!(" hash: {content_hash_for_print}");
6483 println!(
6484 " {} artifact recorded in {}",
6485 style::ok("ok"),
6486 frontier.display()
6487 );
6488 }
6489}
6490
6491fn cmd_artifacts(frontier: &Path, target: Option<&str>, json_out: bool) {
6492 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
6493 let filtered: Vec<&crate::bundle::Artifact> = project
6494 .artifacts
6495 .iter()
6496 .filter(|artifact| {
6497 target
6498 .map(|t| artifact.target_findings.iter().any(|f| f == t))
6499 .unwrap_or(true)
6500 })
6501 .collect();
6502
6503 if json_out {
6504 println!(
6505 "{}",
6506 serde_json::to_string_pretty(&json!({
6507 "ok": true,
6508 "command": "artifacts",
6509 "frontier": frontier.display().to_string(),
6510 "count": filtered.len(),
6511 "artifacts": filtered,
6512 }))
6513 .expect("serialize artifacts")
6514 );
6515 return;
6516 }
6517
6518 println!();
6519 println!(
6520 " {}",
6521 format!("VELA · ARTIFACTS · {}", frontier.display())
6522 .to_uppercase()
6523 .dimmed()
6524 );
6525 println!(" {}", style::tick_row(60));
6526 if filtered.is_empty() {
6527 println!(" (no artifacts registered)");
6528 return;
6529 }
6530 for artifact in filtered {
6531 println!(
6532 " · {} {} · {}",
6533 artifact.id.dimmed(),
6534 artifact.kind,
6535 artifact.name
6536 );
6537 if let Some(locator) = &artifact.locator {
6538 println!(" locator: {}", truncate(locator, 88));
6539 }
6540 if !artifact.target_findings.is_empty() {
6541 println!(" targets: {}", artifact.target_findings.join(", "));
6542 }
6543 }
6544}
6545
6546fn cmd_artifact_audit(frontier: &Path, json_out: bool) {
6547 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
6548 let audit = crate::artifact_audit::audit_artifacts(frontier, &project);
6549 if json_out {
6550 println!(
6551 "{}",
6552 serde_json::to_string_pretty(&audit).expect("serialize artifact audit")
6553 );
6554 if !audit.ok {
6555 std::process::exit(1);
6556 }
6557 return;
6558 }
6559
6560 println!();
6561 println!(
6562 " {}",
6563 format!("VELA · ARTIFACT AUDIT · {}", frontier.display())
6564 .to_uppercase()
6565 .dimmed()
6566 );
6567 println!(" {}", style::tick_row(60));
6568 println!(" artifacts: {}", audit.artifact_count);
6569 println!(" checked local blobs: {}", audit.checked_local_blobs);
6570 println!(" local blob bytes: {}", audit.local_blob_bytes);
6571 if !audit.by_kind.is_empty() {
6572 let kinds = audit
6573 .by_kind
6574 .iter()
6575 .map(|(kind, count)| format!("{kind}:{count}"))
6576 .collect::<Vec<_>>()
6577 .join(", ");
6578 println!(" kinds: {kinds}");
6579 }
6580 if audit.ok {
6581 println!(" {} artifact audit passed.", style::ok("ok"));
6582 return;
6583 }
6584 for issue in &audit.issues {
6585 println!(
6586 " {} {} {}: {}",
6587 style::lost("invalid"),
6588 issue.id,
6589 issue.field,
6590 issue.message
6591 );
6592 }
6593 std::process::exit(1);
6594}
6595
6596fn cmd_decision_brief(frontier: &Path, json_out: bool) {
6597 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
6598 let report = decision::load_decision_brief(frontier, &project);
6599 if json_out {
6600 println!(
6601 "{}",
6602 serde_json::to_string_pretty(&report).expect("serialize decision brief report")
6603 );
6604 if !report.ok {
6605 std::process::exit(1);
6606 }
6607 return;
6608 }
6609 println!();
6610 println!(
6611 " {}",
6612 format!("VELA · DECISION BRIEF · {}", project.project.name)
6613 .to_uppercase()
6614 .dimmed()
6615 );
6616 println!(" {}", style::tick_row(60));
6617 if !report.ok {
6618 print_projection_issues(&report.issues, report.error.as_deref());
6619 std::process::exit(1);
6620 }
6621 let brief = report
6622 .projection
6623 .as_ref()
6624 .expect("ok decision report carries projection");
6625 for question in &brief.questions {
6626 println!(" · {} · {}", question.id.dimmed(), question.title);
6627 println!(" answer: {}", wrap_line(&question.short_answer, 82));
6628 println!(" caveat: {}", wrap_line(&question.caveat, 82));
6629 println!(" support: {}", question.supporting_findings.join(", "));
6630 if !question.tension_findings.is_empty() {
6631 println!(" tensions: {}", question.tension_findings.join(", "));
6632 }
6633 if !question.gap_findings.is_empty() {
6634 println!(" gaps: {}", question.gap_findings.join(", "));
6635 }
6636 if !question.artifact_ids.is_empty() {
6637 println!(" artifacts: {}", question.artifact_ids.join(", "));
6638 }
6639 println!(
6640 " would change: {}",
6641 wrap_line(&question.what_would_change_this_answer, 82)
6642 );
6643 }
6644}
6645
6646fn cmd_trial_summary(frontier: &Path, json_out: bool) {
6647 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
6648 let report = decision::load_trial_outcomes(frontier, &project);
6649 if json_out {
6650 println!(
6651 "{}",
6652 serde_json::to_string_pretty(&report).expect("serialize trial summary report")
6653 );
6654 if !report.ok {
6655 std::process::exit(1);
6656 }
6657 return;
6658 }
6659 println!();
6660 println!(
6661 " {}",
6662 format!("VELA · TRIAL SUMMARY · {}", project.project.name)
6663 .to_uppercase()
6664 .dimmed()
6665 );
6666 println!(" {}", style::tick_row(60));
6667 if !report.ok {
6668 print_projection_issues(&report.issues, report.error.as_deref());
6669 std::process::exit(1);
6670 }
6671 let outcomes = report
6672 .projection
6673 .as_ref()
6674 .expect("ok trial report carries projection");
6675 for row in &outcomes.rows {
6676 println!(" · {} · {} ({})", row.id.dimmed(), row.program, row.drug);
6677 println!(" population: {}", wrap_line(&row.population, 82));
6678 println!(" endpoint: {}", wrap_line(&row.primary_endpoint, 82));
6679 println!(" cognition: {}", wrap_line(&row.cognitive_result, 82));
6680 println!(" biomarker: {}", wrap_line(&row.biomarker_result, 82));
6681 println!(" risk: {}", wrap_line(&row.aria_or_safety_result, 82));
6682 println!(" status: {}", wrap_line(&row.regulatory_status, 82));
6683 if !row.finding_ids.is_empty() {
6684 println!(" findings: {}", row.finding_ids.join(", "));
6685 }
6686 if !row.artifact_ids.is_empty() {
6687 println!(" artifacts: {}", row.artifact_ids.join(", "));
6688 }
6689 }
6690}
6691
6692fn cmd_source_verification(frontier: &Path, json_out: bool) {
6693 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
6694 let report = decision::load_source_verification(frontier, &project);
6695 if json_out {
6696 println!(
6697 "{}",
6698 serde_json::to_string_pretty(&report).expect("serialize source verification report")
6699 );
6700 if !report.ok {
6701 std::process::exit(1);
6702 }
6703 return;
6704 }
6705 println!();
6706 println!(
6707 " {}",
6708 format!("VELA · SOURCE VERIFICATION · {}", project.project.name)
6709 .to_uppercase()
6710 .dimmed()
6711 );
6712 println!(" {}", style::tick_row(60));
6713 if !report.ok {
6714 print_projection_issues(&report.issues, report.error.as_deref());
6715 std::process::exit(1);
6716 }
6717 let verification = report
6718 .projection
6719 .as_ref()
6720 .expect("ok source verification report carries projection");
6721 println!(" verified_at: {}", verification.verified_at);
6722 for source in &verification.sources {
6723 println!(" · {} · {}", source.id.dimmed(), source.title);
6724 println!(" agency: {}", source.agency);
6725 println!(" url: {}", truncate(&source.url, 88));
6726 println!(" status: {}", wrap_line(&source.current_status, 82));
6727 }
6728}
6729
6730fn cmd_source_ingest_plan(frontier: &Path, json_out: bool) {
6731 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
6732 let report = decision::load_source_ingest_plan(frontier, &project);
6733 if json_out {
6734 println!(
6735 "{}",
6736 serde_json::to_string_pretty(&report).expect("serialize source ingest plan report")
6737 );
6738 if !report.ok {
6739 std::process::exit(1);
6740 }
6741 return;
6742 }
6743 println!();
6744 println!(
6745 " {}",
6746 format!("VELA · SOURCE INGEST PLAN · {}", project.project.name)
6747 .to_uppercase()
6748 .dimmed()
6749 );
6750 println!(" {}", style::tick_row(60));
6751 if !report.ok {
6752 print_projection_issues(&report.issues, report.error.as_deref());
6753 std::process::exit(1);
6754 }
6755 let plan = report
6756 .projection
6757 .as_ref()
6758 .expect("ok source ingest plan report carries projection");
6759 println!(" verified_at: {}", plan.verified_at);
6760 println!(" entries: {}", plan.entries.len());
6761 for entry in &plan.entries {
6762 println!(
6763 " · {} · {} · {} · {}",
6764 entry.id.dimmed(),
6765 entry.category,
6766 entry.priority,
6767 entry.ingest_status
6768 );
6769 println!(" name: {}", wrap_line(&entry.name, 82));
6770 println!(" locator: {}", truncate(&entry.locator, 88));
6771 println!(" use: {}", wrap_line(&entry.target_use, 82));
6772 if let Some(id) = &entry.current_frontier_artifact_id {
6773 println!(" artifact: {id}");
6774 }
6775 if !entry.target_findings.is_empty() {
6776 println!(" findings: {}", entry.target_findings.join(", "));
6777 }
6778 }
6779}
6780
6781fn print_projection_issues(issues: &[decision::ProjectionIssue], error: Option<&str>) {
6782 if let Some(error) = error {
6783 println!(" {} {error}", style::lost("unavailable"));
6784 }
6785 for issue in issues {
6786 println!(
6787 " {} {}: {}",
6788 style::lost("invalid"),
6789 issue.path,
6790 issue.message
6791 );
6792 }
6793}
6794
6795fn wrap_line(text: &str, max_chars: usize) -> String {
6796 if text.chars().count() <= max_chars {
6797 return text.to_string();
6798 }
6799 let mut out = String::new();
6800 let mut line_len = 0usize;
6801 for word in text.split_whitespace() {
6802 let word_len = word.chars().count();
6803 if line_len > 0 && line_len + 1 + word_len > max_chars {
6804 out.push('\n');
6805 out.push_str(" ");
6806 out.push_str(word);
6807 line_len = word_len;
6808 } else {
6809 if line_len > 0 {
6810 out.push(' ');
6811 line_len += 1;
6812 }
6813 out.push_str(word);
6814 line_len += word_len;
6815 }
6816 }
6817 out
6818}
6819
6820fn clinical_str<'a>(study: &'a Value, pointer: &str) -> Option<&'a str> {
6821 study.pointer(pointer).and_then(Value::as_str)
6822}
6823
6824fn clinical_string_array(study: &Value, pointer: &str) -> Vec<String> {
6825 study
6826 .pointer(pointer)
6827 .and_then(Value::as_array)
6828 .map(|items| {
6829 items
6830 .iter()
6831 .filter_map(Value::as_str)
6832 .map(str::to_string)
6833 .collect()
6834 })
6835 .unwrap_or_default()
6836}
6837
6838fn clinical_named_array(study: &Value, pointer: &str, field: &str) -> Vec<String> {
6839 study
6840 .pointer(pointer)
6841 .and_then(Value::as_array)
6842 .map(|items| {
6843 items
6844 .iter()
6845 .filter_map(|item| item.get(field).and_then(Value::as_str))
6846 .map(str::to_string)
6847 .collect()
6848 })
6849 .unwrap_or_default()
6850}
6851
6852fn insert_string_vec_metadata(
6853 metadata: &mut BTreeMap<String, Value>,
6854 key: &str,
6855 values: Vec<String>,
6856) {
6857 if values.is_empty() {
6858 return;
6859 }
6860 metadata.insert(
6861 key.to_string(),
6862 Value::Array(values.into_iter().map(Value::String).collect()),
6863 );
6864}
6865
6866async fn cmd_clinical_trial_import(
6867 frontier: &Path,
6868 nct_id: &str,
6869 input_json: Option<&Path>,
6870 target: Vec<String>,
6871 deposited_by: &str,
6872 reason: &str,
6873 license: &str,
6874 json_out: bool,
6875) {
6876 let api_url = format!("https://clinicaltrials.gov/api/v2/studies/{nct_id}");
6877 let raw = if let Some(path) = input_json {
6878 std::fs::read_to_string(path)
6879 .unwrap_or_else(|e| fail(&format!("Failed to read ClinicalTrials.gov JSON: {e}")))
6880 } else {
6881 let response = reqwest::get(&api_url).await.unwrap_or_else(|e| {
6882 fail(&format!(
6883 "Failed to fetch ClinicalTrials.gov record {nct_id}: {e}"
6884 ))
6885 });
6886 let response = response.error_for_status().unwrap_or_else(|e| {
6887 fail(&format!(
6888 "Failed to fetch ClinicalTrials.gov record {nct_id}: {e}"
6889 ))
6890 });
6891 response.text().await.unwrap_or_else(|e| {
6892 fail(&format!(
6893 "Failed to read ClinicalTrials.gov record {nct_id}: {e}"
6894 ))
6895 })
6896 };
6897 let study: Value = serde_json::from_str(&raw)
6898 .unwrap_or_else(|e| fail(&format!("Failed to parse ClinicalTrials.gov JSON: {e}")));
6899 let canonical_bytes = crate::canonical::to_canonical_bytes(&study)
6900 .unwrap_or_else(|e| fail(&format!("Failed to canonicalize trial JSON: {e}")));
6901 let content_hash = sha256_for_bytes(&canonical_bytes);
6902 let locator = artifact_blob_locator(frontier, &content_hash, &canonical_bytes)
6903 .unwrap_or_else(|| api_url.clone());
6904 let storage_mode = if locator.starts_with(".vela/") {
6905 "local_blob"
6906 } else {
6907 "remote"
6908 };
6909
6910 let parsed_nct = clinical_str(&study, "/protocolSection/identificationModule/nctId")
6911 .unwrap_or(nct_id)
6912 .to_string();
6913 let title = clinical_str(&study, "/protocolSection/identificationModule/briefTitle")
6914 .or_else(|| {
6915 clinical_str(
6916 &study,
6917 "/protocolSection/identificationModule/officialTitle",
6918 )
6919 })
6920 .unwrap_or(nct_id);
6921 let public_url = format!("https://clinicaltrials.gov/study/{parsed_nct}");
6922 let mut metadata = BTreeMap::new();
6923 metadata.insert("nct_id".to_string(), Value::String(parsed_nct.clone()));
6924 metadata.insert(
6925 "source_api".to_string(),
6926 Value::String("clinicaltrials.gov-v2".to_string()),
6927 );
6928 metadata.insert(
6929 "retrieved_at".to_string(),
6930 Value::String(chrono::Utc::now().to_rfc3339()),
6931 );
6932 for (key, pointer) in [
6933 (
6934 "overall_status",
6935 "/protocolSection/statusModule/overallStatus",
6936 ),
6937 (
6938 "start_date",
6939 "/protocolSection/statusModule/startDateStruct/date",
6940 ),
6941 (
6942 "completion_date",
6943 "/protocolSection/statusModule/completionDateStruct/date",
6944 ),
6945 ] {
6946 if let Some(value) = clinical_str(&study, pointer) {
6947 metadata.insert(key.to_string(), Value::String(value.to_string()));
6948 }
6949 }
6950 insert_string_vec_metadata(
6951 &mut metadata,
6952 "phases",
6953 clinical_string_array(&study, "/protocolSection/designModule/phases"),
6954 );
6955 insert_string_vec_metadata(
6956 &mut metadata,
6957 "conditions",
6958 clinical_string_array(&study, "/protocolSection/conditionsModule/conditions"),
6959 );
6960 insert_string_vec_metadata(
6961 &mut metadata,
6962 "interventions",
6963 clinical_named_array(
6964 &study,
6965 "/protocolSection/armsInterventionsModule/interventions",
6966 "name",
6967 ),
6968 );
6969 insert_string_vec_metadata(
6970 &mut metadata,
6971 "primary_outcomes",
6972 clinical_named_array(
6973 &study,
6974 "/protocolSection/outcomesModule/primaryOutcomes",
6975 "measure",
6976 ),
6977 );
6978 if let Some(has_results) = study.get("hasResults").and_then(Value::as_bool) {
6979 metadata.insert("has_results".to_string(), Value::Bool(has_results));
6980 }
6981
6982 let provenance = artifact_provenance(
6983 "clinical_trial_record",
6984 title,
6985 Some(&public_url),
6986 None,
6987 Some(license),
6988 );
6989 let artifact = crate::bundle::Artifact::new(
6990 "clinical_trial_record",
6991 title.to_string(),
6992 content_hash,
6993 Some(canonical_bytes.len() as u64),
6994 Some("application/json".to_string()),
6995 storage_mode.to_string(),
6996 Some(locator),
6997 Some(public_url.clone()),
6998 Some(license.to_string()),
6999 target,
7000 provenance,
7001 metadata,
7002 crate::access_tier::AccessTier::Public,
7003 )
7004 .unwrap_or_else(|e| fail_return(&e));
7005 let artifact_id = artifact.id.clone();
7006 let report = state::add_artifact(frontier, artifact, deposited_by, reason)
7007 .unwrap_or_else(|e| fail_return(&e));
7008
7009 if json_out {
7010 println!(
7011 "{}",
7012 serde_json::to_string_pretty(&json!({
7013 "ok": true,
7014 "command": "clinical-trial-import",
7015 "nct_id": parsed_nct,
7016 "id": artifact_id,
7017 "frontier": frontier.display().to_string(),
7018 "event": report.applied_event_id,
7019 "source_url": public_url,
7020 }))
7021 .expect("serialize clinical-trial-import")
7022 );
7023 } else {
7024 println!();
7025 println!(
7026 " {}",
7027 format!("VELA · CLINICAL TRIAL · {}", artifact_id)
7028 .to_uppercase()
7029 .dimmed()
7030 );
7031 println!(" {}", style::tick_row(60));
7032 println!(" nct_id: {parsed_nct}");
7033 println!(" title: {}", truncate(title, 96));
7034 println!(" source: {public_url}");
7035 println!(
7036 " {} trial record imported into {}",
7037 style::ok("ok"),
7038 frontier.display()
7039 );
7040 }
7041}
7042
7043#[allow(clippy::too_many_arguments)]
7050fn cmd_replicate(
7051 frontier: &Path,
7052 target: &str,
7053 outcome: &str,
7054 attempted_by: &str,
7055 conditions_text: &str,
7056 source_title: &str,
7057 doi: Option<&str>,
7058 pmid: Option<&str>,
7059 sample_size: Option<&str>,
7060 note: &str,
7061 previous_attempt: Option<&str>,
7062 no_cascade: bool,
7063 json: bool,
7064) {
7065 if !crate::bundle::VALID_REPLICATION_OUTCOMES.contains(&outcome) {
7066 fail(&format!(
7067 "invalid outcome '{outcome}'; valid: {:?}",
7068 crate::bundle::VALID_REPLICATION_OUTCOMES
7069 ));
7070 }
7071 if !target.starts_with("vf_") {
7072 fail(&format!("target '{target}' is not a vf_ finding id"));
7073 }
7074
7075 let mut project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
7076
7077 if !project.findings.iter().any(|f| f.id == target) {
7078 fail(&format!(
7079 "target finding '{target}' not present in frontier '{}'",
7080 frontier.display()
7081 ));
7082 }
7083
7084 let lower = conditions_text.to_lowercase();
7089 let conditions = crate::bundle::Conditions {
7090 text: conditions_text.to_string(),
7091 species_verified: Vec::new(),
7092 species_unverified: Vec::new(),
7093 in_vitro: lower.contains("in vitro") || lower.contains("ipsc"),
7094 in_vivo: lower.contains("in vivo") || lower.contains("mouse") || lower.contains("rat"),
7095 human_data: lower.contains("human")
7096 || lower.contains("clinical")
7097 || lower.contains("patient"),
7098 clinical_trial: lower.contains("clinical trial") || lower.contains("phase "),
7099 concentration_range: None,
7100 duration: None,
7101 age_group: None,
7102 cell_type: None,
7103 };
7104
7105 let evidence = crate::bundle::Evidence {
7106 evidence_type: "experimental".to_string(),
7107 model_system: String::new(),
7108 species: None,
7109 method: "replication_attempt".to_string(),
7110 sample_size: sample_size.map(|s| s.to_string()),
7111 effect_size: None,
7112 p_value: None,
7113 replicated: outcome == "replicated",
7114 replication_count: None,
7115 evidence_spans: Vec::new(),
7116 };
7117
7118 let provenance = crate::bundle::Provenance {
7119 source_type: "published_paper".to_string(),
7120 doi: doi.map(|s| s.to_string()),
7121 pmid: pmid.map(|s| s.to_string()),
7122 pmc: None,
7123 openalex_id: None,
7124 url: None,
7125 title: source_title.to_string(),
7126 authors: Vec::new(),
7127 year: None,
7128 journal: None,
7129 license: None,
7130 publisher: None,
7131 funders: Vec::new(),
7132 extraction: crate::bundle::Extraction {
7133 method: "manual_curation".to_string(),
7134 model: None,
7135 model_version: None,
7136 extracted_at: chrono::Utc::now().to_rfc3339(),
7137 extractor_version: env!("CARGO_PKG_VERSION").to_string(),
7138 },
7139 review: None,
7140 citation_count: None,
7141 };
7142
7143 let mut rep = crate::bundle::Replication::new(
7144 target.to_string(),
7145 attempted_by.to_string(),
7146 outcome.to_string(),
7147 evidence,
7148 conditions,
7149 provenance,
7150 note.to_string(),
7151 );
7152 rep.previous_attempt = previous_attempt.map(|s| s.to_string());
7153
7154 if project.replications.iter().any(|r| r.id == rep.id) {
7157 if json {
7158 println!(
7159 "{}",
7160 serde_json::to_string_pretty(&json!({
7161 "ok": false,
7162 "command": "replicate",
7163 "reason": "replication_already_exists",
7164 "id": rep.id,
7165 }))
7166 .expect("serialize")
7167 );
7168 } else {
7169 println!(
7170 "{} replication {} already exists in {}; skipping.",
7171 style::warn("replicate"),
7172 rep.id,
7173 frontier.display()
7174 );
7175 }
7176 return;
7177 }
7178
7179 let new_id = rep.id.clone();
7180 project.replications.push(rep);
7181
7182 let cascade_result = if no_cascade {
7189 None
7190 } else {
7191 let result = propagate::propagate_correction(
7192 &mut project,
7193 target,
7194 propagate::PropagationAction::ReplicationOutcome {
7195 outcome: outcome.to_string(),
7196 vrep_id: new_id.clone(),
7197 },
7198 );
7199 project.review_events.extend(result.events.clone());
7202 project::recompute_stats(&mut project);
7203 Some(result)
7204 };
7205
7206 repo::save_to_path(frontier, &project).unwrap_or_else(|e| fail_return(&e));
7207
7208 if json {
7209 let cascade_json = cascade_result.as_ref().map(|r| {
7210 json!({
7211 "affected": r.affected,
7212 "events": r.events.len(),
7213 })
7214 });
7215 println!(
7216 "{}",
7217 serde_json::to_string_pretty(&json!({
7218 "ok": true,
7219 "command": "replicate",
7220 "id": new_id,
7221 "target": target,
7222 "outcome": outcome,
7223 "attempted_by": attempted_by,
7224 "cascade": cascade_json,
7225 "frontier": frontier.display().to_string(),
7226 }))
7227 .expect("failed to serialize replicate result")
7228 );
7229 } else {
7230 println!();
7231 println!(
7232 " {}",
7233 format!("VELA · REPLICATE · {}", new_id)
7234 .to_uppercase()
7235 .dimmed()
7236 );
7237 println!(" {}", style::tick_row(60));
7238 println!(" target: {target}");
7239 println!(" outcome: {outcome}");
7240 println!(" attempted by: {attempted_by}");
7241 println!(" conditions: {conditions_text}");
7242 println!(" source: {source_title}");
7243 if let Some(d) = doi {
7244 println!(" doi: {d}");
7245 }
7246 println!();
7247 println!(
7248 " {} replication recorded in {}",
7249 style::ok("ok"),
7250 frontier.display()
7251 );
7252 if let Some(result) = cascade_result {
7253 println!(
7254 " {} cascade: {} dependent(s) flagged, {} review event(s) recorded",
7255 style::ok("ok"),
7256 result.affected,
7257 result.events.len()
7258 );
7259 } else {
7260 println!(" {} cascade skipped (--no-cascade)", style::warn("info"));
7261 }
7262 }
7263}
7264
7265fn cmd_replications(frontier: &Path, target: Option<&str>, json: bool) {
7267 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
7268 let filtered: Vec<&crate::bundle::Replication> = project
7269 .replications
7270 .iter()
7271 .filter(|r| target.is_none_or(|t| r.target_finding == t))
7272 .collect();
7273
7274 if json {
7275 let payload = json!({
7276 "ok": true,
7277 "command": "replications",
7278 "frontier": frontier.display().to_string(),
7279 "filter_target": target,
7280 "count": filtered.len(),
7281 "replications": filtered,
7282 });
7283 println!(
7284 "{}",
7285 serde_json::to_string_pretty(&payload).expect("failed to serialize replications list")
7286 );
7287 return;
7288 }
7289
7290 println!();
7291 let header = match target {
7292 Some(t) => format!("VELA · REPLICATIONS · {t}"),
7293 None => format!("VELA · REPLICATIONS · {}", frontier.display()),
7294 };
7295 println!(" {}", header.to_uppercase().dimmed());
7296 println!(" {}", style::tick_row(60));
7297 if filtered.is_empty() {
7298 println!(" (no replications recorded)");
7299 return;
7300 }
7301 for rep in &filtered {
7302 let outcome_chip = match rep.outcome.as_str() {
7303 "replicated" => style::ok(&rep.outcome),
7304 "failed" => style::lost(&rep.outcome),
7305 "partial" => style::warn(&rep.outcome),
7306 _ => rep.outcome.clone().normal().to_string(),
7307 };
7308 println!(
7309 " · {} {} by {}",
7310 rep.id.dimmed(),
7311 outcome_chip,
7312 rep.attempted_by
7313 );
7314 println!(" target: {}", rep.target_finding);
7315 if !rep.conditions.text.is_empty() {
7316 println!(" conditions: {}", truncate(&rep.conditions.text, 80));
7317 }
7318 if !rep.provenance.title.is_empty() {
7319 println!(" source: {}", truncate(&rep.provenance.title, 80));
7320 }
7321 }
7322}
7323
7324async fn cmd_ingest(
7337 path: &str,
7338 frontier: &Path,
7339 backend: Option<&str>,
7340 actor: Option<&str>,
7341 dry_run: bool,
7342 json: bool,
7343) {
7344 let lowered = path.trim().to_lowercase();
7346 if lowered.starts_with("doi:") || lowered.starts_with("pmid:") || lowered.starts_with("nct:") {
7347 cmd_source_fetch(path.trim(), None, None, false, json).await;
7348 if !json {
7354 eprintln!();
7355 eprintln!(
7356 " vela ingest · note: doi:/pmid:/nct: URIs only fetch metadata; no frontier state was written."
7357 );
7358 eprintln!(
7359 " next: turn this paper into a proposal with `vela finding add {} --assertion '...' --author 'reviewer:you' --apply`",
7360 frontier.display()
7361 );
7362 }
7363 return;
7364 }
7365
7366 let p = std::path::PathBuf::from(path);
7367 if !p.exists() {
7368 fail(&format!(
7369 "ingest: path '{path}' does not exist (and is not a doi:/pmid:/nct: URI)"
7370 ));
7371 }
7372
7373 let ext = p
7375 .extension()
7376 .and_then(|s| s.to_str())
7377 .map(|s| s.to_ascii_lowercase());
7378
7379 if p.is_file() {
7380 match ext.as_deref() {
7381 Some("pdf") => {
7382 cmd_scout(&p, frontier, backend, dry_run, json).await;
7386 }
7387 Some("md") | Some("markdown") => {
7388 cmd_compile_notes(&p, frontier, backend, None, None, dry_run, json).await;
7391 }
7392 Some("csv") | Some("tsv") => {
7393 cmd_compile_data(&p, frontier, backend, None, dry_run, json).await;
7396 }
7397 Some("json") => {
7398 let actor_id = actor.unwrap_or("agent:vela-ingest-bot");
7400 cmd_artifact_to_state(frontier, &p, actor_id, false, json);
7401 }
7402 other => {
7403 fail(&format!(
7404 "ingest: unsupported file type '{}' (expected .pdf, .md, .csv, .tsv, .json, or a doi:/pmid:/nct: URI)",
7405 other.unwrap_or("(none)")
7406 ));
7407 }
7408 }
7409 return;
7410 }
7411
7412 if p.is_dir() {
7413 let mut pdf_count = 0usize;
7420 let mut md_count = 0usize;
7421 let mut data_count = 0usize;
7422 let mut json_count = 0usize;
7423 let mut unhandled_exts: std::collections::BTreeSet<String> =
7424 std::collections::BTreeSet::new();
7425 if let Ok(entries) = std::fs::read_dir(&p) {
7426 for entry in entries.flatten() {
7427 let path = entry.path();
7428 if !path.is_file() {
7429 continue;
7430 }
7431 if let Some(name) = entry.file_name().to_str()
7432 && let Some(dot) = name.rfind('.')
7433 {
7434 let ext = name[dot + 1..].to_ascii_lowercase();
7435 match ext.as_str() {
7436 "pdf" => pdf_count += 1,
7437 "md" | "markdown" => md_count += 1,
7438 "csv" | "tsv" => data_count += 1,
7439 "json" => json_count += 1,
7440 other => {
7441 if !name.starts_with('.') {
7444 unhandled_exts.insert(other.to_string());
7445 }
7446 }
7447 }
7448 }
7449 }
7450 }
7451
7452 let dispatched_types = (pdf_count > 0) as usize
7453 + (md_count > 0) as usize
7454 + (data_count > 0) as usize
7455 + (json_count > 0) as usize;
7456
7457 if dispatched_types == 0 {
7458 cmd_compile_code(&p, frontier, backend, None, dry_run, json).await;
7461 return;
7462 }
7463
7464 if dispatched_types > 1 {
7465 eprintln!(
7466 " vela ingest · folder has multiple handlable types; running each in sequence"
7467 );
7468 eprintln!(
7469 " pdf:{pdf_count} md:{md_count} csv/tsv:{data_count} json:{json_count}"
7470 );
7471 }
7472
7473 if pdf_count > 0 {
7480 cmd_scout(&p, frontier, backend, dry_run, json).await;
7481 }
7482 if md_count > 0 {
7483 cmd_compile_notes(&p, frontier, backend, None, None, dry_run, json).await;
7484 }
7485 if data_count > 0 {
7486 cmd_compile_data(&p, frontier, backend, None, dry_run, json).await;
7487 }
7488 if json_count > 0 {
7489 let actor_id = actor.unwrap_or("agent:vela-ingest-bot");
7492 if let Ok(entries) = std::fs::read_dir(&p) {
7493 for entry in entries.flatten() {
7494 let path = entry.path();
7495 if path.is_file()
7496 && path
7497 .extension()
7498 .and_then(|s| s.to_str())
7499 .map(|s| s.eq_ignore_ascii_case("json"))
7500 .unwrap_or(false)
7501 {
7502 cmd_artifact_to_state(frontier, &path, actor_id, false, json);
7503 }
7504 }
7505 }
7506 }
7507
7508 if !unhandled_exts.is_empty() {
7509 let kinds: Vec<String> = unhandled_exts.into_iter().collect();
7510 eprintln!(
7511 " vela ingest · skipped {} file extension(s) with no handler: {}",
7512 kinds.len(),
7513 kinds.join(", ")
7514 );
7515 }
7516 return;
7517 }
7518
7519 fail(&format!(
7520 "ingest: path '{path}' is neither a file nor a directory"
7521 ));
7522}
7523
7524#[allow(clippy::too_many_arguments)]
7525async fn cmd_compile_data(
7527 root: &Path,
7528 frontier: &Path,
7529 backend: Option<&str>,
7530 sample_rows: Option<usize>,
7531 dry_run: bool,
7532 json_out: bool,
7533) {
7534 match DATASETS_HANDLER.get() {
7535 Some(handler) => {
7536 handler(
7537 root.to_path_buf(),
7538 frontier.to_path_buf(),
7539 backend.map(String::from),
7540 sample_rows,
7541 dry_run,
7542 json_out,
7543 )
7544 .await;
7545 }
7546 None => {
7547 eprintln!(
7548 "{} `vela compile-data` requires the vela CLI binary; the library is unwired without a registered datasets handler.",
7549 style::err_prefix()
7550 );
7551 std::process::exit(1);
7552 }
7553 }
7554}
7555
7556async fn cmd_review_pending(
7559 frontier: &Path,
7560 backend: Option<&str>,
7561 max_proposals: Option<usize>,
7562 batch_size: usize,
7563 dry_run: bool,
7564 json_out: bool,
7565) {
7566 match REVIEWER_HANDLER.get() {
7567 Some(handler) => {
7568 handler(
7569 frontier.to_path_buf(),
7570 backend.map(String::from),
7571 max_proposals,
7572 batch_size,
7573 dry_run,
7574 json_out,
7575 )
7576 .await;
7577 }
7578 None => {
7579 eprintln!(
7580 "{} `vela review-pending` requires the vela CLI binary; the library is unwired without a registered reviewer handler.",
7581 style::err_prefix()
7582 );
7583 std::process::exit(1);
7584 }
7585 }
7586}
7587
7588async fn cmd_find_tensions(
7591 frontier: &Path,
7592 backend: Option<&str>,
7593 max_findings: Option<usize>,
7594 dry_run: bool,
7595 json_out: bool,
7596) {
7597 match TENSIONS_HANDLER.get() {
7598 Some(handler) => {
7599 handler(
7600 frontier.to_path_buf(),
7601 backend.map(String::from),
7602 max_findings,
7603 dry_run,
7604 json_out,
7605 )
7606 .await;
7607 }
7608 None => {
7609 eprintln!(
7610 "{} `vela find-tensions` requires the vela CLI binary; the library is unwired without a registered tensions handler.",
7611 style::err_prefix()
7612 );
7613 std::process::exit(1);
7614 }
7615 }
7616}
7617
7618async fn cmd_plan_experiments(
7621 frontier: &Path,
7622 backend: Option<&str>,
7623 max_findings: Option<usize>,
7624 dry_run: bool,
7625 json_out: bool,
7626) {
7627 match EXPERIMENTS_HANDLER.get() {
7628 Some(handler) => {
7629 handler(
7630 frontier.to_path_buf(),
7631 backend.map(String::from),
7632 max_findings,
7633 dry_run,
7634 json_out,
7635 )
7636 .await;
7637 }
7638 None => {
7639 eprintln!(
7640 "{} `vela plan-experiments` requires the vela CLI binary; the library is unwired without a registered experiments handler.",
7641 style::err_prefix()
7642 );
7643 std::process::exit(1);
7644 }
7645 }
7646}
7647
7648async fn cmd_compile_code(
7651 root: &Path,
7652 frontier: &Path,
7653 backend: Option<&str>,
7654 max_files: Option<usize>,
7655 dry_run: bool,
7656 json_out: bool,
7657) {
7658 match CODE_HANDLER.get() {
7659 Some(handler) => {
7660 handler(
7661 root.to_path_buf(),
7662 frontier.to_path_buf(),
7663 backend.map(String::from),
7664 max_files,
7665 dry_run,
7666 json_out,
7667 )
7668 .await;
7669 }
7670 None => {
7671 eprintln!(
7672 "{} `vela compile-code` requires the vela CLI binary; the library is unwired without a registered code handler.",
7673 style::err_prefix()
7674 );
7675 std::process::exit(1);
7676 }
7677 }
7678}
7679
7680async fn cmd_compile_notes(
7685 vault: &Path,
7686 frontier: &Path,
7687 backend: Option<&str>,
7688 max_files: Option<usize>,
7689 max_items_per_category: Option<usize>,
7690 dry_run: bool,
7691 json_out: bool,
7692) {
7693 match NOTES_HANDLER.get() {
7694 Some(handler) => {
7695 handler(
7696 vault.to_path_buf(),
7697 frontier.to_path_buf(),
7698 backend.map(String::from),
7699 max_files,
7700 max_items_per_category,
7701 dry_run,
7702 json_out,
7703 )
7704 .await;
7705 }
7706 None => {
7707 eprintln!(
7708 "{} `vela compile-notes` requires the vela CLI binary; the library is unwired without a registered notes handler.",
7709 style::err_prefix()
7710 );
7711 std::process::exit(1);
7712 }
7713 }
7714}
7715
7716async fn cmd_scout(
7723 folder: &Path,
7724 frontier: &Path,
7725 backend: Option<&str>,
7726 dry_run: bool,
7727 json_out: bool,
7728) {
7729 match SCOUT_HANDLER.get() {
7730 Some(handler) => {
7731 handler(
7732 folder.to_path_buf(),
7733 frontier.to_path_buf(),
7734 backend.map(String::from),
7735 dry_run,
7736 json_out,
7737 )
7738 .await;
7739 }
7740 None => {
7741 eprintln!(
7742 "{} `vela scout` requires the vela CLI binary; the library is unwired without a registered scout handler.",
7743 style::err_prefix()
7744 );
7745 std::process::exit(1);
7746 }
7747 }
7748}
7749
7750#[allow(clippy::too_many_arguments)]
7751pub fn scan_for_sensitive_paths(root: &Path) -> Vec<PathBuf> {
7760 let mut hits: Vec<PathBuf> = Vec::new();
7761 let skip_dirs: &[&str] = &[".git", "target", "node_modules", "dist", "build"];
7762 let bad_exts: &[&str] = &["key", "pem", "p12", "pfx"];
7763 let bad_substrings: &[&str] = &["private", "secret", "credential"];
7764 let mut stack: Vec<PathBuf> = vec![root.to_path_buf()];
7765 while let Some(dir) = stack.pop() {
7766 let Ok(entries) = std::fs::read_dir(&dir) else {
7767 continue;
7768 };
7769 for entry in entries.flatten() {
7770 let path = entry.path();
7771 let name_os = path.file_name();
7772 let Some(name) = name_os.and_then(|n| n.to_str()) else {
7773 continue;
7774 };
7775 let lower = name.to_lowercase();
7776 if path.is_dir() {
7777 if skip_dirs.contains(&name) {
7778 continue;
7779 }
7780 stack.push(path);
7781 continue;
7782 }
7783 if lower.ends_with(".pub") || lower.ends_with(".pubkey") {
7785 continue;
7786 }
7787 if lower == "public.key" {
7789 continue;
7790 }
7791 let ext = path
7792 .extension()
7793 .and_then(|e| e.to_str())
7794 .map(str::to_lowercase)
7795 .unwrap_or_default();
7796 let mut hit = false;
7797 if bad_exts.iter().any(|x| ext == *x) {
7798 hit = true;
7799 }
7800 if bad_substrings.iter().any(|s| lower.contains(s)) {
7801 hit = true;
7802 }
7803 if hit {
7804 hits.push(path);
7805 }
7806 }
7807 }
7808 hits.sort();
7809 hits
7810}
7811
7812fn cmd_check(
7813 source: Option<&Path>,
7814 schema: bool,
7815 stats: bool,
7816 conformance_flag: bool,
7817 conformance_dir: &Path,
7818 all: bool,
7819 schema_only: bool,
7820 strict: bool,
7821 fix: bool,
7822 json_output: bool,
7823) {
7824 if json_output {
7825 let Some(src) = source else {
7826 fail("--json requires a frontier source");
7827 };
7828 let payload = check_json_payload(src, schema_only, strict);
7829 println!(
7830 "{}",
7831 serde_json::to_string_pretty(&payload).expect("failed to serialize check report")
7832 );
7833 if payload.get("ok").and_then(Value::as_bool) != Some(true) {
7834 std::process::exit(1);
7835 }
7836 return;
7837 }
7838
7839 if strict && let Some(src) = source {
7850 let hits = scan_for_sensitive_paths(src);
7851 if !hits.is_empty() {
7852 eprintln!(
7853 "{} secret-audit: {} sensitive path(s) found under {}",
7854 style::err_prefix(),
7855 hits.len(),
7856 src.display()
7857 );
7858 for hit in &hits {
7859 eprintln!(" - {}", hit.display());
7860 }
7861 eprintln!(
7862 " hint: add `keys/` and `*.key` to .gitignore so these never reach a public repo (see THREAT_MODEL.md A17)"
7863 );
7864 std::process::exit(1);
7865 }
7866 }
7867
7868 let run_all = all || (!schema && !stats && !conformance_flag && !schema_only);
7869 if run_all || schema || schema_only {
7870 let Some(src) = source else {
7871 fail("check requires a frontier source");
7872 };
7873 validate::run(src);
7874 }
7875 if !schema_only && (run_all || stats) {
7876 let Some(src) = source else {
7877 fail("--stats requires a frontier source");
7878 };
7879 let frontier = load_frontier_or_fail(src);
7880 let report = lint::lint(&frontier, None, None);
7881 lint::print_report(&report);
7882 let replay_report = events::replay_report(&frontier);
7883 println!("event replay: {}", replay_report.status);
7884 if !replay_report.conflicts.is_empty() {
7885 for conflict in &replay_report.conflicts {
7886 println!(" - {conflict}");
7887 }
7888 }
7889 if let Ok(signature_report) = sign::verify_frontier_data(&frontier, None)
7890 && signature_report.signed > 0
7891 {
7892 println!(
7893 "Signatures: {} valid / {} invalid / {} unsigned",
7894 signature_report.valid, signature_report.invalid, signature_report.unsigned
7895 );
7896 }
7897 let signal_report = signals::analyze(&frontier, &[]);
7898 print_signal_summary(&signal_report, strict);
7899 if !replay_report.ok
7900 || (strict
7901 && (!signal_report.review_queue.is_empty()
7902 || signal_report.proof_readiness.status != "ready"))
7903 {
7904 std::process::exit(1);
7905 }
7906 }
7907 if run_all || conformance_flag {
7908 if conformance_flag || conformance_dir.is_dir() {
7918 conformance::run(conformance_dir);
7919 } else {
7920 eprintln!(
7921 " conformance: skipped ({} not present; pass --conformance-dir <path> to point at the source repo's tests/conformance)",
7922 conformance_dir.display()
7923 );
7924 }
7925 }
7926 let _ = fix;
7927}
7928
7929fn check_json_payload(src: &Path, schema_only: bool, strict: bool) -> Value {
7930 let report = validate::validate(src);
7931 let loaded = repo::load_from_path(src).ok();
7932 let (method_report, graph_report) = if schema_only {
7933 (None, None)
7934 } else if let Some(frontier) = loaded.as_ref() {
7935 (
7936 Some(lint::lint(frontier, None, None)),
7937 Some(lint::lint_frontier(frontier)),
7938 )
7939 } else {
7940 (None, None)
7941 };
7942 let source_hash = hash_path(src).unwrap_or_else(|_| "unavailable".to_string());
7943 let mut diagnostics = Vec::new();
7944 diagnostics.extend(report.errors.iter().map(|e| {
7945 json!({
7946 "severity": "error",
7947 "rule_id": "schema",
7948 "finding_id": null,
7949 "file": &e.file,
7950 "field_path": null,
7951 "message": &e.error,
7952 "suggestion": schema_error_suggestion(&e.error),
7953 "fixable": schema_error_fix(&e.error),
7954 "normalize_action": schema_error_action(&e.error),
7955 })
7956 }));
7957 for (check_id, lint_report) in [
7958 ("methodology", method_report.as_ref()),
7959 ("frontier_graph", graph_report.as_ref()),
7960 ] {
7961 if let Some(lint_report) = lint_report {
7962 diagnostics.extend(lint_report.diagnostics.iter().map(|d| {
7963 json!({
7964 "severity": d.severity.to_string(),
7965 "rule_id": &d.rule_id,
7966 "check": check_id,
7967 "finding_id": &d.finding_id,
7968 "field_path": null,
7969 "message": &d.message,
7970 "suggestion": &d.suggestion,
7971 "fixable": false,
7972 "normalize_action": null,
7973 })
7974 }));
7975 }
7976 }
7977 let method_errors = method_report.as_ref().map_or(0, |r| r.errors);
7978 let method_warnings = method_report.as_ref().map_or(0, |r| r.warnings);
7979 let method_infos = method_report.as_ref().map_or(0, |r| r.infos);
7980 let graph_errors = graph_report.as_ref().map_or(0, |r| r.errors);
7981 let graph_warnings = graph_report.as_ref().map_or(0, |r| r.warnings);
7982 let graph_infos = graph_report.as_ref().map_or(0, |r| r.infos);
7983 let replay_report = loaded.as_ref().map(events::replay_report);
7984 let state_integrity_report = if schema_only {
7985 loaded.as_ref().map(state_integrity::analyze)
7986 } else {
7987 state_integrity::analyze_path(src).ok()
7988 };
7989 if let Some(replay) = replay_report.as_ref()
7990 && !replay.ok
7991 {
7992 diagnostics.extend(replay.conflicts.iter().map(|conflict| {
7993 json!({
7994 "severity": "error",
7995 "rule_id": "event_replay",
7996 "check": "events",
7997 "finding_id": null,
7998 "field_path": null,
7999 "message": conflict,
8000 "suggestion": "Inspect canonical state events and repair the frontier event log before proof export.",
8001 "fixable": false,
8002 "normalize_action": null,
8003 })
8004 }));
8005 }
8006 let event_errors = replay_report
8007 .as_ref()
8008 .map_or(0, |replay| usize::from(!replay.ok));
8009 let state_integrity_errors = state_integrity_report
8010 .as_ref()
8011 .map_or(0, |report| report.structural_errors.len());
8012 let (source_registry, evidence_atoms, conditions, proposal_summary, proof_state) = loaded
8013 .as_ref()
8014 .map(|frontier| {
8015 (
8016 sources::source_summary(frontier),
8017 sources::evidence_summary(frontier),
8018 sources::condition_summary(frontier),
8019 proposals::summary(frontier),
8020 proposals::proof_state_json(&frontier.proof_state),
8021 )
8022 })
8023 .unwrap_or_else(|| {
8024 (
8025 sources::SourceRegistrySummary::default(),
8026 sources::EvidenceAtomSummary::default(),
8027 sources::ConditionSummary::default(),
8028 proposals::ProposalSummary::default(),
8029 Value::Null,
8030 )
8031 });
8032 let signature_report = loaded
8033 .as_ref()
8034 .and_then(|frontier| sign::verify_frontier_data(frontier, None).ok());
8035 if let Some(frontier) = loaded.as_ref()
8036 && !schema_only
8037 {
8038 let projection = sources::derive_projection(frontier);
8039 let existing_sources = frontier
8040 .sources
8041 .iter()
8042 .map(|source| source.id.as_str())
8043 .collect::<std::collections::BTreeSet<_>>();
8044 let existing_atoms = frontier
8045 .evidence_atoms
8046 .iter()
8047 .map(|atom| atom.id.as_str())
8048 .collect::<std::collections::BTreeSet<_>>();
8049 let existing_conditions = frontier
8050 .condition_records
8051 .iter()
8052 .map(|record| record.id.as_str())
8053 .collect::<std::collections::BTreeSet<_>>();
8054 for source in projection
8055 .sources
8056 .iter()
8057 .filter(|source| !existing_sources.contains(source.id.as_str()))
8058 {
8059 diagnostics.push(json!({
8060 "severity": "warning",
8061 "rule_id": "missing_source_record",
8062 "check": "source_registry",
8063 "finding_id": source.finding_ids.first(),
8064 "field_path": "sources",
8065 "message": format!("Source record {} is derivable but not materialized in frontier state.", source.id),
8066 "suggestion": "Run `vela normalize` to materialize source records before proof export.",
8067 "fixable": true,
8068 "normalize_action": "materialize_source_record",
8069 }));
8070 }
8071 for atom in projection
8072 .evidence_atoms
8073 .iter()
8074 .filter(|atom| !existing_atoms.contains(atom.id.as_str()))
8075 {
8076 diagnostics.push(json!({
8077 "severity": "warning",
8078 "rule_id": "missing_evidence_atom",
8079 "check": "evidence_atoms",
8080 "finding_id": atom.finding_id,
8081 "field_path": "evidence_atoms",
8082 "message": format!("Evidence atom {} is derivable but not materialized in frontier state.", atom.id),
8083 "suggestion": "Run `vela normalize` to materialize evidence atoms before proof export.",
8084 "fixable": true,
8085 "normalize_action": "materialize_evidence_atom",
8086 }));
8087 }
8088 for atom in projection
8089 .evidence_atoms
8090 .iter()
8091 .filter(|atom| atom.locator.is_none())
8092 {
8093 diagnostics.push(json!({
8094 "severity": "warning",
8095 "rule_id": "missing_evidence_locator",
8096 "check": "evidence_atoms",
8097 "finding_id": atom.finding_id,
8098 "field_path": "evidence_atoms[].locator",
8099 "message": format!("Evidence atom {} has no source locator.", atom.id),
8100 "suggestion": "Add or verify evidence spans, table rows, pages, sections, or run locators.",
8101 "fixable": false,
8102 "normalize_action": null,
8103 }));
8104 }
8105 for condition in projection
8106 .condition_records
8107 .iter()
8108 .filter(|condition| !existing_conditions.contains(condition.id.as_str()))
8109 {
8110 diagnostics.push(json!({
8111 "severity": "warning",
8112 "rule_id": "condition_record_missing",
8113 "check": "conditions",
8114 "finding_id": condition.finding_id,
8115 "field_path": "condition_records",
8116 "message": format!("Condition record {} is derivable but not materialized in frontier state.", condition.id),
8117 "suggestion": "Run `vela normalize` to materialize condition boundaries before proof export.",
8118 "fixable": true,
8119 "normalize_action": "materialize_condition_record",
8120 }));
8121 }
8122 for proposal in frontier.proposals.iter().filter(|proposal| {
8123 matches!(proposal.status.as_str(), "accepted" | "applied")
8124 && proposal
8125 .reviewed_by
8126 .as_deref()
8127 .is_none_or(proposals::is_placeholder_reviewer)
8128 }) {
8129 diagnostics.push(json!({
8130 "severity": "error",
8131 "rule_id": "reviewer_identity_missing",
8132 "check": "proposals",
8133 "finding_id": proposal.target.id,
8134 "field_path": "proposals[].reviewed_by",
8135 "message": format!("Accepted or applied proposal {} uses a missing or placeholder reviewer identity.", proposal.id),
8136 "suggestion": "Accept the proposal with a stable named reviewer id before strict proof use.",
8137 "fixable": false,
8138 "normalize_action": null,
8139 }));
8140 }
8141 }
8142 let signal_report = loaded
8143 .as_ref()
8144 .map(|frontier| signals::analyze(frontier, &diagnostics))
8145 .unwrap_or_else(empty_signal_report);
8146 let errors =
8147 report.errors.len() + method_errors + graph_errors + event_errors + state_integrity_errors;
8148 let warnings = method_warnings + graph_warnings + signal_report.proof_readiness.warnings;
8149 let infos = method_infos + graph_infos;
8150 let strict_blockers = signal_report
8151 .signals
8152 .iter()
8153 .filter(|signal| signal.blocks.iter().any(|block| block == "strict_check"))
8154 .count();
8155 let fixable = diagnostics
8156 .iter()
8157 .filter(|d| d.get("fixable").and_then(Value::as_bool).unwrap_or(false))
8158 .count();
8159 let ok = errors == 0 && (!strict || (warnings == 0 && strict_blockers == 0));
8160
8161 json!({
8162 "ok": ok,
8163 "command": "check",
8164 "schema_version": project::VELA_SCHEMA_VERSION,
8165 "source": {
8166 "path": src.display().to_string(),
8167 "hash": format!("sha256:{source_hash}"),
8168 },
8169 "summary": {
8170 "status": if ok { "pass" } else { "fail" },
8171 "checked_findings": report.total_files,
8172 "valid_findings": report.valid,
8173 "invalid_findings": report.invalid,
8174 "errors": errors,
8175 "warnings": warnings,
8176 "info": infos,
8177 "fixable": fixable,
8178 "strict": strict,
8179 "schema_only": schema_only,
8180 },
8181 "checks": [
8182 {
8183 "id": "schema",
8184 "status": if report.invalid == 0 { "pass" } else { "fail" },
8185 "checked": report.total_files,
8186 "failed": report.invalid,
8187 "errors": report.errors.iter().map(|e| json!({
8188 "file": e.file,
8189 "message": e.error,
8190 })).collect::<Vec<_>>(),
8191 },
8192 {
8193 "id": "methodology",
8194 "status": if method_errors == 0 { "pass" } else { "fail" },
8195 "checked": method_report.as_ref().map_or(0, |r| r.findings_checked),
8196 "failed": method_errors,
8197 "warnings": method_warnings,
8198 "info": method_infos,
8199 "skipped": schema_only,
8200 },
8201 {
8202 "id": "frontier_graph",
8203 "status": if graph_errors == 0 { "pass" } else { "fail" },
8204 "checked": graph_report.as_ref().map_or(0, |r| r.findings_checked),
8205 "failed": graph_errors,
8206 "warnings": graph_warnings,
8207 "info": graph_infos,
8208 "skipped": schema_only,
8209 },
8210 {
8211 "id": "signals",
8212 "status": if strict_blockers == 0 { "pass" } else { "fail" },
8213 "checked": signal_report.signals.len(),
8214 "failed": strict_blockers,
8215 "warnings": signal_report.proof_readiness.warnings,
8216 "skipped": loaded.is_none(),
8217 "blockers": signal_report.signals.iter()
8218 .filter(|s| s.blocks.iter().any(|b| b == "strict_check"))
8219 .map(|s| json!({
8220 "id": s.id,
8221 "kind": s.kind,
8222 "severity": s.severity,
8223 "reason": s.reason,
8224 }))
8225 .collect::<Vec<_>>(),
8226 },
8227 {
8228 "id": "events",
8229 "status": if replay_report.as_ref().is_none_or(|replay| replay.ok) { "pass" } else { "fail" },
8230 "checked": replay_report.as_ref().map_or(0, |replay| replay.event_log.count),
8231 "failed": event_errors,
8232 "skipped": schema_only || loaded.is_none(),
8233 },
8234 {
8235 "id": "state_integrity",
8236 "status": if state_integrity_report.as_ref().is_none_or(|report| report.status != "fail") { "pass" } else { "fail" },
8237 "checked": state_integrity_report.as_ref().map_or(0, |report| report.summary.get("events").copied().unwrap_or_default()),
8238 "failed": state_integrity_errors,
8239 "skipped": schema_only || loaded.is_none(),
8240 }
8241 ],
8242 "event_log": replay_report.as_ref().map(|replay| &replay.event_log),
8243 "replay": replay_report,
8244 "state_integrity": state_integrity_report,
8245 "source_registry": source_registry,
8246 "evidence_atoms": evidence_atoms,
8247 "conditions": conditions,
8248 "proposals": proposal_summary,
8249 "proof_state": proof_state,
8250 "signatures": signature_report,
8251 "diagnostics": diagnostics,
8252 "signals": signal_report.signals,
8253 "review_queue": signal_report.review_queue,
8254 "proof_readiness": signal_report.proof_readiness,
8255 "repair_plan": build_repair_plan(&diagnostics),
8256 })
8257}
8258
8259#[allow(clippy::too_many_arguments)]
8260fn cmd_normalize(
8261 source: &Path,
8262 out: Option<&Path>,
8263 write: bool,
8264 dry_run: bool,
8265 rewrite_ids: bool,
8266 id_map: Option<&Path>,
8267 resync_provenance: bool,
8268 json_output: bool,
8269) {
8270 if write && out.is_some() {
8271 fail("Use either --write or --out, not both.");
8272 }
8273 if dry_run && (write || out.is_some()) {
8274 fail("--dry-run cannot be combined with --write or --out.");
8275 }
8276 if id_map.is_some() && !rewrite_ids {
8277 fail("--id-map requires --rewrite-ids.");
8278 }
8279
8280 let detected = repo::detect(source).unwrap_or_else(|e| {
8281 eprintln!("{e}");
8282 std::process::exit(1);
8283 });
8284 if matches!(detected, repo::VelaSource::PacketDir(_)) {
8285 fail(
8286 "Cannot normalize a proof packet directory. Export a new packet from frontier state instead.",
8287 );
8288 }
8289 let mut frontier = repo::load(&detected).unwrap_or_else(|e| fail_return(&e));
8290 let has_substantive_events = frontier
8295 .events
8296 .iter()
8297 .any(|event| event.kind != "frontier.created");
8298 if has_substantive_events && (write || out.is_some()) {
8299 fail(
8300 "Refusing to normalize a frontier with canonical events. Normalize before proposal-backed writes, or create a new reviewed transition for the intended change.",
8301 );
8302 }
8303 let source_hash = hash_path(source).unwrap_or_else(|_| "unavailable".to_string());
8304 let before_stats = serde_json::to_value(&frontier.stats).unwrap_or(Value::Null);
8305 let (entity_type_fixes, entity_name_fixes) =
8306 normalize::normalize_findings(&mut frontier.findings);
8307 let confidence_updates =
8308 bundle::recompute_all_confidence(&mut frontier.findings, &frontier.replications);
8309 let provenance_resync_count = if resync_provenance {
8313 sources::resync_provenance_from_sources(&mut frontier)
8314 } else {
8315 0
8316 };
8317 let before_source_count = frontier.sources.len();
8318 let before_evidence_atom_count = frontier.evidence_atoms.len();
8319 let before_condition_record_count = frontier.condition_records.len();
8320
8321 let mut id_rewrites = Vec::new();
8322 if rewrite_ids {
8323 let mut id_map_values = std::collections::BTreeMap::<String, String>::new();
8324 for finding in &frontier.findings {
8325 let expected =
8326 bundle::FindingBundle::content_address(&finding.assertion, &finding.provenance);
8327 if expected != finding.id {
8328 id_map_values.insert(finding.id.clone(), expected);
8329 }
8330 }
8331 let new_ids = id_map_values
8332 .values()
8333 .map(String::as_str)
8334 .collect::<std::collections::HashSet<_>>();
8335 if new_ids.len() != id_map_values.len() {
8336 fail("Refusing to rewrite IDs because two findings map to the same content address.");
8337 }
8338 for finding in &mut frontier.findings {
8339 if let Some(new_id) = id_map_values.get(&finding.id) {
8340 id_rewrites.push(json!({"old": finding.id, "new": new_id}));
8341 finding.previous_version = Some(finding.id.clone());
8342 finding.id = new_id.clone();
8343 }
8344 }
8345 for finding in &mut frontier.findings {
8346 for link in &mut finding.links {
8347 if let Some(new_target) = id_map_values.get(&link.target) {
8348 link.target = new_target.clone();
8349 }
8350 }
8351 }
8352 if let Some(path) = id_map {
8353 std::fs::write(
8354 path,
8355 serde_json::to_string_pretty(&id_map_values)
8356 .expect("failed to serialize normalize id map"),
8357 )
8358 .unwrap_or_else(|e| fail(&format!("Failed to write {}: {e}", path.display())));
8359 }
8360 }
8361
8362 sources::materialize_project(&mut frontier);
8363 let source_records_materialized = frontier.sources.len().saturating_sub(before_source_count);
8364 let evidence_atoms_materialized = frontier
8365 .evidence_atoms
8366 .len()
8367 .saturating_sub(before_evidence_atom_count);
8368 let condition_records_materialized = frontier
8369 .condition_records
8370 .len()
8371 .saturating_sub(before_condition_record_count);
8372 let after_stats = serde_json::to_value(&frontier.stats).unwrap_or(Value::Null);
8373 let id_rewrite_count = id_rewrites.len();
8374 let wrote_to = if write {
8375 repo::save(&detected, &frontier).unwrap_or_else(|e| fail(&e));
8376 Some(source.display().to_string())
8377 } else if let Some(out_path) = out {
8378 repo::save_to_path(out_path, &frontier).unwrap_or_else(|e| fail(&e));
8379 Some(out_path.display().to_string())
8380 } else {
8381 None
8382 };
8383 let wrote = wrote_to.is_some();
8384 let planned_changes = entity_type_fixes
8385 + entity_name_fixes
8386 + confidence_updates
8387 + id_rewrite_count
8388 + source_records_materialized
8389 + evidence_atoms_materialized
8390 + condition_records_materialized
8391 + provenance_resync_count;
8392 let payload = json!({
8393 "ok": true,
8394 "command": "normalize",
8395 "schema_version": project::VELA_SCHEMA_VERSION,
8396 "source": {
8397 "path": source.display().to_string(),
8398 "hash": format!("sha256:{source_hash}"),
8399 },
8400 "dry_run": wrote_to.is_none(),
8401 "wrote_to": wrote_to,
8402 "summary": {
8403 "planned": planned_changes,
8404 "safe": planned_changes,
8405 "unsafe": 0,
8406 "applied": if wrote { planned_changes } else { 0 },
8407 },
8408 "changes": {
8409 "entity_type_fixes": entity_type_fixes,
8410 "entity_name_fixes": entity_name_fixes,
8411 "confidence_updates": confidence_updates,
8412 "id_rewrites": id_rewrite_count,
8413 "source_records_materialized": source_records_materialized,
8414 "evidence_atoms_materialized": evidence_atoms_materialized,
8415 "condition_records_materialized": condition_records_materialized,
8416 "provenance_resyncs": provenance_resync_count,
8417 "stats_changed": before_stats != after_stats,
8418 },
8419 "id_rewrites": id_rewrites,
8420 "repair_plan": if wrote { Vec::<Value>::new() } else {
8421 vec![json!({
8422 "action": "apply_normalization",
8423 "command": "vela normalize <frontier> --out frontier.normalized.json"
8424 })]
8425 },
8426 });
8427 if json_output {
8428 println!(
8429 "{}",
8430 serde_json::to_string_pretty(&payload).expect("failed to serialize normalize report")
8431 );
8432 } else if let Some(path) = payload.get("wrote_to").and_then(Value::as_str) {
8433 println!("{} normalized frontier written to {path}", style::ok("ok"));
8434 println!(
8435 " entity type fixes: {}, entity name fixes: {}, confidence updates: {}, id rewrites: {}",
8436 entity_type_fixes, entity_name_fixes, confidence_updates, id_rewrite_count
8437 );
8438 } else {
8439 println!("normalize dry run for {}", source.display());
8440 println!(
8441 " would apply entity type fixes: {}, entity name fixes: {}, confidence updates: {}, id rewrites: {}",
8442 entity_type_fixes, entity_name_fixes, confidence_updates, id_rewrite_count
8443 );
8444 }
8445}
8446
8447fn cmd_proof(
8448 frontier: &Path,
8449 out: &Path,
8450 template: &str,
8451 gold: Option<&Path>,
8452 record_proof_state: bool,
8453 json_output: bool,
8454) {
8455 if template != "bbb-alzheimer" {
8456 fail(&format!(
8457 "Unsupported proof template '{template}'. Supported: bbb-alzheimer"
8458 ));
8459 }
8460 let mut loaded = load_frontier_or_fail(frontier);
8461 let source_hash = hash_path_or_fail(frontier);
8462 let export_record = export::export_packet_with_source(&loaded, Some(frontier), out)
8463 .unwrap_or_else(|e| fail(&e));
8464 let benchmark_summary = gold.map(|gold_path| {
8465 let summary = benchmark::run_suite(gold_path).unwrap_or_else(|e| {
8466 fail(&format!(
8467 "Failed to run proof benchmark '{}': {e}",
8468 gold_path.display()
8469 ))
8470 });
8471 append_packet_json_file(out, "benchmark-summary.json", &summary).unwrap_or_else(|e| {
8472 fail(&format!("Failed to write benchmark summary: {e}"));
8473 });
8474 if summary.get("ok").and_then(Value::as_bool) != Some(true) {
8475 fail(&format!(
8476 "Proof benchmark failed for {}",
8477 gold_path.display()
8478 ));
8479 }
8480 summary
8481 });
8482 let validation_summary = packet::validate(out).unwrap_or_else(|e| {
8483 fail(&format!("Proof packet validation failed: {e}"));
8484 });
8485 proposals::record_proof_export(
8486 &mut loaded,
8487 proposals::ProofPacketRecord {
8488 generated_at: export_record.generated_at.clone(),
8489 snapshot_hash: export_record.snapshot_hash.clone(),
8490 event_log_hash: export_record.event_log_hash.clone(),
8491 packet_manifest_hash: export_record.packet_manifest_hash.clone(),
8492 },
8493 );
8494 project::recompute_stats(&mut loaded);
8495 if record_proof_state {
8496 repo::save_to_path(frontier, &loaded).unwrap_or_else(|e| fail(&e));
8497 }
8498 let signal_report = signals::analyze(&loaded, &[]);
8499 if json_output {
8500 let payload = json!({
8501 "ok": true,
8502 "command": "proof",
8503 "schema_version": project::VELA_SCHEMA_VERSION,
8504 "recorded_proof_state": record_proof_state,
8505 "frontier": {
8506 "name": &loaded.project.name,
8507 "source": frontier.display().to_string(),
8508 "hash": format!("sha256:{source_hash}"),
8509 },
8510 "template": template,
8511 "gold": gold.map(|p| p.display().to_string()),
8512 "benchmark": benchmark_summary,
8513 "output": out.display().to_string(),
8514 "packet": {
8515 "manifest_path": out.join("manifest.json").display().to_string(),
8516 },
8517 "validation": {
8518 "status": "ok",
8519 "summary": validation_summary,
8520 },
8521 "proposals": proposals::summary(&loaded),
8522 "proof_state": loaded.proof_state,
8523 "signals": signal_report.signals,
8524 "review_queue": signal_report.review_queue,
8525 "proof_readiness": signal_report.proof_readiness,
8526 "trace_path": out.join("proof-trace.json").display().to_string(),
8527 });
8528 println!(
8529 "{}",
8530 serde_json::to_string_pretty(&payload).expect("failed to serialize proof response")
8531 );
8532 } else {
8533 println!("vela proof");
8534 println!(" source: {}", frontier.display());
8535 println!(" template: {template}");
8536 println!(" output: {}", out.display());
8537 println!(" trace: {}", out.join("proof-trace.json").display());
8538 println!(
8539 " proof state: {}",
8540 if record_proof_state {
8541 "recorded"
8542 } else {
8543 "not recorded"
8544 }
8545 );
8546 println!();
8547 println!("{validation_summary}");
8548 }
8549}
8550
8551fn cmd_status(path: &Path, json: bool) {
8555 let project = repo::load_from_path(path).unwrap_or_else(|e| fail_return(&e));
8556
8557 let mut pending_total = 0usize;
8559 let mut pending_by_kind: std::collections::BTreeMap<String, usize> =
8560 std::collections::BTreeMap::new();
8561 for p in &project.proposals {
8562 if p.status == "pending_review" {
8563 pending_total += 1;
8564 *pending_by_kind.entry(p.kind.clone()).or_insert(0) += 1;
8565 }
8566 }
8567
8568 let audit = crate::causal_reasoning::audit_frontier(&project);
8570 let audit_summary = crate::causal_reasoning::summarize_audit(&audit);
8571
8572 let mut last_sync: Option<&crate::events::StateEvent> = None;
8574 let mut last_conflict: Option<&crate::events::StateEvent> = None;
8575 let mut total_conflicts = 0usize;
8576 for e in &project.events {
8577 match e.kind.as_str() {
8578 "frontier.synced_with_peer" => {
8579 if last_sync
8580 .map(|prev| e.timestamp > prev.timestamp)
8581 .unwrap_or(true)
8582 {
8583 last_sync = Some(e);
8584 }
8585 }
8586 "frontier.conflict_detected" => {
8587 total_conflicts += 1;
8588 if last_conflict
8589 .map(|prev| e.timestamp > prev.timestamp)
8590 .unwrap_or(true)
8591 {
8592 last_conflict = Some(e);
8593 }
8594 }
8595 _ => {}
8596 }
8597 }
8598
8599 let mut targets_with_success = std::collections::HashSet::new();
8601 let mut failed_replications = 0usize;
8602 for r in &project.replications {
8603 if r.outcome == "replicated" {
8604 targets_with_success.insert(r.target_finding.clone());
8605 } else if r.outcome == "failed" {
8606 failed_replications += 1;
8607 }
8608 }
8609
8610 if json {
8611 println!(
8612 "{}",
8613 serde_json::to_string_pretty(&json!({
8614 "ok": true,
8615 "command": "status",
8616 "frontier": frontier_label(&project),
8617 "vfr_id": project.frontier_id(),
8618 "findings": project.findings.len(),
8619 "events": project.events.len(),
8620 "actors": project.actors.len(),
8621 "peers": project.peers.len(),
8622 "inbox": {
8623 "pending_total": pending_total,
8624 "pending_by_kind": pending_by_kind,
8625 },
8626 "causal_audit": {
8627 "identified": audit_summary.identified,
8628 "conditional": audit_summary.conditional,
8629 "underidentified": audit_summary.underidentified,
8630 "underdetermined": audit_summary.underdetermined,
8631 },
8632 "replications": {
8633 "total": project.replications.len(),
8634 "findings_with_success": targets_with_success.len(),
8635 "failed": failed_replications,
8636 },
8637 "federation": {
8638 "peers": project.peers.len(),
8639 "last_sync": last_sync.map(|e| e.timestamp.clone()),
8640 "last_conflict": last_conflict.map(|e| e.timestamp.clone()),
8641 "total_conflicts": total_conflicts,
8642 },
8643 }))
8644 .expect("serialize status")
8645 );
8646 return;
8647 }
8648
8649 println!();
8650 println!(
8651 " {}",
8652 format!("VELA · STATUS · {}", path.display())
8653 .to_uppercase()
8654 .dimmed()
8655 );
8656 println!(" {}", style::tick_row(60));
8657 println!();
8658 println!(" frontier: {}", frontier_label(&project));
8659 println!(" vfr_id: {}", project.frontier_id());
8660 println!(
8661 " findings: {} events: {} peers: {} actors: {}",
8662 project.findings.len(),
8663 project.events.len(),
8664 project.peers.len(),
8665 project.actors.len(),
8666 );
8667 println!();
8668 if pending_total > 0 {
8669 println!(
8670 " {} {pending_total} pending proposals",
8671 style::warn("inbox")
8672 );
8673 for (k, n) in &pending_by_kind {
8674 println!(" · {n:>3} {k}");
8675 }
8676 } else {
8677 println!(" {} inbox clean", style::ok("ok"));
8678 }
8679 println!();
8680 if audit_summary.underidentified > 0 || audit_summary.conditional > 0 {
8681 let chip = if audit_summary.underidentified > 0 {
8682 style::lost("audit")
8683 } else {
8684 style::warn("audit")
8685 };
8686 println!(
8687 " {} identified {} · conditional {} · underidentified {} · underdetermined {}",
8688 chip,
8689 audit_summary.identified,
8690 audit_summary.conditional,
8691 audit_summary.underidentified,
8692 audit_summary.underdetermined,
8693 );
8694 if audit_summary.underidentified > 0 {
8695 println!(
8696 " next: vela causal audit {} --problems-only",
8697 path.display()
8698 );
8699 }
8700 } else if audit_summary.underdetermined == 0 {
8701 println!(
8702 " {} causal audit: all {} identified",
8703 style::ok("ok"),
8704 audit_summary.identified
8705 );
8706 } else {
8707 println!(
8708 " {} causal audit: {} identified, {} ungraded",
8709 style::warn("audit"),
8710 audit_summary.identified,
8711 audit_summary.underdetermined,
8712 );
8713 }
8714 println!();
8715 if !project.replications.is_empty() {
8716 println!(
8717 " {} {} records · {} findings replicated · {} failed",
8718 style::ok("replications"),
8719 project.replications.len(),
8720 targets_with_success.len(),
8721 failed_replications,
8722 );
8723 }
8724 if project.peers.is_empty() {
8725 println!(
8726 " {} no federation peers registered",
8727 style::warn("federation")
8728 );
8729 } else {
8730 let last = last_sync
8731 .map(|e| fmt_timestamp(&e.timestamp))
8732 .unwrap_or_else(|| "never".to_string());
8733 let chip = if total_conflicts > 0 {
8734 style::warn("federation")
8735 } else {
8736 style::ok("federation")
8737 };
8738 println!(
8739 " {} {} peer(s) · last sync {} · {} conflict events",
8740 chip,
8741 project.peers.len(),
8742 last,
8743 total_conflicts,
8744 );
8745 }
8746 println!();
8747}
8748
8749fn cmd_log(path: &Path, limit: usize, kind_filter: Option<&str>, json: bool) {
8751 let project = repo::load_from_path(path).unwrap_or_else(|e| fail_return(&e));
8752 let mut events: Vec<&crate::events::StateEvent> = project
8753 .events
8754 .iter()
8755 .filter(|e| match kind_filter {
8756 Some(k) => e.kind.contains(k),
8757 None => true,
8758 })
8759 .collect();
8760 events.sort_by(|a, b| b.timestamp.cmp(&a.timestamp));
8761 events.truncate(limit);
8762
8763 if json {
8764 let payload: Vec<_> = events
8765 .iter()
8766 .map(|e| {
8767 json!({
8768 "id": e.id,
8769 "kind": e.kind,
8770 "actor": e.actor.id,
8771 "target": &e.target.id,
8772 "target_type": &e.target.r#type,
8773 "timestamp": e.timestamp,
8774 "reason": e.reason,
8775 })
8776 })
8777 .collect();
8778 println!(
8779 "{}",
8780 serde_json::to_string_pretty(&json!({
8781 "ok": true,
8782 "command": "log",
8783 "events": payload,
8784 }))
8785 .expect("serialize log")
8786 );
8787 return;
8788 }
8789
8790 println!();
8791 println!(
8792 " {}",
8793 format!("VELA · LOG · {} (latest {})", path.display(), events.len())
8794 .to_uppercase()
8795 .dimmed()
8796 );
8797 println!(" {}", style::tick_row(60));
8798 if events.is_empty() {
8799 println!(" (no events)");
8800 return;
8801 }
8802 for e in &events {
8803 let when = fmt_timestamp(&e.timestamp);
8804 let target_short = if e.target.id.len() > 22 {
8805 format!("{}…", &e.target.id[..21])
8806 } else {
8807 e.target.id.clone()
8808 };
8809 let reason: String = e.reason.chars().take(70).collect();
8810 println!(
8811 " {:<19} {:<32} {:<24} {}",
8812 when, e.kind, target_short, reason
8813 );
8814 }
8815 println!();
8816}
8817
8818fn cmd_inbox(path: &Path, kind_filter: Option<&str>, limit: usize, json: bool) {
8820 let project = repo::load_from_path(path).unwrap_or_else(|e| fail_return(&e));
8821
8822 let mut score_map: std::collections::HashMap<String, (f64, f64, f64, f64)> =
8825 std::collections::HashMap::new();
8826 for p in &project.proposals {
8827 if p.kind != "finding.note" {
8828 continue;
8829 }
8830 if p.actor.id != "agent:reviewer-agent" {
8831 continue;
8832 }
8833 let reason = &p.reason;
8834 let Some(target) = reason.split_whitespace().find(|s| s.starts_with("vpr_")) else {
8835 continue;
8836 };
8837 let text = p.payload.get("text").and_then(|v| v.as_str()).unwrap_or("");
8838 let extract = |k: &str| -> f64 {
8839 let pat = format!("{k} ");
8840 text.find(&pat)
8841 .and_then(|idx| text[idx + pat.len()..].split_whitespace().next())
8842 .and_then(|t| t.parse::<f64>().ok())
8843 .unwrap_or(0.0)
8844 };
8845 score_map.insert(
8846 target.to_string(),
8847 (
8848 extract("plausibility"),
8849 extract("evidence"),
8850 extract("scope"),
8851 extract("duplicate-risk"),
8852 ),
8853 );
8854 }
8855
8856 let mut pending: Vec<&crate::proposals::StateProposal> = project
8857 .proposals
8858 .iter()
8859 .filter(|p| {
8860 p.status == "pending_review"
8861 && match kind_filter {
8862 Some(k) => p.kind.contains(k),
8863 None => true,
8864 }
8865 })
8866 .collect();
8867 pending.sort_by(|a, b| {
8869 let sa = score_map
8870 .get(&a.id)
8871 .map(|(p, e, s, d)| 0.4 * p + 0.3 * e + 0.2 * s - 0.3 * d);
8872 let sb = score_map
8873 .get(&b.id)
8874 .map(|(p, e, s, d)| 0.4 * p + 0.3 * e + 0.2 * s - 0.3 * d);
8875 sb.partial_cmp(&sa).unwrap_or(std::cmp::Ordering::Equal)
8876 });
8877 pending.truncate(limit);
8878
8879 if json {
8880 let payload: Vec<_> = pending
8881 .iter()
8882 .map(|p| {
8883 let assertion_text = p
8884 .payload
8885 .get("finding")
8886 .and_then(|f| f.get("assertion"))
8887 .and_then(|a| a.get("text"))
8888 .and_then(|t| t.as_str());
8889 let assertion_type = p
8890 .payload
8891 .get("finding")
8892 .and_then(|f| f.get("assertion"))
8893 .and_then(|a| a.get("type"))
8894 .and_then(|t| t.as_str());
8895 let composite = score_map
8896 .get(&p.id)
8897 .map(|(pl, e, s, d)| 0.4 * pl + 0.3 * e + 0.2 * s - 0.3 * d);
8898 json!({
8899 "proposal_id": p.id,
8900 "kind": p.kind,
8901 "actor": p.actor,
8902 "reason": p.reason,
8903 "assertion_text": assertion_text,
8904 "assertion_type": assertion_type,
8905 "reviewer_composite": composite,
8906 })
8907 })
8908 .collect();
8909 println!(
8910 "{}",
8911 serde_json::to_string_pretty(&json!({
8912 "ok": true,
8913 "command": "inbox",
8914 "shown": pending.len(),
8915 "proposals": payload,
8916 }))
8917 .expect("serialize inbox")
8918 );
8919 return;
8920 }
8921
8922 println!();
8923 println!(
8924 " {}",
8925 format!(
8926 "VELA · INBOX · {} ({} pending shown)",
8927 path.display(),
8928 pending.len()
8929 )
8930 .to_uppercase()
8931 .dimmed()
8932 );
8933 println!(" {}", style::tick_row(60));
8934 if pending.is_empty() {
8935 println!(" (inbox clean)");
8936 return;
8937 }
8938 for p in &pending {
8939 let assertion_text = p
8940 .payload
8941 .get("finding")
8942 .and_then(|f| f.get("assertion"))
8943 .and_then(|a| a.get("text"))
8944 .and_then(|t| t.as_str())
8945 .unwrap_or("");
8946 let assertion_type = p
8947 .payload
8948 .get("finding")
8949 .and_then(|f| f.get("assertion"))
8950 .and_then(|a| a.get("type"))
8951 .and_then(|t| t.as_str())
8952 .unwrap_or("");
8953 let composite = score_map
8954 .get(&p.id)
8955 .map(|(pl, e, s, d)| 0.4 * pl + 0.3 * e + 0.2 * s - 0.3 * d);
8956 let score_str = composite
8957 .map(|c| format!("[{:.2}]", c))
8958 .unwrap_or_else(|| "[—] ".to_string());
8959 let kind_short = if p.kind.len() > 12 {
8960 format!("{}…", &p.kind[..11])
8961 } else {
8962 p.kind.clone()
8963 };
8964 let summary: String = if !assertion_text.is_empty() {
8965 assertion_text.chars().take(80).collect()
8966 } else {
8967 p.reason.chars().take(80).collect()
8968 };
8969 println!(
8970 " {} {} {:<13} {:<18} {}",
8971 score_str, p.id, kind_short, assertion_type, summary
8972 );
8973 }
8974 println!();
8975}
8976
8977fn cmd_ask(path: &Path, question: &str, json: bool) {
8982 let project = repo::load_from_path(path).unwrap_or_else(|e| fail_return(&e));
8983
8984 if question.trim().is_empty() {
8985 use std::io::{BufRead, Write};
8987 println!();
8988 println!(
8989 " {}",
8990 format!("VELA · ASK · {}", path.display())
8991 .to_uppercase()
8992 .dimmed()
8993 );
8994 println!(" {}", style::tick_row(60));
8995 println!(" Ask a question. Type `exit` to quit.");
8996 println!(" Examples:");
8997 println!(" · what's pending?");
8998 println!(" · what's underidentified?");
8999 println!(" · how many findings?");
9000 println!(" · what changed recently?");
9001 println!(" · who has what calibration?");
9002 println!();
9003 let stdin = std::io::stdin();
9004 let mut stdout = std::io::stdout();
9005 loop {
9006 print!(" ask> ");
9007 stdout.flush().ok();
9008 let mut line = String::new();
9009 if stdin.lock().read_line(&mut line).is_err() {
9010 break;
9011 }
9012 let q = line.trim();
9013 if q.is_empty() {
9014 continue;
9015 }
9016 if matches!(q, "exit" | "quit" | "q") {
9017 break;
9018 }
9019 answer(&project, q, false);
9020 }
9021 return;
9022 }
9023
9024 answer(&project, question, json);
9025}
9026
9027fn answer(project: &crate::project::Project, q: &str, json: bool) {
9028 let lower = q.to_lowercase();
9029
9030 if lower.contains("pending")
9032 || lower.contains("inbox")
9033 || lower.contains("queue")
9034 || lower.contains("to review")
9035 {
9036 let pending: Vec<&crate::proposals::StateProposal> = project
9037 .proposals
9038 .iter()
9039 .filter(|p| p.status == "pending_review")
9040 .collect();
9041 let mut by_kind: std::collections::BTreeMap<String, usize> = Default::default();
9042 for p in &pending {
9043 *by_kind.entry(p.kind.clone()).or_insert(0) += 1;
9044 }
9045 if json {
9046 println!(
9047 "{}",
9048 serde_json::to_string_pretty(&json!({
9049 "answer": "pending",
9050 "total": pending.len(),
9051 "by_kind": by_kind,
9052 }))
9053 .unwrap()
9054 );
9055 } else {
9056 println!(" {} pending proposals.", pending.len());
9057 for (k, n) in &by_kind {
9058 println!(" · {n:>3} {k}");
9059 }
9060 if pending.is_empty() {
9061 println!(" Inbox is clean.");
9062 } else {
9063 println!(" Run `vela inbox <frontier>` to triage.");
9064 }
9065 }
9066 return;
9067 }
9068
9069 if lower.contains("underident")
9071 || lower.contains("audit")
9072 || lower.contains("identif")
9073 || lower.contains("causal")
9074 {
9075 let entries = crate::causal_reasoning::audit_frontier(project);
9076 let summary = crate::causal_reasoning::summarize_audit(&entries);
9077 if json {
9078 println!(
9079 "{}",
9080 serde_json::to_string_pretty(&json!({
9081 "answer": "audit",
9082 "summary": {
9083 "identified": summary.identified,
9084 "conditional": summary.conditional,
9085 "underidentified": summary.underidentified,
9086 "underdetermined": summary.underdetermined,
9087 },
9088 }))
9089 .unwrap()
9090 );
9091 } else {
9092 println!(
9093 " Causal audit: {} identified · {} conditional · {} underidentified · {} underdetermined.",
9094 summary.identified,
9095 summary.conditional,
9096 summary.underidentified,
9097 summary.underdetermined,
9098 );
9099 if summary.underidentified > 0 {
9100 println!(
9101 " The {} underidentified findings are concrete review items:",
9102 summary.underidentified
9103 );
9104 for e in entries
9105 .iter()
9106 .filter(|e| {
9107 matches!(
9108 e.verdict,
9109 crate::causal_reasoning::Identifiability::Underidentified
9110 )
9111 })
9112 .take(8)
9113 {
9114 let txt: String = e.assertion_text.chars().take(70).collect();
9115 println!(" · {} {}", e.finding_id, txt);
9116 }
9117 }
9118 }
9119 return;
9120 }
9121
9122 if lower.contains("recent")
9124 || lower.contains("changed")
9125 || lower.contains("latest")
9126 || lower.contains("happen")
9127 {
9128 let mut events: Vec<&crate::events::StateEvent> = project.events.iter().collect();
9129 events.sort_by(|a, b| b.timestamp.cmp(&a.timestamp));
9130 events.truncate(8);
9131 if json {
9132 println!(
9133 "{}",
9134 serde_json::to_string_pretty(&json!({
9135 "answer": "recent_events",
9136 "events": events.iter().map(|e| json!({
9137 "id": e.id, "kind": e.kind, "timestamp": e.timestamp,
9138 "actor": e.actor.id, "target": e.target.id,
9139 })).collect::<Vec<_>>(),
9140 }))
9141 .unwrap()
9142 );
9143 } else {
9144 println!(" Most recent {} events:", events.len());
9145 for e in &events {
9146 let when = fmt_timestamp(&e.timestamp);
9147 println!(" · {when} {:<28} {}", e.kind, e.target.id);
9148 }
9149 }
9150 return;
9151 }
9152
9153 if lower.starts_with("how many") || lower.contains("count") || lower.contains("total") {
9155 let n = project.findings.len();
9156 let evs = project.events.len();
9157 let peers = project.peers.len();
9158 let actors = project.actors.len();
9159 if json {
9160 println!(
9161 "{}",
9162 serde_json::to_string_pretty(&json!({
9163 "answer": "counts",
9164 "findings": n,
9165 "events": evs,
9166 "peers": peers,
9167 "actors": actors,
9168 "replications": project.replications.len(),
9169 "predictions": project.predictions.len(),
9170 }))
9171 .unwrap()
9172 );
9173 } else {
9174 println!(" {n} findings · {evs} events · {actors} actors · {peers} peers.");
9175 println!(
9176 " {} replications · {} predictions · {} datasets · {} code artifacts.",
9177 project.replications.len(),
9178 project.predictions.len(),
9179 project.datasets.len(),
9180 project.code_artifacts.len(),
9181 );
9182 }
9183 return;
9184 }
9185
9186 if lower.contains("calibration") || lower.contains("brier") || lower.contains("predict") {
9188 let records =
9189 crate::calibration::calibration_records(&project.predictions, &project.resolutions);
9190 if json {
9191 println!("{}", serde_json::to_string_pretty(&records).unwrap());
9192 } else if records.is_empty() {
9193 println!(" No predictions yet. The calibration ledger is empty.");
9194 } else {
9195 println!(" Calibration over {} actor(s):", records.len());
9196 for r in &records {
9197 let brier = r
9198 .brier_score
9199 .map(|b| format!("{:.3}", b))
9200 .unwrap_or_else(|| "—".into());
9201 println!(
9202 " · {:<28} predictions {} · resolved {} · expired {} · Brier {}",
9203 r.actor, r.n_predictions, r.n_resolved, r.n_expired, brier
9204 );
9205 }
9206 }
9207 return;
9208 }
9209
9210 if lower.contains("peer")
9212 || lower.contains("federat")
9213 || lower.contains("sync")
9214 || lower.contains("conflict")
9215 {
9216 let mut total_conflicts = 0usize;
9217 for e in &project.events {
9218 if e.kind == "frontier.conflict_detected" {
9219 total_conflicts += 1;
9220 }
9221 }
9222 if json {
9223 println!(
9224 "{}",
9225 serde_json::to_string_pretty(&json!({
9226 "answer": "federation",
9227 "peers": project.peers.iter().map(|p| &p.id).collect::<Vec<_>>(),
9228 "total_conflicts": total_conflicts,
9229 }))
9230 .unwrap()
9231 );
9232 } else {
9233 println!(" {} peer(s) registered:", project.peers.len());
9234 for p in &project.peers {
9235 println!(" · {:<24} {}", p.id, p.url);
9236 }
9237 println!(" {total_conflicts} conflict events on the canonical log.");
9238 }
9239 return;
9240 }
9241
9242 if json {
9244 println!(
9245 "{}",
9246 serde_json::to_string_pretty(&json!({
9247 "answer": "unknown_question",
9248 "question": q,
9249 "hint": "Try: pending, audit, recent, how many, calibration, peers."
9250 }))
9251 .unwrap()
9252 );
9253 } else {
9254 println!(" Don't know how to route that question yet.");
9255 println!(" Try: pending · audit · recent · how many · calibration · peers");
9256 }
9257}
9258
9259fn frontier_label(p: &crate::project::Project) -> String {
9260 if p.project.name.trim().is_empty() {
9261 "(unnamed)".to_string()
9262 } else {
9263 p.project.name.clone()
9264 }
9265}
9266
9267fn fmt_timestamp(ts: &str) -> String {
9268 chrono::DateTime::parse_from_rfc3339(ts)
9271 .map(|dt| dt.format("%m-%d %H:%M").to_string())
9272 .unwrap_or_else(|_| ts.chars().take(16).collect())
9273}
9274
9275fn cmd_stats(path: &Path) {
9276 let frontier = load_frontier_or_fail(path);
9277 let s = &frontier.stats;
9278 println!();
9279 println!(" {}", "FRONTIER · V0.36.0".dimmed());
9280 println!(" {}", frontier.project.name.bold());
9281 println!(" {}", style::tick_row(60));
9282 println!(" id: {}", frontier.frontier_id());
9283 println!(" compiled: {}", frontier.project.compiled_at);
9284 println!(" papers: {}", frontier.project.papers_processed);
9285 println!(" findings: {}", s.findings);
9286 println!(" links: {}", s.links);
9287 println!(" replicated: {}", s.replicated);
9288 println!(" avg confidence: {}", s.avg_confidence);
9289 println!(" gaps: {}", s.gaps);
9290 println!(" contested: {}", s.contested);
9291 println!(" reviewed: {}", s.human_reviewed);
9292 println!(" proposals: {}", s.proposal_count);
9293 println!(
9294 " recorded proof: {}",
9295 frontier.proof_state.latest_packet.status
9296 );
9297 if frontier.proof_state.latest_packet.status != "never_exported" {
9298 println!(
9299 " proof note: recorded frontier metadata; packet files are checked by `vela packet validate`"
9300 );
9301 }
9302 if !s.categories.is_empty() {
9303 println!();
9304 println!(" {}", "categories".dimmed());
9305 let mut categories = s.categories.iter().collect::<Vec<_>>();
9306 categories.sort_by(|a, b| b.1.cmp(a.1));
9307 for (category, count) in categories {
9308 println!(" {category}: {}", count);
9309 }
9310 }
9311 println!();
9312 println!(" {}", style::tick_row(60));
9313 println!();
9314}
9315
9316fn cmd_proposals(action: ProposalAction) {
9317 match action {
9318 ProposalAction::List {
9319 frontier,
9320 status,
9321 json,
9322 } => {
9323 let frontier_state =
9324 repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
9325 let proposals_list = proposals::list(&frontier_state, status.as_deref());
9326 let payload = json!({
9327 "ok": true,
9328 "command": "proposals.list",
9329 "frontier": frontier_state.project.name,
9330 "status_filter": status,
9331 "summary": proposals::summary(&frontier_state),
9332 "proposals": proposals_list,
9333 });
9334 if json {
9335 println!(
9336 "{}",
9337 serde_json::to_string_pretty(&payload)
9338 .expect("failed to serialize proposals list")
9339 );
9340 } else {
9341 println!("vela proposals list");
9342 println!(" frontier: {}", frontier_state.project.name);
9343 println!(
9344 " proposals: {}",
9345 payload["proposals"].as_array().map_or(0, Vec::len)
9346 );
9347 }
9348 }
9349 ProposalAction::Show {
9350 frontier,
9351 proposal_id,
9352 json,
9353 } => {
9354 let frontier_state =
9355 repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
9356 let proposal =
9357 proposals::show(&frontier_state, &proposal_id).unwrap_or_else(|e| fail_return(&e));
9358 let payload = json!({
9359 "ok": true,
9360 "command": "proposals.show",
9361 "frontier": frontier_state.project.name,
9362 "proposal": proposal,
9363 });
9364 if json {
9365 println!(
9366 "{}",
9367 serde_json::to_string_pretty(&payload)
9368 .expect("failed to serialize proposal show")
9369 );
9370 } else {
9371 println!("vela proposals show");
9372 println!(" frontier: {}", frontier_state.project.name);
9373 println!(" proposal: {}", proposal_id);
9374 println!(" kind: {}", proposal.kind);
9375 println!(" status: {}", proposal.status);
9376 }
9377 }
9378 ProposalAction::Preview {
9379 frontier,
9380 proposal_id,
9381 reviewer,
9382 json,
9383 } => {
9384 let preview = proposals::preview_at_path(&frontier, &proposal_id, &reviewer)
9385 .unwrap_or_else(|e| fail_return(&e));
9386 let payload = json!({
9387 "ok": true,
9388 "command": "proposals.preview",
9389 "frontier": frontier.display().to_string(),
9390 "preview": preview,
9391 });
9392 if json {
9393 println!(
9394 "{}",
9395 serde_json::to_string_pretty(&payload)
9396 .expect("failed to serialize proposal preview")
9397 );
9398 } else {
9399 println!("vela proposals preview");
9400 println!(" proposal: {}", proposal_id);
9401 println!(" kind: {}", preview.kind);
9402 println!(
9403 " findings: {} -> {}",
9404 preview.findings_before, preview.findings_after
9405 );
9406 println!(
9407 " artifacts: {} -> {}",
9408 preview.artifacts_before, preview.artifacts_after
9409 );
9410 println!(
9411 " events: {} -> {}",
9412 preview.events_before, preview.events_after
9413 );
9414 if !preview.changed_findings.is_empty() {
9415 println!(
9416 " findings changed: {}",
9417 preview.changed_findings.join(", ")
9418 );
9419 }
9420 if !preview.changed_artifacts.is_empty() {
9421 println!(
9422 " artifacts changed: {}",
9423 preview.changed_artifacts.join(", ")
9424 );
9425 }
9426 if !preview.event_kinds.is_empty() {
9427 println!(" event kinds: {}", preview.event_kinds.join(", "));
9428 }
9429 println!(" event: {}", preview.applied_event_id);
9430 }
9431 }
9432 ProposalAction::Import {
9433 frontier,
9434 source,
9435 json,
9436 } => {
9437 let report =
9438 proposals::import_from_path(&frontier, &source).unwrap_or_else(|e| fail_return(&e));
9439 let payload = json!({
9440 "ok": true,
9441 "command": "proposals.import",
9442 "frontier": frontier.display().to_string(),
9443 "source": source.display().to_string(),
9444 "summary": {
9445 "imported": report.imported,
9446 "applied": report.applied,
9447 "rejected": report.rejected,
9448 "duplicates": report.duplicates,
9449 },
9450 });
9451 if json {
9452 println!(
9453 "{}",
9454 serde_json::to_string_pretty(&payload)
9455 .expect("failed to serialize proposal import")
9456 );
9457 } else {
9458 println!(
9459 "Imported {} proposals into {}",
9460 report.imported, report.wrote_to
9461 );
9462 }
9463 }
9464 ProposalAction::Validate { source, json } => {
9465 let report = proposals::validate_source(&source).unwrap_or_else(|e| fail_return(&e));
9466 let payload = json!({
9467 "ok": report.ok,
9468 "command": "proposals.validate",
9469 "source": source.display().to_string(),
9470 "summary": {
9471 "checked": report.checked,
9472 "valid": report.valid,
9473 "invalid": report.invalid,
9474 },
9475 "proposal_ids": report.proposal_ids,
9476 "errors": report.errors,
9477 });
9478 if json {
9479 println!(
9480 "{}",
9481 serde_json::to_string_pretty(&payload)
9482 .expect("failed to serialize proposal validation")
9483 );
9484 } else if report.ok {
9485 println!("{} validated {} proposals", style::ok("ok"), report.valid);
9486 } else {
9487 println!(
9488 "{} validated {} proposals, {} invalid",
9489 style::lost("lost"),
9490 report.valid,
9491 report.invalid
9492 );
9493 for error in &report.errors {
9494 println!(" · {error}");
9495 }
9496 std::process::exit(1);
9497 }
9498 }
9499 ProposalAction::Export {
9500 frontier,
9501 output,
9502 status,
9503 json,
9504 } => {
9505 let count = proposals::export_to_path(&frontier, &output, status.as_deref())
9506 .unwrap_or_else(|e| fail_return(&e));
9507 let payload = json!({
9508 "ok": true,
9509 "command": "proposals.export",
9510 "frontier": frontier.display().to_string(),
9511 "output": output.display().to_string(),
9512 "status": status,
9513 "exported": count,
9514 });
9515 if json {
9516 println!(
9517 "{}",
9518 serde_json::to_string_pretty(&payload)
9519 .expect("failed to serialize proposal export")
9520 );
9521 } else {
9522 println!("sealed · {count} proposals · {}", output.display());
9523 }
9524 }
9525 ProposalAction::Accept {
9526 frontier,
9527 proposal_id,
9528 reviewer,
9529 reason,
9530 json,
9531 } => {
9532 let event_id = proposals::accept_at_path(&frontier, &proposal_id, &reviewer, &reason)
9533 .unwrap_or_else(|e| fail_return(&e));
9534 let payload = json!({
9535 "ok": true,
9536 "command": "proposals.accept",
9537 "frontier": frontier.display().to_string(),
9538 "proposal_id": proposal_id,
9539 "reviewer": reviewer,
9540 "applied_event_id": event_id,
9541 });
9542 if json {
9543 println!(
9544 "{}",
9545 serde_json::to_string_pretty(&payload)
9546 .expect("failed to serialize proposal accept")
9547 );
9548 } else {
9549 println!(
9550 "{} accepted and applied proposal {}",
9551 style::ok("ok"),
9552 proposal_id
9553 );
9554 println!(" event: {}", event_id);
9555 }
9556 }
9557 ProposalAction::Reject {
9558 frontier,
9559 proposal_id,
9560 reviewer,
9561 reason,
9562 json,
9563 } => {
9564 proposals::reject_at_path(&frontier, &proposal_id, &reviewer, &reason)
9565 .unwrap_or_else(|e| fail_return(&e));
9566 let payload = json!({
9567 "ok": true,
9568 "command": "proposals.reject",
9569 "frontier": frontier.display().to_string(),
9570 "proposal_id": proposal_id,
9571 "reviewer": reviewer,
9572 "status": "rejected",
9573 });
9574 if json {
9575 println!(
9576 "{}",
9577 serde_json::to_string_pretty(&payload)
9578 .expect("failed to serialize proposal reject")
9579 );
9580 } else {
9581 println!(
9582 "{} rejected proposal {}",
9583 style::warn("rejected"),
9584 proposal_id
9585 );
9586 }
9587 }
9588 }
9589}
9590
9591fn cmd_artifact_to_state(
9592 frontier: &Path,
9593 packet: &Path,
9594 actor: &str,
9595 apply_artifacts: bool,
9596 json: bool,
9597) {
9598 let report =
9599 crate::artifact_to_state::import_packet_at_path(frontier, packet, actor, apply_artifacts)
9600 .unwrap_or_else(|e| fail_return(&e));
9601 if json {
9602 println!(
9603 "{}",
9604 serde_json::to_string_pretty(&report)
9605 .expect("failed to serialize artifact-to-state report")
9606 );
9607 } else {
9608 println!("vela artifact-to-state");
9609 println!(" packet: {}", report.packet_id);
9610 println!(" frontier: {}", report.frontier);
9611 println!(" artifact proposals: {}", report.artifact_proposals);
9612 println!(" finding proposals: {}", report.finding_proposals);
9613 println!(" gap proposals: {}", report.gap_proposals);
9614 println!(
9615 " applied artifact events: {}",
9616 report.applied_artifact_events
9617 );
9618 println!(
9619 " pending truth proposals: {}",
9620 report.pending_truth_proposals
9621 );
9622 }
9623}
9624
9625async fn cmd_bridge_kit(action: BridgeKitAction) {
9626 match action {
9627 BridgeKitAction::Validate { source, json } => {
9628 let report = crate::artifact_to_state::validate_bridge_kit_path(&source);
9629 if json {
9630 println!(
9631 "{}",
9632 serde_json::to_string_pretty(&report)
9633 .expect("failed to serialize bridge-kit validation report")
9634 );
9635 } else {
9636 println!("vela bridge-kit validate");
9637 println!(" source: {}", report.source);
9638 println!(" packets: {}", report.packet_count);
9639 println!(" valid: {}", report.valid_packet_count);
9640 println!(" invalid: {}", report.invalid_packet_count);
9641 for packet in &report.packets {
9642 if packet.ok {
9643 println!(
9644 " ok: {} · {} artifacts · {} claims · {} needs",
9645 packet
9646 .packet_id
9647 .as_deref()
9648 .unwrap_or("packet id unavailable"),
9649 packet.artifact_count,
9650 packet.candidate_claim_count,
9651 packet.open_need_count
9652 );
9653 } else {
9654 println!(" invalid: {} · {}", packet.path, packet.errors.join("; "));
9655 }
9656 }
9657 for error in &report.errors {
9658 println!(" error: {error}");
9659 }
9660 }
9661 if !report.ok {
9662 std::process::exit(1);
9663 }
9664 }
9665 BridgeKitAction::VerifyProvenance { packet, json } => {
9666 let report = verify_packet_provenance(&packet).await;
9667 if json {
9668 println!(
9669 "{}",
9670 serde_json::to_string_pretty(&report)
9671 .expect("failed to serialize provenance verification report")
9672 );
9673 } else {
9674 println!("vela bridge-kit verify-provenance");
9675 println!(" packet: {}", report.packet);
9676 println!(" identifiers: {}", report.identifiers.len());
9677 println!(" resolved: {}", report.resolved_count);
9678 println!(" unresolved: {}", report.unresolved_count);
9679 println!(" skipped: {}", report.skipped_count);
9680 for entry in &report.identifiers {
9681 let status = match entry.status.as_str() {
9682 "resolved" => "ok ",
9683 "unresolved" => "FAIL",
9684 "skipped" => "skip",
9685 _ => "? ",
9686 };
9687 println!(
9688 " {} {} ({})",
9689 status,
9690 entry.identifier,
9691 entry.note.as_deref().unwrap_or(entry.kind.as_str())
9692 );
9693 }
9694 }
9695 if report.unresolved_count > 0 {
9696 std::process::exit(1);
9697 }
9698 }
9699 }
9700}
9701
9702#[derive(Debug, Clone, Serialize)]
9703struct ProvenanceVerificationReport {
9704 command: String,
9705 packet: String,
9706 identifiers: Vec<ProvenanceVerificationEntry>,
9707 resolved_count: usize,
9708 unresolved_count: usize,
9709 skipped_count: usize,
9710}
9711
9712#[derive(Debug, Clone, Serialize)]
9713struct ProvenanceVerificationEntry {
9714 identifier: String,
9715 kind: String,
9716 status: String,
9717 #[serde(skip_serializing_if = "Option::is_none")]
9718 note: Option<String>,
9719}
9720
9721async fn verify_packet_provenance(packet_path: &Path) -> ProvenanceVerificationReport {
9726 use crate::artifact_to_state::ArtifactPacket;
9727 let raw = std::fs::read_to_string(packet_path)
9728 .unwrap_or_else(|e| fail_return(&format!("read packet: {e}")));
9729 let parsed: ArtifactPacket =
9730 serde_json::from_str(&raw).unwrap_or_else(|e| fail_return(&format!("parse packet: {e}")));
9731 let packet = parsed
9732 .validate()
9733 .unwrap_or_else(|e| fail_return(&format!("validate packet: {e}")));
9734
9735 let mut candidates: std::collections::BTreeSet<String> = std::collections::BTreeSet::new();
9737 for artifact in &packet.artifacts {
9738 if let Some(ident) = extract_identifier(&artifact.locator) {
9739 candidates.insert(ident);
9740 }
9741 }
9742 for claim in &packet.candidate_claims {
9743 for source_ref in &claim.source_refs {
9744 if let Some(ident) = extract_identifier(source_ref) {
9745 candidates.insert(ident);
9746 }
9747 }
9748 }
9749
9750 let client = reqwest::Client::builder()
9751 .user_agent("vela/0.108 (+https://github.com/vela-science/vela)")
9752 .timeout(std::time::Duration::from_secs(15))
9753 .build()
9754 .unwrap_or_else(|e| fail_return(&format!("build http client: {e}")));
9755
9756 let mut entries: Vec<ProvenanceVerificationEntry> = Vec::new();
9757 let mut resolved = 0usize;
9758 let mut unresolved = 0usize;
9759 let mut skipped = 0usize;
9760 for candidate in &candidates {
9761 let entry = if let Some(doi) = candidate.strip_prefix("doi:") {
9762 verify_doi(&client, doi).await
9763 } else if let Some(pmid) = candidate.strip_prefix("pmid:") {
9764 verify_pmid(&client, pmid).await
9765 } else {
9766 ProvenanceVerificationEntry {
9767 identifier: candidate.clone(),
9768 kind: "unknown".to_string(),
9769 status: "skipped".to_string(),
9770 note: Some("no recognized identifier prefix".to_string()),
9771 }
9772 };
9773 match entry.status.as_str() {
9774 "resolved" => resolved += 1,
9775 "unresolved" => unresolved += 1,
9776 _ => skipped += 1,
9777 }
9778 entries.push(entry);
9779 }
9780
9781 ProvenanceVerificationReport {
9782 command: "bridge-kit.verify-provenance".to_string(),
9783 packet: packet_path.display().to_string(),
9784 identifiers: entries,
9785 resolved_count: resolved,
9786 unresolved_count: unresolved,
9787 skipped_count: skipped,
9788 }
9789}
9790
9791fn extract_identifier(s: &str) -> Option<String> {
9796 let trimmed = s.trim();
9797 if trimmed.is_empty() {
9798 return None;
9799 }
9800 if trimmed.starts_with("doi:") || trimmed.starts_with("pmid:") {
9802 return Some(trimmed.to_string());
9803 }
9804 for prefix in ["https://doi.org/", "http://doi.org/", "https://dx.doi.org/"] {
9806 if let Some(rest) = trimmed.strip_prefix(prefix) {
9807 return Some(format!("doi:{rest}"));
9808 }
9809 }
9810 for prefix in [
9812 "https://pubmed.ncbi.nlm.nih.gov/",
9813 "http://pubmed.ncbi.nlm.nih.gov/",
9814 ] {
9815 if let Some(rest) = trimmed.strip_prefix(prefix) {
9816 let pmid = rest.trim_end_matches('/');
9817 return Some(format!("pmid:{pmid}"));
9818 }
9819 }
9820 if trimmed.starts_with("10.") && trimmed.contains('/') && !trimmed.contains(' ') {
9822 return Some(format!("doi:{trimmed}"));
9823 }
9824 None
9825}
9826
9827async fn verify_doi(client: &reqwest::Client, doi: &str) -> ProvenanceVerificationEntry {
9828 let url = format!("https://api.crossref.org/works/{doi}");
9829 match client.get(&url).send().await {
9830 Ok(resp) if resp.status().is_success() => ProvenanceVerificationEntry {
9831 identifier: format!("doi:{doi}"),
9832 kind: "doi".to_string(),
9833 status: "resolved".to_string(),
9834 note: None,
9835 },
9836 Ok(resp) => ProvenanceVerificationEntry {
9837 identifier: format!("doi:{doi}"),
9838 kind: "doi".to_string(),
9839 status: "unresolved".to_string(),
9840 note: Some(format!("crossref returned {}", resp.status())),
9841 },
9842 Err(e) => ProvenanceVerificationEntry {
9843 identifier: format!("doi:{doi}"),
9844 kind: "doi".to_string(),
9845 status: "skipped".to_string(),
9846 note: Some(format!("crossref unreachable: {e}")),
9847 },
9848 }
9849}
9850
9851async fn verify_pmid(client: &reqwest::Client, pmid: &str) -> ProvenanceVerificationEntry {
9852 let url = format!(
9853 "https://eutils.ncbi.nlm.nih.gov/entrez/eutils/esummary.fcgi?db=pubmed&id={pmid}&retmode=json"
9854 );
9855 match client.get(&url).send().await {
9856 Ok(resp) if resp.status().is_success() => {
9857 let body: serde_json::Value = resp.json().await.unwrap_or(serde_json::Value::Null);
9860 let result = body.get("result");
9861 let uids = result
9862 .and_then(|r| r.get("uids"))
9863 .and_then(|u| u.as_array());
9864 let resolved = uids.is_some_and(|a| !a.is_empty());
9865 if resolved {
9866 ProvenanceVerificationEntry {
9867 identifier: format!("pmid:{pmid}"),
9868 kind: "pmid".to_string(),
9869 status: "resolved".to_string(),
9870 note: None,
9871 }
9872 } else {
9873 ProvenanceVerificationEntry {
9874 identifier: format!("pmid:{pmid}"),
9875 kind: "pmid".to_string(),
9876 status: "unresolved".to_string(),
9877 note: Some("eutils returned empty uids".to_string()),
9878 }
9879 }
9880 }
9881 Ok(resp) => ProvenanceVerificationEntry {
9882 identifier: format!("pmid:{pmid}"),
9883 kind: "pmid".to_string(),
9884 status: "unresolved".to_string(),
9885 note: Some(format!("eutils returned {}", resp.status())),
9886 },
9887 Err(e) => ProvenanceVerificationEntry {
9888 identifier: format!("pmid:{pmid}"),
9889 kind: "pmid".to_string(),
9890 status: "skipped".to_string(),
9891 note: Some(format!("eutils unreachable: {e}")),
9892 },
9893 }
9894}
9895
9896async fn cmd_source_adapter(action: SourceAdapterAction) {
9897 match action {
9898 SourceAdapterAction::Run {
9899 frontier,
9900 adapter,
9901 actor,
9902 entries,
9903 priority,
9904 include_excluded,
9905 allow_partial,
9906 dry_run,
9907 input_dir,
9908 apply_artifacts,
9909 json,
9910 } => {
9911 let report = crate::source_adapters::run(
9912 &frontier,
9913 crate::source_adapters::SourceAdapterRunOptions {
9914 adapter,
9915 actor,
9916 entries,
9917 priority,
9918 include_excluded,
9919 allow_partial,
9920 dry_run,
9921 input_dir,
9922 apply_artifacts,
9923 },
9924 )
9925 .await
9926 .unwrap_or_else(|e| fail_return(&e));
9927 if json {
9928 println!(
9929 "{}",
9930 serde_json::to_string_pretty(&report)
9931 .expect("failed to serialize source adapter report")
9932 );
9933 } else {
9934 println!("vela source-adapter run");
9935 println!(" adapter: {}", report.adapter);
9936 println!(" run: {}", report.run_id);
9937 println!(" frontier: {}", report.frontier);
9938 println!(" selected entries: {}", report.selected_entries);
9939 println!(" fetched records: {}", report.fetched_records);
9940 println!(" changed records: {}", report.changed_records);
9941 println!(" unchanged records: {}", report.unchanged_records);
9942 println!(" failed records: {}", report.failed_records.len());
9943 if let Some(packet_id) = report.packet_id {
9944 println!(" packet: {packet_id}");
9945 }
9946 println!(" artifact proposals: {}", report.artifact_proposals);
9947 println!(" review note proposals: {}", report.review_note_proposals);
9948 println!(" applied events: {}", report.applied_event_ids.len());
9949 }
9950 }
9951 }
9952}
9953
9954fn cmd_runtime_adapter(action: RuntimeAdapterAction) {
9955 match action {
9956 RuntimeAdapterAction::Run {
9957 frontier,
9958 adapter,
9959 input,
9960 actor,
9961 dry_run,
9962 apply_artifacts,
9963 json,
9964 } => {
9965 let report = crate::runtime_adapters::run(
9966 &frontier,
9967 crate::runtime_adapters::RuntimeAdapterRunOptions {
9968 adapter,
9969 input,
9970 actor,
9971 dry_run,
9972 apply_artifacts,
9973 },
9974 )
9975 .unwrap_or_else(|e| fail_return(&e));
9976 if json {
9977 println!(
9978 "{}",
9979 serde_json::to_string_pretty(&report)
9980 .expect("failed to serialize runtime adapter report")
9981 );
9982 } else {
9983 println!("vela runtime-adapter run");
9984 println!(" adapter: {}", report.adapter);
9985 println!(" run: {}", report.run_id);
9986 println!(" frontier: {}", report.frontier);
9987 if let Some(packet_id) = report.packet_id {
9988 println!(" packet: {packet_id}");
9989 }
9990 println!(" artifact proposals: {}", report.artifact_proposals);
9991 println!(" finding proposals: {}", report.finding_proposals);
9992 println!(" gap proposals: {}", report.gap_proposals);
9993 println!(" review note proposals: {}", report.review_note_proposals);
9994 println!(
9995 " applied artifact events: {}",
9996 report.applied_artifact_events
9997 );
9998 println!(
9999 " pending truth proposals: {}",
10000 report.pending_truth_proposals
10001 );
10002 }
10003 }
10004 }
10005}
10006
10007fn cmd_sign(action: SignAction) {
10008 match action {
10009 SignAction::GenerateKeypair { out, json } => {
10010 let public_key = sign::generate_keypair(&out).unwrap_or_else(|e| fail_return(&e));
10011 let payload = json!({
10012 "ok": true,
10013 "command": "sign.generate-keypair",
10014 "output_dir": out.display().to_string(),
10015 "public_key": public_key,
10016 });
10017 if json {
10018 println!(
10019 "{}",
10020 serde_json::to_string_pretty(&payload)
10021 .expect("failed to serialize sign.generate-keypair")
10022 );
10023 } else {
10024 println!("{} keypair · {}", style::ok("generated"), out.display());
10025 println!(" public key: {public_key}");
10026 }
10027 }
10028 SignAction::Apply {
10029 frontier,
10030 private_key,
10031 json,
10032 } => {
10033 let count =
10034 sign::sign_frontier(&frontier, &private_key).unwrap_or_else(|e| fail_return(&e));
10035 let payload = json!({
10036 "ok": true,
10037 "command": "sign.apply",
10038 "frontier": frontier.display().to_string(),
10039 "private_key": private_key.display().to_string(),
10040 "signed": count,
10041 });
10042 if json {
10043 println!(
10044 "{}",
10045 serde_json::to_string_pretty(&payload).expect("failed to serialize sign.apply")
10046 );
10047 } else {
10048 println!(
10049 "{} {count} findings in {}",
10050 style::ok("signed"),
10051 frontier.display()
10052 );
10053 }
10054 }
10055 SignAction::Verify {
10056 frontier,
10057 public_key,
10058 json,
10059 } => {
10060 let report = sign::verify_frontier(&frontier, public_key.as_deref())
10061 .unwrap_or_else(|e| fail_return(&e));
10062 if json {
10063 println!(
10064 "{}",
10065 serde_json::to_string_pretty(&report).expect("failed to serialize sign.verify")
10066 );
10067 } else {
10068 println!();
10069 println!(
10070 " {}",
10071 format!("VELA · SIGN · VERIFY · {}", frontier.display())
10072 .to_uppercase()
10073 .dimmed()
10074 );
10075 println!(" {}", style::tick_row(60));
10076 println!(" total findings: {}", report.total_findings);
10077 println!(" signed: {}", report.signed);
10078 println!(" unsigned: {}", report.unsigned);
10079 println!(" valid: {}", report.valid);
10080 println!(" invalid: {}", report.invalid);
10081 if report.findings_with_threshold > 0 {
10082 println!(" with threshold: {}", report.findings_with_threshold);
10083 println!(" jointly accepted: {}", report.jointly_accepted);
10084 }
10085 }
10086 }
10087 SignAction::ThresholdSet {
10088 frontier,
10089 finding_id,
10090 to,
10091 json,
10092 } => {
10093 if to == 0 {
10094 fail("--to must be >= 1");
10095 }
10096 let mut project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
10097 let Some(idx) = project.findings.iter().position(|f| f.id == finding_id) else {
10098 fail(&format!("finding '{finding_id}' not present in frontier"));
10099 };
10100 project.findings[idx].flags.signature_threshold = Some(to);
10101 sign::refresh_jointly_accepted(&mut project);
10105 let met = project.findings[idx].flags.jointly_accepted;
10106 repo::save_to_path(&frontier, &project).unwrap_or_else(|e| fail_return(&e));
10107
10108 if json {
10109 println!(
10110 "{}",
10111 serde_json::to_string_pretty(&json!({
10112 "ok": true,
10113 "command": "sign.threshold-set",
10114 "finding_id": finding_id,
10115 "threshold": to,
10116 "jointly_accepted": met,
10117 "frontier": frontier.display().to_string(),
10118 }))
10119 .expect("failed to serialize sign.threshold-set")
10120 );
10121 } else {
10122 println!(
10123 "{} signature_threshold={to} on {finding_id} ({})",
10124 style::ok("set"),
10125 if met {
10126 "jointly accepted"
10127 } else {
10128 "awaiting signatures"
10129 }
10130 );
10131 }
10132 }
10133 }
10134}
10135
10136fn cmd_actor(action: ActorAction) {
10137 match action {
10138 ActorAction::Add {
10139 frontier,
10140 id,
10141 pubkey,
10142 tier,
10143 orcid,
10144 clearance,
10145 json,
10146 } => {
10147 let trimmed = pubkey.trim();
10149 if trimmed.len() != 64 || hex::decode(trimmed).is_err() {
10150 fail("Public key must be 64 hex characters (32-byte Ed25519 pubkey).");
10151 }
10152 let orcid_normalized = orcid
10154 .as_deref()
10155 .map(|s| sign::validate_orcid(s).unwrap_or_else(|e| fail_return(&e)));
10156 let clearance: Option<crate::access_tier::AccessTier> = clearance.as_deref().map(|s| {
10159 crate::access_tier::AccessTier::parse(s).unwrap_or_else(|e| fail_return(&e))
10160 });
10161
10162 let mut project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
10163 if project.actors.iter().any(|actor| actor.id == id) {
10164 fail(&format!(
10165 "Actor '{id}' already registered in this frontier."
10166 ));
10167 }
10168 project.actors.push(sign::ActorRecord {
10169 id: id.clone(),
10170 public_key: trimmed.to_string(),
10171 algorithm: "ed25519".to_string(),
10172 created_at: chrono::Utc::now().to_rfc3339(),
10173 tier: tier.clone(),
10174 orcid: orcid_normalized.clone(),
10175 access_clearance: clearance,
10176 });
10177 repo::save_to_path(&frontier, &project).unwrap_or_else(|e| fail_return(&e));
10178 let payload = json!({
10179 "ok": true,
10180 "command": "actor.add",
10181 "frontier": frontier.display().to_string(),
10182 "actor_id": id,
10183 "public_key": trimmed,
10184 "tier": tier,
10185 "orcid": orcid_normalized,
10186 "registered_count": project.actors.len(),
10187 });
10188 if json {
10189 println!(
10190 "{}",
10191 serde_json::to_string_pretty(&payload).expect("failed to serialize actor.add")
10192 );
10193 } else {
10194 let tier_suffix = tier
10195 .as_deref()
10196 .map_or_else(String::new, |t| format!(" tier={t}"));
10197 println!(
10198 "{} actor {} (pubkey {}{tier_suffix})",
10199 style::ok("registered"),
10200 id,
10201 &trimmed[..16]
10202 );
10203 }
10204 }
10205 ActorAction::List { frontier, json } => {
10206 let project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
10207 if json {
10208 let payload = json!({
10209 "ok": true,
10210 "command": "actor.list",
10211 "frontier": frontier.display().to_string(),
10212 "actors": project.actors,
10213 });
10214 println!(
10215 "{}",
10216 serde_json::to_string_pretty(&payload).expect("failed to serialize actor.list")
10217 );
10218 } else {
10219 println!();
10220 println!(
10221 " {}",
10222 format!("VELA · ACTOR · LIST · {}", frontier.display())
10223 .to_uppercase()
10224 .dimmed()
10225 );
10226 println!(" {}", style::tick_row(60));
10227 if project.actors.is_empty() {
10228 println!(" (no actors registered)");
10229 } else {
10230 for actor in &project.actors {
10231 println!(
10232 " {:<28} {}… registered {}",
10233 actor.id,
10234 &actor.public_key[..16],
10235 actor.created_at
10236 );
10237 }
10238 }
10239 }
10240 }
10241 }
10242}
10243
10244fn cmd_causal(action: CausalAction) {
10246 use crate::causal_reasoning;
10247
10248 match action {
10249 CausalAction::Audit {
10250 frontier,
10251 problems_only,
10252 json,
10253 } => {
10254 let project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
10255 let mut entries = causal_reasoning::audit_frontier(&project);
10256 if problems_only {
10257 entries.retain(|e| e.verdict.needs_reviewer_attention());
10258 }
10259 let summary = causal_reasoning::summarize_audit(&entries);
10260
10261 if json {
10262 println!(
10263 "{}",
10264 serde_json::to_string_pretty(&json!({
10265 "ok": true,
10266 "command": "causal.audit",
10267 "frontier": frontier.display().to_string(),
10268 "summary": summary,
10269 "entries": entries,
10270 }))
10271 .expect("serialize causal.audit")
10272 );
10273 return;
10274 }
10275
10276 println!();
10277 println!(
10278 " {}",
10279 format!("VELA · CAUSAL · AUDIT · {}", frontier.display())
10280 .to_uppercase()
10281 .dimmed()
10282 );
10283 println!(" {}", style::tick_row(60));
10284 println!(
10285 " total: {} identified: {} conditional: {} underidentified: {} underdetermined: {}",
10286 summary.total,
10287 summary.identified,
10288 summary.conditional,
10289 summary.underidentified,
10290 summary.underdetermined,
10291 );
10292 if entries.is_empty() {
10293 println!(" (no entries to report)");
10294 return;
10295 }
10296 for e in &entries {
10297 let chip = match e.verdict {
10298 crate::causal_reasoning::Identifiability::Identified => style::ok("identified"),
10299 crate::causal_reasoning::Identifiability::Conditional => {
10300 style::warn("conditional")
10301 }
10302 crate::causal_reasoning::Identifiability::Underidentified => {
10303 style::lost("underidentified")
10304 }
10305 crate::causal_reasoning::Identifiability::Underdetermined => {
10306 style::warn("underdetermined")
10307 }
10308 };
10309 let claim = e
10310 .causal_claim
10311 .map_or("none".to_string(), |c| format!("{c:?}").to_lowercase());
10312 let grade = e
10313 .causal_evidence_grade
10314 .map_or("none".to_string(), |g| format!("{g:?}").to_lowercase());
10315 println!();
10316 println!(" {chip} {} ({}/{})", e.finding_id, claim, grade);
10317 let assertion_short: String = e.assertion_text.chars().take(78).collect();
10318 println!(" {assertion_short}");
10319 println!(" {} {}", style::ok("why:"), e.rationale);
10320 if e.verdict.needs_reviewer_attention()
10321 || matches!(
10322 e.verdict,
10323 crate::causal_reasoning::Identifiability::Underdetermined
10324 )
10325 {
10326 println!(" {} {}", style::ok("fix:"), e.remediation);
10327 }
10328 }
10329 }
10330 CausalAction::Effect {
10331 frontier,
10332 source,
10333 on: target,
10334 json,
10335 } => {
10336 use crate::causal_graph::{CausalEffectVerdict, identify_effect};
10337
10338 let project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
10339 let verdict = identify_effect(&project, &source, &target);
10340
10341 if json {
10342 println!(
10343 "{}",
10344 serde_json::to_string_pretty(&json!({
10345 "ok": true,
10346 "command": "causal.effect",
10347 "frontier": frontier.display().to_string(),
10348 "source": source,
10349 "target": target,
10350 "verdict": verdict,
10351 }))
10352 .expect("serialize causal.effect")
10353 );
10354 return;
10355 }
10356
10357 println!();
10358 println!(
10359 " {}",
10360 format!("VELA · CAUSAL · EFFECT · {} → {}", source, target)
10361 .to_uppercase()
10362 .dimmed()
10363 );
10364 println!(" {}", style::tick_row(60));
10365 match verdict {
10366 CausalEffectVerdict::Identified {
10367 adjustment_set,
10368 back_door_paths_considered,
10369 } => {
10370 if adjustment_set.is_empty() {
10371 println!(
10372 " {} no back-door adjustment needed",
10373 style::ok("identified")
10374 );
10375 } else {
10376 println!(" {} identified by adjusting on:", style::ok("identified"));
10377 for z in &adjustment_set {
10378 println!(" · {z}");
10379 }
10380 }
10381 println!(
10382 " back-door paths considered: {}",
10383 back_door_paths_considered
10384 );
10385 }
10386 CausalEffectVerdict::IdentifiedByFrontDoor { mediator_set } => {
10387 println!(
10388 " {} identified via front-door criterion (Pearl 1995 §3.3)",
10389 style::ok("identified")
10390 );
10391 println!(" mediators that intercept all directed paths:");
10392 for m in &mediator_set {
10393 println!(" · {m}");
10394 }
10395 println!(
10396 " applies when source-target confounders are unobserved but the mediator chain is."
10397 );
10398 }
10399 CausalEffectVerdict::NoCausalPath { reason } => {
10400 println!(" {} no causal path: {reason}", style::warn("no_path"));
10401 }
10402 CausalEffectVerdict::Underidentified {
10403 unblocked_back_door_paths,
10404 candidates_tried,
10405 } => {
10406 println!(
10407 " {} no observational adjustment set found ({} candidates tried)",
10408 style::lost("underidentified"),
10409 candidates_tried
10410 );
10411 println!(" open back-door paths:");
10412 for path in unblocked_back_door_paths.iter().take(5) {
10413 println!(" · {}", path.join(" — "));
10414 }
10415 println!(
10416 " remediation: either intervene experimentally on {source}, or extend the link graph to make a confounder observable."
10417 );
10418 }
10419 CausalEffectVerdict::UnknownNode { which } => {
10420 fail(&which);
10421 }
10422 }
10423 println!();
10424 }
10425 CausalAction::Graph {
10426 frontier,
10427 node,
10428 json,
10429 } => {
10430 use crate::causal_graph::CausalGraph;
10431 let project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
10432 let graph = CausalGraph::from_project(&project);
10433
10434 let nodes: Vec<&str> = if let Some(n) = node.as_deref() {
10437 if !graph.contains(n) {
10438 fail(&format!("node not in frontier: {n}"));
10439 }
10440 vec![n]
10441 } else {
10442 project.findings.iter().map(|f| f.id.as_str()).collect()
10443 };
10444
10445 if json {
10446 let payload: Vec<_> = nodes
10447 .iter()
10448 .map(|n| {
10449 let parents: Vec<&str> = graph.parents_of(n).collect();
10450 let children: Vec<&str> = graph.children_of(n).collect();
10451 json!({
10452 "node": n,
10453 "parents": parents,
10454 "children": children,
10455 })
10456 })
10457 .collect();
10458 println!(
10459 "{}",
10460 serde_json::to_string_pretty(&json!({
10461 "ok": true,
10462 "command": "causal.graph",
10463 "node_count": graph.node_count(),
10464 "edge_count": graph.edge_count(),
10465 "nodes": payload,
10466 }))
10467 .expect("serialize causal.graph")
10468 );
10469 return;
10470 }
10471
10472 println!();
10473 println!(
10474 " {}",
10475 format!("VELA · CAUSAL · GRAPH · {}", frontier.display())
10476 .to_uppercase()
10477 .dimmed()
10478 );
10479 println!(" {}", style::tick_row(60));
10480 println!(
10481 " {} nodes · {} edges",
10482 graph.node_count(),
10483 graph.edge_count()
10484 );
10485 println!();
10486 for n in &nodes {
10487 let parents: Vec<&str> = graph.parents_of(n).collect();
10488 let children: Vec<&str> = graph.children_of(n).collect();
10489 if parents.is_empty() && children.is_empty() && nodes.len() > 1 {
10490 continue; }
10492 println!(" {n}");
10493 if !parents.is_empty() {
10494 println!(" parents: {}", parents.join(", "));
10495 }
10496 if !children.is_empty() {
10497 println!(" children: {}", children.join(", "));
10498 }
10499 }
10500 }
10501 CausalAction::Counterfactual {
10502 frontier,
10503 intervene_on,
10504 set_to,
10505 target,
10506 json,
10507 } => {
10508 use crate::counterfactual::{
10509 CounterfactualQuery, CounterfactualVerdict, answer_counterfactual,
10510 };
10511
10512 let project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
10513 let query = CounterfactualQuery {
10514 intervene_on: intervene_on.clone(),
10515 set_to,
10516 target: target.clone(),
10517 };
10518 let verdict = answer_counterfactual(&project, &query);
10519
10520 if json {
10521 println!(
10522 "{}",
10523 serde_json::to_string_pretty(&json!({
10524 "ok": true,
10525 "command": "causal.counterfactual",
10526 "frontier": frontier.display().to_string(),
10527 "query": query,
10528 "verdict": verdict,
10529 }))
10530 .expect("serialize causal.counterfactual")
10531 );
10532 return;
10533 }
10534
10535 println!();
10536 println!(
10537 " {}",
10538 format!(
10539 "VELA · CAUSAL · COUNTERFACTUAL · do({intervene_on} := {set_to:.3}) → {target}"
10540 )
10541 .to_uppercase()
10542 .dimmed()
10543 );
10544 println!(" {}", style::tick_row(72));
10545 match verdict {
10546 CounterfactualVerdict::Resolved {
10547 factual,
10548 counterfactual,
10549 delta,
10550 paths_used,
10551 } => {
10552 println!(
10553 " {} factual: {factual:.3} counterfactual: {counterfactual:.3} delta: {delta:+.3}",
10554 style::ok("resolved")
10555 );
10556 println!(
10557 " twin-network propagation through {} causal path(s):",
10558 paths_used.len()
10559 );
10560 for p in paths_used.iter().take(5) {
10561 println!(" · {}", p.join(" → "));
10562 }
10563 println!(
10564 " reading: \"if {intervene_on}'s confidence had been {set_to:.3} \
10565 instead of factual, {target}'s confidence would shift by {delta:+.3}.\""
10566 );
10567 }
10568 CounterfactualVerdict::MechanismUnspecified { unspecified_edges } => {
10569 println!(
10570 " {} causal path exists but {} edge(s) lack a mechanism annotation",
10571 style::warn("mechanism_unspecified"),
10572 unspecified_edges.len()
10573 );
10574 for (parent, child) in unspecified_edges.iter().take(8) {
10575 println!(" · {parent} → {child}");
10576 }
10577 println!(
10578 " remediation: annotate one of the link mechanisms (linear / monotonic / threshold / saturating)."
10579 );
10580 }
10581 CounterfactualVerdict::NoCausalPath { factual } => {
10582 println!(
10583 " {} no directed path from {intervene_on} to {target}; counterfactual = factual = {factual:.3}",
10584 style::warn("no_path")
10585 );
10586 }
10587 CounterfactualVerdict::UnknownNode { which } => {
10588 fail(&format!("node not in frontier: {which}"));
10589 }
10590 CounterfactualVerdict::InvalidIntervention { reason } => {
10591 fail(&reason);
10592 }
10593 }
10594 println!();
10595 }
10596 }
10597}
10598
10599fn cmd_bridges(action: BridgesAction) {
10602 use crate::bridge::{Bridge, BridgeStatus, derive_bridges};
10603 use std::collections::HashMap;
10604
10605 fn bridges_dir(frontier: &Path) -> PathBuf {
10606 frontier.join(".vela/bridges")
10607 }
10608
10609 fn load_bridge(frontier: &Path, id: &str) -> Result<Bridge, String> {
10610 let path = bridges_dir(frontier).join(format!("{id}.json"));
10611 if !path.is_file() {
10612 return Err(format!("bridge not found: {id}"));
10613 }
10614 let data = std::fs::read_to_string(&path).map_err(|e| format!("read {id}: {e}"))?;
10615 serde_json::from_str(&data).map_err(|e| format!("parse {id}: {e}"))
10616 }
10617
10618 fn save_bridge(frontier: &Path, b: &Bridge) -> Result<(), String> {
10619 let dir = bridges_dir(frontier);
10620 std::fs::create_dir_all(&dir).map_err(|e| format!("mkdir bridges/: {e}"))?;
10621 let path = dir.join(format!("{}.json", b.id));
10622 let data = serde_json::to_string_pretty(b).map_err(|e| format!("serialize bridge: {e}"))?;
10623 std::fs::write(&path, format!("{data}\n")).map_err(|e| format!("write bridge: {e}"))
10624 }
10625
10626 fn default_reviewer_id() -> String {
10629 std::env::var("VELA_REVIEWER_ID").unwrap_or_else(|_| "reviewer:will-blair".to_string())
10630 }
10631
10632 fn emit_bridge_reviewed_event(
10643 frontier: &Path,
10644 bridge_id: &str,
10645 status: &str,
10646 reviewer_id: &str,
10647 note: Option<&str>,
10648 ) -> Result<(), String> {
10649 let mut payload = serde_json::json!({
10650 "bridge_id": bridge_id,
10651 "status": status,
10652 });
10653 if let Some(n) = note
10654 && !n.trim().is_empty()
10655 {
10656 payload["note"] = serde_json::Value::String(n.to_string());
10657 }
10658 let known_ids: Vec<String> = list_bridges(frontier)
10660 .unwrap_or_default()
10661 .into_iter()
10662 .map(|b| b.id)
10663 .collect();
10664 crate::events::validate_bridge_reviewed_against_state(&payload, &known_ids)?;
10665 let event = crate::events::new_bridge_reviewed_event(
10666 bridge_id,
10667 reviewer_id,
10668 "human",
10669 &format!("Bridge {status} by {reviewer_id}"),
10670 payload,
10671 Vec::new(),
10672 );
10673 let events_dir = frontier.join(".vela/events");
10674 std::fs::create_dir_all(&events_dir).map_err(|e| format!("mkdir .vela/events: {e}"))?;
10675 let event_path = events_dir.join(format!("{}.json", event.id));
10676 let data =
10677 serde_json::to_string_pretty(&event).map_err(|e| format!("serialize event: {e}"))?;
10678 std::fs::write(&event_path, format!("{data}\n")).map_err(|e| format!("write event: {e}"))
10679 }
10680
10681 fn list_bridges(frontier: &Path) -> Result<Vec<Bridge>, String> {
10682 let dir = bridges_dir(frontier);
10683 if !dir.is_dir() {
10684 return Ok(Vec::new());
10685 }
10686 let mut out = Vec::new();
10687 for entry in std::fs::read_dir(&dir).map_err(|e| format!("read bridges/: {e}"))? {
10688 let entry = entry.map_err(|e| format!("read entry: {e}"))?;
10689 let path = entry.path();
10690 if path.extension().and_then(|s| s.to_str()) != Some("json") {
10691 continue;
10692 }
10693 let data = std::fs::read_to_string(&path).map_err(|e| format!("read {path:?}: {e}"))?;
10694 let b: Bridge =
10695 serde_json::from_str(&data).map_err(|e| format!("parse {path:?}: {e}"))?;
10696 out.push(b);
10697 }
10698 out.sort_by(|a, b| {
10699 b.finding_refs
10700 .len()
10701 .cmp(&a.finding_refs.len())
10702 .then(a.entity_name.cmp(&b.entity_name))
10703 });
10704 Ok(out)
10705 }
10706
10707 match action {
10708 BridgesAction::Derive {
10709 frontier_a,
10710 label_a,
10711 frontier_b,
10712 label_b,
10713 json,
10714 } => {
10715 let a = repo::load_from_path(&frontier_a).unwrap_or_else(|e| fail_return(&e));
10716 let b = repo::load_from_path(&frontier_b).unwrap_or_else(|e| fail_return(&e));
10717 let now = chrono::Utc::now().to_rfc3339();
10718 let new_bridges =
10719 derive_bridges(&[(label_a.as_str(), &a), (label_b.as_str(), &b)], &now);
10720
10721 let existing = list_bridges(&frontier_a).unwrap_or_default();
10725 let existing_by_id: HashMap<String, Bridge> =
10726 existing.iter().map(|b| (b.id.clone(), b.clone())).collect();
10727 let mut written = 0;
10728 let mut preserved = 0;
10729 let mut new_ids = Vec::new();
10730 for mut bridge in new_bridges {
10731 if let Some(prev) = existing_by_id.get(&bridge.id)
10732 && prev.status != BridgeStatus::Derived
10733 {
10734 bridge.status = prev.status;
10736 bridge.derived_at = prev.derived_at.clone();
10737 preserved += 1;
10738 }
10739 save_bridge(&frontier_a, &bridge).unwrap_or_else(|e| fail_return(&e));
10740 new_ids.push(bridge.id.clone());
10741 written += 1;
10742 }
10743
10744 if json {
10745 println!(
10746 "{}",
10747 serde_json::to_string_pretty(&json!({
10748 "ok": true,
10749 "command": "bridges.derive",
10750 "frontier_a": frontier_a.display().to_string(),
10751 "frontier_b": frontier_b.display().to_string(),
10752 "bridges_written": written,
10753 "reviewer_judgments_preserved": preserved,
10754 "ids": new_ids,
10755 }))
10756 .expect("serialize bridges.derive")
10757 );
10758 return;
10759 }
10760
10761 println!();
10762 println!(
10763 " {}",
10764 format!("VELA · BRIDGES · DERIVE · {} ↔ {}", label_a, label_b)
10765 .to_uppercase()
10766 .dimmed()
10767 );
10768 println!(" {}", style::tick_row(60));
10769 println!(" {} {} bridge(s) materialized", style::ok("ok"), written);
10770 if preserved > 0 {
10771 println!(
10772 " {} {} reviewer judgment(s) preserved",
10773 style::ok("kept"),
10774 preserved
10775 );
10776 }
10777 for id in new_ids.iter().take(10) {
10778 println!(" · {id}");
10779 }
10780 if new_ids.len() > 10 {
10781 println!(" … and {} more", new_ids.len() - 10);
10782 }
10783 println!();
10784 }
10785 BridgesAction::List {
10786 frontier,
10787 status,
10788 json,
10789 } => {
10790 let mut bridges = list_bridges(&frontier).unwrap_or_else(|e| fail_return(&e));
10791 if let Some(s) = status.as_deref() {
10792 let want = match s.to_lowercase().as_str() {
10793 "derived" => BridgeStatus::Derived,
10794 "confirmed" => BridgeStatus::Confirmed,
10795 "refuted" => BridgeStatus::Refuted,
10796 other => fail_return(&format!(
10797 "unknown bridge status '{other}' (try derived|confirmed|refuted)"
10798 )),
10799 };
10800 bridges.retain(|b| b.status == want);
10801 }
10802 if json {
10803 println!(
10804 "{}",
10805 serde_json::to_string_pretty(&json!({
10806 "ok": true,
10807 "command": "bridges.list",
10808 "frontier": frontier.display().to_string(),
10809 "count": bridges.len(),
10810 "bridges": bridges,
10811 }))
10812 .expect("serialize bridges.list")
10813 );
10814 return;
10815 }
10816 println!();
10817 println!(
10818 " {}",
10819 format!("VELA · BRIDGES · LIST · {}", frontier.display())
10820 .to_uppercase()
10821 .dimmed()
10822 );
10823 println!(" {}", style::tick_row(60));
10824 println!(" {} bridge(s)", bridges.len());
10825 for b in &bridges {
10826 let chip = match b.status {
10827 BridgeStatus::Derived => style::warn("derived"),
10828 BridgeStatus::Confirmed => style::ok("confirmed"),
10829 BridgeStatus::Refuted => style::lost("refuted"),
10830 };
10831 println!();
10832 println!(
10833 " {chip} {} {} ↔ findings:{}",
10834 b.id,
10835 b.entity_name,
10836 b.finding_refs.len()
10837 );
10838 println!(" frontiers: {}", b.frontiers.join(", "));
10839 if let Some(t) = &b.tension {
10840 println!(" tension: {t}");
10841 }
10842 }
10843 println!();
10844 }
10845 BridgesAction::Show {
10846 frontier,
10847 bridge_id,
10848 json,
10849 } => {
10850 let b = load_bridge(&frontier, &bridge_id).unwrap_or_else(|e| fail_return(&e));
10851 if json {
10852 println!("{}", serde_json::to_string_pretty(&b).expect("serialize"));
10853 return;
10854 }
10855 println!();
10856 println!(
10857 " {}",
10858 format!("VELA · BRIDGES · SHOW · {}", b.id)
10859 .to_uppercase()
10860 .dimmed()
10861 );
10862 println!(" {}", style::tick_row(60));
10863 println!(" entity: {}", b.entity_name);
10864 println!(" status: {:?}", b.status);
10865 println!(" frontiers: {}", b.frontiers.join(", "));
10866 if !b.frontier_ids.is_empty() {
10867 println!(" frontier_ids: {}", b.frontier_ids.join(", "));
10868 }
10869 if let Some(t) = &b.tension {
10870 println!(" tension: {t}");
10871 }
10872 println!(" derived_at: {}", b.derived_at);
10873 println!(" finding refs ({}):", b.finding_refs.len());
10874 for r in &b.finding_refs {
10875 let dir = r.direction.as_deref().unwrap_or("—");
10876 let truncated: String = r.assertion_text.chars().take(72).collect();
10877 println!(
10878 " · [{}] {} (conf={:.2}, dir={})",
10879 r.frontier, r.finding_id, r.confidence, dir
10880 );
10881 println!(" {truncated}");
10882 }
10883 println!();
10884 }
10885 BridgesAction::Confirm {
10886 frontier,
10887 bridge_id,
10888 reviewer,
10889 note,
10890 json,
10891 } => {
10892 let mut b = load_bridge(&frontier, &bridge_id).unwrap_or_else(|e| fail_return(&e));
10893 let reviewer_id = reviewer.unwrap_or_else(default_reviewer_id);
10894 b.status = BridgeStatus::Confirmed;
10895 save_bridge(&frontier, &b).unwrap_or_else(|e| fail_return(&e));
10896 let _ = emit_bridge_reviewed_event(
10900 &frontier,
10901 &bridge_id,
10902 "confirmed",
10903 &reviewer_id,
10904 note.as_deref(),
10905 );
10906 if json {
10907 println!("{}", serde_json::to_string_pretty(&b).expect("serialize"));
10908 return;
10909 }
10910 println!();
10911 println!(" {} {} now confirmed", style::ok("confirmed"), b.id);
10912 println!();
10913 }
10914 BridgesAction::Refute {
10915 frontier,
10916 bridge_id,
10917 reviewer,
10918 note,
10919 json,
10920 } => {
10921 let mut b = load_bridge(&frontier, &bridge_id).unwrap_or_else(|e| fail_return(&e));
10922 let reviewer_id = reviewer.unwrap_or_else(default_reviewer_id);
10923 b.status = BridgeStatus::Refuted;
10924 save_bridge(&frontier, &b).unwrap_or_else(|e| fail_return(&e));
10925 let _ = emit_bridge_reviewed_event(
10926 &frontier,
10927 &bridge_id,
10928 "refuted",
10929 &reviewer_id,
10930 note.as_deref(),
10931 );
10932 if json {
10933 println!("{}", serde_json::to_string_pretty(&b).expect("serialize"));
10934 return;
10935 }
10936 println!();
10937 println!(" {} {} now refuted", style::lost("refuted"), b.id);
10938 println!();
10939 }
10940 }
10941}
10942
10943fn cmd_federation(action: FederationAction) {
10945 use crate::federation::PeerHub;
10946
10947 match action {
10948 FederationAction::PeerAdd {
10949 frontier,
10950 id,
10951 url,
10952 pubkey,
10953 note,
10954 json,
10955 } => {
10956 let peer = PeerHub {
10957 id: id.clone(),
10958 url: url.clone(),
10959 public_key: pubkey.trim().to_string(),
10960 added_at: chrono::Utc::now().to_rfc3339(),
10961 note: note.clone(),
10962 };
10963 peer.validate().unwrap_or_else(|e| fail_return(&e));
10964
10965 let mut project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
10966 if project.peers.iter().any(|p| p.id == id) {
10967 fail(&format!("peer '{id}' already in registry"));
10968 }
10969 project.peers.push(peer.clone());
10970 repo::save_to_path(&frontier, &project).unwrap_or_else(|e| fail_return(&e));
10971
10972 if json {
10973 println!(
10974 "{}",
10975 serde_json::to_string_pretty(&json!({
10976 "ok": true,
10977 "command": "federation.peer-add",
10978 "frontier": frontier.display().to_string(),
10979 "peer": peer,
10980 "registered_count": project.peers.len(),
10981 }))
10982 .expect("serialize federation.peer-add")
10983 );
10984 } else {
10985 println!(
10986 "{} peer {} (pubkey {}…) at {}",
10987 style::ok("registered"),
10988 id,
10989 &peer.public_key[..16],
10990 peer.url
10991 );
10992 }
10993 }
10994 FederationAction::PeerList { frontier, json } => {
10995 let project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
10996 if json {
10997 println!(
10998 "{}",
10999 serde_json::to_string_pretty(&json!({
11000 "ok": true,
11001 "command": "federation.peer-list",
11002 "frontier": frontier.display().to_string(),
11003 "peers": project.peers,
11004 }))
11005 .expect("serialize federation.peer-list")
11006 );
11007 } else {
11008 println!();
11009 println!(
11010 " {}",
11011 format!("VELA · FEDERATION · PEERS · {}", frontier.display())
11012 .to_uppercase()
11013 .dimmed()
11014 );
11015 println!(" {}", style::tick_row(60));
11016 if project.peers.is_empty() {
11017 println!(" (no peers registered)");
11018 } else {
11019 for p in &project.peers {
11020 let note_suffix = if p.note.is_empty() {
11021 String::new()
11022 } else {
11023 format!(" · {}", p.note)
11024 };
11025 println!(
11026 " {:<24} {} {}…{note_suffix}",
11027 p.id,
11028 p.url,
11029 &p.public_key[..16]
11030 );
11031 }
11032 }
11033 }
11034 }
11035 FederationAction::Sync {
11036 frontier,
11037 peer_id,
11038 url,
11039 via_hub,
11040 vfr_id,
11041 allow_cross_vfr,
11042 dry_run,
11043 json,
11044 } => {
11045 use crate::federation::{self, DiscoveryResult};
11046
11047 let mut project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
11048 let Some(peer) = project.peers.iter().find(|p| p.id == peer_id).cloned() else {
11049 fail(&format!(
11050 "peer '{peer_id}' not in registry; run `vela federation peer add` first"
11051 ));
11052 };
11053 let local_frontier_id = project.frontier_id();
11054
11055 if via_hub
11062 && let Some(target) = vfr_id.as_deref()
11063 && target != local_frontier_id
11064 && !allow_cross_vfr
11065 {
11066 fail(&format!(
11067 "cross-vfr sync refused: --vfr-id {target} differs from local frontier_id {local_frontier_id}. \
11068 Pass --allow-cross-vfr to opt in (every peer-side finding will be recorded as a \
11069 missing_locally conflict). Or omit --vfr-id to default to the local frontier id."
11070 ));
11071 }
11072
11073 #[derive(Debug)]
11075 enum SyncOutcome {
11076 Resolved(crate::project::Project, String), BrokenLocator(String, String, u16), UnverifiedEntry(String, String), EntryNotFound(String, u16),
11080 }
11081
11082 let outcome = if via_hub {
11083 let target_vfr = vfr_id.clone().unwrap_or_else(|| local_frontier_id.clone());
11084 match federation::discover_peer_frontier(
11085 &peer.url,
11086 &target_vfr,
11087 Some(&peer.public_key),
11088 ) {
11089 DiscoveryResult::Resolved(p) => {
11090 let src =
11091 format!("{}/entries/{}", peer.url.trim_end_matches('/'), target_vfr);
11092 SyncOutcome::Resolved(p, src)
11093 }
11094 DiscoveryResult::BrokenLocator {
11095 vfr_id,
11096 locator,
11097 status,
11098 } => SyncOutcome::BrokenLocator(vfr_id, locator, status),
11099 DiscoveryResult::UnverifiedEntry { vfr_id, reason } => {
11100 SyncOutcome::UnverifiedEntry(vfr_id, reason)
11101 }
11102 DiscoveryResult::EntryNotFound { vfr_id, status } => {
11103 SyncOutcome::EntryNotFound(vfr_id, status)
11104 }
11105 DiscoveryResult::Unreachable { url, error } => {
11106 fail(&format!("peer hub unreachable ({url}): {error}"));
11107 }
11108 }
11109 } else {
11110 let resolved_url = url.unwrap_or_else(|| {
11111 let base = peer.url.trim_end_matches('/');
11112 format!("{base}/manifest/{local_frontier_id}.json")
11113 });
11114 match federation::fetch_peer_frontier(&resolved_url) {
11115 Ok(p) => SyncOutcome::Resolved(p, resolved_url),
11116 Err(e) => fail(&format!("direct fetch failed: {e}")),
11117 }
11118 };
11119
11120 let peer_source: String;
11123 let peer_state = match outcome {
11124 SyncOutcome::Resolved(p, src) => {
11125 if !json {
11126 println!(" · resolved via {src}");
11127 }
11128 peer_source = src;
11129 p
11130 }
11131 SyncOutcome::BrokenLocator(vfr, locator, status) => {
11132 if dry_run {
11133 if json {
11134 println!(
11135 "{}",
11136 serde_json::to_string_pretty(&json!({
11137 "ok": true,
11138 "command": "federation.sync",
11139 "dry_run": true,
11140 "outcome": "broken_locator",
11141 "vfr_id": vfr,
11142 "locator": locator,
11143 "http_status": status,
11144 }))
11145 .expect("serialize")
11146 );
11147 } else {
11148 println!(
11149 "{} dry-run: peer entry resolved but locator dead",
11150 style::warn("broken_locator")
11151 );
11152 println!(" vfr_id: {vfr}");
11153 println!(" locator: {locator} (HTTP {status})");
11154 }
11155 return;
11156 }
11157 let report = federation::record_locator_failure(
11158 &mut project,
11159 &peer_id,
11160 &vfr,
11161 &locator,
11162 status,
11163 );
11164 repo::save_to_path(&frontier, &project).unwrap_or_else(|e| fail_return(&e));
11165 if json {
11166 println!(
11167 "{}",
11168 serde_json::to_string_pretty(&json!({
11169 "ok": true,
11170 "command": "federation.sync",
11171 "outcome": "broken_locator",
11172 "report": report,
11173 }))
11174 .expect("serialize")
11175 );
11176 } else {
11177 println!(
11178 "{} sync recorded broken-locator conflict against {peer_id}",
11179 style::warn("broken_locator")
11180 );
11181 println!(" vfr_id: {vfr}");
11182 println!(" locator: {locator} (HTTP {status})");
11183 println!(" events appended: {}", report.events_appended);
11184 }
11185 return;
11186 }
11187 SyncOutcome::UnverifiedEntry(vfr, reason) => {
11188 if dry_run {
11189 if json {
11190 println!(
11191 "{}",
11192 serde_json::to_string_pretty(&json!({
11193 "ok": true,
11194 "command": "federation.sync",
11195 "dry_run": true,
11196 "outcome": "unverified_peer_entry",
11197 "vfr_id": vfr,
11198 "reason": reason,
11199 }))
11200 .expect("serialize")
11201 );
11202 } else {
11203 println!(
11204 "{} dry-run: peer entry signature did not verify",
11205 style::lost("unverified_peer_entry")
11206 );
11207 println!(" vfr_id: {vfr}");
11208 println!(" reason: {reason}");
11209 }
11210 return;
11211 }
11212 let report =
11213 federation::record_unverified_entry(&mut project, &peer_id, &vfr, &reason);
11214 repo::save_to_path(&frontier, &project).unwrap_or_else(|e| fail_return(&e));
11215 if json {
11216 println!(
11217 "{}",
11218 serde_json::to_string_pretty(&json!({
11219 "ok": true,
11220 "command": "federation.sync",
11221 "outcome": "unverified_peer_entry",
11222 "report": report,
11223 }))
11224 .expect("serialize")
11225 );
11226 } else {
11227 println!(
11228 "{} sync halted; peer's registry entry signature did not verify",
11229 style::lost("unverified_peer_entry")
11230 );
11231 println!(" vfr_id: {vfr}");
11232 println!(" reason: {reason}");
11233 }
11234 return;
11235 }
11236 SyncOutcome::EntryNotFound(vfr, status) => {
11237 if json {
11238 println!(
11239 "{}",
11240 serde_json::to_string_pretty(&json!({
11241 "ok": false,
11242 "command": "federation.sync",
11243 "outcome": "entry_not_found",
11244 "vfr_id": vfr,
11245 "http_status": status,
11246 }))
11247 .expect("serialize")
11248 );
11249 } else {
11250 println!(
11251 "{} peer's hub does not publish vfr_id {vfr} (HTTP {status})",
11252 style::warn("entry_not_found")
11253 );
11254 }
11255 return;
11256 }
11257 };
11258
11259 if dry_run {
11260 let conflicts = federation::diff_frontiers(&project, &peer_state);
11261 if json {
11262 println!(
11263 "{}",
11264 serde_json::to_string_pretty(&json!({
11265 "ok": true,
11266 "command": "federation.sync",
11267 "dry_run": true,
11268 "peer_id": peer_id,
11269 "peer_source": peer_source,
11270 "conflicts": conflicts,
11271 }))
11272 .expect("serialize federation.sync (dry-run)")
11273 );
11274 } else {
11275 println!(
11276 "{} dry-run vs {peer_id} ({}): {} conflict(s)",
11277 style::ok("ok"),
11278 peer_source,
11279 conflicts.len()
11280 );
11281 for c in &conflicts {
11282 println!(" · {} {} {}", c.kind.as_str(), c.finding_id, c.detail);
11283 }
11284 }
11285 return;
11286 }
11287
11288 let report = federation::sync_with_peer(&mut project, &peer_id, &peer_state);
11289 repo::save_to_path(&frontier, &project).unwrap_or_else(|e| fail_return(&e));
11290
11291 if json {
11292 println!(
11293 "{}",
11294 serde_json::to_string_pretty(&json!({
11295 "ok": true,
11296 "command": "federation.sync",
11297 "peer_id": peer_id,
11298 "peer_source": peer_source,
11299 "report": report,
11300 }))
11301 .expect("serialize federation.sync")
11302 );
11303 } else {
11304 println!(
11305 "{} synced with {} ({})",
11306 style::ok("ok"),
11307 peer_id,
11308 peer_source
11309 );
11310 println!(
11311 " our: {}",
11312 &report.our_snapshot_hash[..16.min(report.our_snapshot_hash.len())]
11313 );
11314 println!(
11315 " peer: {}",
11316 &report.peer_snapshot_hash[..16.min(report.peer_snapshot_hash.len())]
11317 );
11318 println!(
11319 " conflicts: {} events appended: {}",
11320 report.conflicts.len(),
11321 report.events_appended
11322 );
11323 for c in &report.conflicts {
11324 println!(" · {} {} {}", c.kind.as_str(), c.finding_id, c.detail);
11325 }
11326 }
11327 }
11328 FederationAction::PushResolution {
11329 frontier,
11330 conflict_event_id,
11331 to,
11332 key,
11333 vfr_id,
11334 json,
11335 } => {
11336 cmd_federation_push_resolution(frontier, conflict_event_id, to, key, vfr_id, json);
11337 }
11338 FederationAction::PeerRemove { frontier, id, json } => {
11339 let mut project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
11340 let before = project.peers.len();
11341 project.peers.retain(|p| p.id != id);
11342 if project.peers.len() == before {
11343 fail(&format!("peer '{id}' not found in registry"));
11344 }
11345 repo::save_to_path(&frontier, &project).unwrap_or_else(|e| fail_return(&e));
11346
11347 if json {
11348 println!(
11349 "{}",
11350 serde_json::to_string_pretty(&json!({
11351 "ok": true,
11352 "command": "federation.peer-remove",
11353 "frontier": frontier.display().to_string(),
11354 "removed": id,
11355 "remaining": project.peers.len(),
11356 }))
11357 .expect("serialize federation.peer-remove")
11358 );
11359 } else {
11360 println!(
11361 "{} peer {} ({} remaining)",
11362 style::ok("removed"),
11363 id,
11364 project.peers.len()
11365 );
11366 }
11367 }
11368 }
11369}
11370
11371fn cmd_federation_push_resolution(
11383 frontier: PathBuf,
11384 conflict_event_id: String,
11385 to: String,
11386 key: Option<PathBuf>,
11387 vfr_id: Option<String>,
11388 json: bool,
11389) {
11390 use crate::canonical;
11391 use crate::sign;
11392
11393 let project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
11394
11395 let Some(peer) = project.peers.iter().find(|p| p.id == to).cloned() else {
11396 fail(&format!(
11397 "peer '{to}' not in registry; run `vela federation peer-add` first"
11398 ));
11399 };
11400
11401 let Some(resolution) = project
11403 .events
11404 .iter()
11405 .find(|e| {
11406 e.kind == "frontier.conflict_resolved"
11407 && e.payload.get("conflict_event_id").and_then(|v| v.as_str())
11408 == Some(conflict_event_id.as_str())
11409 })
11410 .cloned()
11411 else {
11412 fail(&format!(
11413 "no frontier.conflict_resolved event paired with conflict {conflict_event_id} in {}",
11414 frontier.display()
11415 ));
11416 };
11417
11418 let actor_id = resolution.actor.id.clone();
11421 let Some(actor) = project.actors.iter().find(|a| a.id == actor_id) else {
11422 fail(&format!(
11423 "resolution event's actor.id ({actor_id}) is not in the frontier's actor registry; \
11424 register the reviewer with `vela actor add` before pushing"
11425 ));
11426 };
11427
11428 let key_path = key.unwrap_or_else(|| {
11431 let home = std::env::var("HOME").unwrap_or_default();
11432 let base = PathBuf::from(home)
11433 .join(".config")
11434 .join("vela")
11435 .join("keys");
11436 let safe_id = actor.id.replace([':', '/'], "_");
11437 let by_actor = base.join(format!("{safe_id}.key"));
11438 if by_actor.exists() {
11439 by_actor
11440 } else {
11441 base.join("private.key")
11442 }
11443 });
11444
11445 let signing_key = sign::load_signing_key_from_path(&key_path).unwrap_or_else(|e| {
11446 fail_return(&format!(
11447 "load private key from {}: {e}",
11448 key_path.display()
11449 ))
11450 });
11451 let pubkey_hex = sign::pubkey_hex(&signing_key);
11452 if !pubkey_hex.eq_ignore_ascii_case(&actor.public_key) {
11453 fail(&format!(
11454 "private key at {} does not match actor {}'s registered public key. \
11455 Loaded pubkey {}, expected {}.",
11456 key_path.display(),
11457 actor.id,
11458 &pubkey_hex[..16],
11459 &actor.public_key[..16]
11460 ));
11461 }
11462
11463 let signature_hex = sign::sign_event(&resolution, &signing_key)
11466 .unwrap_or_else(|e| fail_return(&format!("sign event: {e}")));
11467
11468 let mut body = resolution.clone();
11473 body.signature = None;
11474 let body_value =
11475 serde_json::to_value(&body).unwrap_or_else(|e| fail_return(&format!("serialize: {e}")));
11476 let _canonical_check = canonical::to_canonical_bytes(&body_value)
11477 .unwrap_or_else(|e| fail_return(&format!("canonicalize: {e}")));
11478
11479 let target_vfr = vfr_id.unwrap_or_else(|| project.frontier_id());
11480 let url = format!(
11481 "{}/entries/{}/events",
11482 peer.url.trim_end_matches('/'),
11483 target_vfr
11484 );
11485
11486 let url_owned = url.clone();
11488 let pubkey_owned = pubkey_hex.clone();
11489 let signature_owned = signature_hex.clone();
11490 let body_owned = body_value.clone();
11491 let response: Result<(u16, String), String> = std::thread::spawn(move || {
11492 let client = reqwest::blocking::Client::new();
11493 let resp = client
11494 .post(&url_owned)
11495 .header("X-Vela-Signer-Pubkey", &pubkey_owned)
11496 .header("X-Vela-Signature", &signature_owned)
11497 .json(&body_owned)
11498 .send()
11499 .map_err(|e| format!("HTTP POST {url_owned}: {e}"))?;
11500 let status = resp.status().as_u16();
11501 let text = resp.text().unwrap_or_default();
11502 Ok((status, text))
11503 })
11504 .join()
11505 .map_err(|_| "push thread panicked".to_string())
11506 .unwrap_or_else(|e| fail_return(&e));
11507
11508 let (status, text) = response.unwrap_or_else(|e| fail_return(&e));
11509 let parsed: serde_json::Value =
11510 serde_json::from_str(&text).unwrap_or_else(|_| json!({ "raw": text }));
11511
11512 let accepted = matches!(status, 200..=202);
11513 if json {
11514 println!(
11515 "{}",
11516 serde_json::to_string_pretty(&json!({
11517 "ok": accepted,
11518 "command": "federation.push-resolution",
11519 "frontier": frontier.display().to_string(),
11520 "peer_id": to,
11521 "url": url,
11522 "conflict_event_id": conflict_event_id,
11523 "event_id": resolution.id,
11524 "actor_id": actor.id,
11525 "http_status": status,
11526 "response": parsed,
11527 }))
11528 .expect("serialize federation.push-resolution")
11529 );
11530 } else if accepted {
11531 println!(
11532 "{} resolution {} pushed to {} (HTTP {})",
11533 style::ok("ok"),
11534 &resolution.id[..16.min(resolution.id.len())],
11535 to,
11536 status
11537 );
11538 println!(" url: {url}");
11539 println!(" signer: {} (actor {})", &pubkey_hex[..16], actor.id);
11540 } else {
11541 println!("{} push refused (HTTP {})", style::lost("rejected"), status);
11542 println!(" url: {url}");
11543 println!(" response: {text}");
11544 std::process::exit(1);
11545 }
11546}
11547
11548fn cmd_queue(action: QueueAction) {
11553 use crate::queue;
11554 match action {
11555 QueueAction::List { queue_file, json } => {
11556 let path = queue_file.unwrap_or_else(queue::default_queue_path);
11557 let q = queue::load(&path).unwrap_or_else(|e| fail_return(&e));
11558 if json {
11559 let payload = json!({
11560 "ok": true,
11561 "command": "queue.list",
11562 "queue_file": path.display().to_string(),
11563 "schema": q.schema,
11564 "actions": q.actions,
11565 });
11566 println!(
11567 "{}",
11568 serde_json::to_string_pretty(&payload).expect("failed to serialize queue.list")
11569 );
11570 } else {
11571 println!();
11572 println!(
11573 " {}",
11574 format!("VELA · QUEUE · LIST · {}", path.display())
11575 .to_uppercase()
11576 .dimmed()
11577 );
11578 println!(" {}", style::tick_row(60));
11579 if q.actions.is_empty() {
11580 println!(" (queue is empty)");
11581 } else {
11582 for (idx, action) in q.actions.iter().enumerate() {
11583 println!(
11584 " [{idx}] {} → {} queued {}",
11585 action.kind,
11586 action.frontier.display(),
11587 action.queued_at
11588 );
11589 }
11590 }
11591 }
11592 }
11593 QueueAction::Clear { queue_file, json } => {
11594 let path = queue_file.unwrap_or_else(queue::default_queue_path);
11595 let dropped = queue::clear(&path).unwrap_or_else(|e| fail_return(&e));
11596 if json {
11597 let payload = json!({
11598 "ok": true,
11599 "command": "queue.clear",
11600 "queue_file": path.display().to_string(),
11601 "dropped": dropped,
11602 });
11603 println!(
11604 "{}",
11605 serde_json::to_string_pretty(&payload)
11606 .expect("failed to serialize queue.clear")
11607 );
11608 } else {
11609 println!("{} dropped {dropped} queued action(s)", style::ok("ok"));
11610 }
11611 }
11612 QueueAction::Sign {
11613 actor,
11614 key,
11615 queue_file,
11616 yes_to_all,
11617 json,
11618 } => {
11619 let path = queue_file.unwrap_or_else(queue::default_queue_path);
11620 let q = queue::load(&path).unwrap_or_else(|e| fail_return(&e));
11621 if q.actions.is_empty() {
11622 if json {
11623 println!("{}", json!({"ok": true, "signed": 0, "remaining": 0}));
11624 } else {
11625 println!("{} queue is empty", style::ok("ok"));
11626 }
11627 return;
11628 }
11629 let key_hex = std::fs::read_to_string(&key)
11630 .map(|s| s.trim().to_string())
11631 .unwrap_or_else(|e| fail_return(&format!("read key {}: {e}", key.display())));
11632 let signing_key = parse_signing_key(&key_hex);
11633 let mut signed_count = 0usize;
11634 let mut remaining = Vec::new();
11635 for action in q.actions.iter() {
11636 if !yes_to_all && !confirm_action(action) {
11637 remaining.push(action.clone());
11638 continue;
11639 }
11640 match sign_and_apply(&signing_key, &actor, action) {
11641 Ok(report) => {
11642 signed_count += 1;
11643 if !json {
11644 println!(
11645 "{} {} on {} → {}",
11646 style::ok("signed"),
11647 action.kind,
11648 action.frontier.display(),
11649 report
11650 );
11651 }
11652 }
11653 Err(error) => {
11654 remaining.push(action.clone());
11656 if !json {
11657 eprintln!(
11658 "{} {} on {}: {error}",
11659 style::warn("failed"),
11660 action.kind,
11661 action.frontier.display()
11662 );
11663 }
11664 }
11665 }
11666 }
11667 queue::replace_actions(&path, remaining.clone()).unwrap_or_else(|e| fail_return(&e));
11668 if json {
11669 let payload = json!({
11670 "ok": true,
11671 "command": "queue.sign",
11672 "signed": signed_count,
11673 "remaining": remaining.len(),
11674 });
11675 println!(
11676 "{}",
11677 serde_json::to_string_pretty(&payload).expect("failed to serialize queue.sign")
11678 );
11679 } else {
11680 println!(
11681 "{} signed {signed_count} action(s); {} remaining in queue",
11682 style::ok("ok"),
11683 remaining.len()
11684 );
11685 }
11686 }
11687 }
11688}
11689
11690fn parse_signing_key(hex_str: &str) -> ed25519_dalek::SigningKey {
11691 let bytes = hex::decode(hex_str)
11692 .unwrap_or_else(|e| fail_return(&format!("invalid private-key hex: {e}")));
11693 let key_bytes: [u8; 32] = bytes
11694 .try_into()
11695 .unwrap_or_else(|_| fail_return("private key must be 32 bytes"));
11696 ed25519_dalek::SigningKey::from_bytes(&key_bytes)
11697}
11698
11699fn confirm_action(action: &crate::queue::QueuedAction) -> bool {
11700 use std::io::{self, BufRead, Write};
11701 let mut stdout = io::stdout().lock();
11702 let _ = writeln!(
11703 stdout,
11704 " sign {} on {}? [y/N] ",
11705 action.kind,
11706 action.frontier.display()
11707 );
11708 let _ = stdout.flush();
11709 drop(stdout);
11710 let stdin = io::stdin();
11711 let mut line = String::new();
11712 if stdin.lock().read_line(&mut line).is_err() {
11713 return false;
11714 }
11715 matches!(line.trim().to_lowercase().as_str(), "y" | "yes")
11716}
11717
11718fn sign_and_apply(
11723 signing_key: &ed25519_dalek::SigningKey,
11724 actor: &str,
11725 action: &crate::queue::QueuedAction,
11726) -> Result<String, String> {
11727 use crate::events::StateTarget;
11728 use crate::proposals;
11729 let args = &action.args;
11730 match action.kind.as_str() {
11731 "propose_review" | "propose_note" | "propose_revise_confidence" | "propose_retract" => {
11732 let kind = match action.kind.as_str() {
11733 "propose_review" => "finding.review",
11734 "propose_note" => "finding.note",
11735 "propose_revise_confidence" => "finding.confidence_revise",
11736 "propose_retract" => "finding.retract",
11737 _ => unreachable!(),
11738 };
11739 let target_id = args
11740 .get("target_finding_id")
11741 .and_then(Value::as_str)
11742 .ok_or("target_finding_id missing")?;
11743 let reason = args
11744 .get("reason")
11745 .and_then(Value::as_str)
11746 .ok_or("reason missing")?;
11747 let payload = match action.kind.as_str() {
11748 "propose_review" => {
11749 let status = args
11750 .get("status")
11751 .and_then(Value::as_str)
11752 .ok_or("status missing")?;
11753 json!({"status": status})
11754 }
11755 "propose_note" => {
11756 let text = args
11757 .get("text")
11758 .and_then(Value::as_str)
11759 .ok_or("text missing")?;
11760 json!({"text": text})
11761 }
11762 "propose_revise_confidence" => {
11763 let new_score = args
11764 .get("new_score")
11765 .and_then(Value::as_f64)
11766 .ok_or("new_score missing")?;
11767 json!({"new_score": new_score})
11768 }
11769 "propose_retract" => json!({}),
11770 _ => unreachable!(),
11771 };
11772 let created_at = args
11773 .get("created_at")
11774 .and_then(Value::as_str)
11775 .map(String::from)
11776 .unwrap_or_else(|| chrono::Utc::now().to_rfc3339());
11777 let mut proposal = proposals::new_proposal(
11778 kind,
11779 StateTarget {
11780 r#type: "finding".to_string(),
11781 id: target_id.to_string(),
11782 },
11783 actor,
11784 "human",
11785 reason,
11786 payload,
11787 Vec::new(),
11788 Vec::new(),
11789 );
11790 proposal.created_at = created_at;
11791 proposal.id = proposals::proposal_id(&proposal);
11792 let _signature = crate::sign::sign_proposal(&proposal, signing_key)?;
11796 let result = proposals::create_or_apply(&action.frontier, proposal, false)
11797 .map_err(|e| format!("create_or_apply: {e}"))?;
11798 Ok(format!("proposal {}", result.proposal_id))
11799 }
11800 "accept_proposal" | "reject_proposal" => {
11801 let proposal_id = args
11802 .get("proposal_id")
11803 .and_then(Value::as_str)
11804 .ok_or("proposal_id missing")?;
11805 let reason = args
11806 .get("reason")
11807 .and_then(Value::as_str)
11808 .ok_or("reason missing")?;
11809 let timestamp = args
11810 .get("timestamp")
11811 .and_then(Value::as_str)
11812 .map(String::from)
11813 .unwrap_or_else(|| chrono::Utc::now().to_rfc3339());
11814 let preimage = json!({
11816 "action": if action.kind == "accept_proposal" { "accept" } else { "reject" },
11817 "proposal_id": proposal_id,
11818 "reviewer_id": actor,
11819 "reason": reason,
11820 "timestamp": timestamp,
11821 });
11822 let bytes = crate::canonical::to_canonical_bytes(&preimage)?;
11823 use ed25519_dalek::Signer;
11824 let _signature = hex::encode(signing_key.sign(&bytes).to_bytes());
11825 if action.kind == "accept_proposal" {
11826 let event_id =
11827 crate::proposals::accept_at_path(&action.frontier, proposal_id, actor, reason)
11828 .map_err(|e| format!("accept_at_path: {e}"))?;
11829 Ok(format!("event {event_id}"))
11830 } else {
11831 crate::proposals::reject_at_path(&action.frontier, proposal_id, actor, reason)
11832 .map_err(|e| format!("reject_at_path: {e}"))?;
11833 Ok(format!("rejected {proposal_id}"))
11834 }
11835 }
11836 other => Err(format!("unsupported queued action kind '{other}'")),
11837 }
11838}
11839
11840fn cmd_entity(action: EntityAction) {
11852 use crate::entity_resolve;
11853 match action {
11854 EntityAction::Resolve {
11855 frontier,
11856 force,
11857 json,
11858 } => {
11859 let mut p = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
11860 let report = entity_resolve::resolve_frontier(&mut p, force);
11861 repo::save_to_path(&frontier, &p).unwrap_or_else(|e| fail_return(&e));
11862 if json {
11863 println!(
11864 "{}",
11865 serde_json::to_string_pretty(&serde_json::json!({
11866 "ok": true,
11867 "command": "entity.resolve",
11868 "frontier_path": frontier.display().to_string(),
11869 "report": report,
11870 }))
11871 .expect("serialize")
11872 );
11873 } else {
11874 println!(
11875 "{} resolved {} of {} entities ({} already, {} unresolved) across {} findings",
11876 style::ok("entity"),
11877 report.resolved,
11878 report.total_entities,
11879 report.already_resolved,
11880 report.unresolved_count,
11881 report.findings_touched,
11882 );
11883 let unresolved_summary: std::collections::BTreeSet<&str> = report
11884 .per_finding
11885 .iter()
11886 .flat_map(|f| f.unresolved.iter().map(String::as_str))
11887 .collect();
11888 if !unresolved_summary.is_empty() {
11889 let take = unresolved_summary.iter().take(8).collect::<Vec<_>>();
11890 println!(
11891 " unresolved (first {}): {}",
11892 take.len(),
11893 take.iter().copied().cloned().collect::<Vec<_>>().join(", ")
11894 );
11895 }
11896 }
11897 }
11898 EntityAction::List { json } => {
11899 let entries: Vec<serde_json::Value> = entity_resolve::iter_bundled()
11900 .map(|(name, etype, source, id)| {
11901 serde_json::json!({
11902 "canonical_name": name,
11903 "entity_type": etype,
11904 "source": source,
11905 "id": id,
11906 })
11907 })
11908 .collect();
11909 if json {
11910 println!(
11911 "{}",
11912 serde_json::to_string_pretty(&serde_json::json!({
11913 "ok": true,
11914 "command": "entity.list",
11915 "count": entries.len(),
11916 "entries": entries,
11917 }))
11918 .expect("serialize")
11919 );
11920 } else {
11921 println!("{} {} bundled entries", style::ok("entity"), entries.len());
11922 for e in &entries {
11923 println!(
11924 " {:32} {:18} {} {}",
11925 e["canonical_name"].as_str().unwrap_or("?"),
11926 e["entity_type"].as_str().unwrap_or("?"),
11927 e["source"].as_str().unwrap_or("?"),
11928 e["id"].as_str().unwrap_or("?"),
11929 );
11930 }
11931 }
11932 }
11933 }
11934}
11935
11936fn cmd_link(action: LinkAction) {
11937 use crate::bundle::{Link, LinkRef};
11938 match action {
11939 LinkAction::Add {
11940 frontier,
11941 from,
11942 to,
11943 r#type,
11944 note,
11945 inferred_by,
11946 no_check_target,
11947 json,
11948 } => {
11949 validate_enum_arg("--type", &r#type, bundle::VALID_LINK_TYPES);
11950 if !["compiler", "reviewer", "author"].contains(&inferred_by.as_str()) {
11951 fail(&format!(
11952 "invalid --inferred-by '{inferred_by}'. Valid: compiler, reviewer, author"
11953 ));
11954 }
11955 let parsed = LinkRef::parse(&to).unwrap_or_else(|e| {
11956 fail(&format!(
11957 "invalid --to '{to}': {e}. Expected vf_<hex> or vf_<hex>@vfr_<hex>"
11958 ))
11959 });
11960 let mut p = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
11961 let source_idx = p
11962 .findings
11963 .iter()
11964 .position(|f| f.id == from)
11965 .unwrap_or_else(|| {
11966 fail_return(&format!("--from finding '{from}' not in frontier"))
11967 });
11968 if let LinkRef::Local { vf_id } = &parsed
11969 && !p.findings.iter().any(|f| &f.id == vf_id)
11970 {
11971 fail(&format!(
11972 "local --to target '{vf_id}' not in frontier; add the target finding first"
11973 ));
11974 }
11975 if let LinkRef::Cross { vfr_id, .. } = &parsed
11976 && p.dep_for_vfr(vfr_id).is_none()
11977 {
11978 fail(&format!(
11979 "cross-frontier --to references vfr_id '{vfr_id}' but no matching dep is declared. Run `vela frontier add-dep {vfr_id} --locator <url> --snapshot <hash>` first."
11980 ));
11981 }
11982
11983 let mut target_warning: Option<String> = None;
11989 if let LinkRef::Cross {
11990 vfr_id: target_vfr,
11991 vf_id: target_vf,
11992 } = &parsed
11993 && !no_check_target
11994 && let Some(dep) = p.dep_for_vfr(target_vfr)
11995 && let Some(locator) = dep.locator.as_deref()
11996 && (locator.starts_with("http://") || locator.starts_with("https://"))
11997 {
11998 let client = reqwest::blocking::Client::builder()
11999 .timeout(std::time::Duration::from_secs(15))
12000 .build()
12001 .ok();
12002 if let Some(client) = client
12003 && let Ok(resp) = client.get(locator).send()
12004 && resp.status().is_success()
12005 && let Ok(dep_project) = resp.json::<crate::project::Project>()
12006 {
12007 if let Some(target_finding) =
12008 dep_project.findings.iter().find(|f| &f.id == target_vf)
12009 {
12010 if target_finding.flags.superseded {
12011 target_warning = Some(format!(
12012 "warn · cross-frontier target '{target_vf}' in '{target_vfr}' has flags.superseded = true. \
12013You may be linking to outdated wording. Pull --transitive and inspect the supersedes chain to find the current finding. \
12014Use --no-check-target to skip this check."
12015 ));
12016 }
12017 } else {
12018 target_warning = Some(format!(
12019 "warn · cross-frontier target '{target_vf}' not found in dep '{target_vfr}' (fetched from {locator}). \
12020The target may have been removed or never existed in the pinned snapshot."
12021 ));
12022 }
12023 }
12024 }
12025
12026 let now = chrono::Utc::now().to_rfc3339_opts(chrono::SecondsFormat::Secs, true);
12027 let link = Link {
12028 target: to.clone(),
12029 link_type: r#type.clone(),
12030 note: note.clone(),
12031 inferred_by: inferred_by.clone(),
12032 created_at: now,
12033 mechanism: None,
12034 };
12035 p.findings[source_idx].links.push(link);
12036 project::recompute_stats(&mut p);
12037 repo::save_to_path(&frontier, &p).unwrap_or_else(|e| fail_return(&e));
12038 let payload = json!({
12039 "ok": true,
12040 "command": "link.add",
12041 "frontier": frontier.display().to_string(),
12042 "from": from,
12043 "to": to,
12044 "type": r#type,
12045 "cross_frontier": parsed.is_cross_frontier(),
12046 });
12047 if json {
12048 let mut p2 = payload.clone();
12049 if let Some(w) = &target_warning
12050 && let serde_json::Value::Object(m) = &mut p2
12051 {
12052 m.insert(
12053 "target_warning".to_string(),
12054 serde_json::Value::String(w.clone()),
12055 );
12056 }
12057 println!(
12058 "{}",
12059 serde_json::to_string_pretty(&p2).expect("failed to serialize link.add")
12060 );
12061 } else {
12062 println!(
12063 "{} {} --[{}]--> {}{}",
12064 style::ok("link"),
12065 from,
12066 r#type,
12067 to,
12068 if parsed.is_cross_frontier() {
12069 " (cross-frontier)"
12070 } else {
12071 ""
12072 }
12073 );
12074 if let Some(w) = target_warning {
12075 println!(" {w}");
12076 }
12077 }
12078 }
12079 }
12080}
12081
12082fn cmd_frontier(action: FrontierAction) {
12083 use crate::project::ProjectDependency;
12084 use crate::repo;
12085 match action {
12086 FrontierAction::New {
12087 path,
12088 name,
12089 description,
12090 force,
12091 json,
12092 } => {
12093 if path.exists() && !force {
12094 fail(&format!(
12095 "{} already exists; pass --force to overwrite",
12096 path.display()
12097 ));
12098 }
12099 let now = chrono::Utc::now().to_rfc3339_opts(chrono::SecondsFormat::Secs, true);
12100 let project = project::Project {
12101 vela_version: project::VELA_SCHEMA_VERSION.to_string(),
12102 schema: project::VELA_SCHEMA_URL.to_string(),
12103 frontier_id: None,
12104 project: project::ProjectMeta {
12105 name: name.clone(),
12106 description: description.clone(),
12107 compiled_at: now,
12108 compiler: project::VELA_COMPILER_VERSION.to_string(),
12109 papers_processed: 0,
12110 errors: 0,
12111 dependencies: Vec::new(),
12112 },
12113 stats: project::ProjectStats::default(),
12114 findings: Vec::new(),
12115 sources: Vec::new(),
12116 evidence_atoms: Vec::new(),
12117 condition_records: Vec::new(),
12118 review_events: Vec::new(),
12119 confidence_updates: Vec::new(),
12120 events: Vec::new(),
12121 proposals: Vec::new(),
12122 proof_state: proposals::ProofState::default(),
12123 signatures: Vec::new(),
12124 actors: Vec::new(),
12125 replications: Vec::new(),
12126 datasets: Vec::new(),
12127 code_artifacts: Vec::new(),
12128 artifacts: Vec::new(),
12129 predictions: Vec::new(),
12130 resolutions: Vec::new(),
12131 peers: Vec::new(),
12132 negative_results: Vec::new(),
12133 trajectories: Vec::new(),
12134 };
12135 repo::save_to_path(&path, &project).unwrap_or_else(|e| fail_return(&e));
12136 let payload = json!({
12137 "ok": true,
12138 "command": "frontier.new",
12139 "path": path.display().to_string(),
12140 "name": name,
12141 "schema": project::VELA_SCHEMA_URL,
12142 "vela_version": env!("CARGO_PKG_VERSION"),
12143 "next_steps": [
12144 "vela finding add <path> --assertion '...' --author 'reviewer:you' --apply",
12145 "vela sign generate-keypair --out keys",
12146 "vela actor add <path> reviewer:you --pubkey \"$(cat keys/public.key)\"",
12147 "vela registry publish <path> --owner reviewer:you --key keys/private.key --locator <url> --to https://vela-hub.fly.dev",
12148 ],
12149 });
12150 if json {
12151 println!(
12152 "{}",
12153 serde_json::to_string_pretty(&payload)
12154 .expect("failed to serialize frontier.new")
12155 );
12156 } else {
12157 println!(
12158 "{} scaffolded frontier '{name}' at {}",
12159 style::ok("frontier"),
12160 path.display()
12161 );
12162 println!(" next steps:");
12163 println!(
12164 " 1. vela finding add {} --assertion '...' --author 'reviewer:you' --apply",
12165 path.display()
12166 );
12167 println!(" 2. vela sign generate-keypair --out keys");
12168 println!(
12169 " 3. vela actor add {} reviewer:you --pubkey \"$(cat keys/public.key)\"",
12170 path.display()
12171 );
12172 println!(
12173 " 4. vela registry publish {} --owner reviewer:you --key keys/private.key --locator <url> --to https://vela-hub.fly.dev",
12174 path.display()
12175 );
12176 }
12177 }
12178 FrontierAction::Materialize { frontier, json } => {
12179 let payload = frontier_repo::materialize(&frontier).unwrap_or_else(|e| fail_return(&e));
12180 if json {
12181 println!(
12182 "{}",
12183 serde_json::to_string_pretty(&payload)
12184 .expect("failed to serialize frontier materialize")
12185 );
12186 } else {
12187 println!(
12188 "{} materialized frontier repo at {}",
12189 style::ok("frontier"),
12190 frontier.display()
12191 );
12192 }
12193 }
12194 FrontierAction::AddDep {
12195 frontier,
12196 vfr_id,
12197 locator,
12198 snapshot,
12199 name,
12200 json,
12201 } => {
12202 let mut p = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
12203 if p.project
12204 .dependencies
12205 .iter()
12206 .any(|d| d.vfr_id.as_deref() == Some(&vfr_id))
12207 {
12208 fail(&format!(
12209 "cross-frontier dependency '{vfr_id}' already declared; remove it first via `vela frontier remove-dep`"
12210 ));
12211 }
12212 let dep = ProjectDependency {
12213 name: name.unwrap_or_else(|| vfr_id.clone()),
12214 source: "vela.hub".into(),
12215 version: None,
12216 pinned_hash: None,
12217 vfr_id: Some(vfr_id.clone()),
12218 locator: Some(locator.clone()),
12219 pinned_snapshot_hash: Some(snapshot.clone()),
12220 };
12221 p.project.dependencies.push(dep);
12222 repo::save_to_path(&frontier, &p).unwrap_or_else(|e| fail_return(&e));
12223 let payload = json!({
12224 "ok": true,
12225 "command": "frontier.add-dep",
12226 "frontier": frontier.display().to_string(),
12227 "vfr_id": vfr_id,
12228 "locator": locator,
12229 "pinned_snapshot_hash": snapshot,
12230 "declared_count": p.project.dependencies.len(),
12231 });
12232 if json {
12233 println!(
12234 "{}",
12235 serde_json::to_string_pretty(&payload)
12236 .expect("failed to serialize frontier.add-dep")
12237 );
12238 } else {
12239 println!(
12240 "{} declared cross-frontier dep {vfr_id}",
12241 style::ok("frontier")
12242 );
12243 println!(" locator: {locator}");
12244 println!(" snapshot: {snapshot}");
12245 }
12246 }
12247 FrontierAction::ListDeps { frontier, json } => {
12248 let p = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
12249 let deps: Vec<&ProjectDependency> = p.project.dependencies.iter().collect();
12250 if json {
12251 let payload = json!({
12252 "ok": true,
12253 "command": "frontier.list-deps",
12254 "frontier": frontier.display().to_string(),
12255 "count": deps.len(),
12256 "dependencies": deps,
12257 });
12258 println!(
12259 "{}",
12260 serde_json::to_string_pretty(&payload)
12261 .expect("failed to serialize frontier.list-deps")
12262 );
12263 } else {
12264 println!();
12265 println!(
12266 " {}",
12267 format!("VELA · FRONTIER · LIST-DEPS · {}", frontier.display())
12268 .to_uppercase()
12269 .dimmed()
12270 );
12271 println!(" {}", style::tick_row(60));
12272 if deps.is_empty() {
12273 println!(" (no dependencies declared)");
12274 } else {
12275 for d in &deps {
12276 let kind = if d.is_cross_frontier() {
12277 "cross-frontier"
12278 } else {
12279 "compile-time"
12280 };
12281 println!(" · {} [{kind}]", d.name);
12282 if let Some(v) = &d.vfr_id {
12283 println!(" vfr_id: {v}");
12284 }
12285 if let Some(l) = &d.locator {
12286 println!(" locator: {l}");
12287 }
12288 if let Some(s) = &d.pinned_snapshot_hash {
12289 println!(" snapshot: {s}");
12290 }
12291 }
12292 }
12293 }
12294 }
12295 FrontierAction::RemoveDep {
12296 frontier,
12297 vfr_id,
12298 json,
12299 } => {
12300 let mut p = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
12301 for f in &p.findings {
12303 for l in &f.links {
12304 if let Ok(crate::bundle::LinkRef::Cross { vfr_id: ref v, .. }) =
12305 crate::bundle::LinkRef::parse(&l.target)
12306 && v == &vfr_id
12307 {
12308 fail(&format!(
12309 "cannot remove dep '{vfr_id}': finding {} still links to it via {}",
12310 f.id, l.target
12311 ));
12312 }
12313 }
12314 }
12315 let before = p.project.dependencies.len();
12316 p.project
12317 .dependencies
12318 .retain(|d| d.vfr_id.as_deref() != Some(&vfr_id));
12319 let removed = before - p.project.dependencies.len();
12320 if removed == 0 {
12321 fail(&format!("no cross-frontier dependency '{vfr_id}' found"));
12322 }
12323 repo::save_to_path(&frontier, &p).unwrap_or_else(|e| fail_return(&e));
12324 let payload = json!({
12325 "ok": true,
12326 "command": "frontier.remove-dep",
12327 "frontier": frontier.display().to_string(),
12328 "vfr_id": vfr_id,
12329 "removed": removed,
12330 });
12331 if json {
12332 println!(
12333 "{}",
12334 serde_json::to_string_pretty(&payload)
12335 .expect("failed to serialize frontier.remove-dep")
12336 );
12337 } else {
12338 println!(
12339 "{} removed cross-frontier dep {vfr_id}",
12340 style::ok("frontier")
12341 );
12342 }
12343 }
12344 FrontierAction::RefreshDeps {
12345 frontier,
12346 from,
12347 dry_run,
12348 json,
12349 } => {
12350 let mut p = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
12351 let cross_deps: Vec<String> = p
12352 .project
12353 .dependencies
12354 .iter()
12355 .filter_map(|d| d.vfr_id.clone())
12356 .collect();
12357 if cross_deps.is_empty() {
12358 if json {
12359 println!(
12360 "{}",
12361 serde_json::to_string_pretty(&json!({
12362 "ok": true,
12363 "command": "frontier.refresh-deps",
12364 "frontier": frontier.display().to_string(),
12365 "from": from,
12366 "dry_run": dry_run,
12367 "deps": [],
12368 "summary": { "total": 0, "refreshed": 0, "unchanged": 0, "missing": 0, "unreachable": 0 },
12369 })).expect("serialize")
12370 );
12371 } else {
12372 println!(
12373 "{} no cross-frontier deps declared in {}",
12374 style::ok("frontier"),
12375 frontier.display()
12376 );
12377 }
12378 return;
12379 }
12380 let client = reqwest::blocking::Client::builder()
12381 .timeout(std::time::Duration::from_secs(20))
12382 .build()
12383 .unwrap_or_else(|e| fail_return(&format!("http client init failed: {e}")));
12384 let base = from.trim_end_matches('/');
12385 #[derive(serde::Deserialize)]
12386 struct HubEntry {
12387 latest_snapshot_hash: String,
12388 }
12389 let mut per_dep: Vec<serde_json::Value> = Vec::new();
12390 let (mut refreshed, mut unchanged, mut missing, mut unreachable) =
12391 (0u32, 0u32, 0u32, 0u32);
12392 for vfr in &cross_deps {
12393 let url = format!("{base}/entries/{vfr}");
12394 let resp = client.get(&url).send();
12395 let outcome = match resp {
12396 Ok(r) if r.status().as_u16() == 404 => {
12397 missing += 1;
12398 json!({ "vfr_id": vfr, "status": "missing", "url": url })
12399 }
12400 Ok(r) if !r.status().is_success() => {
12401 unreachable += 1;
12402 json!({ "vfr_id": vfr, "status": "unreachable", "http_status": r.status().as_u16() })
12403 }
12404 Err(e) => {
12405 unreachable += 1;
12406 json!({ "vfr_id": vfr, "status": "unreachable", "error": e.to_string() })
12407 }
12408 Ok(r) => match r.json::<HubEntry>() {
12409 Err(e) => {
12410 unreachable += 1;
12411 json!({ "vfr_id": vfr, "status": "unreachable", "error": format!("invalid hub response: {e}") })
12412 }
12413 Ok(entry) => {
12414 match p
12416 .project
12417 .dependencies
12418 .iter()
12419 .position(|d| d.vfr_id.as_deref() == Some(vfr.as_str()))
12420 {
12421 None => {
12422 unreachable += 1;
12423 json!({ "vfr_id": vfr, "status": "unreachable", "error": "dep disappeared mid-scan" })
12424 }
12425 Some(idx) => {
12426 let local_pin =
12427 p.project.dependencies[idx].pinned_snapshot_hash.clone();
12428 let new_pin = entry.latest_snapshot_hash;
12429 if local_pin.as_deref() == Some(new_pin.as_str()) {
12430 unchanged += 1;
12431 json!({ "vfr_id": vfr, "status": "unchanged", "snapshot": new_pin })
12432 } else {
12433 if !dry_run {
12434 p.project.dependencies[idx].pinned_snapshot_hash =
12435 Some(new_pin.clone());
12436 }
12437 refreshed += 1;
12438 json!({
12439 "vfr_id": vfr,
12440 "status": "refreshed",
12441 "old_snapshot": local_pin,
12442 "new_snapshot": new_pin,
12443 })
12444 }
12445 }
12446 }
12447 }
12448 },
12449 };
12450 per_dep.push(outcome);
12451 }
12452 if !dry_run && refreshed > 0 {
12453 repo::save_to_path(&frontier, &p).unwrap_or_else(|e| fail_return(&e));
12454 }
12455 let payload = json!({
12456 "ok": true,
12457 "command": "frontier.refresh-deps",
12458 "frontier": frontier.display().to_string(),
12459 "from": from,
12460 "dry_run": dry_run,
12461 "deps": per_dep,
12462 "summary": {
12463 "total": cross_deps.len(),
12464 "refreshed": refreshed,
12465 "unchanged": unchanged,
12466 "missing": missing,
12467 "unreachable": unreachable,
12468 },
12469 });
12470 if json {
12471 println!(
12472 "{}",
12473 serde_json::to_string_pretty(&payload)
12474 .expect("failed to serialize frontier.refresh-deps")
12475 );
12476 } else {
12477 let mode = if dry_run { " (dry-run)" } else { "" };
12478 println!(
12479 "{} refresh-deps{mode} · {} total · {refreshed} refreshed · {unchanged} unchanged · {missing} missing · {unreachable} unreachable",
12480 style::ok("frontier"),
12481 cross_deps.len()
12482 );
12483 for d in &per_dep {
12484 let vfr = d["vfr_id"].as_str().unwrap_or("?");
12485 let status = d["status"].as_str().unwrap_or("?");
12486 match status {
12487 "refreshed" => println!(
12488 " {vfr} refreshed {} → {}",
12489 d["old_snapshot"]
12490 .as_str()
12491 .unwrap_or("(none)")
12492 .chars()
12493 .take(16)
12494 .collect::<String>(),
12495 d["new_snapshot"]
12496 .as_str()
12497 .unwrap_or("?")
12498 .chars()
12499 .take(16)
12500 .collect::<String>(),
12501 ),
12502 "unchanged" => println!(" {vfr} unchanged"),
12503 "missing" => println!(" {vfr} missing on hub"),
12504 _ => println!(" {vfr} unreachable"),
12505 }
12506 }
12507 }
12508 }
12509 FrontierAction::Diff {
12510 frontier,
12511 since,
12512 week,
12513 json,
12514 } => cmd_frontier_diff(&frontier, since.as_deref(), week.as_deref(), json),
12515 }
12516}
12517
12518fn cmd_repo(action: RepoAction) {
12519 match action {
12520 RepoAction::Status { frontier, json } => {
12521 let payload = frontier_repo::repo_status(&frontier).unwrap_or_else(|e| fail_return(&e));
12522 if json {
12523 println!(
12524 "{}",
12525 serde_json::to_string_pretty(&payload)
12526 .expect("failed to serialize repo status")
12527 );
12528 } else {
12529 let summary = payload.get("summary").unwrap_or(&Value::Null);
12530 let freshness = payload.get("freshness").unwrap_or(&Value::Null);
12531 println!("vela repo status");
12532 println!(" frontier: {}", frontier.display());
12533 println!(
12534 " events: {}",
12535 summary
12536 .get("accepted_events")
12537 .and_then(Value::as_u64)
12538 .unwrap_or_default()
12539 );
12540 println!(
12541 " open proposals: {}",
12542 summary
12543 .get("open_proposals")
12544 .and_then(Value::as_u64)
12545 .unwrap_or_default()
12546 );
12547 println!(
12548 " state: {}",
12549 freshness
12550 .get("materialized_state")
12551 .and_then(Value::as_str)
12552 .unwrap_or("unknown")
12553 );
12554 println!(
12555 " proof: {}",
12556 freshness
12557 .get("proof")
12558 .and_then(Value::as_str)
12559 .unwrap_or("unknown")
12560 );
12561 }
12562 }
12563 RepoAction::Doctor { frontier, json } => {
12564 let payload = frontier_repo::repo_doctor(&frontier).unwrap_or_else(|e| fail_return(&e));
12565 if json {
12566 println!(
12567 "{}",
12568 serde_json::to_string_pretty(&payload)
12569 .expect("failed to serialize repo doctor")
12570 );
12571 } else {
12572 let ok = payload.get("ok").and_then(Value::as_bool) == Some(true);
12573 let issues = payload
12574 .get("issues")
12575 .and_then(Value::as_array)
12576 .map_or(0, Vec::len);
12577 println!("vela repo doctor");
12578 println!(" frontier: {}", frontier.display());
12579 println!(" status: {}", if ok { "ok" } else { "needs attention" });
12580 println!(" issues: {issues}");
12581 }
12582 }
12583 }
12584}
12585
12586fn cmd_proof_verify(frontier: &Path, json_output: bool) {
12587 let payload = frontier_repo::proof_verify(frontier).unwrap_or_else(|e| fail_return(&e));
12588 if json_output {
12589 println!(
12590 "{}",
12591 serde_json::to_string_pretty(&payload).expect("failed to serialize proof verify")
12592 );
12593 if payload.get("ok").and_then(Value::as_bool) != Some(true) {
12594 std::process::exit(1);
12595 }
12596 } else {
12597 let ok = payload.get("ok").and_then(Value::as_bool) == Some(true);
12598 println!("vela proof verify");
12599 println!(" frontier: {}", frontier.display());
12600 println!(" status: {}", if ok { "ok" } else { "failed" });
12601 if let Some(issues) = payload.get("issues").and_then(Value::as_array) {
12602 for issue in issues {
12603 if let Some(message) = issue.get("message").and_then(Value::as_str) {
12604 println!(" issue: {message}");
12605 }
12606 }
12607 }
12608 if !ok {
12609 std::process::exit(1);
12610 }
12611 }
12612}
12613
12614fn cmd_proof_explain(frontier: &Path) {
12615 let text = frontier_repo::proof_explain(frontier).unwrap_or_else(|e| fail_return(&e));
12616 print!("{text}");
12617}
12618
12619fn cmd_frontier_diff(frontier: &Path, since: Option<&str>, week: Option<&str>, json: bool) {
12628 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
12629
12630 let now = chrono::Utc::now();
12632 let (window_start, window_end, week_label): (
12633 chrono::DateTime<chrono::Utc>,
12634 chrono::DateTime<chrono::Utc>,
12635 Option<String>,
12636 ) = if let Some(s) = since {
12637 let parsed = chrono::DateTime::parse_from_rfc3339(s)
12638 .map(|d| d.with_timezone(&chrono::Utc))
12639 .unwrap_or_else(|e| fail_return(&format!("invalid --since timestamp '{s}': {e}")));
12640 (parsed, now, None)
12641 } else {
12642 let key = week
12643 .map(str::to_owned)
12644 .unwrap_or_else(|| iso_week_key_for(now.date_naive()));
12645 let (start, end) = iso_week_bounds(&key)
12646 .unwrap_or_else(|e| fail_return(&format!("invalid --week '{key}': {e}")));
12647 (start, end, Some(key))
12648 };
12649
12650 let mut added: Vec<&crate::bundle::FindingBundle> = Vec::new();
12652 let mut updated: Vec<&crate::bundle::FindingBundle> = Vec::new();
12653 let mut new_contradictions: Vec<&crate::bundle::FindingBundle> = Vec::new();
12654 let mut cumulative: usize = 0;
12655
12656 for f in &project.findings {
12657 let created = chrono::DateTime::parse_from_rfc3339(&f.created)
12658 .map(|d| d.with_timezone(&chrono::Utc))
12659 .ok();
12660 let updated_ts = f
12661 .updated
12662 .as_deref()
12663 .and_then(|u| chrono::DateTime::parse_from_rfc3339(u).ok())
12664 .map(|d| d.with_timezone(&chrono::Utc));
12665
12666 if let Some(c) = created
12667 && c < window_end
12668 {
12669 cumulative += 1;
12670 }
12671
12672 if let Some(c) = created
12673 && c >= window_start
12674 && c < window_end
12675 {
12676 added.push(f);
12677 let is_tension = f.flags.contested || f.assertion.assertion_type == "tension";
12678 if is_tension {
12679 new_contradictions.push(f);
12680 }
12681 continue;
12682 }
12683 if let Some(u) = updated_ts
12684 && u >= window_start
12685 && u < window_end
12686 {
12687 updated.push(f);
12688 }
12689 }
12690
12691 let summary_for = |list: &[&crate::bundle::FindingBundle]| -> Vec<serde_json::Value> {
12693 list.iter()
12694 .map(|f| {
12695 json!({
12696 "id": f.id,
12697 "assertion": f.assertion.text,
12698 "evidence_type": f.evidence.evidence_type,
12699 "confidence": f.confidence.score,
12700 "doi": f.provenance.doi,
12701 "pmid": f.provenance.pmid,
12702 })
12703 })
12704 .collect()
12705 };
12706
12707 let payload = json!({
12708 "ok": true,
12709 "command": "frontier.diff",
12710 "frontier": frontier.display().to_string(),
12711 "frontier_id": project.frontier_id,
12712 "window": {
12713 "start": window_start.to_rfc3339_opts(chrono::SecondsFormat::Secs, true),
12714 "end": window_end.to_rfc3339_opts(chrono::SecondsFormat::Secs, true),
12715 "iso_week": week_label,
12716 },
12717 "totals": {
12718 "added": added.len(),
12719 "updated": updated.len(),
12720 "new_contradictions": new_contradictions.len(),
12721 "cumulative_claims": cumulative,
12722 },
12723 "added": summary_for(&added),
12724 "updated": summary_for(&updated),
12725 "new_contradictions": summary_for(&new_contradictions),
12726 });
12727
12728 if json {
12729 println!(
12730 "{}",
12731 serde_json::to_string_pretty(&payload).expect("failed to serialize frontier.diff")
12732 );
12733 return;
12734 }
12735
12736 let label = week_label
12737 .clone()
12738 .unwrap_or_else(|| format!("since {}", window_start.format("%Y-%m-%d %H:%M UTC")));
12739 println!();
12740 println!(
12741 " {}",
12742 format!("VELA · FRONTIER · DIFF · {label}")
12743 .to_uppercase()
12744 .dimmed()
12745 );
12746 println!(" {}", style::tick_row(60));
12747 println!(
12748 " range: {} → {}",
12749 window_start.format("%Y-%m-%d %H:%M"),
12750 window_end.format("%Y-%m-%d %H:%M")
12751 );
12752 println!(" added: {}", added.len());
12753 println!(" updated: {}", updated.len());
12754 println!(" contradictions: {}", new_contradictions.len());
12755 println!(" cumulative: {cumulative}");
12756 if added.is_empty() && updated.is_empty() {
12757 println!();
12758 println!(" (quiet window — no findings added or updated)");
12759 } else {
12760 println!();
12761 println!(" added:");
12762 for f in &added {
12763 println!(
12764 " · {} {}",
12765 f.id.dimmed(),
12766 truncate(&f.assertion.text, 88)
12767 );
12768 }
12769 if !updated.is_empty() {
12770 println!();
12771 println!(" updated:");
12772 for f in &updated {
12773 println!(
12774 " · {} {}",
12775 f.id.dimmed(),
12776 truncate(&f.assertion.text, 88)
12777 );
12778 }
12779 }
12780 }
12781}
12782
12783fn truncate(s: &str, n: usize) -> String {
12784 if s.chars().count() <= n {
12785 s.to_string()
12786 } else {
12787 let mut out: String = s.chars().take(n.saturating_sub(1)).collect();
12788 out.push('…');
12789 out
12790 }
12791}
12792
12793fn iso_week_key_for(d: chrono::NaiveDate) -> String {
12795 use chrono::Datelike;
12796 let iso = d.iso_week();
12797 format!("{:04}-W{:02}", iso.year(), iso.week())
12798}
12799
12800fn iso_week_bounds(
12803 key: &str,
12804) -> Result<(chrono::DateTime<chrono::Utc>, chrono::DateTime<chrono::Utc>), String> {
12805 let (year_str, week_str) = key
12806 .split_once("-W")
12807 .ok_or_else(|| format!("expected YYYY-Www, got '{key}'"))?;
12808 let year: i32 = year_str
12809 .parse()
12810 .map_err(|e| format!("bad year in '{key}': {e}"))?;
12811 let week: u32 = week_str
12812 .parse()
12813 .map_err(|e| format!("bad week in '{key}': {e}"))?;
12814 let monday = chrono::NaiveDate::from_isoywd_opt(year, week, chrono::Weekday::Mon)
12815 .ok_or_else(|| format!("invalid ISO week: {key}"))?;
12816 let next_monday = monday + chrono::Duration::days(7);
12817 let start = monday.and_hms_opt(0, 0, 0).expect("00:00 valid").and_utc();
12818 let end = next_monday
12819 .and_hms_opt(0, 0, 0)
12820 .expect("00:00 valid")
12821 .and_utc();
12822 Ok((start, end))
12823}
12824
12825fn cmd_registry(action: RegistryAction) {
12830 use crate::registry;
12831 let default_registry = || -> PathBuf {
12832 let home = std::env::var("HOME").unwrap_or_else(|_| ".".to_string());
12833 PathBuf::from(home)
12834 .join(".vela")
12835 .join("registry")
12836 .join("entries.json")
12837 };
12838 match action {
12839 RegistryAction::DependsOn { vfr_id, from, json } => {
12840 let base = from.trim_end_matches('/');
12841 let url = format!("{base}/entries/{vfr_id}/depends-on");
12842 let client = reqwest::blocking::Client::builder()
12843 .timeout(std::time::Duration::from_secs(30))
12844 .build()
12845 .unwrap_or_else(|e| fail_return(&format!("http client init: {e}")));
12846 let resp = client
12847 .get(&url)
12848 .send()
12849 .unwrap_or_else(|e| fail_return(&format!("GET {url}: {e}")));
12850 if !resp.status().is_success() {
12851 fail(&format!("GET {url}: HTTP {}", resp.status()));
12852 }
12853 let body: serde_json::Value = resp
12854 .json()
12855 .unwrap_or_else(|e| fail_return(&format!("parse response: {e}")));
12856 if json {
12857 println!(
12858 "{}",
12859 serde_json::to_string_pretty(&body).expect("serialize")
12860 );
12861 } else {
12862 let dependents = body
12863 .get("dependents")
12864 .and_then(|v| v.as_array())
12865 .cloned()
12866 .unwrap_or_default();
12867 let count = dependents.len();
12868 println!(
12869 "{} {count} {} on {vfr_id}",
12870 style::ok("registry"),
12871 if count == 1 {
12872 "frontier depends"
12873 } else {
12874 "frontiers depend"
12875 },
12876 );
12877 for e in &dependents {
12878 let v = e.get("vfr_id").and_then(|v| v.as_str()).unwrap_or("?");
12879 let n = e.get("name").and_then(|v| v.as_str()).unwrap_or("?");
12880 let o = e
12881 .get("owner_actor_id")
12882 .and_then(|v| v.as_str())
12883 .unwrap_or("?");
12884 println!(" {v} {n} ({o})");
12885 }
12886 }
12887 }
12888 RegistryAction::Mirror {
12889 vfr_id,
12890 from,
12891 to,
12892 json,
12893 } => {
12894 let src_base = from.trim_end_matches('/');
12895 let dst_base = to.trim_end_matches('/');
12896 let src_url = format!("{src_base}/entries/{vfr_id}");
12897 let dst_url = format!("{dst_base}/entries");
12898 let client = reqwest::blocking::Client::builder()
12899 .timeout(std::time::Duration::from_secs(30))
12900 .build()
12901 .unwrap_or_else(|e| fail_return(&format!("http client init: {e}")));
12902
12903 let entry: serde_json::Value = client
12904 .get(&src_url)
12905 .send()
12906 .unwrap_or_else(|e| fail_return(&format!("GET {src_url}: {e}")))
12907 .error_for_status()
12908 .unwrap_or_else(|e| fail_return(&format!("GET {src_url}: {e}")))
12909 .json()
12910 .unwrap_or_else(|e| fail_return(&format!("parse {src_url}: {e}")));
12911
12912 let resp = client
12913 .post(&dst_url)
12914 .header("content-type", "application/json")
12915 .body(
12916 serde_json::to_vec(&entry)
12917 .unwrap_or_else(|e| fail_return(&format!("serialize: {e}"))),
12918 )
12919 .send()
12920 .unwrap_or_else(|e| fail_return(&format!("POST {dst_url}: {e}")));
12921 let status = resp.status();
12922 if !status.is_success() {
12923 let body = resp.text().unwrap_or_default();
12924 fail(&format!(
12925 "POST {dst_url}: HTTP {status}: {}",
12926 body.chars().take(300).collect::<String>()
12927 ));
12928 }
12929 let body: serde_json::Value = resp
12930 .json()
12931 .unwrap_or_else(|e| fail_return(&format!("parse POST response: {e}")));
12932 let duplicate = body
12933 .get("duplicate")
12934 .and_then(serde_json::Value::as_bool)
12935 .unwrap_or(false);
12936 let payload = json!({
12937 "ok": true,
12938 "command": "registry.mirror",
12939 "vfr_id": vfr_id,
12940 "from": src_base,
12941 "to": dst_base,
12942 "duplicate_on_destination": duplicate,
12943 "destination_response": body,
12944 });
12945 if json {
12946 println!(
12947 "{}",
12948 serde_json::to_string_pretty(&payload).expect("serialize")
12949 );
12950 } else {
12951 println!(
12952 "{} mirrored {vfr_id} from {src_base} → {dst_base}{}",
12953 style::ok("registry"),
12954 if duplicate {
12955 " (duplicate; signature already known)"
12956 } else {
12957 " (fresh insert)"
12958 }
12959 );
12960 }
12961 }
12962 RegistryAction::List { from, json } => {
12963 let (label, registry_data) = match &from {
12966 Some(loc) if loc.starts_with("http") => (
12967 loc.clone(),
12968 registry::load_any(loc).unwrap_or_else(|e| fail_return(&e)),
12969 ),
12970 Some(loc) => {
12971 let p = registry::resolve_local(loc).unwrap_or_else(|e| fail_return(&e));
12972 (
12973 p.display().to_string(),
12974 registry::load_local(&p).unwrap_or_else(|e| fail_return(&e)),
12975 )
12976 }
12977 None => {
12978 let p = default_registry();
12979 (
12980 p.display().to_string(),
12981 registry::load_local(&p).unwrap_or_else(|e| fail_return(&e)),
12982 )
12983 }
12984 };
12985 let r = registry_data;
12986 let path_label = label;
12987 if json {
12988 let payload = json!({
12989 "ok": true,
12990 "command": "registry.list",
12991 "registry": path_label,
12992 "entry_count": r.entries.len(),
12993 "entries": r.entries,
12994 });
12995 println!(
12996 "{}",
12997 serde_json::to_string_pretty(&payload)
12998 .expect("failed to serialize registry.list")
12999 );
13000 } else {
13001 println!();
13002 println!(
13003 " {}",
13004 format!("VELA · REGISTRY · LIST · {}", path_label)
13005 .to_uppercase()
13006 .dimmed()
13007 );
13008 println!(" {}", style::tick_row(60));
13009 if r.entries.is_empty() {
13010 println!(" (registry is empty)");
13011 } else {
13012 for entry in &r.entries {
13013 println!(
13014 " {} {} ({}) by {} published {}",
13015 entry.vfr_id,
13016 entry.name,
13017 entry.network_locator,
13018 entry.owner_actor_id,
13019 entry.signed_publish_at
13020 );
13021 }
13022 }
13023 }
13024 }
13025 RegistryAction::Publish {
13026 frontier,
13027 owner,
13028 key,
13029 locator,
13030 to,
13031 json,
13032 } => {
13033 let key_hex = std::fs::read_to_string(&key)
13036 .map(|s| s.trim().to_string())
13037 .unwrap_or_else(|e| fail_return(&format!("read key {}: {e}", key.display())));
13038 let signing_key = parse_signing_key(&key_hex);
13039 let derived = hex::encode(signing_key.verifying_key().to_bytes());
13040
13041 let mut frontier_data =
13043 repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
13044
13045 let pubkey = match frontier_data.actors.iter().find(|actor| actor.id == owner) {
13046 Some(actor) => actor.public_key.clone(),
13047 None => {
13048 eprintln!(
13056 " vela registry publish · auto-registering actor {owner} (derived pubkey {})",
13057 &derived[..16]
13058 );
13059 frontier_data.actors.push(sign::ActorRecord {
13060 id: owner.clone(),
13061 public_key: derived.clone(),
13062 algorithm: "ed25519".to_string(),
13063 created_at: chrono::Utc::now().to_rfc3339(),
13064 tier: None,
13065 orcid: None,
13066 access_clearance: None,
13067 });
13068 repo::save_to_path(&frontier, &frontier_data)
13069 .unwrap_or_else(|e| fail_return(&format!("save actor: {e}")));
13070 derived.clone()
13071 }
13072 };
13073
13074 let snapshot_hash = events::snapshot_hash(&frontier_data);
13078 let event_log_hash = events::event_log_hash(&frontier_data.events);
13079 let vfr_id = frontier_data.frontier_id();
13080 let name = frontier_data.project.name.clone();
13081
13082 if derived != pubkey {
13084 fail(&format!(
13085 "private key does not match registered pubkey for owner '{owner}'"
13086 ));
13087 }
13088
13089 let to_is_remote = matches!(
13097 to.as_deref(),
13098 Some(loc) if loc.starts_with("http://") || loc.starts_with("https://")
13099 );
13100 let resolved_locator = match locator {
13101 Some(l) => l,
13102 None => {
13103 if to_is_remote {
13104 let hub = to.as_deref().unwrap().trim_end_matches('/');
13105 let hub_root = hub.trim_end_matches("/entries");
13106 format!("{hub_root}/entries/{vfr_id}/snapshot")
13107 } else {
13108 fail_return(
13109 "--locator is required for local publishes; pass e.g. \
13110 --locator file:///path/to/frontier.json or an HTTPS URL.",
13111 )
13112 }
13113 }
13114 };
13115
13116 let mut entry = registry::RegistryEntry {
13117 schema: registry::ENTRY_SCHEMA.to_string(),
13118 vfr_id: vfr_id.clone(),
13119 name: name.clone(),
13120 owner_actor_id: owner.clone(),
13121 owner_pubkey: pubkey,
13122 latest_snapshot_hash: snapshot_hash,
13123 latest_event_log_hash: event_log_hash,
13124 network_locator: resolved_locator,
13125 signed_publish_at: chrono::Utc::now().to_rfc3339(),
13126 signature: String::new(),
13127 };
13128 entry.signature =
13129 registry::sign_entry(&entry, &signing_key).unwrap_or_else(|e| fail_return(&e));
13130
13131 let (registry_label, duplicate) = if to_is_remote {
13132 let hub_url = to.clone().unwrap();
13133 let resp = registry::publish_remote(&entry, &hub_url, Some(&frontier_data))
13137 .unwrap_or_else(|e| fail_return(&e));
13138 (hub_url, resp.duplicate)
13139 } else {
13140 let registry_path = match &to {
13141 Some(loc) => registry::resolve_local(loc).unwrap_or_else(|e| fail_return(&e)),
13142 None => default_registry(),
13143 };
13144 registry::publish_entry(®istry_path, entry.clone())
13145 .unwrap_or_else(|e| fail_return(&e));
13146 (registry_path.display().to_string(), false)
13147 };
13148
13149 let payload = json!({
13150 "ok": true,
13151 "command": "registry.publish",
13152 "registry": registry_label,
13153 "vfr_id": vfr_id,
13154 "name": name,
13155 "owner": owner,
13156 "snapshot_hash": entry.latest_snapshot_hash,
13157 "event_log_hash": entry.latest_event_log_hash,
13158 "signed_publish_at": entry.signed_publish_at,
13159 "signature": entry.signature,
13160 "duplicate": duplicate,
13161 });
13162 if json {
13163 println!(
13164 "{}",
13165 serde_json::to_string_pretty(&payload)
13166 .expect("failed to serialize registry.publish")
13167 );
13168 } else {
13169 let dup_suffix = if duplicate { " (duplicate, no-op)" } else { "" };
13170 println!(
13171 "{} published {vfr_id} → {}{}",
13172 style::ok("registry"),
13173 registry_label,
13174 dup_suffix
13175 );
13176 println!(" snapshot: {}", entry.latest_snapshot_hash);
13177 println!(" event_log: {}", entry.latest_event_log_hash);
13178 println!(" signature: {}…", &entry.signature[..16]);
13179 }
13180 }
13181 RegistryAction::Pull {
13182 vfr_id,
13183 from,
13184 out,
13185 transitive,
13186 depth,
13187 json,
13188 } => {
13189 let (registry_label, registry_data) = match &from {
13193 Some(loc) if loc.starts_with("http") => (
13194 loc.clone(),
13195 registry::load_any(loc).unwrap_or_else(|e| fail_return(&e)),
13196 ),
13197 Some(loc) => {
13198 let p = registry::resolve_local(loc).unwrap_or_else(|e| fail_return(&e));
13199 (
13200 p.display().to_string(),
13201 registry::load_local(&p).unwrap_or_else(|e| fail_return(&e)),
13202 )
13203 }
13204 None => {
13205 let p = default_registry();
13206 (
13207 p.display().to_string(),
13208 registry::load_local(&p).unwrap_or_else(|e| fail_return(&e)),
13209 )
13210 }
13211 };
13212 let entry = registry::find_latest(®istry_data, &vfr_id)
13213 .unwrap_or_else(|| fail_return(&format!("{vfr_id} not found in registry")));
13214
13215 if transitive {
13216 let result = registry::pull_transitive(®istry_data, &vfr_id, &out, depth)
13220 .unwrap_or_else(|e| fail_return(&format!("transitive pull failed: {e}")));
13221
13222 let dep_paths_json: serde_json::Value = serde_json::Value::Object(
13223 result
13224 .deps
13225 .iter()
13226 .map(|(k, v)| (k.clone(), serde_json::json!(v.display().to_string())))
13227 .collect(),
13228 );
13229 let payload = json!({
13230 "ok": true,
13231 "command": "registry.pull",
13232 "registry": registry_label,
13233 "vfr_id": vfr_id,
13234 "transitive": true,
13235 "depth": depth,
13236 "out_dir": out.display().to_string(),
13237 "primary": result.primary_path.display().to_string(),
13238 "verified": result.verified,
13239 "deps": dep_paths_json,
13240 });
13241 if json {
13242 println!(
13243 "{}",
13244 serde_json::to_string_pretty(&payload)
13245 .expect("failed to serialize registry.pull")
13246 );
13247 } else {
13248 println!(
13249 "{} pulled {vfr_id} (transitive) → {}",
13250 style::ok("registry"),
13251 out.display()
13252 );
13253 println!(" verified {} frontier(s):", result.verified.len());
13254 for v in &result.verified {
13255 println!(" · {v}");
13256 }
13257 println!(" every cross-frontier dependency's pinned snapshot hash matched");
13258 }
13259 return;
13260 }
13261
13262 registry::fetch_frontier_to_prefer_event_hub(&entry, from.as_deref(), &out)
13265 .unwrap_or_else(|e| fail_return(&format!("fetch frontier: {e}")));
13266 registry::verify_pull(&entry, &out).unwrap_or_else(|e| {
13267 let _ = std::fs::remove_file(&out);
13268 fail_return(&format!("pull verification failed: {e}"))
13269 });
13270
13271 let payload = json!({
13272 "ok": true,
13273 "command": "registry.pull",
13274 "registry": registry_label,
13275 "vfr_id": vfr_id,
13276 "out": out.display().to_string(),
13277 "snapshot_hash": entry.latest_snapshot_hash,
13278 "event_log_hash": entry.latest_event_log_hash,
13279 "verified": true,
13280 });
13281 if json {
13282 println!(
13283 "{}",
13284 serde_json::to_string_pretty(&payload)
13285 .expect("failed to serialize registry.pull")
13286 );
13287 } else {
13288 println!(
13289 "{} pulled {vfr_id} → {}",
13290 style::ok("registry"),
13291 out.display()
13292 );
13293 println!(" verified snapshot+event_log hashes match registry; signature ok");
13294 }
13295 }
13296 }
13297}
13298
13299fn print_stats_json(path: &Path) {
13300 let frontier = load_frontier_or_fail(path);
13301 let source_hash = hash_path_or_fail(path);
13302 let payload = json!({
13303 "ok": true,
13304 "command": "stats",
13305 "schema_version": project::VELA_SCHEMA_VERSION,
13306 "frontier": {
13307 "name": &frontier.project.name,
13308 "description": &frontier.project.description,
13309 "source": path.display().to_string(),
13310 "hash": format!("sha256:{source_hash}"),
13311 "compiled_at": &frontier.project.compiled_at,
13312 "compiler": &frontier.project.compiler,
13313 "papers_processed": frontier.project.papers_processed,
13314 "errors": frontier.project.errors,
13315 },
13316 "stats": frontier.stats,
13317 "proposals": proposals::summary(&frontier),
13318 "proof_state": frontier.proof_state,
13319 });
13320 println!(
13321 "{}",
13322 serde_json::to_string_pretty(&payload).expect("failed to serialize stats")
13323 );
13324}
13325
13326fn cmd_search(
13327 source: Option<&Path>,
13328 query: &str,
13329 entity: Option<&str>,
13330 assertion_type: Option<&str>,
13331 all: Option<&Path>,
13332 limit: usize,
13333 json_output: bool,
13334) {
13335 if let Some(dir) = all {
13336 search::run_all(dir, query, entity, assertion_type, limit);
13337 return;
13338 }
13339 let Some(src) = source else {
13340 fail("Provide --source <frontier> or --all <directory>.");
13341 };
13342 if json_output {
13343 let results = search::search(src, query, entity, assertion_type, limit);
13344 let loaded = load_frontier_or_fail(src);
13345 let source_hash = hash_path_or_fail(src);
13346 let payload = json!({
13347 "ok": true,
13348 "command": "search",
13349 "schema_version": project::VELA_SCHEMA_VERSION,
13350 "query": query,
13351 "frontier": {
13352 "name": &loaded.project.name,
13353 "source": src.display().to_string(),
13354 "hash": format!("sha256:{source_hash}"),
13355 },
13356 "filters": {
13357 "entity": entity,
13358 "assertion_type": assertion_type,
13359 "limit": limit,
13360 },
13361 "count": results.len(),
13362 "results": results.iter().map(|result| json!({
13363 "id": &result.id,
13364 "score": result.score,
13365 "assertion": &result.assertion,
13366 "assertion_type": &result.assertion_type,
13367 "confidence": result.confidence,
13368 "entities": &result.entities,
13369 "doi": &result.doi,
13370 })).collect::<Vec<_>>()
13371 });
13372 println!(
13373 "{}",
13374 serde_json::to_string_pretty(&payload).expect("failed to serialize search results")
13375 );
13376 } else {
13377 search::run(src, query, entity, assertion_type, limit);
13378 }
13379}
13380
13381fn cmd_tensions(source: &Path, both_high: bool, cross_domain: bool, top: usize, json_output: bool) {
13382 let frontier = load_frontier_or_fail(source);
13383 let result = tensions::analyze(&frontier, both_high, cross_domain, top);
13384 if json_output {
13385 let source_hash = hash_path_or_fail(source);
13386 let payload = json!({
13387 "ok": true,
13388 "command": "tensions",
13389 "schema_version": project::VELA_SCHEMA_VERSION,
13390 "frontier": {
13391 "name": &frontier.project.name,
13392 "source": source.display().to_string(),
13393 "hash": format!("sha256:{source_hash}"),
13394 },
13395 "filters": {
13396 "both_high": both_high,
13397 "cross_domain": cross_domain,
13398 "top": top,
13399 },
13400 "count": result.len(),
13401 "tensions": result.iter().map(|t| json!({
13402 "score": t.score,
13403 "resolved": t.resolved,
13404 "superseding_id": &t.superseding_id,
13405 "finding_a": {
13406 "id": &t.finding_a.id,
13407 "assertion": &t.finding_a.assertion,
13408 "confidence": t.finding_a.confidence,
13409 "assertion_type": &t.finding_a.assertion_type,
13410 "citation_count": t.finding_a.citation_count,
13411 "contradicts_count": t.finding_a.contradicts_count,
13412 },
13413 "finding_b": {
13414 "id": &t.finding_b.id,
13415 "assertion": &t.finding_b.assertion,
13416 "confidence": t.finding_b.confidence,
13417 "assertion_type": &t.finding_b.assertion_type,
13418 "citation_count": t.finding_b.citation_count,
13419 "contradicts_count": t.finding_b.contradicts_count,
13420 }
13421 })).collect::<Vec<_>>()
13422 });
13423 println!(
13424 "{}",
13425 serde_json::to_string_pretty(&payload).expect("failed to serialize tensions")
13426 );
13427 } else {
13428 tensions::print_tensions(&result);
13429 }
13430}
13431
13432fn cmd_gaps(action: GapsAction) {
13433 match action {
13434 GapsAction::Rank {
13435 frontier,
13436 top,
13437 domain,
13438 json,
13439 } => cmd_gap_rank(&frontier, top, domain.as_deref(), json),
13440 }
13441}
13442
13443fn cmd_gap_rank(frontier_path: &Path, top: usize, domain: Option<&str>, json_output: bool) {
13444 let frontier = load_frontier_or_fail(frontier_path);
13445 let mut ranked = frontier
13446 .findings
13447 .iter()
13448 .filter(|finding| finding.flags.gap || finding.flags.negative_space)
13449 .filter(|finding| {
13450 domain.is_none_or(|domain| {
13451 finding
13452 .assertion
13453 .text
13454 .to_lowercase()
13455 .contains(&domain.to_lowercase())
13456 || finding
13457 .assertion
13458 .entities
13459 .iter()
13460 .any(|entity| entity.name.to_lowercase().contains(&domain.to_lowercase()))
13461 })
13462 })
13463 .map(|finding| {
13464 let dependency_count = frontier
13465 .findings
13466 .iter()
13467 .flat_map(|candidate| candidate.links.iter())
13468 .filter(|link| link.target == finding.id)
13469 .count();
13470 let score = dependency_count as f64 + finding.confidence.score;
13471 json!({
13472 "id": &finding.id,
13473 "kind": "candidate_gap_review_lead",
13474 "assertion": &finding.assertion.text,
13475 "score": score,
13476 "dependency_count": dependency_count,
13477 "confidence": finding.confidence.score,
13478 "evidence_type": &finding.evidence.evidence_type,
13479 "entities": finding.assertion.entities.iter().map(|e| &e.name).collect::<Vec<_>>(),
13480 "recommended_action": "Review source scope and missing evidence before treating this as an experiment target.",
13481 "caveats": ["Candidate gap rankings are review leads, not guaranteed underexplored areas or experiment targets."],
13482 })
13483 })
13484 .collect::<Vec<_>>();
13485 ranked.sort_by(|a, b| {
13486 b.get("score")
13487 .and_then(Value::as_f64)
13488 .partial_cmp(&a.get("score").and_then(Value::as_f64))
13489 .unwrap_or(std::cmp::Ordering::Equal)
13490 });
13491 ranked.truncate(top);
13492 if json_output {
13493 let source_hash = hash_path_or_fail(frontier_path);
13494 let payload = json!({
13495 "ok": true,
13496 "command": "gaps rank",
13497 "schema_version": project::VELA_SCHEMA_VERSION,
13498 "frontier": {
13499 "name": &frontier.project.name,
13500 "source": frontier_path.display().to_string(),
13501 "hash": format!("sha256:{source_hash}"),
13502 },
13503 "filters": {
13504 "top": top,
13505 "domain": domain,
13506 },
13507 "count": ranked.len(),
13508 "ranking_label": "candidate gap review leads",
13509 "caveats": ["These rankings are navigation signals over flagged findings, not scientific conclusions."],
13510 "review_leads": ranked.clone(),
13511 "gaps": ranked,
13512 });
13513 println!(
13514 "{}",
13515 serde_json::to_string_pretty(&payload).expect("failed to serialize gap ranking")
13516 );
13517 } else {
13518 println!();
13519 println!(" {}", "CANDIDATE GAP REVIEW LEADS".dimmed());
13520 println!(" {}", style::tick_row(60));
13521 println!(" review source scope; these are not guaranteed experiment targets.");
13522 println!();
13523 for (idx, gap) in ranked.iter().enumerate() {
13524 println!(
13525 " {}. [{}] score={} {}",
13526 idx + 1,
13527 gap["id"].as_str().unwrap_or("?"),
13528 gap["score"].as_f64().unwrap_or(0.0),
13529 gap["assertion"].as_str().unwrap_or("")
13530 );
13531 }
13532 }
13533}
13534
13535async fn cmd_bridge(inputs: &[PathBuf], check_novelty: bool, top_n: usize) {
13536 if inputs.len() < 2 {
13537 fail("need at least 2 frontier files for bridge detection.");
13538 }
13539 println!();
13540 println!(" {}", "VELA · BRIDGE · V0.36.0".dimmed());
13541 println!(" {}", style::tick_row(60));
13542 println!(" loading {} frontiers...", inputs.len());
13543 let mut named_projects = Vec::<(String, project::Project)>::new();
13544 let mut total_findings = 0;
13545 for path in inputs {
13546 let frontier = load_frontier_or_fail(path);
13547 let name = path
13548 .file_stem()
13549 .unwrap_or_default()
13550 .to_string_lossy()
13551 .to_string();
13552 println!(" {} · {} findings", name, frontier.stats.findings);
13553 total_findings += frontier.stats.findings;
13554 named_projects.push((name, frontier));
13555 }
13556 let refs = named_projects
13557 .iter()
13558 .map(|(name, frontier)| (name.as_str(), frontier))
13559 .collect::<Vec<_>>();
13560 let mut bridges = bridge::detect_bridges(&refs);
13561 if check_novelty && !bridges.is_empty() {
13562 let client = Client::new();
13563 let check_count = bridges.len().min(top_n);
13564 println!(" running rough PubMed prior-art checks for top {check_count} bridges...");
13565 for bridge_item in bridges.iter_mut().take(check_count) {
13566 let query = bridge::novelty_query(&bridge_item.entity_name, bridge_item);
13567 match bridge::check_novelty(&client, &query).await {
13568 Ok(count) => bridge_item.pubmed_count = Some(count),
13569 Err(e) => eprintln!(
13570 " {} prior-art check failed for {}: {e}",
13571 style::err_prefix(),
13572 bridge_item.entity_name
13573 ),
13574 }
13575 tokio::time::sleep(std::time::Duration::from_millis(350)).await;
13576 }
13577 }
13578 print!("{}", bridge::format_report(&bridges, total_findings));
13579}
13580
13581struct BenchArgs {
13582 frontier: Option<PathBuf>,
13583 gold: Option<PathBuf>,
13584 entity_gold: Option<PathBuf>,
13585 link_gold: Option<PathBuf>,
13586 suite: Option<PathBuf>,
13587 suite_ready: bool,
13588 min_f1: Option<f64>,
13589 min_precision: Option<f64>,
13590 min_recall: Option<f64>,
13591 no_thresholds: bool,
13592 json: bool,
13593}
13594
13595fn cmd_agent_bench(
13600 gold: &Path,
13601 candidate: &Path,
13602 sources: Option<&Path>,
13603 threshold: Option<f64>,
13604 report_path: Option<&Path>,
13605 json_out: bool,
13606) {
13607 let input = crate::agent_bench::BenchInput {
13608 gold_path: gold.to_path_buf(),
13609 candidate_path: candidate.to_path_buf(),
13610 sources: sources.map(Path::to_path_buf),
13611 threshold: threshold.unwrap_or(0.0),
13612 };
13613 let report = match crate::agent_bench::run(input) {
13614 Ok(r) => r,
13615 Err(e) => {
13616 eprintln!("{} bench failed: {e}", style::err_prefix());
13617 std::process::exit(1);
13618 }
13619 };
13620
13621 let json = serde_json::to_string_pretty(&report).unwrap_or_default();
13622 if let Some(path) = report_path
13623 && let Err(e) = std::fs::write(path, &json)
13624 {
13625 eprintln!(
13626 "{} failed to write report to {}: {e}",
13627 style::err_prefix(),
13628 path.display()
13629 );
13630 }
13631
13632 if json_out {
13633 println!("{json}");
13634 } else {
13635 println!();
13636 println!(" {}", "VELA · BENCH · AGENT STATE-UPDATE".dimmed());
13637 println!(" {}", style::tick_row(60));
13638 print!("{}", crate::agent_bench::render_pretty(&report));
13639 println!();
13640 }
13641
13642 if !report.pass {
13643 std::process::exit(1);
13644 }
13645}
13646
13647fn cmd_bench(args: BenchArgs) {
13648 if args.suite_ready {
13649 let suite_path = args
13650 .suite
13651 .unwrap_or_else(|| PathBuf::from("benchmarks/suites/bbb-core.json"));
13652 let payload =
13653 benchmark::suite_ready_report(&suite_path).unwrap_or_else(|e| fail_return(&e));
13654 println!(
13655 "{}",
13656 serde_json::to_string_pretty(&payload).expect("failed to serialize suite-ready report")
13657 );
13658 if payload.get("ok").and_then(Value::as_bool) != Some(true) {
13659 std::process::exit(1);
13660 }
13661 return;
13662 }
13663 if let Some(suite_path) = args.suite {
13664 let payload = benchmark::run_suite(&suite_path).unwrap_or_else(|e| fail_return(&e));
13665 if args.json {
13666 println!(
13667 "{}",
13668 serde_json::to_string_pretty(&payload)
13669 .expect("failed to serialize benchmark suite")
13670 );
13671 } else {
13672 let ok = payload.get("ok").and_then(Value::as_bool) == Some(true);
13673 let metrics = payload.get("metrics").unwrap_or(&Value::Null);
13674 println!();
13675 println!(" {}", "VELA · BENCH · SUITE".dimmed());
13676 println!(" {}", style::tick_row(60));
13677 println!(" suite: {}", suite_path.display());
13678 println!(
13679 " status: {}",
13680 if ok {
13681 style::ok("pass")
13682 } else {
13683 style::lost("fail")
13684 }
13685 );
13686 println!(
13687 " tasks: {}/{} passed",
13688 metrics
13689 .get("tasks_passed")
13690 .and_then(Value::as_u64)
13691 .unwrap_or(0),
13692 metrics
13693 .get("tasks_total")
13694 .and_then(Value::as_u64)
13695 .unwrap_or(0)
13696 );
13697 }
13698 if payload.get("ok").and_then(Value::as_bool) != Some(true) {
13699 std::process::exit(1);
13700 }
13701 return;
13702 }
13703
13704 let frontier = args
13705 .frontier
13706 .unwrap_or_else(|| PathBuf::from("frontiers/bbb-alzheimer.json"));
13707 let thresholds = benchmark::BenchmarkThresholds {
13708 min_f1: if args.no_thresholds {
13709 None
13710 } else {
13711 args.min_f1.or(Some(0.05))
13712 },
13713 min_precision: if args.no_thresholds {
13714 None
13715 } else {
13716 args.min_precision
13717 },
13718 min_recall: if args.no_thresholds {
13719 None
13720 } else {
13721 args.min_recall
13722 },
13723 ..Default::default()
13724 };
13725 if let Some(path) = args.link_gold {
13726 print_benchmark_or_exit(benchmark::task_envelope(
13727 &frontier,
13728 None,
13729 benchmark::BenchmarkMode::Link,
13730 Some(&path),
13731 &thresholds,
13732 None,
13733 ));
13734 } else if let Some(path) = args.entity_gold {
13735 print_benchmark_or_exit(benchmark::task_envelope(
13736 &frontier,
13737 None,
13738 benchmark::BenchmarkMode::Entity,
13739 Some(&path),
13740 &thresholds,
13741 None,
13742 ));
13743 } else if let Some(path) = args.gold {
13744 if args.json {
13745 print_benchmark_or_exit(benchmark::task_envelope(
13746 &frontier,
13747 None,
13748 benchmark::BenchmarkMode::Finding,
13749 Some(&path),
13750 &thresholds,
13751 None,
13752 ));
13753 } else {
13754 benchmark::run(&frontier, &path, false);
13755 }
13756 } else {
13757 fail("Provide --suite, --gold, --entity-gold, or --link-gold.");
13758 }
13759}
13760
13761fn print_benchmark_or_exit(result: Result<Value, String>) {
13762 let payload = result.unwrap_or_else(|e| fail_return(&e));
13763 println!(
13764 "{}",
13765 serde_json::to_string_pretty(&payload).expect("failed to serialize benchmark report")
13766 );
13767 if payload.get("ok").and_then(Value::as_bool) != Some(true) {
13768 std::process::exit(1);
13769 }
13770}
13771
13772fn cmd_packet(action: PacketAction) {
13773 let (result, json_output) = match action {
13774 PacketAction::Inspect { path, json } => (packet::inspect(&path), json),
13775 PacketAction::Validate { path, json } => (packet::validate(&path), json),
13776 };
13777 match result {
13778 Ok(output) if json_output => {
13779 println!(
13780 "{}",
13781 serde_json::to_string_pretty(&json!({
13782 "ok": true,
13783 "command": "packet",
13784 "result": output,
13785 }))
13786 .expect("failed to serialize packet response")
13787 );
13788 }
13789 Ok(output) => println!("{output}"),
13790 Err(e) => fail(&e),
13791 }
13792}
13793
13794fn cmd_verify(path: &Path, json_output: bool) {
13799 let result = packet::validate(path);
13800 match result {
13801 Ok(output) if json_output => {
13802 println!(
13803 "{}",
13804 serde_json::to_string_pretty(&json!({
13805 "ok": true,
13806 "command": "verify",
13807 "result": output,
13808 }))
13809 .expect("failed to serialize verify response")
13810 );
13811 }
13812 Ok(output) => {
13813 println!("{output}");
13814 println!(
13815 "\nverify: ok\n every file in the manifest matched its claimed sha256.\n pull this packet on another machine, run the same command, see the same line."
13816 );
13817 }
13818 Err(e) => fail(&e),
13819 }
13820}
13821
13822fn cmd_init(path: &Path, name: &str, template: &str, initialize_git: bool, json_output: bool) {
13823 if path.join(".vela").exists() {
13824 fail(&format!(
13825 "already initialized: {} exists",
13826 path.join(".vela").display()
13827 ));
13828 }
13829 let payload = frontier_repo::initialize(
13830 path,
13831 frontier_repo::InitOptions {
13832 name,
13833 template,
13834 initialize_git,
13835 },
13836 )
13837 .unwrap_or_else(|e| fail_return(&e));
13838 if json_output {
13839 println!(
13840 "{}",
13841 serde_json::to_string_pretty(&payload).expect("failed to serialize init report")
13842 );
13843 } else {
13844 println!(
13845 "{} initialized frontier repository in {}",
13846 style::ok("ok"),
13847 path.display()
13848 );
13849 }
13850}
13851
13852fn cmd_quickstart(
13859 path: &Path,
13860 name: &str,
13861 reviewer: &str,
13862 assertion: Option<&str>,
13863 keys_out: Option<&Path>,
13864 json_output: bool,
13865) {
13866 use std::process::Command;
13867
13868 if path.join(".vela").exists() {
13869 fail(&format!(
13870 "already initialized: {} exists",
13871 path.join(".vela").display()
13872 ));
13873 }
13874
13875 let exe = std::env::current_exe()
13876 .unwrap_or_else(|e| fail_return(&format!("cannot locate current executable: {e}")));
13877 let keys_dir = keys_out
13878 .map(Path::to_path_buf)
13879 .unwrap_or_else(|| path.join("keys"));
13880 let assertion_text =
13881 assertion.unwrap_or("Quickstart placeholder claim. Replace with your real assertion.");
13882
13883 let run_step = |label: &str, args: &[&str]| -> std::process::Output {
13884 let out = Command::new(&exe)
13885 .args(args)
13886 .output()
13887 .unwrap_or_else(|e| fail_return(&format!("{label}: failed to spawn: {e}")));
13888 if !out.status.success() {
13889 let stderr = String::from_utf8_lossy(&out.stderr);
13890 fail(&format!("{label} failed:\n{stderr}"));
13891 }
13892 out
13893 };
13894
13895 run_step(
13897 "init",
13898 &[
13899 "init",
13900 path.to_string_lossy().as_ref(),
13901 "--name",
13902 name,
13903 "--no-git",
13904 "--json",
13905 ],
13906 );
13907
13908 let keys_out_str = keys_dir.to_string_lossy().into_owned();
13910 let keypair_out = run_step(
13911 "sign.generate-keypair",
13912 &[
13913 "sign",
13914 "generate-keypair",
13915 "--out",
13916 keys_out_str.as_ref(),
13917 "--json",
13918 ],
13919 );
13920 let keypair_json: serde_json::Value = serde_json::from_slice(&keypair_out.stdout)
13921 .unwrap_or_else(|e| fail_return(&format!("sign.generate-keypair: bad json: {e}")));
13922 let public_key = keypair_json
13923 .get("public_key")
13924 .and_then(|v| v.as_str())
13925 .unwrap_or_else(|| fail_return("sign.generate-keypair: missing public_key in output"))
13926 .to_string();
13927
13928 run_step(
13930 "actor.add",
13931 &[
13932 "actor",
13933 "add",
13934 path.to_string_lossy().as_ref(),
13935 reviewer,
13936 "--pubkey",
13937 public_key.as_str(),
13938 "--json",
13939 ],
13940 );
13941
13942 let finding_out = run_step(
13944 "finding.add",
13945 &[
13946 "finding",
13947 "add",
13948 path.to_string_lossy().as_ref(),
13949 "--assertion",
13950 assertion_text,
13951 "--author",
13952 reviewer,
13953 "--apply",
13954 "--json",
13955 ],
13956 );
13957 let finding_json: serde_json::Value = serde_json::from_slice(&finding_out.stdout)
13958 .unwrap_or_else(|e| fail_return(&format!("finding.add: bad json: {e}")));
13959 let finding_id = finding_json
13960 .get("finding_id")
13961 .and_then(|v| v.as_str())
13962 .map(str::to_string);
13963
13964 if json_output {
13965 let payload = json!({
13966 "ok": true,
13967 "command": "quickstart",
13968 "frontier": path.display().to_string(),
13969 "name": name,
13970 "reviewer": reviewer,
13971 "public_key": public_key,
13972 "keys_dir": keys_dir.display().to_string(),
13973 "finding_id": finding_id,
13974 "next_steps": [
13975 format!("vela serve {}", path.display()),
13976 format!(
13977 "vela ingest <paper.pdf|doi:...> --frontier {}",
13978 path.display()
13979 ),
13980 format!("vela log {}", path.display()),
13981 ],
13982 });
13983 println!(
13984 "{}",
13985 serde_json::to_string_pretty(&payload).expect("failed to serialize quickstart")
13986 );
13987 return;
13988 }
13989
13990 println!();
13991 println!(
13992 " {}",
13993 format!("VELA · QUICKSTART · {}", path.display())
13994 .to_uppercase()
13995 .dimmed()
13996 );
13997 println!(" {}", style::tick_row(60));
13998 println!(" frontier: {}", path.display());
13999 println!(" name: {name}");
14000 println!(" reviewer: {reviewer}");
14001 println!(" keys: {}", keys_dir.display());
14002 println!(" pubkey: {}…", &public_key[..16]);
14003 if let Some(id) = finding_id.as_deref() {
14004 println!(" finding: {id}");
14005 }
14006 println!();
14007 println!(" {}", style::ok("done"));
14008 println!(" next:");
14009 println!(" vela serve {}", path.display());
14010 println!(
14011 " vela ingest <paper.pdf|doi:10.xxx|pmid:xxx> --frontier {}",
14012 path.display()
14013 );
14014 println!(" vela log {}", path.display());
14015 println!();
14016}
14017
14018fn cmd_lock(path: &Path, check: bool, json_output: bool) {
14024 if check {
14025 cmd_lock_check(path, json_output);
14026 return;
14027 }
14028 let payload = crate::frontier_repo::materialize(path).unwrap_or_else(|e| fail_return(&e));
14029 if json_output {
14030 println!(
14031 "{}",
14032 serde_json::to_string_pretty(&json!({
14033 "ok": true,
14034 "command": "lock",
14035 "path": path.display().to_string(),
14036 "snapshot_hash": payload.get("snapshot_hash"),
14037 "event_log_hash": payload.get("event_log_hash"),
14038 "proposal_state_hash": payload.get("proposal_state_hash"),
14039 }))
14040 .expect("failed to serialize lock report")
14041 );
14042 return;
14043 }
14044 println!();
14045 println!(
14046 " {}",
14047 format!("VELA · LOCK · {}", path.display())
14048 .to_uppercase()
14049 .dimmed()
14050 );
14051 println!(" {}", style::tick_row(60));
14052 println!(
14053 " snapshot_hash: {}",
14054 payload
14055 .get("snapshot_hash")
14056 .and_then(|v| v.as_str())
14057 .unwrap_or("?")
14058 );
14059 println!(
14060 " event_log_hash: {}",
14061 payload
14062 .get("event_log_hash")
14063 .and_then(|v| v.as_str())
14064 .unwrap_or("?")
14065 );
14066 println!(
14067 " proposal_state_hash: {}",
14068 payload
14069 .get("proposal_state_hash")
14070 .and_then(|v| v.as_str())
14071 .unwrap_or("?")
14072 );
14073 println!();
14074 println!(" {}", style::ok("locked"));
14075}
14076
14077fn cmd_lock_check(path: &Path, json_output: bool) {
14078 use crate::frontier_repo::read_lock;
14079 let lock = read_lock(path).unwrap_or_else(|e| fail_return(&e));
14080 let Some(lock) = lock else {
14081 fail("lock --check: no vela.lock found at path");
14082 };
14083 let project = repo::load_from_path(path).unwrap_or_else(|e| fail_return(&e));
14084 let current_snapshot = format!("sha256:{}", crate::events::snapshot_hash(&project));
14085 let current_event_log = format!("sha256:{}", crate::events::event_log_hash(&project.events));
14086 let mut drift: Vec<String> = Vec::new();
14087 if lock.snapshot_hash != current_snapshot {
14088 drift.push(format!(
14089 "snapshot_hash: lock={} current={}",
14090 lock.snapshot_hash, current_snapshot
14091 ));
14092 }
14093 if lock.event_log_hash != current_event_log {
14094 drift.push(format!(
14095 "event_log_hash: lock={} current={}",
14096 lock.event_log_hash, current_event_log
14097 ));
14098 }
14099 let ok = drift.is_empty();
14100 if json_output {
14101 println!(
14102 "{}",
14103 serde_json::to_string_pretty(&json!({
14104 "ok": ok,
14105 "command": "lock.check",
14106 "path": path.display().to_string(),
14107 "drift": drift,
14108 "lock_snapshot_hash": lock.snapshot_hash,
14109 "current_snapshot_hash": current_snapshot,
14110 "lock_event_log_hash": lock.event_log_hash,
14111 "current_event_log_hash": current_event_log,
14112 "dependency_count": lock.dependencies.len(),
14113 }))
14114 .expect("failed to serialize lock check report")
14115 );
14116 } else {
14117 println!();
14118 println!(
14119 " {}",
14120 format!("VELA · LOCK · CHECK · {}", path.display())
14121 .to_uppercase()
14122 .dimmed()
14123 );
14124 println!(" {}", style::tick_row(60));
14125 if ok {
14126 println!(" snapshot_hash: {}", lock.snapshot_hash);
14127 println!(" event_log_hash: {}", lock.event_log_hash);
14128 println!(" dependencies pinned: {}", lock.dependencies.len());
14129 println!();
14130 println!(" {} on-disk state matches vela.lock", style::ok("ok"));
14131 } else {
14132 println!(" {} drift detected:", style::err_prefix());
14133 for d in &drift {
14134 println!(" - {d}");
14135 }
14136 }
14137 }
14138 if !ok {
14139 std::process::exit(1);
14140 }
14141}
14142
14143fn cmd_doc(path: &Path, out: Option<&Path>, json_output: bool) {
14148 let project = repo::load_from_path(path).unwrap_or_else(|e| fail_return(&e));
14149 let out_dir = out
14150 .map(Path::to_path_buf)
14151 .unwrap_or_else(|| path.join("doc"));
14152 let report =
14153 crate::doc_render::write_site(&project, &out_dir).unwrap_or_else(|e| fail_return(&e));
14154 if json_output {
14155 println!(
14156 "{}",
14157 serde_json::to_string_pretty(&report).expect("failed to serialize doc report")
14158 );
14159 return;
14160 }
14161 println!();
14162 println!(
14163 " {}",
14164 format!("VELA · DOC · {}", path.display())
14165 .to_uppercase()
14166 .dimmed()
14167 );
14168 println!(" {}", style::tick_row(60));
14169 println!(" frontier_id: {}", report.frontier_id);
14170 println!(" out: {}", report.out);
14171 println!(" files written: {}", report.files_written);
14172 println!(" findings: {}", report.findings_documented);
14173 println!(" events: {}", report.events_documented);
14174 println!();
14175 println!(
14176 " {} open {}/index.html in a browser",
14177 style::ok("ok"),
14178 report.out
14179 );
14180}
14181
14182fn cmd_import(frontier_path: &Path, into: Option<&Path>) {
14183 let frontier = repo::load_from_path(frontier_path).unwrap_or_else(|e| fail_return(&e));
14184 let target = into
14185 .map(Path::to_path_buf)
14186 .unwrap_or_else(|| PathBuf::from(frontier.project.name.replace(' ', "-").to_lowercase()));
14187 repo::init_repo(&target, &frontier).unwrap_or_else(|e| fail(&e));
14188 println!(
14189 "{} {} findings · {}",
14190 style::ok("imported"),
14191 frontier.findings.len(),
14192 target.display()
14193 );
14194}
14195
14196fn cmd_locator_repair(
14197 path: &Path,
14198 atom_id: &str,
14199 locator_override: Option<&str>,
14200 reviewer: &str,
14201 reason: &str,
14202 apply: bool,
14203 json_output: bool,
14204) {
14205 let report = state::repair_evidence_atom_locator(
14206 path,
14207 atom_id,
14208 locator_override,
14209 reviewer,
14210 reason,
14211 apply,
14212 )
14213 .unwrap_or_else(|e| fail_return(&e));
14214 print_state_report(&report, json_output);
14215}
14216
14217async fn cmd_source_fetch(
14222 identifier: &str,
14223 cache_root: Option<&Path>,
14224 out_path: Option<&Path>,
14225 refresh: bool,
14226 _json_output: bool,
14227) {
14228 use sha2::{Digest, Sha256};
14229
14230 let normalized = normalize_source_identifier(identifier);
14231 let cache_path = cache_root.map(|root| {
14232 let hash = format!("{:x}", Sha256::digest(normalized.as_bytes()));
14233 root.join("sources")
14234 .join("cache")
14235 .join(format!("{hash}.json"))
14236 });
14237
14238 if !refresh
14239 && let Some(p) = cache_path.as_ref()
14240 && p.is_file()
14241 {
14242 let body = std::fs::read_to_string(p)
14243 .unwrap_or_else(|e| fail_return(&format!("read cache {}: {e}", p.display())));
14244 emit_source_fetch_result(&body, out_path);
14245 return;
14246 }
14247
14248 let result = fetch_source_metadata(&normalized).await;
14249 let json = match result {
14250 Ok(value) => serde_json::to_string_pretty(&value)
14251 .unwrap_or_else(|e| fail_return(&format!("serialize fetched record: {e}"))),
14252 Err(e) => fail_return(&format!("source-fetch '{identifier}': {e}")),
14253 };
14254
14255 if let Some(p) = cache_path.as_ref() {
14256 if let Some(parent) = p.parent() {
14257 std::fs::create_dir_all(parent)
14258 .unwrap_or_else(|e| fail_return(&format!("mkdir {}: {e}", parent.display())));
14259 }
14260 std::fs::write(p, &json)
14261 .unwrap_or_else(|e| fail_return(&format!("write cache {}: {e}", p.display())));
14262 }
14263 emit_source_fetch_result(&json, out_path);
14264}
14265
14266fn emit_source_fetch_result(body: &str, out_path: Option<&Path>) {
14267 if let Some(p) = out_path {
14268 if let Some(parent) = p.parent() {
14269 let _ = std::fs::create_dir_all(parent);
14270 }
14271 std::fs::write(p, body)
14272 .unwrap_or_else(|e| fail_return(&format!("write {}: {e}", p.display())));
14273 } else {
14274 println!("{body}");
14275 }
14276}
14277
14278fn normalize_source_identifier(raw: &str) -> String {
14279 let trimmed = raw.trim();
14280 if trimmed.starts_with("doi:")
14281 || trimmed.starts_with("pmid:")
14282 || trimmed.starts_with("nct:")
14283 || trimmed.starts_with("pmc:")
14284 {
14285 return trimmed.to_string();
14286 }
14287 if trimmed.starts_with("10.") {
14288 return format!("doi:{trimmed}");
14289 }
14290 if trimmed.starts_with("NCT") || trimmed.starts_with("nct") {
14291 return format!(
14292 "nct:{}",
14293 trimmed
14294 .to_uppercase()
14295 .trim_start_matches("NCT")
14296 .to_string()
14297 .split_at(0)
14298 .0
14299 );
14300 }
14301 if trimmed.chars().all(|c| c.is_ascii_digit()) {
14302 return format!("pmid:{trimmed}");
14303 }
14304 trimmed.to_string()
14305}
14306
14307async fn fetch_source_metadata(normalized: &str) -> Result<Value, String> {
14308 let client = Client::builder()
14309 .user_agent("vela/0.66.0 (vela-source-fetch; +https://vela-science.fly.dev)")
14310 .timeout(std::time::Duration::from_secs(30))
14311 .build()
14312 .map_err(|e| format!("client build: {e}"))?;
14313 if let Some(rest) = normalized.strip_prefix("doi:") {
14314 let mut record = fetch_via_crossref(&client, rest).await?;
14321 let crossref_abstract = record
14322 .get("abstract")
14323 .and_then(|v| v.as_str())
14324 .unwrap_or("");
14325 if crossref_abstract.is_empty()
14326 && let Some(pmid) = resolve_doi_to_pmid(&client, rest).await
14327 && let Ok(pubmed_record) = fetch_via_pubmed(&client, &pmid).await
14328 {
14329 let pubmed_abstract = pubmed_record
14330 .get("abstract")
14331 .and_then(|v| v.as_str())
14332 .unwrap_or("")
14333 .to_string();
14334 if !pubmed_abstract.is_empty()
14335 && let Some(obj) = record.as_object_mut()
14336 {
14337 obj.insert("abstract".to_string(), Value::String(pubmed_abstract));
14338 obj.insert(
14339 "abstract_source".to_string(),
14340 Value::String(format!("pubmed:{pmid}")),
14341 );
14342 }
14343 }
14344 return Ok(record);
14345 }
14346 if let Some(rest) = normalized.strip_prefix("pmid:") {
14347 return fetch_via_pubmed(&client, rest).await;
14348 }
14349 if let Some(rest) = normalized.strip_prefix("nct:") {
14350 return fetch_via_ctgov(&client, rest).await;
14351 }
14352 Err(format!(
14353 "unsupported source identifier '{normalized}'. Use doi:, pmid:, or nct: prefix."
14354 ))
14355}
14356
14357async fn resolve_doi_to_pmid(client: &Client, doi: &str) -> Option<String> {
14361 let url = format!(
14362 "https://eutils.ncbi.nlm.nih.gov/entrez/eutils/esearch.fcgi?db=pubmed&term={}[doi]&retmode=json",
14363 urlencoding::encode(doi)
14364 );
14365 let resp = client.get(&url).send().await.ok()?;
14366 if !resp.status().is_success() {
14367 return None;
14368 }
14369 let body: Value = resp.json().await.ok()?;
14370 let id_list = body.pointer("/esearchresult/idlist")?.as_array()?;
14371 if id_list.len() != 1 {
14372 return None;
14375 }
14376 id_list.first()?.as_str().map(|s| s.to_string())
14377}
14378
14379async fn fetch_via_crossref(client: &Client, doi: &str) -> Result<Value, String> {
14380 let url = format!("https://api.crossref.org/works/{doi}");
14381 let resp = client
14382 .get(&url)
14383 .send()
14384 .await
14385 .map_err(|e| format!("crossref get: {e}"))?;
14386 if !resp.status().is_success() {
14387 return Err(format!("crossref returned {}", resp.status()));
14388 }
14389 let body: Value = resp
14390 .json()
14391 .await
14392 .map_err(|e| format!("crossref json: {e}"))?;
14393 let work = body.get("message").cloned().unwrap_or(Value::Null);
14394 let title = work
14395 .get("title")
14396 .and_then(|v| v.as_array())
14397 .and_then(|a| a.first())
14398 .and_then(|v| v.as_str())
14399 .unwrap_or("")
14400 .to_string();
14401 let abstract_html = work
14402 .get("abstract")
14403 .and_then(|v| v.as_str())
14404 .unwrap_or("")
14405 .to_string();
14406 let abstract_text = strip_jats_tags(&abstract_html);
14407 let year = work
14408 .get("issued")
14409 .and_then(|v| v.get("date-parts"))
14410 .and_then(|v| v.as_array())
14411 .and_then(|a| a.first())
14412 .and_then(|v| v.as_array())
14413 .and_then(|a| a.first())
14414 .and_then(|v| v.as_i64());
14415 let journal = work
14416 .get("container-title")
14417 .and_then(|v| v.as_array())
14418 .and_then(|a| a.first())
14419 .and_then(|v| v.as_str())
14420 .unwrap_or("")
14421 .to_string();
14422 let authors = work
14423 .get("author")
14424 .and_then(|v| v.as_array())
14425 .map(|arr| {
14426 arr.iter()
14427 .filter_map(|a| {
14428 let given = a.get("given").and_then(|v| v.as_str()).unwrap_or("");
14429 let family = a.get("family").and_then(|v| v.as_str()).unwrap_or("");
14430 let combined = format!("{given} {family}").trim().to_string();
14431 if combined.is_empty() {
14432 None
14433 } else {
14434 Some(combined)
14435 }
14436 })
14437 .collect::<Vec<_>>()
14438 })
14439 .unwrap_or_default();
14440 Ok(json!({
14441 "schema": "vela.source_fetch.v0.1",
14442 "identifier": format!("doi:{doi}"),
14443 "source": "crossref",
14444 "title": title,
14445 "abstract": abstract_text,
14446 "year": year,
14447 "journal": journal,
14448 "authors": authors,
14449 "retrieved_at": chrono::Utc::now().to_rfc3339(),
14450 }))
14451}
14452
14453async fn fetch_via_pubmed(client: &Client, pmid: &str) -> Result<Value, String> {
14454 let url = format!(
14455 "https://eutils.ncbi.nlm.nih.gov/entrez/eutils/efetch.fcgi?db=pubmed&id={pmid}&retmode=xml"
14456 );
14457 let resp = client
14458 .get(&url)
14459 .send()
14460 .await
14461 .map_err(|e| format!("pubmed get: {e}"))?;
14462 if !resp.status().is_success() {
14463 return Err(format!("pubmed returned {}", resp.status()));
14464 }
14465 let xml = resp.text().await.map_err(|e| format!("pubmed text: {e}"))?;
14466 let title = extract_xml_text(&xml, "<ArticleTitle>", "</ArticleTitle>");
14467 let abstract_text = extract_xml_text(&xml, "<AbstractText>", "</AbstractText>");
14468 let year = extract_xml_text(&xml, "<Year>", "</Year>")
14469 .parse::<i64>()
14470 .ok();
14471 let journal = extract_xml_text(&xml, "<Title>", "</Title>");
14472 Ok(json!({
14473 "schema": "vela.source_fetch.v0.1",
14474 "identifier": format!("pmid:{pmid}"),
14475 "source": "pubmed",
14476 "title": title,
14477 "abstract": abstract_text,
14478 "year": year,
14479 "journal": journal,
14480 "authors": Vec::<String>::new(),
14481 "retrieved_at": chrono::Utc::now().to_rfc3339(),
14482 }))
14483}
14484
14485async fn fetch_via_ctgov(client: &Client, nct: &str) -> Result<Value, String> {
14486 let nct_clean = nct.trim();
14487 let nct_id = if nct_clean.starts_with("NCT") || nct_clean.starts_with("nct") {
14488 nct_clean.to_uppercase()
14489 } else {
14490 format!("NCT{nct_clean}")
14491 };
14492 let url = format!("https://clinicaltrials.gov/api/v2/studies/{nct_id}");
14493 let resp = client
14494 .get(&url)
14495 .send()
14496 .await
14497 .map_err(|e| format!("ctgov get: {e}"))?;
14498 if !resp.status().is_success() {
14499 return Err(format!("ctgov returned {}", resp.status()));
14500 }
14501 let body: Value = resp.json().await.map_err(|e| format!("ctgov json: {e}"))?;
14502 let title = body
14503 .pointer("/protocolSection/identificationModule/briefTitle")
14504 .and_then(|v| v.as_str())
14505 .unwrap_or("")
14506 .to_string();
14507 let abstract_text = body
14508 .pointer("/protocolSection/descriptionModule/briefSummary")
14509 .and_then(|v| v.as_str())
14510 .unwrap_or("")
14511 .to_string();
14512 let phase = body
14513 .pointer("/protocolSection/designModule/phases")
14514 .and_then(|v| v.as_array())
14515 .and_then(|a| a.first())
14516 .and_then(|v| v.as_str())
14517 .unwrap_or("")
14518 .to_string();
14519 Ok(json!({
14520 "schema": "vela.source_fetch.v0.1",
14521 "identifier": format!("nct:{nct_id}"),
14522 "source": "clinicaltrials.gov",
14523 "title": title,
14524 "abstract": abstract_text,
14525 "year": Value::Null,
14526 "journal": phase,
14527 "authors": Vec::<String>::new(),
14528 "retrieved_at": chrono::Utc::now().to_rfc3339(),
14529 }))
14530}
14531
14532fn extract_xml_text(xml: &str, open: &str, close: &str) -> String {
14533 if let Some(start) = xml.find(open) {
14534 let after = &xml[start + open.len()..];
14535 if let Some(end) = after.find(close) {
14536 return after[..end].trim().to_string();
14537 }
14538 }
14539 String::new()
14540}
14541
14542fn strip_jats_tags(html: &str) -> String {
14543 let mut out = String::with_capacity(html.len());
14544 let mut in_tag = false;
14545 for c in html.chars() {
14546 match c {
14547 '<' => in_tag = true,
14548 '>' => in_tag = false,
14549 _ if !in_tag => out.push(c),
14550 _ => {}
14551 }
14552 }
14553 out.split_whitespace().collect::<Vec<_>>().join(" ")
14554}
14555
14556fn cmd_span_repair(
14557 path: &Path,
14558 finding_id: &str,
14559 section: &str,
14560 text: &str,
14561 reviewer: &str,
14562 reason: &str,
14563 apply: bool,
14564 json_output: bool,
14565) {
14566 let report =
14567 state::repair_finding_span(path, finding_id, section, text, reviewer, reason, apply)
14568 .unwrap_or_else(|e| fail_return(&e));
14569 print_state_report(&report, json_output);
14570}
14571
14572#[allow(clippy::too_many_arguments)]
14573fn cmd_entity_resolve(
14574 path: &Path,
14575 finding_id: &str,
14576 entity_name: &str,
14577 source: &str,
14578 id: &str,
14579 confidence: f64,
14580 matched_name: Option<&str>,
14581 resolution_method: &str,
14582 reviewer: &str,
14583 reason: &str,
14584 apply: bool,
14585 json_output: bool,
14586) {
14587 let report = state::resolve_finding_entity(
14588 path,
14589 finding_id,
14590 entity_name,
14591 source,
14592 id,
14593 confidence,
14594 matched_name,
14595 resolution_method,
14596 reviewer,
14597 reason,
14598 apply,
14599 )
14600 .unwrap_or_else(|e| fail_return(&e));
14601 print_state_report(&report, json_output);
14602}
14603
14604fn cmd_propagate(
14605 path: &Path,
14606 retract: Option<String>,
14607 reduce_confidence: Option<String>,
14608 to: Option<f64>,
14609 output: Option<&Path>,
14610) {
14611 let mut frontier = load_frontier_or_fail(path);
14612 let (finding_id, action, label) = if let Some(id) = retract {
14613 (id, propagate::PropagationAction::Retracted, "retraction")
14614 } else if let Some(id) = reduce_confidence {
14615 let score = to.unwrap_or_else(|| fail_return("--reduce-confidence requires --to <score>"));
14616 if !(0.0..=1.0).contains(&score) {
14617 fail("--to must be between 0.0 and 1.0");
14618 }
14619 (
14620 id,
14621 propagate::PropagationAction::ConfidenceReduced { new_score: score },
14622 "confidence reduction",
14623 )
14624 } else {
14625 fail("specify --retract <id> or --reduce-confidence <id> --to <score>");
14626 };
14627 if !frontier.findings.iter().any(|f| f.id == finding_id) {
14628 fail(&format!("finding not found: {finding_id}"));
14629 }
14630 let result = propagate::propagate_correction(&mut frontier, &finding_id, action);
14631 frontier.review_events.extend(result.events.clone());
14636 project::recompute_stats(&mut frontier);
14637 propagate::print_result(&result, label, &finding_id);
14638 let out = output.unwrap_or(path);
14639 repo::save_to_path(out, &frontier).expect("Failed to save frontier");
14640 println!(" output: {}", out.display());
14641}
14642
14643fn cmd_mcp_setup(source: Option<&Path>, frontiers: Option<&Path>) {
14644 let source_desc = source
14645 .map(|p| p.display().to_string())
14646 .or_else(|| frontiers.map(|p| p.display().to_string()))
14647 .unwrap_or_else(|| "frontier.json".to_string());
14648 let args = if let Some(path) = source {
14649 format!(r#""serve", "{}""#, path.display())
14650 } else if let Some(path) = frontiers {
14651 format!(r#""serve", "--frontiers", "{}""#, path.display())
14652 } else {
14653 r#""serve", "frontier.json""#.to_string()
14654 };
14655 println!(
14656 r#"Add this MCP server configuration to your client:
14657
14658{{
14659 "mcpServers": {{
14660 "vela": {{
14661 "command": "vela",
14662 "args": [{args}]
14663 }}
14664 }}
14665}}
14666
14667Source: {source_desc}"#
14668 );
14669}
14670
14671fn parse_entities(input: &str) -> Vec<(String, String)> {
14672 if input.trim().is_empty() {
14673 return Vec::new();
14674 }
14675 input
14676 .split(',')
14677 .filter_map(|pair| {
14678 let parts = pair.trim().splitn(2, ':').collect::<Vec<_>>();
14679 if parts.len() == 2 {
14680 Some((parts[0].trim().to_string(), parts[1].trim().to_string()))
14681 } else {
14682 eprintln!(
14683 "{} skipping malformed entity '{}'",
14684 style::warn("warn"),
14685 pair.trim()
14686 );
14687 None
14688 }
14689 })
14690 .collect()
14691}
14692
14693fn parse_evidence_spans(inputs: &[String]) -> Vec<Value> {
14694 inputs
14695 .iter()
14696 .filter_map(|input| {
14697 let trimmed = input.trim();
14698 if trimmed.is_empty() {
14699 return None;
14700 }
14701 if trimmed.starts_with('{') {
14702 match serde_json::from_str::<Value>(trimmed) {
14703 Ok(value @ Value::Object(_)) => return Some(value),
14704 Ok(_) | Err(_) => {
14705 eprintln!(
14706 "{} evidence span JSON should be an object; storing as text",
14707 style::warn("warn")
14708 );
14709 }
14710 }
14711 }
14712 Some(json!({
14713 "section": "curator_source",
14714 "text": trimmed,
14715 }))
14716 })
14717 .collect()
14718}
14719
14720fn hash_path(path: &Path) -> Result<String, String> {
14721 let mut hasher = Sha256::new();
14722 if path.is_file() {
14723 let bytes = std::fs::read(path)
14724 .map_err(|e| format!("Failed to read {} for hashing: {e}", path.display()))?;
14725 hasher.update(&bytes);
14726 } else if path.is_dir() {
14727 let mut files = Vec::new();
14728 collect_hash_files(path, path, &mut files)?;
14729 files.sort();
14730 for rel in files {
14731 hasher.update(rel.to_string_lossy().as_bytes());
14732 let bytes = std::fs::read(path.join(&rel))
14733 .map_err(|e| format!("Failed to read {} for hashing: {e}", rel.display()))?;
14734 hasher.update(bytes);
14735 }
14736 } else {
14737 return Err(format!("Cannot hash missing path {}", path.display()));
14738 }
14739 Ok(format!("{:x}", hasher.finalize()))
14740}
14741
14742fn load_frontier_or_fail(path: &Path) -> project::Project {
14743 repo::load_from_path(path).unwrap_or_else(|e| {
14744 fail_return(&format!(
14745 "Failed to load frontier '{}': {e}",
14746 path.display()
14747 ))
14748 })
14749}
14750
14751fn hash_path_or_fail(path: &Path) -> String {
14752 hash_path(path).unwrap_or_else(|e| {
14753 fail_return(&format!(
14754 "Failed to hash frontier '{}': {e}",
14755 path.display()
14756 ))
14757 })
14758}
14759
14760fn collect_hash_files(root: &Path, dir: &Path, files: &mut Vec<PathBuf>) -> Result<(), String> {
14761 for entry in
14762 std::fs::read_dir(dir).map_err(|e| format!("Failed to read {}: {e}", dir.display()))?
14763 {
14764 let entry = entry.map_err(|e| format!("Failed to read directory entry: {e}"))?;
14765 let path = entry.path();
14766 if path.is_dir() {
14767 collect_hash_files(root, &path, files)?;
14768 } else if path.is_file() {
14769 files.push(
14770 path.strip_prefix(root)
14771 .map_err(|e| e.to_string())?
14772 .to_path_buf(),
14773 );
14774 }
14775 }
14776 Ok(())
14777}
14778
14779fn schema_error_suggestion(error: &str) -> &'static str {
14780 if schema_error_action(error).is_some() {
14781 "Run `vela normalize` to repair deterministic frontier state."
14782 } else {
14783 "Inspect and correct the referenced frontier field."
14784 }
14785}
14786
14787fn schema_error_fix(error: &str) -> bool {
14788 schema_error_action(error).is_some()
14789}
14790
14791fn schema_error_action(error: &str) -> Option<&'static str> {
14792 if error.contains("stats.findings")
14793 || error.contains("stats.links")
14794 || error.contains("Invalid compiler")
14795 || error.contains("Invalid vela_version")
14796 || error.contains("Invalid schema")
14797 {
14798 Some("normalize_metadata_and_stats")
14799 } else if error.contains("does not match content-address") {
14800 Some("rewrite_ids")
14801 } else {
14802 None
14803 }
14804}
14805
14806fn build_repair_plan(diagnostics: &[Value]) -> Vec<Value> {
14807 let mut actions = std::collections::BTreeMap::<String, usize>::new();
14808 for diagnostic in diagnostics {
14809 if let Some(action) = diagnostic.get("normalize_action").and_then(Value::as_str) {
14810 *actions.entry(action.to_string()).or_default() += 1;
14811 }
14812 }
14813 actions
14814 .into_iter()
14815 .map(|(action, count)| {
14816 let command = if action == "rewrite_ids" {
14817 "vela normalize <frontier> --write --rewrite-ids --id-map id-map.json"
14818 } else {
14819 "vela normalize <frontier> --write"
14820 };
14821 json!({
14822 "action": action,
14823 "count": count,
14824 "command": command,
14825 })
14826 })
14827 .collect()
14828}
14829
14830fn cmd_integrity(frontier: &Path, json: bool) {
14831 let report = state_integrity::analyze_path(frontier).unwrap_or_else(|e| fail_return(&e));
14832 if json {
14833 println!(
14834 "{}",
14835 serde_json::to_string_pretty(&report).expect("failed to serialize integrity report")
14836 );
14837 } else {
14838 println!("vela integrity");
14839 println!(" frontier: {}", frontier.display());
14840 println!(" status: {}", report.status);
14841 println!(" proof freshness: {}", report.proof_freshness);
14842 println!(" structural errors: {}", report.structural_errors.len());
14843 for error in report.structural_errors.iter().take(8) {
14844 println!(" - {}: {}", error.rule_id, error.message);
14845 }
14846 }
14847}
14848
14849fn cmd_impact(frontier: &Path, finding_id: &str, depth: Option<usize>, json: bool) {
14850 let report =
14851 impact::analyze_path(frontier, finding_id, depth).unwrap_or_else(|e| fail_return(&e));
14852 if json {
14853 println!(
14854 "{}",
14855 serde_json::to_string_pretty(&report).expect("failed to serialize impact report")
14856 );
14857 } else {
14858 println!("vela impact");
14859 println!(" finding: {}", report.target.id);
14860 println!(" frontier: {}", report.frontier.vfr_id);
14861 println!(" direct dependents: {}", report.summary.direct_dependents);
14862 println!(" downstream: {}", report.summary.total_downstream);
14863 println!(" open proposals: {}", report.summary.open_proposals);
14864 println!(" accepted events: {}", report.summary.accepted_events);
14865 println!(" proof: {}", report.summary.proof_status);
14866 }
14867}
14868
14869fn cmd_discord(frontier: &Path, json: bool, kind_filter: Option<&str>) {
14870 use crate::discord::DiscordKind;
14871 use crate::discord_compute::compute_discord_assignment;
14872
14873 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
14874 let assignment = compute_discord_assignment(&project);
14875 let support = assignment.frontier_support();
14876
14877 let mut rows: Vec<(String, Vec<String>)> = Vec::new();
14880 for context in support.iter() {
14881 let set = assignment.get(context);
14882 let kinds: Vec<String> = set.iter().map(|k| k.as_str().to_string()).collect();
14883 if let Some(filter) = kind_filter
14884 && !kinds.iter().any(|k| k == filter)
14885 {
14886 continue;
14887 }
14888 rows.push((context.clone(), kinds));
14889 }
14890
14891 let mut histogram: std::collections::BTreeMap<&'static str, usize> =
14895 std::collections::BTreeMap::new();
14896 for kind in DiscordKind::ALL {
14897 let count = assignment
14898 .iter()
14899 .filter(|(_, set)| set.contains(*kind))
14900 .count();
14901 if count > 0 {
14902 histogram.insert(kind.as_str(), count);
14903 }
14904 }
14905
14906 let total_findings = project.findings.len();
14907 let frontier_id = project
14908 .frontier_id
14909 .clone()
14910 .unwrap_or_else(|| String::from("<unknown>"));
14911
14912 if json {
14913 let row_value = |row: &(String, Vec<String>)| {
14914 serde_json::json!({
14915 "finding_id": row.0,
14916 "discord_kinds": row.1,
14917 })
14918 };
14919 let report = serde_json::json!({
14920 "frontier_id": frontier_id,
14921 "total_findings": total_findings,
14922 "frontier_support_size": support.len(),
14923 "filtered_row_count": rows.len(),
14924 "filter_kind": kind_filter,
14925 "histogram": histogram,
14926 "rows": rows.iter().map(row_value).collect::<Vec<_>>(),
14927 });
14928 println!(
14929 "{}",
14930 serde_json::to_string_pretty(&report).expect("serialize discord report")
14931 );
14932 return;
14933 }
14934
14935 println!("vela discord");
14936 println!(" frontier: {frontier_id}");
14937 println!(" total findings: {total_findings}");
14938 println!(
14939 " frontier support (any discord): {} of {}",
14940 support.len(),
14941 total_findings
14942 );
14943 if let Some(k) = kind_filter {
14944 println!(" filter: kind = {k}");
14945 }
14946 println!();
14947 if histogram.is_empty() {
14948 println!(" no discord detected.");
14949 } else {
14950 println!(" discord histogram:");
14951 for (k, n) in &histogram {
14952 println!(" {n:>4} {k}");
14953 }
14954 }
14955 if !rows.is_empty() {
14956 println!();
14957 println!(" findings with discord (showing up to 50):");
14958 for (fid, kinds) in rows.iter().take(50) {
14959 println!(" {fid} · {}", kinds.join(", "));
14960 }
14961 if rows.len() > 50 {
14962 println!(" ... and {} more", rows.len() - 50);
14963 }
14964 }
14965}
14966
14967fn empty_signal_report() -> signals::SignalReport {
14968 signals::SignalReport {
14969 schema: "vela.signals.v0".to_string(),
14970 frontier: "unavailable".to_string(),
14971 signals: Vec::new(),
14972 review_queue: Vec::new(),
14973 proof_readiness: signals::ProofReadiness {
14974 status: "unavailable".to_string(),
14975 blockers: 0,
14976 warnings: 0,
14977 caveats: vec!["Frontier could not be loaded for signal analysis.".to_string()],
14978 },
14979 }
14980}
14981
14982fn print_signal_summary(report: &signals::SignalReport, strict: bool) {
14983 println!();
14984 println!(" {}", "SIGNALS".dimmed());
14985 println!(" {}", style::tick_row(60));
14986 println!(" total signals: {}", report.signals.len());
14987 println!(" proof readiness: {}", report.proof_readiness.status);
14988 if !report.review_queue.is_empty() {
14989 println!(" review queue: {} items", report.review_queue.len());
14990 }
14991 if strict && report.proof_readiness.status != "ready" {
14992 println!(
14993 " {} proof readiness has blocking signals.",
14994 style::lost("strict check failed")
14995 );
14996 }
14997}
14998
14999fn append_packet_json_file(
15000 packet_dir: &Path,
15001 relative_path: &str,
15002 value: &Value,
15003) -> Result<(), String> {
15004 let content = serde_json::to_vec_pretty(value)
15005 .map_err(|e| format!("Failed to serialize packet JSON file: {e}"))?;
15006 let path = packet_dir.join(relative_path);
15007 if let Some(parent) = path.parent() {
15008 std::fs::create_dir_all(parent)
15009 .map_err(|e| format!("Failed to create {}: {e}", parent.display()))?;
15010 }
15011 std::fs::write(&path, &content)
15012 .map_err(|e| format!("Failed to write {}: {e}", path.display()))?;
15013 let entry = json!({
15014 "path": relative_path,
15015 "sha256": hex::encode(Sha256::digest(&content)),
15016 "bytes": content.len(),
15017 });
15018
15019 for manifest_name in ["manifest.json", "packet.lock.json"] {
15020 let manifest_path = packet_dir.join(manifest_name);
15021 let data = std::fs::read_to_string(&manifest_path)
15022 .map_err(|e| format!("Failed to read {}: {e}", manifest_path.display()))?;
15023 let mut manifest: Value = serde_json::from_str(&data)
15024 .map_err(|e| format!("Failed to parse {}: {e}", manifest_path.display()))?;
15025 let array_key = if manifest_name == "manifest.json" {
15026 "included_files"
15027 } else {
15028 "files"
15029 };
15030 let files = manifest
15031 .get_mut(array_key)
15032 .and_then(Value::as_array_mut)
15033 .ok_or_else(|| format!("{} missing {array_key} array", manifest_path.display()))?;
15034 files.retain(|file| {
15035 file.get("path")
15036 .and_then(Value::as_str)
15037 .is_none_or(|path| path != relative_path)
15038 });
15039 files.push(entry.clone());
15040 std::fs::write(
15041 &manifest_path,
15042 serde_json::to_vec_pretty(&manifest)
15043 .map_err(|e| format!("Failed to serialize {}: {e}", manifest_path.display()))?,
15044 )
15045 .map_err(|e| format!("Failed to write {}: {e}", manifest_path.display()))?;
15046 }
15047
15048 let lock_path = packet_dir.join("packet.lock.json");
15049 let lock_content = std::fs::read(&lock_path)
15050 .map_err(|e| format!("Failed to read {}: {e}", lock_path.display()))?;
15051 let lock_entry = json!({
15052 "path": "packet.lock.json",
15053 "sha256": hex::encode(Sha256::digest(&lock_content)),
15054 "bytes": lock_content.len(),
15055 });
15056 let manifest_path = packet_dir.join("manifest.json");
15057 let data = std::fs::read_to_string(&manifest_path)
15058 .map_err(|e| format!("Failed to read {}: {e}", manifest_path.display()))?;
15059 let mut manifest: Value = serde_json::from_str(&data)
15060 .map_err(|e| format!("Failed to parse {}: {e}", manifest_path.display()))?;
15061 let files = manifest
15062 .get_mut("included_files")
15063 .and_then(Value::as_array_mut)
15064 .ok_or_else(|| format!("{} missing included_files array", manifest_path.display()))?;
15065 files.retain(|file| {
15066 file.get("path")
15067 .and_then(Value::as_str)
15068 .is_none_or(|path| path != "packet.lock.json")
15069 });
15070 files.push(lock_entry);
15071 std::fs::write(
15072 &manifest_path,
15073 serde_json::to_vec_pretty(&manifest)
15074 .map_err(|e| format!("Failed to serialize {}: {e}", manifest_path.display()))?,
15075 )
15076 .map_err(|e| format!("Failed to write {}: {e}", manifest_path.display()))?;
15077 Ok(())
15078}
15079
15080fn print_tool_check_report(report: &Value) {
15081 let summary = report.get("summary").unwrap_or(&Value::Null);
15082 let frontier = report.get("frontier").unwrap_or(&Value::Null);
15083 println!();
15084 println!(" {}", "VELA · SERVE · CHECK-TOOLS".dimmed());
15085 println!(" {}", style::tick_row(60));
15086 println!(
15087 "frontier: {}",
15088 frontier
15089 .get("name")
15090 .and_then(Value::as_str)
15091 .unwrap_or("unknown")
15092 );
15093 println!(
15094 "findings: {}",
15095 frontier
15096 .get("findings")
15097 .and_then(Value::as_u64)
15098 .unwrap_or_default()
15099 );
15100 println!(
15101 "checks: {} passed, {} failed",
15102 summary
15103 .get("passed")
15104 .and_then(Value::as_u64)
15105 .unwrap_or_default(),
15106 summary
15107 .get("failed")
15108 .and_then(Value::as_u64)
15109 .unwrap_or_default()
15110 );
15111 if let Some(tools) = report.get("tools").and_then(Value::as_array) {
15112 let names = tools
15113 .iter()
15114 .filter_map(Value::as_str)
15115 .collect::<Vec<_>>()
15116 .join(", ");
15117 println!("tools: {names}");
15118 }
15119 if let Some(checks) = report.get("checks").and_then(Value::as_array) {
15120 for check in checks {
15121 let status = if check.get("ok").and_then(Value::as_bool) == Some(true) {
15122 style::ok("ok")
15123 } else {
15124 style::lost("lost")
15125 };
15126 println!(
15127 " {} {}",
15128 status,
15129 check
15130 .get("tool")
15131 .and_then(Value::as_str)
15132 .unwrap_or("unknown")
15133 );
15134 }
15135 }
15136}
15137
15138fn print_state_report(report: &state::StateCommandReport, json_output: bool) {
15139 if json_output {
15140 println!(
15141 "{}",
15142 serde_json::to_string_pretty(report).expect("failed to serialize state command report")
15143 );
15144 } else {
15145 println!("{}", report.message);
15146 println!(" frontier: {}", report.frontier);
15147 println!(" finding: {}", report.finding_id);
15148 println!(" proposal: {}", report.proposal_id);
15149 println!(" status: {}", report.proposal_status);
15150 if let Some(event_id) = &report.applied_event_id {
15151 println!(" event: {}", event_id);
15152 }
15153 println!(" wrote: {}", report.wrote_to);
15154 }
15155}
15156
15157fn print_history(payload: &Value) {
15158 let finding = payload.get("finding").unwrap_or(&Value::Null);
15159 println!("vela history");
15160 println!(
15161 " finding: {}",
15162 finding
15163 .get("id")
15164 .and_then(Value::as_str)
15165 .unwrap_or("unknown")
15166 );
15167 println!(
15168 " assertion: {}",
15169 finding
15170 .get("assertion")
15171 .and_then(Value::as_str)
15172 .unwrap_or("")
15173 );
15174 println!(
15175 " confidence: {:.3}",
15176 finding
15177 .get("confidence")
15178 .and_then(Value::as_f64)
15179 .unwrap_or_default()
15180 );
15181 let reviews = payload
15182 .get("review_events")
15183 .and_then(Value::as_array)
15184 .map_or(0, Vec::len);
15185 let updates = payload
15186 .get("confidence_updates")
15187 .and_then(Value::as_array)
15188 .map_or(0, Vec::len);
15189 let annotations = finding
15190 .get("annotations")
15191 .and_then(Value::as_array)
15192 .map_or(0, Vec::len);
15193 let sources = payload
15194 .get("sources")
15195 .and_then(Value::as_array)
15196 .map_or(0, Vec::len);
15197 let atoms = payload
15198 .get("evidence_atoms")
15199 .and_then(Value::as_array)
15200 .map_or(0, Vec::len);
15201 let conditions = payload
15202 .get("condition_records")
15203 .and_then(Value::as_array)
15204 .map_or(0, Vec::len);
15205 let proposals = payload
15206 .get("proposals")
15207 .and_then(Value::as_array)
15208 .map_or(0, Vec::len);
15209 let events = payload
15210 .get("events")
15211 .and_then(Value::as_array)
15212 .map_or(0, Vec::len);
15213 println!(" review events: {reviews}");
15214 println!(" confidence updates: {updates}");
15215 println!(" annotations: {annotations}");
15216 println!(" sources: {sources}");
15217 println!(" evidence atoms: {atoms}");
15218 println!(" condition records: {conditions}");
15219 println!(" proposals: {proposals}");
15220 println!(" canonical events: {events}");
15221 if let Some(status) = payload
15222 .get("proof_state")
15223 .and_then(|value| value.get("latest_packet"))
15224 .and_then(|value| value.get("status"))
15225 .and_then(Value::as_str)
15226 {
15227 println!(" proof state: {status}");
15228 }
15229 if let Some(events) = payload.get("review_events").and_then(Value::as_array) {
15230 for event in events.iter().take(8) {
15231 println!(
15232 " - {} {} {}",
15233 event
15234 .get("reviewed_at")
15235 .and_then(Value::as_str)
15236 .unwrap_or(""),
15237 event.get("id").and_then(Value::as_str).unwrap_or(""),
15238 event.get("reason").and_then(Value::as_str).unwrap_or("")
15239 );
15240 }
15241 }
15242}
15243
15244#[derive(Debug, Serialize)]
15245pub struct ProofTrace {
15246 pub trace_version: String,
15247 pub command: Vec<String>,
15248 pub source: String,
15249 pub source_hash: String,
15250 pub schema_version: String,
15251 pub checked_artifacts: Vec<String>,
15252 pub benchmark: Option<Value>,
15253 pub packet_manifest: String,
15254 pub packet_validation: String,
15255 pub caveats: Vec<String>,
15256 pub status: String,
15257 pub trace_path: String,
15258}
15259
15260const SCIENCE_SUBCOMMANDS: &[&str] = &[
15261 "compile-notes",
15262 "compile-code",
15263 "compile-data",
15264 "review-pending",
15265 "find-tensions",
15266 "plan-experiments",
15267 "scout",
15268 "check",
15269 "normalize",
15270 "integrity",
15271 "impact",
15272 "discord",
15273 "quickstart",
15274 "proof",
15275 "repo",
15276 "serve",
15277 "stats",
15278 "search",
15279 "tensions",
15280 "gaps",
15281 "bridge",
15282 "export",
15283 "packet",
15284 "bench",
15285 "conformance",
15286 "version",
15287 "sign",
15288 "actor",
15289 "frontier",
15290 "queue",
15291 "registry",
15292 "init",
15293 "import",
15294 "lock",
15295 "doc",
15296 "diff",
15297 "proposals",
15298 "finding",
15299 "link",
15300 "entity",
15301 "review",
15302 "note",
15303 "caveat",
15304 "revise",
15305 "reject",
15306 "history",
15307 "import-events",
15308 "retract",
15309 "propagate",
15310 "replicate",
15312 "replications",
15313 "dataset-add",
15316 "datasets",
15317 "code-add",
15318 "code-artifacts",
15319 "artifact-add",
15320 "artifact-to-state",
15321 "bridge-kit",
15322 "source-adapter",
15323 "runtime-adapter",
15324 "artifacts",
15325 "artifact-audit",
15326 "decision-brief",
15327 "trial-summary",
15328 "source-verification",
15329 "source-ingest-plan",
15330 "clinical-trial-import",
15331 "negative-result-add",
15333 "negative-results",
15334 "trajectory-create",
15336 "trajectory-step",
15337 "trajectories",
15338 "tier-set",
15340 "locator-repair",
15342 "span-repair",
15344 "entity-resolve",
15346 "entity-add",
15348 "proof-add",
15350 "source-fetch",
15352 "predict",
15355 "resolve",
15356 "predictions",
15357 "predictions-expire",
15358 "calibration",
15359 "consensus",
15362 "federation",
15364 "causal",
15366 "status",
15370 "log",
15371 "inbox",
15372 "ask",
15373 "bridges",
15375 "workbench",
15377 "verify",
15379 "ingest",
15383 "propose",
15384 "accept",
15385 "attest",
15386 "lineage",
15387 "carina",
15390 "atlas",
15393 "constellation",
15396];
15397
15398pub fn is_science_subcommand(name: &str) -> bool {
15399 SCIENCE_SUBCOMMANDS.contains(&name)
15400}
15401
15402fn print_strict_help() {
15403 println!(
15404 r#"Vela {}
15405Version control for scientific state.
15406
15407Usage:
15408 vela <COMMAND>
15409
15410Core flow (v0.74):
15411 init Initialize a split frontier repo
15412 ingest Ingest a paper, dataset, or Carina packet (dispatches by file type)
15413 propose Create a finding.review proposal
15414 diff Preview a `vpr_*` proposal, or compare two frontier files
15415 accept Apply a proposal under reviewer authority
15416 attest Sign findings under your private key
15417 log Recent canonical state events
15418 lineage State-transition replay for one finding
15419 serve Local Workbench (findings, evidence, diff, lineage)
15420
15421Read-only inspection:
15422 check Validate a frontier, repo, or proof packet
15423 integrity Check accepted frontier state integrity
15424 impact Report downstream finding impact
15425 normalize Apply deterministic frontier-state repairs
15426 proof Export and validate a proof packet
15427 repo Inspect split frontier repository status and shape
15428 stats Show frontier statistics
15429 search Search findings
15430 tensions List candidate contradictions and tensions
15431 gaps Inspect and rank candidate gap review leads
15432 bridge Find candidate cross-domain connections
15433
15434Advanced (proposal-creation, agent inboxes, federation):
15435 scout Run Literature Scout against a folder of PDFs (writes proposals)
15436 compile-notes Run Notes Compiler against a Markdown vault (writes proposals)
15437 compile-code Run Code & Notebook Analyst against a research repo (writes proposals)
15438 compile-data Run Datasets agent against a folder of CSV/TSV data (writes proposals)
15439 review-pending Run Reviewer Agent: score every pending proposal (writes notes)
15440 find-tensions Run Contradiction Finder: surface real contradictions among findings
15441 plan-experiments Run Experiment Planner: propose experiments for open questions / hypotheses
15442 export Export frontier artifacts
15443 packet Inspect or validate proof packets
15444 bench Run deterministic benchmark gates
15445 conformance Run protocol conformance vectors
15446 sign Optional signing and signature verification
15447 runtime-adapter
15448 Normalize external runtime exports into reviewable proposals
15449 version Show version information
15450 import Import frontier.json into a .vela repo
15451 proposals Inspect, validate, export, import, accept, or reject write proposals
15452 artifact-to-state
15453 Import a Carina artifact packet as reviewable proposals
15454 bridge-kit
15455 Validate Carina artifact packets before importing runtime output
15456 source-adapter
15457 Run reviewed source adapters into artifact-to-state proposals
15458 finding Add or manage finding bundles as frontier state
15459 link Add typed links between findings (incl. cross-frontier vf_at-vfr targets)
15460 entity Resolve unresolved entities against a bundled common-entity table (v0.19)
15461 frontier Scaffold (`new`), materialize, and manage frontier metadata + deps
15462 actor Register Ed25519 publisher identities in a frontier
15463 registry Publish, list, or pull frontiers (open hub at https://vela-hub.fly.dev)
15464 review Create a review proposal or review interactively
15465 note Add a lightweight note to a finding
15466 caveat Create an explicit caveat proposal
15467 revise Create a confidence revision proposal
15468 reject Create a rejection proposal
15469 history Show state-transition history for one finding (v0.74 alias: `lineage`)
15470 import-events Import review/state events from a packet or JSON file
15471 retract Create a retraction proposal
15472 propagate Simulate impact over declared dependency links
15473 artifact-add Register a content-addressed artifact
15474 artifacts List content-addressed artifacts
15475 artifact-audit Audit artifact locators, hashes, references, and profiles
15476 decision-brief Show the validated decision brief projection
15477 trial-summary Show the validated trial outcome projection
15478 source-verification Show the validated source verification projection
15479 source-ingest-plan Show the validated source ingest plan
15480 clinical-trial-import Import a ClinicalTrials.gov record as an artifact
15481 locator-repair Mechanically repair an evidence atom's missing source locator
15482 span-repair Mechanically repair a finding's missing evidence span
15483 entity-resolve Resolve a finding entity to a canonical id
15484 source-fetch Fetch metadata + abstract for a doi:/pmid:/nct: source
15485 atlas Compose multiple frontiers into a domain-level Atlas (vat_*) (v0.78+)
15486 constellation Compose multiple Atlases into a cross-domain Constellation (vco_*) (v0.82+)
15487
15488Quick start (the demo):
15489 vela init demo --name "Your bounded question"
15490 vela ingest paper.pdf --frontier demo
15491 vela propose demo <vf_id> --status accepted --reason "..." --reviewer reviewer:you --apply
15492 vela diff <vpr_id> --frontier demo
15493 vela accept demo <vpr_id> --reviewer reviewer:you --reason "applied"
15494 vela serve --path demo
15495
15496Substrate health:
15497 vela frontier materialize my-frontier --json
15498 vela repo status my-frontier --json
15499 vela proof verify my-frontier --json
15500 vela check my-frontier --strict --json
15501
15502Monolithic frontier file:
15503 vela frontier new frontier.json --name "Your bounded question"
15504 vela finding add frontier.json --assertion "..." --author "reviewer:demo" --apply
15505 vela check frontier.json --json
15506 FINDING_ID=$(jq -r '.findings[0].id' frontier.json)
15507 vela review frontier.json "$FINDING_ID" --status contested --reason "Mouse-only evidence" --reviewer reviewer:demo --apply
15508
15509Publish your own frontier (see docs/PUBLISHING.md):
15510 vela frontier new ./frontier.json --name "Your bounded question"
15511 vela finding add ./frontier.json --assertion "..." --author "reviewer:you" --apply
15512 vela sign generate-keypair --out keys
15513 vela actor add ./frontier.json reviewer:you --pubkey "$(cat keys/public.key)"
15514 vela registry publish ./frontier.json --owner reviewer:you --key keys/private.key \
15515 --to https://vela-hub.fly.dev
15516"#,
15517 env!("CARGO_PKG_VERSION")
15518 );
15519}
15520
15521pub type ScoutHandler = fn(
15530 folder: PathBuf,
15531 frontier: PathBuf,
15532 backend: Option<String>,
15533 dry_run: bool,
15534 json: bool,
15535) -> Pin<Box<dyn Future<Output = ()> + Send>>;
15536
15537static SCOUT_HANDLER: OnceLock<ScoutHandler> = OnceLock::new();
15538
15539pub fn register_scout_handler(handler: ScoutHandler) {
15543 let _ = SCOUT_HANDLER.set(handler);
15544}
15545
15546pub type AtlasInitHandler = fn(
15550 atlases_root: PathBuf,
15551 name: String,
15552 domain: String,
15553 scope_note: Option<String>,
15554 frontiers: Vec<PathBuf>,
15555 json: bool,
15556) -> Pin<Box<dyn Future<Output = ()> + Send>>;
15557
15558static ATLAS_INIT_HANDLER: OnceLock<AtlasInitHandler> = OnceLock::new();
15559
15560pub fn register_atlas_init_handler(handler: AtlasInitHandler) {
15561 let _ = ATLAS_INIT_HANDLER.set(handler);
15562}
15563
15564pub type AtlasMaterializeHandler =
15566 fn(atlases_root: PathBuf, name: String, json: bool) -> Pin<Box<dyn Future<Output = ()> + Send>>;
15567
15568static ATLAS_MATERIALIZE_HANDLER: OnceLock<AtlasMaterializeHandler> = OnceLock::new();
15569
15570pub fn register_atlas_materialize_handler(handler: AtlasMaterializeHandler) {
15571 let _ = ATLAS_MATERIALIZE_HANDLER.set(handler);
15572}
15573
15574pub type AtlasServeHandler = fn(
15579 atlases_root: PathBuf,
15580 name: String,
15581 port: u16,
15582 open_browser: bool,
15583) -> Pin<Box<dyn Future<Output = ()> + Send>>;
15584
15585static ATLAS_SERVE_HANDLER: OnceLock<AtlasServeHandler> = OnceLock::new();
15586
15587pub fn register_atlas_serve_handler(handler: AtlasServeHandler) {
15588 let _ = ATLAS_SERVE_HANDLER.set(handler);
15589}
15590
15591pub type AtlasUpdateHandler = fn(
15596 atlases_root: PathBuf,
15597 name: String,
15598 add_frontier: Vec<PathBuf>,
15599 remove_vfr_id: Vec<String>,
15600 json: bool,
15601) -> Pin<Box<dyn Future<Output = ()> + Send>>;
15602
15603static ATLAS_UPDATE_HANDLER: OnceLock<AtlasUpdateHandler> = OnceLock::new();
15604
15605pub fn register_atlas_update_handler(handler: AtlasUpdateHandler) {
15606 let _ = ATLAS_UPDATE_HANDLER.set(handler);
15607}
15608
15609pub type ConstellationInitHandler = fn(
15613 constellations_root: PathBuf,
15614 name: String,
15615 scope_note: Option<String>,
15616 atlases: Vec<PathBuf>,
15617 json: bool,
15618) -> Pin<Box<dyn Future<Output = ()> + Send>>;
15619
15620static CONSTELLATION_INIT_HANDLER: OnceLock<ConstellationInitHandler> = OnceLock::new();
15621
15622pub fn register_constellation_init_handler(handler: ConstellationInitHandler) {
15623 let _ = CONSTELLATION_INIT_HANDLER.set(handler);
15624}
15625
15626pub type ConstellationMaterializeHandler = fn(
15627 constellations_root: PathBuf,
15628 name: String,
15629 json: bool,
15630) -> Pin<Box<dyn Future<Output = ()> + Send>>;
15631
15632static CONSTELLATION_MATERIALIZE_HANDLER: OnceLock<ConstellationMaterializeHandler> =
15633 OnceLock::new();
15634
15635pub fn register_constellation_materialize_handler(handler: ConstellationMaterializeHandler) {
15636 let _ = CONSTELLATION_MATERIALIZE_HANDLER.set(handler);
15637}
15638
15639pub type ConstellationServeHandler = fn(
15640 constellations_root: PathBuf,
15641 name: String,
15642 port: u16,
15643 open_browser: bool,
15644) -> Pin<Box<dyn Future<Output = ()> + Send>>;
15645
15646static CONSTELLATION_SERVE_HANDLER: OnceLock<ConstellationServeHandler> = OnceLock::new();
15647
15648pub fn register_constellation_serve_handler(handler: ConstellationServeHandler) {
15649 let _ = CONSTELLATION_SERVE_HANDLER.set(handler);
15650}
15651
15652pub type NotesHandler = fn(
15656 vault: PathBuf,
15657 frontier: PathBuf,
15658 backend: Option<String>,
15659 max_files: Option<usize>,
15660 max_items_per_category: Option<usize>,
15661 dry_run: bool,
15662 json: bool,
15663) -> Pin<Box<dyn Future<Output = ()> + Send>>;
15664
15665static NOTES_HANDLER: OnceLock<NotesHandler> = OnceLock::new();
15666
15667pub fn register_notes_handler(handler: NotesHandler) {
15669 let _ = NOTES_HANDLER.set(handler);
15670}
15671
15672pub type CodeHandler = fn(
15674 root: PathBuf,
15675 frontier: PathBuf,
15676 backend: Option<String>,
15677 max_files: Option<usize>,
15678 dry_run: bool,
15679 json: bool,
15680) -> Pin<Box<dyn Future<Output = ()> + Send>>;
15681
15682static CODE_HANDLER: OnceLock<CodeHandler> = OnceLock::new();
15683
15684pub fn register_code_handler(handler: CodeHandler) {
15686 let _ = CODE_HANDLER.set(handler);
15687}
15688
15689pub type DatasetsHandler = fn(
15691 root: PathBuf,
15692 frontier: PathBuf,
15693 backend: Option<String>,
15694 sample_rows: Option<usize>,
15695 dry_run: bool,
15696 json: bool,
15697) -> Pin<Box<dyn Future<Output = ()> + Send>>;
15698
15699static DATASETS_HANDLER: OnceLock<DatasetsHandler> = OnceLock::new();
15700
15701pub fn register_datasets_handler(handler: DatasetsHandler) {
15703 let _ = DATASETS_HANDLER.set(handler);
15704}
15705
15706pub type ReviewerHandler = fn(
15708 frontier: PathBuf,
15709 backend: Option<String>,
15710 max_proposals: Option<usize>,
15711 batch_size: usize,
15712 dry_run: bool,
15713 json: bool,
15714) -> Pin<Box<dyn Future<Output = ()> + Send>>;
15715
15716static REVIEWER_HANDLER: OnceLock<ReviewerHandler> = OnceLock::new();
15717
15718pub fn register_reviewer_handler(handler: ReviewerHandler) {
15720 let _ = REVIEWER_HANDLER.set(handler);
15721}
15722
15723pub type TensionsHandler = fn(
15725 frontier: PathBuf,
15726 backend: Option<String>,
15727 max_findings: Option<usize>,
15728 dry_run: bool,
15729 json: bool,
15730) -> Pin<Box<dyn Future<Output = ()> + Send>>;
15731
15732static TENSIONS_HANDLER: OnceLock<TensionsHandler> = OnceLock::new();
15733
15734pub fn register_tensions_handler(handler: TensionsHandler) {
15736 let _ = TENSIONS_HANDLER.set(handler);
15737}
15738
15739pub type ExperimentsHandler = fn(
15741 frontier: PathBuf,
15742 backend: Option<String>,
15743 max_findings: Option<usize>,
15744 dry_run: bool,
15745 json: bool,
15746) -> Pin<Box<dyn Future<Output = ()> + Send>>;
15747
15748static EXPERIMENTS_HANDLER: OnceLock<ExperimentsHandler> = OnceLock::new();
15749
15750pub fn register_experiments_handler(handler: ExperimentsHandler) {
15752 let _ = EXPERIMENTS_HANDLER.set(handler);
15753}
15754
15755fn find_vela_repo() -> Option<PathBuf> {
15771 let mut cur = std::env::current_dir().ok()?;
15772 loop {
15773 if cur.join(".vela").is_dir() {
15774 return Some(cur);
15775 }
15776 if !cur.pop() {
15777 return None;
15778 }
15779 }
15780}
15781
15782fn print_session_help() {
15783 println!();
15784 println!(
15785 " Vela {} · Version control for scientific state.",
15786 env!("CARGO_PKG_VERSION")
15787 );
15788 println!();
15789 println!(" USAGE");
15790 println!(" vela Open a session against the nearest .vela/ repo");
15791 println!(" vela <command> Run a specific subcommand");
15792 println!(" vela help advanced Full subcommand list (30+ commands)");
15793 println!();
15794 println!(" CORE FLOW (v0.74)");
15795 println!(" init Initialize a split frontier repo");
15796 println!(" ingest <path> Ingest a paper, dataset, or Carina packet");
15797 println!(" propose Create a finding.review proposal");
15798 println!(" diff <vpr_id> Preview a pending proposal vs current frontier");
15799 println!(" accept <vpr_id> Apply a proposal under reviewer authority");
15800 println!(" attest Sign findings under your private key");
15801 println!(" log Recent canonical state events");
15802 println!(" lineage <vf_id> State-transition replay for one finding");
15803 println!(" serve Local Workbench (find, evidence, diff, lineage)");
15804 println!();
15805 println!(" DAILY ALSO-RANS");
15806 println!(" status One-screen frontier health");
15807 println!(" inbox Pending review proposals");
15808 println!(" review Review a proposal interactively");
15809 println!(" ask <question> Plain-text query against the frontier");
15810 println!();
15811 println!(" REASONING (Pearl 1 → 2 → 3)");
15812 println!(" causal audit Per-finding identifiability");
15813 println!(" causal effect <src> --on <tgt> Pairwise back-door / front-door");
15814 println!(" causal counterfactual <src> --target <tgt> --set-to <0..1>");
15815 println!();
15816 println!(" COMPOSITION");
15817 println!(" bridge <a> <b> Cross-frontier hypotheses");
15818 println!(" consensus <vf> Field consensus over similar claims");
15819 println!();
15820 println!(" PUBLISH");
15821 println!(" registry publish Push a signed manifest to the hub");
15822 println!(" federation peer-add Federate with another hub");
15823 println!();
15824 println!(" In session, type a single letter for a quick verb, or any");
15825 println!(" question in plain text. `q` or `exit` quits.");
15826 println!();
15827}
15828
15829fn print_session_dashboard(project: &crate::project::Project, repo_path: &Path) {
15830 use crate::causal_reasoning::{audit_frontier, summarize_audit};
15831
15832 let label = frontier_label(project);
15833 let vfr = project.frontier_id();
15834 let vfr_short = vfr.chars().take(16).collect::<String>();
15835
15836 let mut pending = 0usize;
15837 let mut by_kind: std::collections::BTreeMap<String, usize> = std::collections::BTreeMap::new();
15838 for p in &project.proposals {
15839 if p.status == "pending_review" {
15840 pending += 1;
15841 *by_kind.entry(p.kind.clone()).or_insert(0) += 1;
15842 }
15843 }
15844
15845 let audit = audit_frontier(project);
15846 let audit_summary = summarize_audit(&audit);
15847
15848 let bridges_dir = repo_path.join(".vela/bridges");
15849 let mut bridge_total = 0usize;
15850 let mut bridge_confirmed = 0usize;
15851 let mut bridge_derived = 0usize;
15852 if bridges_dir.is_dir()
15853 && let Ok(entries) = std::fs::read_dir(&bridges_dir)
15854 {
15855 for entry in entries.flatten() {
15856 let path = entry.path();
15857 if path.extension().and_then(|s| s.to_str()) != Some("json") {
15858 continue;
15859 }
15860 bridge_total += 1;
15861 if let Ok(data) = std::fs::read_to_string(&path)
15862 && let Ok(b) = serde_json::from_str::<crate::bridge::Bridge>(&data)
15863 {
15864 match b.status {
15865 crate::bridge::BridgeStatus::Confirmed => bridge_confirmed += 1,
15866 crate::bridge::BridgeStatus::Derived => bridge_derived += 1,
15867 _ => {}
15868 }
15869 }
15870 }
15871 }
15872
15873 let mut targets_with_success = std::collections::HashSet::new();
15874 let mut failed_replications = 0usize;
15875 for r in &project.replications {
15876 if r.outcome == "replicated" {
15877 targets_with_success.insert(r.target_finding.clone());
15878 } else if r.outcome == "failed" {
15879 failed_replications += 1;
15880 }
15881 }
15882
15883 println!();
15884 let version = crate::project::VELA_COMPILER_VERSION
15885 .strip_prefix("vela/")
15886 .unwrap_or(crate::project::VELA_COMPILER_VERSION);
15887 println!(
15888 " {}",
15889 format!("VELA · {version} · {label}")
15890 .to_uppercase()
15891 .dimmed()
15892 );
15893 println!(" {}", style::tick_row(60));
15894 println!(
15895 " vfr_id {}… repo {}",
15896 vfr_short,
15897 repo_path.display()
15898 );
15899 println!(
15900 " findings {:>4} events {} proposals pending {}",
15901 project.findings.len(),
15902 project.events.len(),
15903 pending
15904 );
15905
15906 if pending > 0 {
15907 let parts: Vec<String> = by_kind.iter().map(|(k, n)| format!("{n} {k}")).collect();
15908 println!(" {} · {}", style::warn("inbox"), parts.join(" "));
15909 }
15910 if audit_summary.underidentified > 0 || audit_summary.conditional > 0 {
15911 println!(
15912 " {} · {} underidentified · {} conditional",
15913 if audit_summary.underidentified > 0 {
15914 style::lost("audit")
15915 } else {
15916 style::warn("audit")
15917 },
15918 audit_summary.underidentified,
15919 audit_summary.conditional,
15920 );
15921 }
15922 if bridge_total > 0 {
15923 println!(
15924 " {} · {} total · {} confirmed · {} awaiting review",
15925 style::ok("bridges"),
15926 bridge_total,
15927 bridge_confirmed,
15928 bridge_derived
15929 );
15930 }
15931 if !project.replications.is_empty() {
15932 println!(
15933 " {} · {} records · {} findings replicated · {} failed",
15934 style::ok("replications"),
15935 project.replications.len(),
15936 targets_with_success.len(),
15937 failed_replications,
15938 );
15939 }
15940
15941 println!();
15942 println!(" type a verb or ask anything:");
15943 println!(" a audit problems i inbox (pending) b bridges");
15944 println!(" g causal graph l log (recent) c counterfactuals");
15945 println!(" s refresh status h help (more verbs) q quit");
15946 println!();
15947}
15948
15949fn run_session_verb(verb: &str, repo_path: &Path) -> bool {
15951 match verb {
15952 "a" | "audit" => {
15953 let action = CausalAction::Audit {
15954 frontier: repo_path.to_path_buf(),
15955 problems_only: true,
15956 json: false,
15957 };
15958 cmd_causal(action);
15959 true
15960 }
15961 "i" | "inbox" => {
15962 let action = ProposalAction::List {
15963 frontier: repo_path.to_path_buf(),
15964 status: Some("pending_review".into()),
15965 json: false,
15966 };
15967 cmd_proposals(action);
15968 true
15969 }
15970 "b" | "bridges" => {
15971 let action = BridgesAction::List {
15972 frontier: repo_path.to_path_buf(),
15973 status: None,
15974 json: false,
15975 };
15976 cmd_bridges(action);
15977 true
15978 }
15979 "g" | "graph" => {
15980 let action = CausalAction::Graph {
15981 frontier: repo_path.to_path_buf(),
15982 node: None,
15983 json: false,
15984 };
15985 cmd_causal(action);
15986 true
15987 }
15988 "l" | "log" => {
15989 cmd_log(repo_path, 10, None, false);
15990 true
15991 }
15992 "c" | "counterfactual" | "counterfactuals" => {
15993 let project = match repo::load_from_path(repo_path) {
15996 Ok(p) => p,
15997 Err(e) => {
15998 eprintln!("{} {e}", style::err_prefix());
15999 return true;
16000 }
16001 };
16002 println!();
16003 println!(" {}", "VELA · COUNTERFACTUAL · LIVE PAIRS".dimmed());
16004 println!(" {}", style::tick_row(60));
16005 let mut pairs = 0usize;
16009 for child in &project.findings {
16010 for link in &child.links {
16011 if !matches!(link.link_type.as_str(), "depends" | "supports") {
16012 continue;
16013 }
16014 if link.mechanism.is_none() {
16015 continue;
16016 }
16017 let parent = link
16018 .target
16019 .split_once(':')
16020 .map_or(link.target.as_str(), |(_, r)| r);
16021 pairs += 1;
16022 if pairs <= 10 {
16023 println!(" · do({parent}) → {}", child.id);
16024 }
16025 }
16026 }
16027 if pairs == 0 {
16028 println!(" no mechanism-annotated edges found.");
16029 println!(" add a mechanism via the link's `mechanism` field; see /counterfactual");
16030 } else {
16031 println!();
16032 println!(" {pairs} live pair(s). Run with:");
16033 println!(" vela causal counterfactual <repo> <src> --target <tgt> --set-to 0.5");
16034 }
16035 println!();
16036 true
16037 }
16038 "s" | "status" | "refresh" => {
16039 match repo::load_from_path(repo_path) {
16041 Ok(p) => print_session_dashboard(&p, repo_path),
16042 Err(e) => eprintln!("{} {e}", style::err_prefix()),
16043 }
16044 true
16045 }
16046 "h" | "help" | "?" => {
16047 print_session_help();
16048 true
16049 }
16050 _ => false,
16051 }
16052}
16053
16054fn run_session() {
16055 let repo_path = match find_vela_repo() {
16056 Some(p) => p,
16057 None => {
16058 println!();
16059 println!(
16060 " {}",
16061 "VELA · NO FRONTIER FOUND IN CWD OR ANY PARENT".dimmed()
16062 );
16063 println!(" {}", style::tick_row(60));
16064 println!(" Run `vela init` here to create a frontier, or cd into one.");
16065 println!(" Or run `vela help` for the command list.");
16066 println!();
16067 return;
16068 }
16069 };
16070
16071 let project = match repo::load_from_path(&repo_path) {
16072 Ok(p) => p,
16073 Err(e) => {
16074 eprintln!("{} failed to load .vela/ repo: {e}", style::err_prefix());
16075 std::process::exit(1);
16076 }
16077 };
16078
16079 print_session_dashboard(&project, &repo_path);
16080
16081 use std::io::{BufRead, Write};
16082 let stdin = std::io::stdin();
16083 let mut stdout = std::io::stdout();
16084 loop {
16085 print!(" > ");
16086 stdout.flush().ok();
16087 let mut line = String::new();
16088 if stdin.lock().read_line(&mut line).is_err() {
16089 break;
16090 }
16091 let input = line.trim();
16092 if input.is_empty() {
16093 continue;
16094 }
16095 if matches!(input, "q" | "quit" | "exit") {
16096 break;
16097 }
16098 if run_session_verb(input, &repo_path) {
16099 continue;
16100 }
16101 let project = match repo::load_from_path(&repo_path) {
16103 Ok(p) => p,
16104 Err(e) => {
16105 eprintln!("{} {e}", style::err_prefix());
16106 continue;
16107 }
16108 };
16109 answer(&project, input, false);
16110 }
16111}
16112
16113pub fn run_from_args() {
16114 style::init();
16115 let args = std::env::args().collect::<Vec<_>>();
16116 match args.get(1).map(String::as_str) {
16117 None => {
16121 run_session();
16122 return;
16123 }
16124 Some("-h" | "--help" | "help") => {
16125 if args.get(2).map(String::as_str) == Some("advanced") {
16128 print_strict_help();
16129 } else {
16130 print_session_help();
16131 }
16132 return;
16133 }
16134 Some("-V" | "--version" | "version") => {
16135 println!("vela {}", env!("CARGO_PKG_VERSION"));
16136 return;
16137 }
16138 Some("proof") if args.get(2).map(String::as_str) == Some("verify") => {
16139 let json = args.iter().any(|arg| arg == "--json");
16140 let frontier = args
16141 .iter()
16142 .skip(3)
16143 .find(|arg| !arg.starts_with('-'))
16144 .map(PathBuf::from)
16145 .unwrap_or_else(|| {
16146 eprintln!(
16147 "{} proof verify requires a frontier repo",
16148 style::err_prefix()
16149 );
16150 std::process::exit(2);
16151 });
16152 cmd_proof_verify(&frontier, json);
16153 return;
16154 }
16155 Some("proof") if args.get(2).map(String::as_str) == Some("explain") => {
16156 let frontier = args
16157 .iter()
16158 .skip(3)
16159 .find(|arg| !arg.starts_with('-'))
16160 .map(PathBuf::from)
16161 .unwrap_or_else(|| {
16162 eprintln!(
16163 "{} proof explain requires a frontier repo",
16164 style::err_prefix()
16165 );
16166 std::process::exit(2);
16167 });
16168 cmd_proof_explain(&frontier);
16169 return;
16170 }
16171 Some(cmd) if !is_science_subcommand(cmd) => {
16172 eprintln!(
16173 "{} unknown or non-release command: {cmd}",
16174 style::err_prefix()
16175 );
16176 eprintln!("run `vela --help` for the strict v0 command surface.");
16177 std::process::exit(2);
16178 }
16179 Some(_) => {}
16180 }
16181 let runtime = tokio::runtime::Runtime::new().expect("failed to create tokio runtime");
16182 runtime.block_on(run_command());
16183}
16184
16185fn fail(message: &str) -> ! {
16186 eprintln!("{} {message}", style::err_prefix());
16187 std::process::exit(1);
16188}
16189
16190fn validate_enum_arg(flag: &str, value: &str, valid: &[&str]) {
16195 if !valid.contains(&value) {
16196 fail(&format!(
16197 "invalid {flag} '{value}'. Valid: {}",
16198 valid.join(", ")
16199 ));
16200 }
16201}
16202
16203fn fail_return<T>(message: &str) -> T {
16204 fail(message)
16205}