1use crate::{
2 benchmark, bridge, bundle, carina_validate, conformance, decision, diff, events, export,
3 frontier_repo, impact, lint, normalize, packet, project, propagate, proposals, repo, review,
4 search, serve, sign, signals, sources, state, state_integrity, tensions, validate,
5};
6
7use std::collections::BTreeMap;
8use std::future::Future;
9use std::path::{Path, PathBuf};
10use std::pin::Pin;
11use std::sync::OnceLock;
12
13use clap::{Parser, Subcommand};
14use colored::Colorize;
15
16use crate::cli_style as style;
17use reqwest::Client;
18use serde::Serialize;
19use serde_json::{Value, json};
20use sha2::{Digest, Sha256};
21
22#[derive(Parser)]
23#[command(name = "vela", version)]
24#[command(about = "Portable frontier state for science")]
25struct Cli {
26 #[command(subcommand)]
27 command: Commands,
28}
29
30#[derive(Subcommand)]
31enum Commands {
32 Scout {
39 folder: PathBuf,
41 #[arg(long)]
43 frontier: PathBuf,
44 #[arg(short, long)]
46 backend: Option<String>,
47 #[arg(long)]
49 dry_run: bool,
50 #[arg(long)]
52 json: bool,
53 },
54 CompileNotes {
61 vault: PathBuf,
63 #[arg(long)]
65 frontier: PathBuf,
66 #[arg(short, long)]
68 backend: Option<String>,
69 #[arg(long)]
71 max_files: Option<usize>,
72 #[arg(long)]
77 max_items_per_category: Option<usize>,
78 #[arg(long)]
80 dry_run: bool,
81 #[arg(long)]
83 json: bool,
84 },
85 CompileCode {
91 root: PathBuf,
93 #[arg(long)]
95 frontier: PathBuf,
96 #[arg(short, long)]
98 backend: Option<String>,
99 #[arg(long)]
101 max_files: Option<usize>,
102 #[arg(long)]
104 dry_run: bool,
105 #[arg(long)]
107 json: bool,
108 },
109 ReviewPending {
115 #[arg(long)]
116 frontier: PathBuf,
117 #[arg(short, long)]
118 backend: Option<String>,
119 #[arg(long)]
120 max_proposals: Option<usize>,
121 #[arg(long, default_value = "1")]
126 batch_size: usize,
127 #[arg(long)]
128 dry_run: bool,
129 #[arg(long)]
130 json: bool,
131 },
132 FindTensions {
136 #[arg(long)]
137 frontier: PathBuf,
138 #[arg(short, long)]
139 backend: Option<String>,
140 #[arg(long)]
141 max_findings: Option<usize>,
142 #[arg(long)]
143 dry_run: bool,
144 #[arg(long)]
145 json: bool,
146 },
147 PlanExperiments {
151 #[arg(long)]
152 frontier: PathBuf,
153 #[arg(short, long)]
154 backend: Option<String>,
155 #[arg(long)]
156 max_findings: Option<usize>,
157 #[arg(long)]
158 dry_run: bool,
159 #[arg(long)]
160 json: bool,
161 },
162 CompileData {
167 root: PathBuf,
169 #[arg(long)]
171 frontier: PathBuf,
172 #[arg(short, long)]
174 backend: Option<String>,
175 #[arg(long)]
177 sample_rows: Option<usize>,
178 #[arg(long)]
180 dry_run: bool,
181 #[arg(long)]
183 json: bool,
184 },
185 Check {
187 source: Option<PathBuf>,
189 #[arg(long)]
191 schema: bool,
192 #[arg(long)]
194 stats: bool,
195 #[arg(long)]
197 conformance: bool,
198 #[arg(long, default_value = "tests/conformance")]
200 conformance_dir: PathBuf,
201 #[arg(long)]
203 all: bool,
204 #[arg(long)]
206 schema_only: bool,
207 #[arg(long)]
209 strict: bool,
210 #[arg(long)]
212 fix: bool,
213 #[arg(long)]
215 json: bool,
216 },
217 Integrity {
219 frontier: PathBuf,
221 #[arg(long)]
223 json: bool,
224 },
225 Impact {
227 frontier: PathBuf,
229 finding_id: String,
231 #[arg(long)]
233 depth: Option<usize>,
234 #[arg(long)]
236 json: bool,
237 },
238 Discord {
245 frontier: PathBuf,
247 #[arg(long)]
249 json: bool,
250 #[arg(long)]
254 kind: Option<String>,
255 },
256 Normalize {
258 source: PathBuf,
260 #[arg(short, long)]
262 out: Option<PathBuf>,
263 #[arg(long)]
265 write: bool,
266 #[arg(long)]
268 dry_run: bool,
269 #[arg(long)]
271 rewrite_ids: bool,
272 #[arg(long)]
274 id_map: Option<PathBuf>,
275 #[arg(long)]
279 resync_provenance: bool,
280 #[arg(long)]
282 json: bool,
283 },
284 Proof {
286 frontier: PathBuf,
288 #[arg(long, short = 'o', default_value = "proof-packet")]
290 out: PathBuf,
291 #[arg(long, default_value = "bbb-alzheimer")]
293 template: String,
294 #[arg(long)]
296 gold: Option<PathBuf>,
297 #[arg(long)]
299 record_proof_state: bool,
300 #[arg(long)]
302 json: bool,
303 },
304 Repo {
306 #[command(subcommand)]
307 action: RepoAction,
308 },
309 Serve {
311 #[arg(required_unless_present_any = ["frontiers", "setup"])]
313 frontier: Option<PathBuf>,
314 #[arg(long)]
316 frontiers: Option<PathBuf>,
317 #[arg(short, long)]
319 backend: Option<String>,
320 #[arg(long)]
322 http: Option<u16>,
323 #[arg(long)]
325 setup: bool,
326 #[arg(long)]
328 check_tools: bool,
329 #[arg(long)]
331 json: bool,
332 #[arg(long)]
336 workbench: bool,
337 },
338 Status {
342 frontier: PathBuf,
343 #[arg(long)]
345 json: bool,
346 },
347 Log {
350 frontier: PathBuf,
351 #[arg(long, default_value = "20")]
353 limit: usize,
354 #[arg(long)]
356 kind: Option<String>,
357 #[arg(long)]
359 json: bool,
360 },
361 Inbox {
365 frontier: PathBuf,
366 #[arg(long)]
368 kind: Option<String>,
369 #[arg(long, default_value = "30")]
371 limit: usize,
372 #[arg(long)]
374 json: bool,
375 },
376 Ask {
381 frontier: PathBuf,
382 #[arg(trailing_var_arg = true)]
384 question: Vec<String>,
385 #[arg(long)]
387 json: bool,
388 },
389 Stats {
391 frontier: PathBuf,
393 #[arg(long)]
395 json: bool,
396 },
397 Search {
399 query: String,
401 #[arg(long)]
403 source: Option<PathBuf>,
404 #[arg(long)]
406 entity: Option<String>,
407 #[arg(long)]
409 r#type: Option<String>,
410 #[arg(long)]
412 all: Option<PathBuf>,
413 #[arg(long, default_value = "20")]
415 limit: usize,
416 #[arg(long)]
418 json: bool,
419 },
420 Tensions {
422 source: PathBuf,
423 #[arg(long)]
424 both_high: bool,
425 #[arg(long)]
426 cross_domain: bool,
427 #[arg(long, default_value = "20")]
428 top: usize,
429 #[arg(long)]
430 json: bool,
431 },
432 Gaps {
434 #[command(subcommand)]
435 action: GapsAction,
436 },
437 Bridge {
439 #[arg(required = true)]
441 inputs: Vec<PathBuf>,
442 #[arg(long, default_value = "true", action = clap::ArgAction::Set)]
444 novelty: bool,
445 #[arg(long, default_value = "30")]
447 top: usize,
448 },
449 Export {
451 frontier: PathBuf,
452 #[arg(short, long, default_value = "csv")]
453 format: String,
454 #[arg(short, long)]
455 output: Option<PathBuf>,
456 },
457 Packet {
459 #[command(subcommand)]
460 action: PacketAction,
461 },
462 Verify {
469 path: PathBuf,
471 #[arg(long)]
472 json: bool,
473 },
474 Bench {
486 frontier: Option<PathBuf>,
488 #[arg(long)]
490 gold: Option<PathBuf>,
491 #[arg(long)]
495 candidate: Option<PathBuf>,
496 #[arg(long)]
500 sources: Option<PathBuf>,
501 #[arg(long)]
504 threshold: Option<f64>,
505 #[arg(long)]
508 report: Option<PathBuf>,
509 #[arg(long)]
510 entity_gold: Option<PathBuf>,
511 #[arg(long)]
512 link_gold: Option<PathBuf>,
513 #[arg(long)]
514 suite: Option<PathBuf>,
515 #[arg(long)]
516 suite_ready: bool,
517 #[arg(long)]
518 min_f1: Option<f64>,
519 #[arg(long)]
520 min_precision: Option<f64>,
521 #[arg(long)]
522 min_recall: Option<f64>,
523 #[arg(long)]
524 no_thresholds: bool,
525 #[arg(long)]
526 json: bool,
527 },
528 Conformance {
530 #[arg(default_value = "tests/conformance")]
531 dir: PathBuf,
532 },
533 Version,
535 Sign {
537 #[command(subcommand)]
538 action: SignAction,
539 },
540 Actor {
542 #[command(subcommand)]
543 action: ActorAction,
544 },
545 Federation {
550 #[command(subcommand)]
551 action: FederationAction,
552 },
553 Causal {
560 #[command(subcommand)]
561 action: CausalAction,
562 },
563 Frontier {
567 #[command(subcommand)]
568 action: FrontierAction,
569 },
570 Queue {
573 #[command(subcommand)]
574 action: QueueAction,
575 },
576 Registry {
579 #[command(subcommand)]
580 action: RegistryAction,
581 },
582 Init {
584 #[arg(default_value = ".")]
585 path: PathBuf,
586 #[arg(long, default_value = "unnamed")]
587 name: String,
588 #[arg(long, default_value = "default")]
589 template: String,
590 #[arg(long)]
591 no_git: bool,
592 #[arg(long)]
593 json: bool,
594 },
595 Quickstart {
601 #[arg(default_value = "demo")]
603 path: PathBuf,
604 #[arg(long, default_value = "Quickstart frontier")]
606 name: String,
607 #[arg(long, default_value = "reviewer:you")]
611 reviewer: String,
612 #[arg(long)]
615 assertion: Option<String>,
616 #[arg(long)]
619 keys_out: Option<PathBuf>,
620 #[arg(long)]
622 json: bool,
623 },
624 Import {
626 frontier: PathBuf,
627 #[arg(long)]
628 into: Option<PathBuf>,
629 },
630 Diff {
640 target: String,
643 frontier_b: Option<PathBuf>,
646 #[arg(long)]
650 frontier: Option<PathBuf>,
651 #[arg(long, default_value = "reviewer:preview")]
653 reviewer: String,
654 #[arg(long)]
655 json: bool,
656 #[arg(long)]
657 quiet: bool,
658 },
659 Proposals {
661 #[command(subcommand)]
662 action: ProposalAction,
663 },
664 ArtifactToState {
666 frontier: PathBuf,
668 packet: PathBuf,
670 #[arg(long)]
672 actor: String,
673 #[arg(long)]
675 apply_artifacts: bool,
676 #[arg(long)]
677 json: bool,
678 },
679 BridgeKit {
681 #[command(subcommand)]
682 action: BridgeKitAction,
683 },
684 SourceAdapter {
686 #[command(subcommand)]
687 action: SourceAdapterAction,
688 },
689 RuntimeAdapter {
691 #[command(subcommand)]
692 action: RuntimeAdapterAction,
693 },
694 Finding {
696 #[command(subcommand)]
697 command: FindingCommands,
698 },
699 Link {
703 #[command(subcommand)]
704 action: LinkAction,
705 },
706 Workbench {
711 #[arg(default_value = ".")]
713 path: PathBuf,
714 #[arg(long, default_value_t = 3850)]
716 port: u16,
717 #[arg(long)]
719 no_open: bool,
720 },
721 Bridges {
727 #[command(subcommand)]
728 action: BridgesAction,
729 },
730 Entity {
735 #[command(subcommand)]
736 action: EntityAction,
737 },
738 Review {
740 frontier: PathBuf,
742 finding_id: String,
744 #[arg(long)]
746 status: Option<String>,
747 #[arg(long)]
749 reason: Option<String>,
750 #[arg(long)]
752 reviewer: String,
753 #[arg(long)]
755 apply: bool,
756 #[arg(long)]
758 json: bool,
759 },
760 Note {
762 frontier: PathBuf,
763 finding_id: String,
764 #[arg(long)]
765 text: String,
766 #[arg(long)]
767 author: String,
768 #[arg(long)]
770 apply: bool,
771 #[arg(long)]
772 json: bool,
773 },
774 Caveat {
776 frontier: PathBuf,
777 finding_id: String,
778 #[arg(long)]
779 text: String,
780 #[arg(long)]
781 author: String,
782 #[arg(long)]
783 apply: bool,
784 #[arg(long)]
785 json: bool,
786 },
787 Revise {
789 frontier: PathBuf,
790 finding_id: String,
791 #[arg(long)]
793 confidence: f64,
794 #[arg(long)]
796 reason: String,
797 #[arg(long)]
799 reviewer: String,
800 #[arg(long)]
801 apply: bool,
802 #[arg(long)]
803 json: bool,
804 },
805 Reject {
807 frontier: PathBuf,
808 finding_id: String,
809 #[arg(long)]
810 reason: String,
811 #[arg(long)]
812 reviewer: String,
813 #[arg(long)]
814 apply: bool,
815 #[arg(long)]
816 json: bool,
817 },
818 History {
820 frontier: PathBuf,
821 finding_id: String,
822 #[arg(long)]
823 json: bool,
824 #[arg(long, value_name = "RFC3339_TIMESTAMP")]
828 as_of: Option<String>,
829 },
830 ImportEvents {
832 source: PathBuf,
833 #[arg(long)]
834 into: PathBuf,
835 #[arg(long)]
836 json: bool,
837 },
838 Retract {
840 source: PathBuf,
841 finding_id: String,
842 #[arg(long)]
843 reason: String,
844 #[arg(long)]
845 reviewer: String,
846 #[arg(long)]
847 apply: bool,
848 #[arg(long)]
849 json: bool,
850 },
851 EntityAdd {
856 frontier: PathBuf,
857 finding_id: String,
858 #[arg(long)]
859 entity: String,
860 #[arg(long)]
864 entity_type: String,
865 #[arg(long)]
866 reviewer: String,
867 #[arg(long)]
868 reason: String,
869 #[arg(long)]
870 apply: bool,
871 #[arg(long)]
872 json: bool,
873 },
874 EntityResolve {
878 frontier: PathBuf,
879 finding_id: String,
880 #[arg(long)]
881 entity: String,
882 #[arg(long)]
883 source: String,
884 #[arg(long)]
885 id: String,
886 #[arg(long)]
887 confidence: f64,
888 #[arg(long)]
889 matched_name: Option<String>,
890 #[arg(long, default_value = "manual")]
891 resolution_method: String,
892 #[arg(long)]
893 reviewer: String,
894 #[arg(long)]
895 reason: String,
896 #[arg(long)]
897 apply: bool,
898 #[arg(long)]
899 json: bool,
900 },
901 SourceFetch {
909 identifier: String,
912 #[arg(long)]
916 cache: Option<PathBuf>,
917 #[arg(long)]
919 out: Option<PathBuf>,
920 #[arg(long)]
922 refresh: bool,
923 #[arg(long)]
924 json: bool,
925 },
926 SpanRepair {
929 frontier: PathBuf,
930 finding_id: String,
931 #[arg(long)]
932 section: String,
933 #[arg(long)]
934 text: String,
935 #[arg(long)]
936 reviewer: String,
937 #[arg(long)]
938 reason: String,
939 #[arg(long)]
940 apply: bool,
941 #[arg(long)]
942 json: bool,
943 },
944 LocatorRepair {
949 frontier: PathBuf,
950 atom_id: String,
951 #[arg(long)]
954 locator: Option<String>,
955 #[arg(long)]
958 reviewer: String,
959 #[arg(long)]
961 reason: String,
962 #[arg(long)]
964 apply: bool,
965 #[arg(long)]
966 json: bool,
967 },
968 Propagate {
970 frontier: PathBuf,
971 #[arg(long)]
972 retract: Option<String>,
973 #[arg(long)]
974 reduce_confidence: Option<String>,
975 #[arg(long)]
976 to: Option<f64>,
977 #[arg(short, long)]
978 output: Option<PathBuf>,
979 },
980 Replicate {
989 frontier: PathBuf,
991 target: String,
993 #[arg(long)]
995 outcome: String,
996 #[arg(long)]
998 by: String,
999 #[arg(long)]
1003 conditions: String,
1004 #[arg(long)]
1006 source_title: String,
1007 #[arg(long)]
1009 doi: Option<String>,
1010 #[arg(long)]
1012 pmid: Option<String>,
1013 #[arg(long)]
1015 sample_size: Option<String>,
1016 #[arg(long, default_value = "")]
1019 note: String,
1020 #[arg(long)]
1022 previous_attempt: Option<String>,
1023 #[arg(long, default_value_t = false)]
1030 no_cascade: bool,
1031 #[arg(long)]
1033 json: bool,
1034 },
1035 Replications {
1038 frontier: PathBuf,
1040 #[arg(long)]
1042 target: Option<String>,
1043 #[arg(long)]
1045 json: bool,
1046 },
1047 DatasetAdd {
1054 frontier: PathBuf,
1056 #[arg(long)]
1058 name: String,
1059 #[arg(long)]
1061 version: Option<String>,
1062 #[arg(long)]
1066 content_hash: String,
1067 #[arg(long)]
1069 url: Option<String>,
1070 #[arg(long)]
1072 license: Option<String>,
1073 #[arg(long)]
1075 source_title: String,
1076 #[arg(long)]
1078 doi: Option<String>,
1079 #[arg(long)]
1081 row_count: Option<u64>,
1082 #[arg(long)]
1084 json: bool,
1085 },
1086 Datasets {
1088 frontier: PathBuf,
1089 #[arg(long)]
1090 json: bool,
1091 },
1092 CodeAdd {
1096 frontier: PathBuf,
1098 #[arg(long)]
1100 language: String,
1101 #[arg(long)]
1103 repo_url: Option<String>,
1104 #[arg(long)]
1107 commit: Option<String>,
1108 #[arg(long)]
1110 path: String,
1111 #[arg(long)]
1113 content_hash: String,
1114 #[arg(long)]
1116 line_start: Option<u32>,
1117 #[arg(long)]
1119 line_end: Option<u32>,
1120 #[arg(long)]
1122 entry_point: Option<String>,
1123 #[arg(long)]
1125 json: bool,
1126 },
1127 CodeArtifacts {
1129 frontier: PathBuf,
1130 #[arg(long)]
1131 json: bool,
1132 },
1133 ArtifactAdd {
1138 frontier: PathBuf,
1140 #[arg(long)]
1143 kind: String,
1144 #[arg(long)]
1146 name: String,
1147 #[arg(long)]
1150 file: Option<PathBuf>,
1151 #[arg(long)]
1153 url: Option<String>,
1154 #[arg(long)]
1156 content_hash: Option<String>,
1157 #[arg(long)]
1159 media_type: Option<String>,
1160 #[arg(long)]
1162 license: Option<String>,
1163 #[arg(long)]
1165 source_title: Option<String>,
1166 #[arg(long)]
1168 source_url: Option<String>,
1169 #[arg(long)]
1171 doi: Option<String>,
1172 #[arg(long)]
1174 target: Vec<String>,
1175 #[arg(long)]
1177 metadata: Vec<String>,
1178 #[arg(long, default_value = "public")]
1180 access_tier: String,
1181 #[arg(long, default_value = "reviewer:manual")]
1183 deposited_by: String,
1184 #[arg(long, default_value = "artifact deposit")]
1186 reason: String,
1187 #[arg(long)]
1189 json: bool,
1190 },
1191 Artifacts {
1193 frontier: PathBuf,
1194 #[arg(long)]
1196 target: Option<String>,
1197 #[arg(long)]
1198 json: bool,
1199 },
1200 ArtifactAudit {
1202 frontier: PathBuf,
1203 #[arg(long)]
1205 json: bool,
1206 },
1207 DecisionBrief {
1209 frontier: PathBuf,
1210 #[arg(long)]
1212 json: bool,
1213 },
1214 TrialSummary {
1216 frontier: PathBuf,
1217 #[arg(long)]
1219 json: bool,
1220 },
1221 SourceVerification {
1223 frontier: PathBuf,
1224 #[arg(long)]
1226 json: bool,
1227 },
1228 SourceIngestPlan {
1230 frontier: PathBuf,
1231 #[arg(long)]
1233 json: bool,
1234 },
1235 ClinicalTrialImport {
1238 frontier: PathBuf,
1240 nct_id: String,
1242 #[arg(long)]
1245 input_json: Option<PathBuf>,
1246 #[arg(long)]
1248 target: Vec<String>,
1249 #[arg(long, default_value = "reviewer:manual")]
1251 deposited_by: String,
1252 #[arg(long, default_value = "clinical trial record import")]
1254 reason: String,
1255 #[arg(long, default_value = "ClinicalTrials.gov public record")]
1257 license: String,
1258 #[arg(long)]
1260 json: bool,
1261 },
1262 NegativeResultAdd {
1270 frontier: PathBuf,
1272 #[arg(long)]
1274 kind: String,
1275 #[arg(long)]
1277 deposited_by: String,
1278 #[arg(long)]
1280 reason: String,
1281 #[arg(long)]
1284 conditions_text: String,
1285 #[arg(long, default_value = "")]
1287 notes: String,
1288 #[arg(long)]
1291 target: Vec<String>,
1292 #[arg(long)]
1296 endpoint: Option<String>,
1297 #[arg(long)]
1299 intervention: Option<String>,
1300 #[arg(long)]
1302 comparator: Option<String>,
1303 #[arg(long)]
1305 population: Option<String>,
1306 #[arg(long)]
1308 n_enrolled: Option<u32>,
1309 #[arg(long)]
1311 power: Option<f64>,
1312 #[arg(long)]
1314 ci_lower: Option<f64>,
1315 #[arg(long)]
1317 ci_upper: Option<f64>,
1318 #[arg(long)]
1320 effect_size_threshold: Option<f64>,
1321 #[arg(long)]
1323 registry_id: Option<String>,
1324 #[arg(long)]
1327 reagent: Option<String>,
1328 #[arg(long)]
1330 observation: Option<String>,
1331 #[arg(long)]
1333 attempts: Option<u32>,
1334 #[arg(long)]
1337 source_title: String,
1338 #[arg(long)]
1340 doi: Option<String>,
1341 #[arg(long)]
1343 url: Option<String>,
1344 #[arg(long)]
1346 year: Option<i32>,
1347 #[arg(long)]
1349 json: bool,
1350 },
1351 NegativeResults {
1353 frontier: PathBuf,
1354 #[arg(long)]
1356 target: Option<String>,
1357 #[arg(long)]
1358 json: bool,
1359 },
1360 TrajectoryCreate {
1365 frontier: PathBuf,
1367 #[arg(long)]
1369 deposited_by: String,
1370 #[arg(long)]
1372 reason: String,
1373 #[arg(long)]
1378 target: Vec<String>,
1379 #[arg(long, default_value = "")]
1381 notes: String,
1382 #[arg(long)]
1383 json: bool,
1384 },
1385 TrajectoryStep {
1388 frontier: PathBuf,
1390 trajectory_id: String,
1392 #[arg(long)]
1394 kind: String,
1395 #[arg(long)]
1399 description: String,
1400 #[arg(long)]
1402 actor: String,
1403 #[arg(long)]
1405 reason: String,
1406 #[arg(long)]
1409 reference: Vec<String>,
1410 #[arg(long)]
1411 json: bool,
1412 },
1413 Trajectories {
1415 frontier: PathBuf,
1416 #[arg(long)]
1418 target: Option<String>,
1419 #[arg(long)]
1420 json: bool,
1421 },
1422 TierSet {
1428 frontier: PathBuf,
1430 #[arg(long)]
1432 object_type: String,
1433 #[arg(long)]
1435 object_id: String,
1436 #[arg(long)]
1438 tier: String,
1439 #[arg(long)]
1442 actor: String,
1443 #[arg(long)]
1446 reason: String,
1447 #[arg(long)]
1448 json: bool,
1449 },
1450 Predict {
1457 frontier: PathBuf,
1459 #[arg(long)]
1461 by: String,
1462 #[arg(long)]
1465 claim: String,
1466 #[arg(long)]
1468 criterion: String,
1469 #[arg(long)]
1471 resolves_by: Option<String>,
1472 #[arg(long)]
1474 confidence: f64,
1475 #[arg(long, default_value = "")]
1477 target: String,
1478 #[arg(long, default_value = "affirmed")]
1480 outcome: String,
1481 #[arg(long, default_value = "")]
1483 conditions: String,
1484 #[arg(long)]
1486 json: bool,
1487 },
1488 Resolve {
1493 frontier: PathBuf,
1495 prediction: String,
1497 #[arg(long)]
1499 outcome: String,
1500 #[arg(long)]
1502 matched: bool,
1503 #[arg(long)]
1506 by: String,
1507 #[arg(long, default_value = "1.0")]
1509 confidence: f64,
1510 #[arg(long, default_value = "")]
1512 source_title: String,
1513 #[arg(long)]
1515 doi: Option<String>,
1516 #[arg(long)]
1518 json: bool,
1519 },
1520 Predictions {
1522 frontier: PathBuf,
1523 #[arg(long)]
1525 by: Option<String>,
1526 #[arg(long)]
1528 open: bool,
1529 #[arg(long)]
1531 json: bool,
1532 },
1533 Calibration {
1536 frontier: PathBuf,
1537 #[arg(long)]
1539 actor: Option<String>,
1540 #[arg(long)]
1542 json: bool,
1543 },
1544 PredictionsExpire {
1552 frontier: PathBuf,
1553 #[arg(long)]
1556 now: Option<String>,
1557 #[arg(long)]
1560 dry_run: bool,
1561 #[arg(long)]
1562 json: bool,
1563 },
1564 Consensus {
1573 frontier: PathBuf,
1575 target: String,
1577 #[arg(long, default_value = "composite")]
1580 weighting: String,
1581 #[arg(long)]
1586 causal_claim: Option<String>,
1587 #[arg(long)]
1592 causal_grade_min: Option<String>,
1593 #[arg(long)]
1595 json: bool,
1596 },
1597
1598 Ingest {
1614 path: String,
1617 #[arg(long)]
1620 frontier: PathBuf,
1621 #[arg(short, long)]
1625 backend: Option<String>,
1626 #[arg(long)]
1630 actor: Option<String>,
1631 #[arg(long)]
1633 dry_run: bool,
1634 #[arg(long)]
1635 json: bool,
1636 },
1637
1638 Propose {
1644 frontier: PathBuf,
1645 finding_id: String,
1646 #[arg(long)]
1648 status: String,
1649 #[arg(long)]
1650 reason: String,
1651 #[arg(long)]
1652 reviewer: String,
1653 #[arg(long)]
1656 apply: bool,
1657 #[arg(long)]
1658 json: bool,
1659 },
1660
1661 Accept {
1665 frontier: PathBuf,
1666 proposal_id: String,
1667 #[arg(long)]
1668 reviewer: String,
1669 #[arg(long)]
1670 reason: String,
1671 #[arg(long)]
1672 json: bool,
1673 },
1674
1675 Attest {
1687 frontier: PathBuf,
1689 #[arg(long)]
1693 event: Option<String>,
1694 #[arg(long)]
1697 attester: Option<String>,
1698 #[arg(long)]
1701 scope_note: Option<String>,
1702 #[arg(long)]
1705 proof_id: Option<String>,
1706 #[arg(long)]
1711 signature: Option<String>,
1712 #[arg(long)]
1715 key: Option<PathBuf>,
1716 #[arg(long)]
1717 json: bool,
1718 },
1719
1720 Lineage {
1723 frontier: PathBuf,
1724 finding_id: String,
1725 #[arg(long, value_name = "RFC3339_TIMESTAMP")]
1726 as_of: Option<String>,
1727 #[arg(long)]
1728 json: bool,
1729 },
1730
1731 Carina {
1734 #[command(subcommand)]
1735 action: CarinaAction,
1736 },
1737
1738 Atlas {
1743 #[command(subcommand)]
1744 action: AtlasAction,
1745 },
1746
1747 Constellation {
1753 #[command(subcommand)]
1754 action: ConstellationAction,
1755 },
1756}
1757
1758#[derive(Subcommand)]
1763enum AtlasAction {
1764 Init {
1769 name: String,
1772 #[arg(long, value_delimiter = ',', num_args = 1..)]
1774 frontiers: Vec<PathBuf>,
1775 #[arg(long, default_value = "general")]
1778 domain: String,
1779 #[arg(long)]
1781 scope_note: Option<String>,
1782 #[arg(long, default_value = "atlases")]
1784 atlases_root: PathBuf,
1785 #[arg(long)]
1786 json: bool,
1787 },
1788 Materialize {
1792 name: String,
1794 #[arg(long, default_value = "atlases")]
1795 atlases_root: PathBuf,
1796 #[arg(long)]
1797 json: bool,
1798 },
1799 Serve {
1804 name: String,
1805 #[arg(long, default_value = "atlases")]
1806 atlases_root: PathBuf,
1807 #[arg(long, default_value_t = 3848)]
1808 port: u16,
1809 #[arg(long)]
1810 no_open: bool,
1811 },
1812 Update {
1819 name: String,
1820 #[arg(long, value_delimiter = ',')]
1823 add_frontier: Vec<PathBuf>,
1824 #[arg(long, value_delimiter = ',')]
1827 remove_vfr_id: Vec<String>,
1828 #[arg(long, default_value = "atlases")]
1829 atlases_root: PathBuf,
1830 #[arg(long)]
1831 json: bool,
1832 },
1833}
1834
1835#[derive(Subcommand)]
1839enum ConstellationAction {
1840 Init {
1844 name: String,
1845 #[arg(long, value_delimiter = ',', num_args = 1..)]
1847 atlases: Vec<PathBuf>,
1848 #[arg(long)]
1849 scope_note: Option<String>,
1850 #[arg(long, default_value = "constellations")]
1851 constellations_root: PathBuf,
1852 #[arg(long)]
1853 json: bool,
1854 },
1855 Materialize {
1860 name: String,
1861 #[arg(long, default_value = "constellations")]
1862 constellations_root: PathBuf,
1863 #[arg(long)]
1864 json: bool,
1865 },
1866 Serve {
1870 name: String,
1871 #[arg(long, default_value = "constellations")]
1872 constellations_root: PathBuf,
1873 #[arg(long, default_value_t = 3849)]
1874 port: u16,
1875 #[arg(long)]
1876 no_open: bool,
1877 },
1878}
1879
1880#[derive(Subcommand)]
1884enum CarinaAction {
1885 Validate {
1890 path: PathBuf,
1894 #[arg(long)]
1897 primitive: Option<String>,
1898 #[arg(long)]
1899 json: bool,
1900 },
1901 List {
1903 #[arg(long)]
1904 json: bool,
1905 },
1906 Schema { primitive: String },
1908}
1909
1910#[derive(Subcommand)]
1911enum PacketAction {
1912 Inspect {
1914 path: PathBuf,
1915 #[arg(long)]
1916 json: bool,
1917 },
1918 Validate {
1920 path: PathBuf,
1921 #[arg(long)]
1922 json: bool,
1923 },
1924}
1925
1926#[derive(Subcommand)]
1927enum SignAction {
1928 GenerateKeypair {
1930 #[arg(long, default_value = ".vela/keys")]
1931 out: PathBuf,
1932 #[arg(long)]
1933 json: bool,
1934 },
1935 Apply {
1937 frontier: PathBuf,
1938 #[arg(long)]
1939 private_key: PathBuf,
1940 #[arg(long)]
1941 json: bool,
1942 },
1943 Verify {
1945 frontier: PathBuf,
1946 #[arg(long)]
1947 public_key: Option<PathBuf>,
1948 #[arg(long)]
1949 json: bool,
1950 },
1951 ThresholdSet {
1956 frontier: PathBuf,
1957 finding_id: String,
1959 #[arg(long)]
1961 to: u32,
1962 #[arg(long)]
1963 json: bool,
1964 },
1965}
1966
1967#[derive(Subcommand)]
1968enum ActorAction {
1969 Add {
1971 frontier: PathBuf,
1972 id: String,
1974 #[arg(long)]
1976 pubkey: String,
1977 #[arg(long)]
1981 tier: Option<String>,
1982 #[arg(long)]
1986 orcid: Option<String>,
1987 #[arg(long)]
1992 clearance: Option<String>,
1993 #[arg(long)]
1994 json: bool,
1995 },
1996 List {
1998 frontier: PathBuf,
1999 #[arg(long)]
2000 json: bool,
2001 },
2002}
2003
2004#[derive(Subcommand)]
2005enum CausalAction {
2006 Audit {
2010 frontier: PathBuf,
2011 #[arg(long)]
2014 problems_only: bool,
2015 #[arg(long)]
2016 json: bool,
2017 },
2018 Effect {
2031 frontier: PathBuf,
2032 source: String,
2034 #[arg(long)]
2036 on: String,
2037 #[arg(long)]
2038 json: bool,
2039 },
2040 Graph {
2043 frontier: PathBuf,
2044 #[arg(long)]
2046 node: Option<String>,
2047 #[arg(long)]
2048 json: bool,
2049 },
2050 Counterfactual {
2057 frontier: PathBuf,
2058 intervene_on: String,
2060 #[arg(long)]
2062 set_to: f64,
2063 #[arg(long)]
2065 target: String,
2066 #[arg(long)]
2067 json: bool,
2068 },
2069}
2070
2071#[derive(Subcommand)]
2072enum BridgesAction {
2073 Derive {
2077 frontier_a: PathBuf,
2080 #[arg(long, default_value = "a")]
2082 label_a: String,
2083 frontier_b: PathBuf,
2085 #[arg(long, default_value = "b")]
2087 label_b: String,
2088 #[arg(long)]
2089 json: bool,
2090 },
2091 List {
2093 frontier: PathBuf,
2095 #[arg(long)]
2097 status: Option<String>,
2098 #[arg(long)]
2099 json: bool,
2100 },
2101 Show {
2103 frontier: PathBuf,
2104 bridge_id: String,
2105 #[arg(long)]
2106 json: bool,
2107 },
2108 Confirm {
2113 frontier: PathBuf,
2114 bridge_id: String,
2115 #[arg(long)]
2118 reviewer: Option<String>,
2119 #[arg(long)]
2121 note: Option<String>,
2122 #[arg(long)]
2123 json: bool,
2124 },
2125 Refute {
2128 frontier: PathBuf,
2129 bridge_id: String,
2130 #[arg(long)]
2131 reviewer: Option<String>,
2132 #[arg(long)]
2133 note: Option<String>,
2134 #[arg(long)]
2135 json: bool,
2136 },
2137}
2138
2139#[derive(Subcommand)]
2140enum FederationAction {
2141 PeerAdd {
2145 frontier: PathBuf,
2146 id: String,
2148 #[arg(long)]
2150 url: String,
2151 #[arg(long)]
2153 pubkey: String,
2154 #[arg(long, default_value = "")]
2156 note: String,
2157 #[arg(long)]
2158 json: bool,
2159 },
2160 PeerList {
2162 frontier: PathBuf,
2163 #[arg(long)]
2164 json: bool,
2165 },
2166 PeerRemove {
2170 frontier: PathBuf,
2171 id: String,
2172 #[arg(long)]
2173 json: bool,
2174 },
2175 Sync {
2192 frontier: PathBuf,
2193 peer_id: String,
2195 #[arg(long)]
2197 url: Option<String>,
2198 #[arg(long)]
2202 via_hub: bool,
2203 #[arg(long)]
2206 vfr_id: Option<String>,
2207 #[arg(long)]
2214 allow_cross_vfr: bool,
2215 #[arg(long)]
2217 dry_run: bool,
2218 #[arg(long)]
2219 json: bool,
2220 },
2221 PushResolution {
2234 frontier: PathBuf,
2235 conflict_event_id: String,
2239 #[arg(long = "to")]
2241 to: String,
2242 #[arg(long)]
2246 key: Option<PathBuf>,
2247 #[arg(long)]
2250 vfr_id: Option<String>,
2251 #[arg(long)]
2252 json: bool,
2253 },
2254}
2255
2256#[derive(Subcommand)]
2257enum FrontierAction {
2258 New {
2265 path: PathBuf,
2267 #[arg(long)]
2269 name: String,
2270 #[arg(long, default_value = "")]
2272 description: String,
2273 #[arg(long)]
2275 force: bool,
2276 #[arg(long)]
2277 json: bool,
2278 },
2279 Materialize {
2281 frontier: PathBuf,
2283 #[arg(long)]
2284 json: bool,
2285 },
2286 AddDep {
2290 frontier: PathBuf,
2292 vfr_id: String,
2294 #[arg(long)]
2297 locator: String,
2298 #[arg(long)]
2301 snapshot: String,
2302 #[arg(long)]
2304 name: Option<String>,
2305 #[arg(long)]
2306 json: bool,
2307 },
2308 ListDeps {
2310 frontier: PathBuf,
2311 #[arg(long)]
2312 json: bool,
2313 },
2314 RemoveDep {
2317 frontier: PathBuf,
2318 vfr_id: String,
2319 #[arg(long)]
2320 json: bool,
2321 },
2322 RefreshDeps {
2329 frontier: PathBuf,
2330 #[arg(long, default_value = "https://vela-hub.fly.dev")]
2332 from: String,
2333 #[arg(long)]
2335 dry_run: bool,
2336 #[arg(long)]
2337 json: bool,
2338 },
2339 Diff {
2351 frontier: PathBuf,
2353 #[arg(long)]
2356 since: Option<String>,
2357 #[arg(long)]
2360 week: Option<String>,
2361 #[arg(long)]
2363 json: bool,
2364 },
2365}
2366
2367#[derive(Subcommand)]
2368enum RepoAction {
2369 Status {
2371 frontier: PathBuf,
2373 #[arg(long)]
2375 json: bool,
2376 },
2377 Doctor {
2379 frontier: PathBuf,
2381 #[arg(long)]
2383 json: bool,
2384 },
2385}
2386
2387#[derive(Subcommand)]
2388enum QueueAction {
2389 List {
2391 #[arg(long)]
2392 queue_file: Option<PathBuf>,
2393 #[arg(long)]
2394 json: bool,
2395 },
2396 Sign {
2399 #[arg(long)]
2401 actor: String,
2402 #[arg(long)]
2404 key: PathBuf,
2405 #[arg(long)]
2407 queue_file: Option<PathBuf>,
2408 #[arg(long, alias = "all")]
2414 yes_to_all: bool,
2415 #[arg(long)]
2416 json: bool,
2417 },
2418 Clear {
2420 #[arg(long)]
2421 queue_file: Option<PathBuf>,
2422 #[arg(long)]
2423 json: bool,
2424 },
2425}
2426
2427#[derive(Subcommand)]
2428enum RegistryAction {
2429 List {
2431 #[arg(long)]
2433 from: Option<String>,
2434 #[arg(long)]
2435 json: bool,
2436 },
2437 Publish {
2439 frontier: PathBuf,
2441 #[arg(long)]
2443 owner: String,
2444 #[arg(long)]
2446 key: PathBuf,
2447 #[arg(long)]
2454 locator: Option<String>,
2455 #[arg(long)]
2457 to: Option<String>,
2458 #[arg(long)]
2459 json: bool,
2460 },
2461 DependsOn {
2468 vfr_id: String,
2470 #[arg(long, default_value = "https://vela-hub.fly.dev")]
2472 from: String,
2473 #[arg(long)]
2474 json: bool,
2475 },
2476 Mirror {
2484 vfr_id: String,
2486 #[arg(long)]
2488 from: String,
2489 #[arg(long)]
2491 to: String,
2492 #[arg(long)]
2493 json: bool,
2494 },
2495 Pull {
2497 vfr_id: String,
2499 #[arg(long)]
2501 from: Option<String>,
2502 #[arg(long)]
2506 out: PathBuf,
2507 #[arg(long)]
2510 transitive: bool,
2511 #[arg(long, default_value = "4")]
2514 depth: usize,
2515 #[arg(long)]
2516 json: bool,
2517 },
2518}
2519
2520#[derive(Subcommand)]
2521enum GapsAction {
2522 Rank {
2524 frontier: PathBuf,
2525 #[arg(long, default_value = "10")]
2526 top: usize,
2527 #[arg(long)]
2528 domain: Option<String>,
2529 #[arg(long)]
2530 json: bool,
2531 },
2532}
2533
2534#[derive(Subcommand)]
2535enum LinkAction {
2536 Add {
2541 frontier: PathBuf,
2543 #[arg(long)]
2545 from: String,
2546 #[arg(long)]
2548 to: String,
2549 #[arg(long, default_value = "supports")]
2551 r#type: String,
2552 #[arg(long, default_value = "")]
2554 note: String,
2555 #[arg(long, default_value = "reviewer")]
2557 inferred_by: String,
2558 #[arg(long)]
2567 no_check_target: bool,
2568 #[arg(long)]
2569 json: bool,
2570 },
2571}
2572
2573#[derive(Subcommand)]
2574enum EntityAction {
2575 Resolve {
2582 frontier: PathBuf,
2583 #[arg(long)]
2585 force: bool,
2586 #[arg(long)]
2587 json: bool,
2588 },
2589 List {
2591 #[arg(long)]
2592 json: bool,
2593 },
2594}
2595
2596#[derive(Subcommand)]
2597enum FindingCommands {
2598 Add {
2600 frontier: PathBuf,
2602 #[arg(long)]
2604 assertion: String,
2605 #[arg(long, default_value = "mechanism")]
2607 r#type: String,
2608 #[arg(long, default_value = "manual finding")]
2610 source: String,
2611 #[arg(long, default_value = "expert_assertion")]
2613 source_type: String,
2614 #[arg(long)]
2616 author: String,
2617 #[arg(long, default_value = "0.3")]
2619 confidence: f64,
2620 #[arg(long, default_value = "theoretical")]
2622 evidence_type: String,
2623 #[arg(long, default_value = "")]
2625 entities: String,
2626 #[arg(long)]
2628 entities_reviewed: bool,
2629 #[arg(long)]
2631 evidence_span: Vec<String>,
2632 #[arg(long)]
2634 gap: bool,
2635 #[arg(long)]
2637 negative_space: bool,
2638 #[arg(long)]
2640 doi: Option<String>,
2641 #[arg(long)]
2643 pmid: Option<String>,
2644 #[arg(long)]
2646 year: Option<i32>,
2647 #[arg(long)]
2649 journal: Option<String>,
2650 #[arg(long)]
2652 url: Option<String>,
2653 #[arg(long)]
2655 source_authors: Option<String>,
2656 #[arg(long)]
2658 conditions_text: Option<String>,
2659 #[arg(long)]
2661 species: Option<String>,
2662 #[arg(long)]
2664 in_vivo: bool,
2665 #[arg(long)]
2667 in_vitro: bool,
2668 #[arg(long)]
2670 human_data: bool,
2671 #[arg(long)]
2673 clinical_trial: bool,
2674 #[arg(long)]
2676 json: bool,
2677 #[arg(long)]
2679 apply: bool,
2680 },
2681 Supersede {
2688 frontier: PathBuf,
2690 old_id: String,
2692 #[arg(long)]
2694 assertion: String,
2695 #[arg(long, default_value = "mechanism")]
2697 r#type: String,
2698 #[arg(long, default_value = "manual finding")]
2700 source: String,
2701 #[arg(long, default_value = "expert_assertion")]
2703 source_type: String,
2704 #[arg(long)]
2706 author: String,
2707 #[arg(long)]
2709 reason: String,
2710 #[arg(long, default_value = "0.5")]
2712 confidence: f64,
2713 #[arg(long, default_value = "experimental")]
2715 evidence_type: String,
2716 #[arg(long, default_value = "")]
2718 entities: String,
2719 #[arg(long)]
2721 doi: Option<String>,
2722 #[arg(long)]
2724 pmid: Option<String>,
2725 #[arg(long)]
2727 year: Option<i32>,
2728 #[arg(long)]
2730 journal: Option<String>,
2731 #[arg(long)]
2733 url: Option<String>,
2734 #[arg(long)]
2736 source_authors: Option<String>,
2737 #[arg(long)]
2739 conditions_text: Option<String>,
2740 #[arg(long)]
2742 species: Option<String>,
2743 #[arg(long)]
2744 in_vivo: bool,
2745 #[arg(long)]
2746 in_vitro: bool,
2747 #[arg(long)]
2748 human_data: bool,
2749 #[arg(long)]
2750 clinical_trial: bool,
2751 #[arg(long)]
2752 json: bool,
2753 #[arg(long)]
2755 apply: bool,
2756 },
2757 CausalSet {
2763 frontier: PathBuf,
2765 finding_id: String,
2767 #[arg(long)]
2769 claim: String,
2770 #[arg(long)]
2773 grade: Option<String>,
2774 #[arg(long)]
2777 actor: String,
2778 #[arg(long)]
2781 reason: String,
2782 #[arg(long)]
2783 json: bool,
2784 },
2785}
2786
2787#[derive(Subcommand)]
2788enum ProposalAction {
2789 List {
2791 frontier: PathBuf,
2792 #[arg(long)]
2793 status: Option<String>,
2794 #[arg(long)]
2795 json: bool,
2796 },
2797 Show {
2799 frontier: PathBuf,
2800 proposal_id: String,
2801 #[arg(long)]
2802 json: bool,
2803 },
2804 Preview {
2806 frontier: PathBuf,
2807 proposal_id: String,
2808 #[arg(long, default_value = "reviewer:preview")]
2809 reviewer: String,
2810 #[arg(long)]
2811 json: bool,
2812 },
2813 Import {
2815 frontier: PathBuf,
2816 source: PathBuf,
2817 #[arg(long)]
2818 json: bool,
2819 },
2820 Validate {
2822 source: PathBuf,
2823 #[arg(long)]
2824 json: bool,
2825 },
2826 Export {
2828 frontier: PathBuf,
2829 output: PathBuf,
2830 #[arg(long)]
2831 status: Option<String>,
2832 #[arg(long)]
2833 json: bool,
2834 },
2835 Accept {
2837 frontier: PathBuf,
2838 proposal_id: String,
2839 #[arg(long)]
2840 reviewer: String,
2841 #[arg(long)]
2842 reason: String,
2843 #[arg(long)]
2844 json: bool,
2845 },
2846 Reject {
2848 frontier: PathBuf,
2849 proposal_id: String,
2850 #[arg(long)]
2851 reviewer: String,
2852 #[arg(long)]
2853 reason: String,
2854 #[arg(long)]
2855 json: bool,
2856 },
2857}
2858
2859#[derive(Subcommand)]
2860enum SourceAdapterAction {
2861 Run {
2863 frontier: PathBuf,
2865 adapter: String,
2867 #[arg(long)]
2869 actor: String,
2870 #[arg(long = "entry")]
2872 entries: Vec<String>,
2873 #[arg(long)]
2875 priority: Option<String>,
2876 #[arg(long)]
2878 include_excluded: bool,
2879 #[arg(long)]
2881 allow_partial: bool,
2882 #[arg(long)]
2884 dry_run: bool,
2885 #[arg(long)]
2887 input_dir: Option<PathBuf>,
2888 #[arg(long)]
2890 apply_artifacts: bool,
2891 #[arg(long)]
2893 json: bool,
2894 },
2895}
2896
2897#[derive(Subcommand)]
2898enum RuntimeAdapterAction {
2899 Run {
2901 frontier: PathBuf,
2903 adapter: String,
2905 #[arg(long)]
2907 input: PathBuf,
2908 #[arg(long)]
2910 actor: String,
2911 #[arg(long)]
2913 dry_run: bool,
2914 #[arg(long)]
2916 apply_artifacts: bool,
2917 #[arg(long)]
2919 json: bool,
2920 },
2921}
2922
2923#[derive(Subcommand)]
2924enum BridgeKitAction {
2925 Validate {
2927 source: PathBuf,
2929 #[arg(long)]
2931 json: bool,
2932 },
2933}
2934
2935pub async fn run_command() {
2936 dotenvy::dotenv().ok();
2937
2938 match Cli::parse().command {
2939 Commands::Scout {
2940 folder,
2941 frontier,
2942 backend,
2943 dry_run,
2944 json,
2945 } => {
2946 cmd_scout(&folder, &frontier, backend.as_deref(), dry_run, json).await;
2947 }
2948 Commands::CompileNotes {
2949 vault,
2950 frontier,
2951 backend,
2952 max_files,
2953 max_items_per_category,
2954 dry_run,
2955 json,
2956 } => {
2957 cmd_compile_notes(
2958 &vault,
2959 &frontier,
2960 backend.as_deref(),
2961 max_files,
2962 max_items_per_category,
2963 dry_run,
2964 json,
2965 )
2966 .await;
2967 }
2968 Commands::CompileCode {
2969 root,
2970 frontier,
2971 backend,
2972 max_files,
2973 dry_run,
2974 json,
2975 } => {
2976 cmd_compile_code(
2977 &root,
2978 &frontier,
2979 backend.as_deref(),
2980 max_files,
2981 dry_run,
2982 json,
2983 )
2984 .await;
2985 }
2986 Commands::CompileData {
2987 root,
2988 frontier,
2989 backend,
2990 sample_rows,
2991 dry_run,
2992 json,
2993 } => {
2994 cmd_compile_data(
2995 &root,
2996 &frontier,
2997 backend.as_deref(),
2998 sample_rows,
2999 dry_run,
3000 json,
3001 )
3002 .await;
3003 }
3004 Commands::ReviewPending {
3005 frontier,
3006 backend,
3007 max_proposals,
3008 batch_size,
3009 dry_run,
3010 json,
3011 } => {
3012 cmd_review_pending(
3013 &frontier,
3014 backend.as_deref(),
3015 max_proposals,
3016 batch_size,
3017 dry_run,
3018 json,
3019 )
3020 .await;
3021 }
3022 Commands::FindTensions {
3023 frontier,
3024 backend,
3025 max_findings,
3026 dry_run,
3027 json,
3028 } => {
3029 cmd_find_tensions(&frontier, backend.as_deref(), max_findings, dry_run, json).await;
3030 }
3031 Commands::PlanExperiments {
3032 frontier,
3033 backend,
3034 max_findings,
3035 dry_run,
3036 json,
3037 } => {
3038 cmd_plan_experiments(&frontier, backend.as_deref(), max_findings, dry_run, json).await;
3039 }
3040 Commands::Check {
3041 source,
3042 schema,
3043 stats,
3044 conformance,
3045 conformance_dir,
3046 all,
3047 schema_only,
3048 strict,
3049 fix,
3050 json,
3051 } => cmd_check(
3052 source.as_deref(),
3053 schema,
3054 stats,
3055 conformance,
3056 &conformance_dir,
3057 all,
3058 schema_only,
3059 strict,
3060 fix,
3061 json,
3062 ),
3063 Commands::Integrity { frontier, json } => cmd_integrity(&frontier, json),
3064 Commands::Impact {
3065 frontier,
3066 finding_id,
3067 depth,
3068 json,
3069 } => cmd_impact(&frontier, &finding_id, depth, json),
3070 Commands::Discord {
3071 frontier,
3072 json,
3073 kind,
3074 } => cmd_discord(&frontier, json, kind.as_deref()),
3075 Commands::Normalize {
3076 source,
3077 out,
3078 write,
3079 dry_run,
3080 rewrite_ids,
3081 id_map,
3082 resync_provenance,
3083 json,
3084 } => cmd_normalize(
3085 &source,
3086 out.as_deref(),
3087 write,
3088 dry_run,
3089 rewrite_ids,
3090 id_map.as_deref(),
3091 resync_provenance,
3092 json,
3093 ),
3094 Commands::Proof {
3095 frontier,
3096 out,
3097 template,
3098 gold,
3099 record_proof_state,
3100 json,
3101 } => cmd_proof(
3102 &frontier,
3103 &out,
3104 &template,
3105 gold.as_deref(),
3106 record_proof_state,
3107 json,
3108 ),
3109 Commands::Repo { action } => cmd_repo(action),
3110 Commands::Serve {
3111 frontier,
3112 frontiers,
3113 backend,
3114 http,
3115 setup,
3116 check_tools,
3117 json,
3118 workbench,
3119 } => {
3120 if setup {
3121 cmd_mcp_setup(frontier.as_deref(), frontiers.as_deref());
3122 } else if check_tools {
3123 let source =
3124 serve::ProjectSource::from_args(frontier.as_deref(), frontiers.as_deref());
3125 match serve::check_tools(source) {
3126 Ok(report) => {
3127 if json {
3128 println!(
3129 "{}",
3130 serde_json::to_string_pretty(&report)
3131 .expect("failed to serialize tool check report")
3132 );
3133 } else {
3134 print_tool_check_report(&report);
3135 }
3136 }
3137 Err(e) => fail(&format!("Tool check failed: {e}")),
3138 }
3139 } else {
3140 let source =
3141 serve::ProjectSource::from_args(frontier.as_deref(), frontiers.as_deref());
3142 let resolved_port = if workbench {
3144 Some(http.unwrap_or(3848))
3145 } else {
3146 http
3147 };
3148 if let Some(port) = resolved_port {
3149 serve::run_http(source, backend.as_deref(), port, workbench).await;
3150 } else {
3151 serve::run(source, backend.as_deref()).await;
3152 }
3153 }
3154 }
3155 Commands::Status { frontier, json } => cmd_status(&frontier, json),
3156 Commands::Log {
3157 frontier,
3158 limit,
3159 kind,
3160 json,
3161 } => cmd_log(&frontier, limit, kind.as_deref(), json),
3162 Commands::Inbox {
3163 frontier,
3164 kind,
3165 limit,
3166 json,
3167 } => cmd_inbox(&frontier, kind.as_deref(), limit, json),
3168 Commands::Ask {
3169 frontier,
3170 question,
3171 json,
3172 } => cmd_ask(&frontier, &question.join(" "), json),
3173 Commands::Stats { frontier, json } => {
3174 if json {
3175 print_stats_json(&frontier);
3176 } else {
3177 cmd_stats(&frontier);
3178 }
3179 }
3180 Commands::Search {
3181 source,
3182 query,
3183 entity,
3184 r#type,
3185 all,
3186 limit,
3187 json,
3188 } => cmd_search(
3189 source.as_deref(),
3190 &query,
3191 entity.as_deref(),
3192 r#type.as_deref(),
3193 all.as_deref(),
3194 limit,
3195 json,
3196 ),
3197 Commands::Tensions {
3198 source,
3199 both_high,
3200 cross_domain,
3201 top,
3202 json,
3203 } => cmd_tensions(&source, both_high, cross_domain, top, json),
3204 Commands::Gaps { action } => cmd_gaps(action),
3205 Commands::Bridge {
3206 inputs,
3207 novelty,
3208 top,
3209 } => cmd_bridge(&inputs, novelty, top).await,
3210 Commands::Export {
3211 frontier,
3212 format,
3213 output,
3214 } => export::run(&frontier, &format, output.as_deref()),
3215 Commands::Packet { action } => cmd_packet(action),
3216 Commands::Verify { path, json } => cmd_verify(&path, json),
3217 Commands::Bench {
3218 frontier,
3219 gold,
3220 candidate,
3221 sources,
3222 threshold,
3223 report,
3224 entity_gold,
3225 link_gold,
3226 suite,
3227 suite_ready,
3228 min_f1,
3229 min_precision,
3230 min_recall,
3231 no_thresholds,
3232 json,
3233 } => {
3234 if let Some(cand) = candidate.clone() {
3239 let Some(g) = gold.clone() else {
3240 eprintln!(
3241 "{} `vela bench --candidate <…>` requires `--gold <…>`",
3242 style::err_prefix()
3243 );
3244 std::process::exit(2);
3245 };
3246 cmd_agent_bench(
3247 &g,
3248 &cand,
3249 sources.as_deref(),
3250 threshold,
3251 report.as_deref(),
3252 json,
3253 );
3254 } else {
3255 cmd_bench(BenchArgs {
3256 frontier,
3257 gold,
3258 entity_gold,
3259 link_gold,
3260 suite,
3261 suite_ready,
3262 min_f1,
3263 min_precision,
3264 min_recall,
3265 no_thresholds,
3266 json,
3267 });
3268 }
3269 }
3270 Commands::Conformance { dir } => {
3271 let _ = conformance::run(&dir);
3272 }
3273 Commands::Version => println!("vela 0.36.0"),
3274 Commands::Sign { action } => cmd_sign(action),
3275 Commands::Actor { action } => cmd_actor(action),
3276 Commands::Federation { action } => cmd_federation(action),
3277 Commands::Causal { action } => cmd_causal(action),
3278 Commands::Frontier { action } => cmd_frontier(action),
3279 Commands::Queue { action } => cmd_queue(action),
3280 Commands::Registry { action } => cmd_registry(action),
3281 Commands::Init {
3282 path,
3283 name,
3284 template,
3285 no_git,
3286 json,
3287 } => cmd_init(&path, &name, &template, !no_git, json),
3288 Commands::Quickstart {
3289 path,
3290 name,
3291 reviewer,
3292 assertion,
3293 keys_out,
3294 json,
3295 } => cmd_quickstart(
3296 &path,
3297 &name,
3298 &reviewer,
3299 assertion.as_deref(),
3300 keys_out.as_deref(),
3301 json,
3302 ),
3303 Commands::Import { frontier, into } => cmd_import(&frontier, into.as_deref()),
3304 Commands::Diff {
3305 target,
3306 frontier_b,
3307 frontier,
3308 reviewer,
3309 json,
3310 quiet,
3311 } => {
3312 if target.starts_with("vpr_") {
3317 let frontier_root = frontier
3318 .clone()
3319 .or_else(|| frontier_b.clone())
3320 .unwrap_or_else(|| std::path::PathBuf::from("."));
3321 let preview = proposals::preview_at_path(&frontier_root, &target, &reviewer)
3322 .unwrap_or_else(|e| fail_return(&e));
3323 let payload = json!({
3324 "ok": true,
3325 "command": "diff.proposal",
3326 "frontier": frontier_root.display().to_string(),
3327 "proposal_id": target,
3328 "preview": preview,
3329 });
3330 if json {
3331 println!(
3332 "{}",
3333 serde_json::to_string_pretty(&payload)
3334 .expect("failed to serialize diff preview")
3335 );
3336 } else {
3337 println!("vela diff · proposal preview");
3338 println!(" proposal: {}", target);
3339 println!(" kind: {}", preview.kind);
3340 println!(
3341 " findings: {} -> {}",
3342 preview.findings_before, preview.findings_after
3343 );
3344 println!(
3345 " artifacts: {} -> {}",
3346 preview.artifacts_before, preview.artifacts_after
3347 );
3348 println!(
3349 " events: {} -> {}",
3350 preview.events_before, preview.events_after
3351 );
3352 if !preview.changed_findings.is_empty() {
3353 println!(
3354 " findings changed: {}",
3355 preview.changed_findings.join(", ")
3356 );
3357 }
3358 }
3359 } else {
3360 let frontier_a = std::path::PathBuf::from(&target);
3361 let b = frontier_b.unwrap_or_else(|| {
3362 fail_return(
3363 "diff: two-frontier mode needs a second positional path; for proposal preview pass a `vpr_*` id",
3364 )
3365 });
3366 diff::run(&frontier_a, &b, json, quiet);
3367 }
3368 }
3369 Commands::Proposals { action } => cmd_proposals(action),
3370 Commands::ArtifactToState {
3371 frontier,
3372 packet,
3373 actor,
3374 apply_artifacts,
3375 json,
3376 } => cmd_artifact_to_state(&frontier, &packet, &actor, apply_artifacts, json),
3377 Commands::BridgeKit { action } => cmd_bridge_kit(action),
3378 Commands::SourceAdapter { action } => cmd_source_adapter(action).await,
3379 Commands::RuntimeAdapter { action } => cmd_runtime_adapter(action),
3380 Commands::Link { action } => cmd_link(action),
3381 Commands::Workbench {
3382 path,
3383 port,
3384 no_open,
3385 } => {
3386 if let Err(e) = crate::workbench::run(path, port, !no_open).await {
3387 fail(&e);
3388 }
3389 }
3390 Commands::Bridges { action } => cmd_bridges(action),
3391 Commands::Entity { action } => cmd_entity(action),
3392 Commands::Finding { command } => match command {
3393 FindingCommands::Add {
3394 frontier,
3395 assertion,
3396 r#type,
3397 source,
3398 source_type,
3399 author,
3400 confidence,
3401 evidence_type,
3402 entities,
3403 entities_reviewed,
3404 evidence_span,
3405 gap,
3406 negative_space,
3407 doi,
3408 pmid,
3409 year,
3410 journal,
3411 url,
3412 source_authors,
3413 conditions_text,
3414 species,
3415 in_vivo,
3416 in_vitro,
3417 human_data,
3418 clinical_trial,
3419 json,
3420 apply,
3421 } => {
3422 validate_enum_arg("--type", &r#type, bundle::VALID_ASSERTION_TYPES);
3423 validate_enum_arg(
3424 "--evidence-type",
3425 &evidence_type,
3426 bundle::VALID_EVIDENCE_TYPES,
3427 );
3428 validate_enum_arg(
3429 "--source-type",
3430 &source_type,
3431 bundle::VALID_PROVENANCE_SOURCE_TYPES,
3432 );
3433 let parsed_entities = parse_entities(&entities);
3434 let parsed_evidence_spans = parse_evidence_spans(&evidence_span);
3435 for (name, etype) in &parsed_entities {
3436 if !bundle::VALID_ENTITY_TYPES.contains(&etype.as_str()) {
3437 fail(&format!(
3438 "invalid entity type '{}' for '{}'. Valid: {}",
3439 etype,
3440 name,
3441 bundle::VALID_ENTITY_TYPES.join(", "),
3442 ));
3443 }
3444 }
3445 let parsed_source_authors = source_authors
3446 .map(|s| {
3447 s.split(';')
3448 .map(|a| a.trim().to_string())
3449 .filter(|a| !a.is_empty())
3450 .collect()
3451 })
3452 .unwrap_or_default();
3453 let parsed_species = species
3454 .map(|s| {
3455 s.split(';')
3456 .map(|a| a.trim().to_string())
3457 .filter(|a| !a.is_empty())
3458 .collect()
3459 })
3460 .unwrap_or_default();
3461 let report = state::add_finding(
3462 &frontier,
3463 state::FindingDraftOptions {
3464 text: assertion,
3465 assertion_type: r#type,
3466 source,
3467 source_type,
3468 author,
3469 confidence,
3470 evidence_type,
3471 entities: parsed_entities,
3472 doi,
3473 pmid,
3474 year,
3475 journal,
3476 url,
3477 source_authors: parsed_source_authors,
3478 conditions_text,
3479 species: parsed_species,
3480 in_vivo,
3481 in_vitro,
3482 human_data,
3483 clinical_trial,
3484 entities_reviewed,
3485 evidence_spans: parsed_evidence_spans,
3486 gap,
3487 negative_space,
3488 },
3489 apply,
3490 )
3491 .unwrap_or_else(|e| fail_return(&e));
3492 print_state_report(&report, json);
3493 }
3494 FindingCommands::Supersede {
3495 frontier,
3496 old_id,
3497 assertion,
3498 r#type,
3499 source,
3500 source_type,
3501 author,
3502 reason,
3503 confidence,
3504 evidence_type,
3505 entities,
3506 doi,
3507 pmid,
3508 year,
3509 journal,
3510 url,
3511 source_authors,
3512 conditions_text,
3513 species,
3514 in_vivo,
3515 in_vitro,
3516 human_data,
3517 clinical_trial,
3518 json,
3519 apply,
3520 } => {
3521 validate_enum_arg("--type", &r#type, bundle::VALID_ASSERTION_TYPES);
3522 validate_enum_arg(
3523 "--evidence-type",
3524 &evidence_type,
3525 bundle::VALID_EVIDENCE_TYPES,
3526 );
3527 validate_enum_arg(
3528 "--source-type",
3529 &source_type,
3530 bundle::VALID_PROVENANCE_SOURCE_TYPES,
3531 );
3532 let parsed_entities = parse_entities(&entities);
3533 for (name, etype) in &parsed_entities {
3534 if !bundle::VALID_ENTITY_TYPES.contains(&etype.as_str()) {
3535 fail(&format!(
3536 "invalid entity type '{}' for '{}'. Valid: {}",
3537 etype,
3538 name,
3539 bundle::VALID_ENTITY_TYPES.join(", "),
3540 ));
3541 }
3542 }
3543 let parsed_source_authors = source_authors
3544 .map(|s| {
3545 s.split(';')
3546 .map(|a| a.trim().to_string())
3547 .filter(|a| !a.is_empty())
3548 .collect()
3549 })
3550 .unwrap_or_default();
3551 let parsed_species = species
3552 .map(|s| {
3553 s.split(';')
3554 .map(|a| a.trim().to_string())
3555 .filter(|a| !a.is_empty())
3556 .collect()
3557 })
3558 .unwrap_or_default();
3559 let report = state::supersede_finding(
3560 &frontier,
3561 &old_id,
3562 &reason,
3563 state::FindingDraftOptions {
3564 text: assertion,
3565 assertion_type: r#type,
3566 source,
3567 source_type,
3568 author,
3569 confidence,
3570 evidence_type,
3571 entities: parsed_entities,
3572 doi,
3573 pmid,
3574 year,
3575 journal,
3576 url,
3577 source_authors: parsed_source_authors,
3578 conditions_text,
3579 species: parsed_species,
3580 in_vivo,
3581 in_vitro,
3582 human_data,
3583 clinical_trial,
3584 entities_reviewed: false,
3585 evidence_spans: Vec::new(),
3586 gap: false,
3587 negative_space: false,
3588 },
3589 apply,
3590 )
3591 .unwrap_or_else(|e| fail_return(&e));
3592 print_state_report(&report, json);
3593 }
3594 FindingCommands::CausalSet {
3595 frontier,
3596 finding_id,
3597 claim,
3598 grade,
3599 actor,
3600 reason,
3601 json,
3602 } => {
3603 if !bundle::VALID_CAUSAL_CLAIMS.contains(&claim.as_str()) {
3604 fail(&format!(
3605 "invalid --claim '{claim}'; valid: {:?}",
3606 bundle::VALID_CAUSAL_CLAIMS
3607 ));
3608 }
3609 if let Some(g) = grade.as_deref()
3610 && !bundle::VALID_CAUSAL_EVIDENCE_GRADES.contains(&g)
3611 {
3612 fail(&format!(
3613 "invalid --grade '{g}'; valid: {:?}",
3614 bundle::VALID_CAUSAL_EVIDENCE_GRADES
3615 ));
3616 }
3617 let report = state::set_causal(
3618 &frontier,
3619 &finding_id,
3620 &claim,
3621 grade.as_deref(),
3622 &actor,
3623 &reason,
3624 )
3625 .unwrap_or_else(|e| fail_return(&e));
3626 print_state_report(&report, json);
3627 }
3628 },
3629 Commands::Review {
3630 frontier,
3631 finding_id,
3632 status,
3633 reason,
3634 reviewer,
3635 apply,
3636 json,
3637 } => {
3638 let status = status.unwrap_or_else(|| fail_return("--status is required for review"));
3639 let reason = reason.unwrap_or_else(|| fail_return("--reason is required for review"));
3640 let report = state::review_finding(
3641 &frontier,
3642 &finding_id,
3643 state::ReviewOptions {
3644 status,
3645 reason,
3646 reviewer,
3647 },
3648 apply,
3649 )
3650 .unwrap_or_else(|e| fail_return(&e));
3651 print_state_report(&report, json);
3652 }
3653 Commands::Note {
3654 frontier,
3655 finding_id,
3656 text,
3657 author,
3658 apply,
3659 json,
3660 } => {
3661 let report = state::add_note(&frontier, &finding_id, &text, &author, apply)
3662 .unwrap_or_else(|e| fail_return(&e));
3663 print_state_report(&report, json);
3664 }
3665 Commands::Caveat {
3666 frontier,
3667 finding_id,
3668 text,
3669 author,
3670 apply,
3671 json,
3672 } => {
3673 let report = state::caveat_finding(&frontier, &finding_id, &text, &author, apply)
3674 .unwrap_or_else(|e| fail_return(&e));
3675 print_state_report(&report, json);
3676 }
3677 Commands::Revise {
3678 frontier,
3679 finding_id,
3680 confidence,
3681 reason,
3682 reviewer,
3683 apply,
3684 json,
3685 } => {
3686 let report = state::revise_confidence(
3687 &frontier,
3688 &finding_id,
3689 state::ReviseOptions {
3690 confidence,
3691 reason,
3692 reviewer,
3693 },
3694 apply,
3695 )
3696 .unwrap_or_else(|e| fail_return(&e));
3697 print_state_report(&report, json);
3698 }
3699 Commands::Reject {
3700 frontier,
3701 finding_id,
3702 reason,
3703 reviewer,
3704 apply,
3705 json,
3706 } => {
3707 let report = state::reject_finding(&frontier, &finding_id, &reviewer, &reason, apply)
3708 .unwrap_or_else(|e| fail_return(&e));
3709 print_state_report(&report, json);
3710 }
3711 Commands::History {
3712 frontier,
3713 finding_id,
3714 json,
3715 as_of,
3716 } => {
3717 let payload = state::history_as_of(&frontier, &finding_id, as_of.as_deref())
3718 .unwrap_or_else(|e| fail_return(&e));
3719 if json {
3720 println!(
3721 "{}",
3722 serde_json::to_string_pretty(&payload)
3723 .expect("failed to serialize history response")
3724 );
3725 } else {
3726 print_history(&payload);
3727 }
3728 }
3729 Commands::ImportEvents { source, into, json } => {
3730 let report =
3731 review::import_review_events(&source, &into).unwrap_or_else(|e| fail_return(&e));
3732 if json {
3733 println!(
3734 "{}",
3735 serde_json::to_string_pretty(&json!({
3736 "ok": true,
3737 "command": "import-events",
3738 "source": report.source,
3739 "target": into.display().to_string(),
3740 "summary": {
3741 "imported": report.imported,
3742 "new": report.new,
3743 "duplicate": report.duplicate,
3744 "canonical_events_imported": report.events_imported,
3745 "canonical_events_new": report.events_new,
3746 "canonical_events_duplicate": report.events_duplicate,
3747 }
3748 }))
3749 .expect("failed to serialize import-events response")
3750 );
3751 } else {
3752 println!("{report}");
3753 }
3754 }
3755 Commands::Retract {
3756 source,
3757 finding_id,
3758 reason,
3759 reviewer,
3760 apply,
3761 json,
3762 } => {
3763 let report = state::retract_finding(&source, &finding_id, &reviewer, &reason, apply)
3764 .unwrap_or_else(|e| fail_return(&e));
3765 print_state_report(&report, json);
3766 }
3767 Commands::LocatorRepair {
3768 frontier,
3769 atom_id,
3770 locator,
3771 reviewer,
3772 reason,
3773 apply,
3774 json,
3775 } => {
3776 cmd_locator_repair(
3777 &frontier,
3778 &atom_id,
3779 locator.as_deref(),
3780 &reviewer,
3781 &reason,
3782 apply,
3783 json,
3784 );
3785 }
3786 Commands::SourceFetch {
3787 identifier,
3788 cache,
3789 out,
3790 refresh,
3791 json,
3792 } => {
3793 cmd_source_fetch(&identifier, cache.as_deref(), out.as_deref(), refresh, json).await;
3794 }
3795 Commands::SpanRepair {
3796 frontier,
3797 finding_id,
3798 section,
3799 text,
3800 reviewer,
3801 reason,
3802 apply,
3803 json,
3804 } => {
3805 cmd_span_repair(
3806 &frontier,
3807 &finding_id,
3808 §ion,
3809 &text,
3810 &reviewer,
3811 &reason,
3812 apply,
3813 json,
3814 );
3815 }
3816 Commands::EntityAdd {
3817 frontier,
3818 finding_id,
3819 entity,
3820 entity_type,
3821 reviewer,
3822 reason,
3823 apply,
3824 json,
3825 } => {
3826 let report = state::add_finding_entity(
3827 &frontier,
3828 &finding_id,
3829 &entity,
3830 &entity_type,
3831 &reviewer,
3832 &reason,
3833 apply,
3834 )
3835 .unwrap_or_else(|e| fail_return(&e));
3836 print_state_report(&report, json);
3837 }
3838 Commands::EntityResolve {
3839 frontier,
3840 finding_id,
3841 entity,
3842 source,
3843 id,
3844 confidence,
3845 matched_name,
3846 resolution_method,
3847 reviewer,
3848 reason,
3849 apply,
3850 json,
3851 } => {
3852 cmd_entity_resolve(
3853 &frontier,
3854 &finding_id,
3855 &entity,
3856 &source,
3857 &id,
3858 confidence,
3859 matched_name.as_deref(),
3860 &resolution_method,
3861 &reviewer,
3862 &reason,
3863 apply,
3864 json,
3865 );
3866 }
3867 Commands::Propagate {
3868 frontier,
3869 retract,
3870 reduce_confidence,
3871 to,
3872 output,
3873 } => cmd_propagate(&frontier, retract, reduce_confidence, to, output.as_deref()),
3874 Commands::Replicate {
3875 frontier,
3876 target,
3877 outcome,
3878 by,
3879 conditions,
3880 source_title,
3881 doi,
3882 pmid,
3883 sample_size,
3884 note,
3885 previous_attempt,
3886 no_cascade,
3887 json,
3888 } => cmd_replicate(
3889 &frontier,
3890 &target,
3891 &outcome,
3892 &by,
3893 &conditions,
3894 &source_title,
3895 doi.as_deref(),
3896 pmid.as_deref(),
3897 sample_size.as_deref(),
3898 ¬e,
3899 previous_attempt.as_deref(),
3900 no_cascade,
3901 json,
3902 ),
3903 Commands::Replications {
3904 frontier,
3905 target,
3906 json,
3907 } => cmd_replications(&frontier, target.as_deref(), json),
3908 Commands::DatasetAdd {
3909 frontier,
3910 name,
3911 version,
3912 content_hash,
3913 url,
3914 license,
3915 source_title,
3916 doi,
3917 row_count,
3918 json,
3919 } => cmd_dataset_add(
3920 &frontier,
3921 &name,
3922 version.as_deref(),
3923 &content_hash,
3924 url.as_deref(),
3925 license.as_deref(),
3926 &source_title,
3927 doi.as_deref(),
3928 row_count,
3929 json,
3930 ),
3931 Commands::Datasets { frontier, json } => cmd_datasets(&frontier, json),
3932 Commands::CodeAdd {
3933 frontier,
3934 language,
3935 repo_url,
3936 commit,
3937 path,
3938 content_hash,
3939 line_start,
3940 line_end,
3941 entry_point,
3942 json,
3943 } => cmd_code_add(
3944 &frontier,
3945 &language,
3946 repo_url.as_deref(),
3947 commit.as_deref(),
3948 &path,
3949 &content_hash,
3950 line_start,
3951 line_end,
3952 entry_point.as_deref(),
3953 json,
3954 ),
3955 Commands::CodeArtifacts { frontier, json } => cmd_code_artifacts(&frontier, json),
3956 Commands::ArtifactAdd {
3957 frontier,
3958 kind,
3959 name,
3960 file,
3961 url,
3962 content_hash,
3963 media_type,
3964 license,
3965 source_title,
3966 source_url,
3967 doi,
3968 target,
3969 metadata,
3970 access_tier,
3971 deposited_by,
3972 reason,
3973 json,
3974 } => cmd_artifact_add(
3975 &frontier,
3976 &kind,
3977 &name,
3978 file.as_deref(),
3979 url.as_deref(),
3980 content_hash.as_deref(),
3981 media_type.as_deref(),
3982 license.as_deref(),
3983 source_title.as_deref(),
3984 source_url.as_deref(),
3985 doi.as_deref(),
3986 target,
3987 metadata,
3988 &access_tier,
3989 &deposited_by,
3990 &reason,
3991 json,
3992 ),
3993 Commands::Artifacts {
3994 frontier,
3995 target,
3996 json,
3997 } => cmd_artifacts(&frontier, target.as_deref(), json),
3998 Commands::ArtifactAudit { frontier, json } => cmd_artifact_audit(&frontier, json),
3999 Commands::DecisionBrief { frontier, json } => cmd_decision_brief(&frontier, json),
4000 Commands::TrialSummary { frontier, json } => cmd_trial_summary(&frontier, json),
4001 Commands::SourceVerification { frontier, json } => cmd_source_verification(&frontier, json),
4002 Commands::SourceIngestPlan { frontier, json } => cmd_source_ingest_plan(&frontier, json),
4003 Commands::ClinicalTrialImport {
4004 frontier,
4005 nct_id,
4006 input_json,
4007 target,
4008 deposited_by,
4009 reason,
4010 license,
4011 json,
4012 } => {
4013 cmd_clinical_trial_import(
4014 &frontier,
4015 &nct_id,
4016 input_json.as_deref(),
4017 target,
4018 &deposited_by,
4019 &reason,
4020 &license,
4021 json,
4022 )
4023 .await
4024 }
4025 Commands::NegativeResultAdd {
4026 frontier,
4027 kind,
4028 deposited_by,
4029 reason,
4030 conditions_text,
4031 notes,
4032 target,
4033 endpoint,
4034 intervention,
4035 comparator,
4036 population,
4037 n_enrolled,
4038 power,
4039 ci_lower,
4040 ci_upper,
4041 effect_size_threshold,
4042 registry_id,
4043 reagent,
4044 observation,
4045 attempts,
4046 source_title,
4047 doi,
4048 url,
4049 year,
4050 json,
4051 } => cmd_negative_result_add(
4052 &frontier,
4053 &kind,
4054 &deposited_by,
4055 &reason,
4056 &conditions_text,
4057 ¬es,
4058 target,
4059 endpoint.as_deref(),
4060 intervention.as_deref(),
4061 comparator.as_deref(),
4062 population.as_deref(),
4063 n_enrolled,
4064 power,
4065 ci_lower,
4066 ci_upper,
4067 effect_size_threshold,
4068 registry_id.as_deref(),
4069 reagent.as_deref(),
4070 observation.as_deref(),
4071 attempts,
4072 &source_title,
4073 doi.as_deref(),
4074 url.as_deref(),
4075 year,
4076 json,
4077 ),
4078 Commands::NegativeResults {
4079 frontier,
4080 target,
4081 json,
4082 } => cmd_negative_results(&frontier, target.as_deref(), json),
4083 Commands::TrajectoryCreate {
4084 frontier,
4085 deposited_by,
4086 reason,
4087 target,
4088 notes,
4089 json,
4090 } => cmd_trajectory_create(&frontier, &deposited_by, &reason, target, ¬es, json),
4091 Commands::TrajectoryStep {
4092 frontier,
4093 trajectory_id,
4094 kind,
4095 description,
4096 actor,
4097 reason,
4098 reference,
4099 json,
4100 } => cmd_trajectory_step(
4101 &frontier,
4102 &trajectory_id,
4103 &kind,
4104 &description,
4105 &actor,
4106 &reason,
4107 reference,
4108 json,
4109 ),
4110 Commands::Trajectories {
4111 frontier,
4112 target,
4113 json,
4114 } => cmd_trajectories(&frontier, target.as_deref(), json),
4115 Commands::TierSet {
4116 frontier,
4117 object_type,
4118 object_id,
4119 tier,
4120 actor,
4121 reason,
4122 json,
4123 } => cmd_tier_set(
4124 &frontier,
4125 &object_type,
4126 &object_id,
4127 &tier,
4128 &actor,
4129 &reason,
4130 json,
4131 ),
4132 Commands::Predict {
4133 frontier,
4134 by,
4135 claim,
4136 criterion,
4137 resolves_by,
4138 confidence,
4139 target,
4140 outcome,
4141 conditions,
4142 json,
4143 } => cmd_predict(
4144 &frontier,
4145 &by,
4146 &claim,
4147 &criterion,
4148 resolves_by.as_deref(),
4149 confidence,
4150 &target,
4151 &outcome,
4152 &conditions,
4153 json,
4154 ),
4155 Commands::Resolve {
4156 frontier,
4157 prediction,
4158 outcome,
4159 matched,
4160 by,
4161 confidence,
4162 source_title,
4163 doi,
4164 json,
4165 } => cmd_resolve(
4166 &frontier,
4167 &prediction,
4168 &outcome,
4169 matched,
4170 &by,
4171 confidence,
4172 &source_title,
4173 doi.as_deref(),
4174 json,
4175 ),
4176 Commands::Predictions {
4177 frontier,
4178 by,
4179 open,
4180 json,
4181 } => cmd_predictions(&frontier, by.as_deref(), open, json),
4182 Commands::Calibration {
4183 frontier,
4184 actor,
4185 json,
4186 } => cmd_calibration(&frontier, actor.as_deref(), json),
4187 Commands::PredictionsExpire {
4188 frontier,
4189 now,
4190 dry_run,
4191 json,
4192 } => cmd_predictions_expire(&frontier, now.as_deref(), dry_run, json),
4193 Commands::Consensus {
4194 frontier,
4195 target,
4196 weighting,
4197 causal_claim,
4198 causal_grade_min,
4199 json,
4200 } => cmd_consensus(
4201 &frontier,
4202 &target,
4203 &weighting,
4204 causal_claim.as_deref(),
4205 causal_grade_min.as_deref(),
4206 json,
4207 ),
4208
4209 Commands::Ingest {
4212 path,
4213 frontier,
4214 backend,
4215 actor,
4216 dry_run,
4217 json,
4218 } => {
4219 cmd_ingest(
4220 &path,
4221 &frontier,
4222 backend.as_deref(),
4223 actor.as_deref(),
4224 dry_run,
4225 json,
4226 )
4227 .await
4228 }
4229
4230 Commands::Propose {
4231 frontier,
4232 finding_id,
4233 status,
4234 reason,
4235 reviewer,
4236 apply,
4237 json,
4238 } => {
4239 let options = state::ReviewOptions {
4242 status: status.clone(),
4243 reason: reason.clone(),
4244 reviewer: reviewer.clone(),
4245 };
4246 let report = state::review_finding(&frontier, &finding_id, options, apply)
4247 .unwrap_or_else(|e| fail_return(&e));
4248 print_state_report(&report, json);
4249 }
4250
4251 Commands::Accept {
4252 frontier,
4253 proposal_id,
4254 reviewer,
4255 reason,
4256 json,
4257 } => {
4258 let event_id = proposals::accept_at_path(&frontier, &proposal_id, &reviewer, &reason)
4260 .unwrap_or_else(|e| fail_return(&e));
4261 let payload = json!({
4262 "ok": true,
4263 "command": "accept",
4264 "frontier": frontier.display().to_string(),
4265 "proposal_id": proposal_id,
4266 "reviewer": reviewer,
4267 "applied_event_id": event_id,
4268 });
4269 if json {
4270 println!(
4271 "{}",
4272 serde_json::to_string_pretty(&payload)
4273 .expect("failed to serialize accept response")
4274 );
4275 } else {
4276 println!(
4277 "{} accepted and applied proposal {}",
4278 style::ok("ok"),
4279 proposal_id
4280 );
4281 println!(" event: {}", event_id);
4282 }
4283 }
4284
4285 Commands::Attest {
4286 frontier,
4287 event,
4288 attester,
4289 scope_note,
4290 proof_id,
4291 signature,
4292 key,
4293 json,
4294 } => {
4295 if let Some(target_event_id) = event {
4299 let attester_id = attester.unwrap_or_else(|| {
4300 fail_return("attest: --attester is required in per-event mode")
4301 });
4302 let scope = scope_note.unwrap_or_else(|| {
4303 fail_return("attest: --scope-note is required in per-event mode")
4304 });
4305 let attestation_event_id = state::record_attestation(
4306 &frontier,
4307 &target_event_id,
4308 &attester_id,
4309 &scope,
4310 proof_id.as_deref(),
4311 signature.as_deref(),
4312 )
4313 .unwrap_or_else(|e| fail_return(&e));
4314 if json {
4315 let payload = json!({
4316 "ok": true,
4317 "command": "attest.event",
4318 "frontier": frontier.display().to_string(),
4319 "target_event_id": target_event_id,
4320 "attestation_event_id": attestation_event_id,
4321 "attester_id": attester_id,
4322 });
4323 println!(
4324 "{}",
4325 serde_json::to_string_pretty(&payload)
4326 .expect("failed to serialize attest.event response")
4327 );
4328 } else {
4329 println!(
4330 "{} attested {} by {} ({})",
4331 style::ok("ok"),
4332 target_event_id,
4333 attester_id,
4334 attestation_event_id
4335 );
4336 }
4337 return;
4338 }
4339 let key_path = key.unwrap_or_else(|| {
4341 fail_return(
4342 "attest: --key is required in frontier-wide mode (or pass --event for per-event mode)",
4343 )
4344 });
4345 let count =
4346 sign::sign_frontier(&frontier, &key_path).unwrap_or_else(|e| fail_return(&e));
4347 let payload = json!({
4348 "ok": true,
4349 "command": "attest",
4350 "frontier": frontier.display().to_string(),
4351 "private_key": key_path.display().to_string(),
4352 "signed": count,
4353 });
4354 if json {
4355 println!(
4356 "{}",
4357 serde_json::to_string_pretty(&payload)
4358 .expect("failed to serialize attest response")
4359 );
4360 } else {
4361 println!(
4362 "{} {count} findings in {}",
4363 style::ok("attested"),
4364 frontier.display()
4365 );
4366 }
4367 }
4368
4369 Commands::Lineage {
4370 frontier,
4371 finding_id,
4372 as_of,
4373 json,
4374 } => {
4375 let payload = state::history_as_of(&frontier, &finding_id, as_of.as_deref())
4377 .unwrap_or_else(|e| fail_return(&e));
4378 if json {
4379 println!(
4380 "{}",
4381 serde_json::to_string_pretty(&payload)
4382 .expect("failed to serialize lineage response")
4383 );
4384 } else {
4385 print_history(&payload);
4386 }
4387 }
4388
4389 Commands::Carina { action } => cmd_carina(action),
4390
4391 Commands::Atlas { action } => cmd_atlas(action).await,
4392
4393 Commands::Constellation { action } => cmd_constellation(action).await,
4394 }
4395}
4396
4397async fn cmd_atlas(action: AtlasAction) {
4402 match action {
4403 AtlasAction::Init {
4404 name,
4405 frontiers,
4406 domain,
4407 scope_note,
4408 atlases_root,
4409 json,
4410 } => match ATLAS_INIT_HANDLER.get() {
4411 Some(handler) => {
4412 handler(atlases_root, name, domain, scope_note, frontiers, json).await;
4413 }
4414 None => fail("vela atlas init: handler not registered (built without vela-atlas)"),
4415 },
4416 AtlasAction::Materialize {
4417 name,
4418 atlases_root,
4419 json,
4420 } => match ATLAS_MATERIALIZE_HANDLER.get() {
4421 Some(handler) => handler(atlases_root, name, json).await,
4422 None => fail("vela atlas materialize: handler not registered"),
4423 },
4424 AtlasAction::Serve {
4425 name,
4426 atlases_root,
4427 port,
4428 no_open,
4429 } => {
4430 match ATLAS_SERVE_HANDLER.get() {
4434 Some(handler) => handler(atlases_root, name, port, !no_open).await,
4435 None => fail("vela atlas serve: handler not registered"),
4436 }
4437 }
4438 AtlasAction::Update {
4439 name,
4440 add_frontier,
4441 remove_vfr_id,
4442 atlases_root,
4443 json,
4444 } => match ATLAS_UPDATE_HANDLER.get() {
4445 Some(handler) => {
4446 handler(atlases_root, name, add_frontier, remove_vfr_id, json).await;
4447 }
4448 None => fail("vela atlas update: handler not registered"),
4449 },
4450 }
4451}
4452
4453async fn cmd_constellation(action: ConstellationAction) {
4457 match action {
4458 ConstellationAction::Init {
4459 name,
4460 atlases,
4461 scope_note,
4462 constellations_root,
4463 json,
4464 } => match CONSTELLATION_INIT_HANDLER.get() {
4465 Some(handler) => {
4466 handler(constellations_root, name, scope_note, atlases, json).await;
4467 }
4468 None => fail(
4469 "vela constellation init: handler not registered (built without vela-constellation)",
4470 ),
4471 },
4472 ConstellationAction::Materialize {
4473 name,
4474 constellations_root,
4475 json,
4476 } => match CONSTELLATION_MATERIALIZE_HANDLER.get() {
4477 Some(handler) => handler(constellations_root, name, json).await,
4478 None => fail("vela constellation materialize: handler not registered"),
4479 },
4480 ConstellationAction::Serve {
4481 name,
4482 constellations_root,
4483 port,
4484 no_open,
4485 } => match CONSTELLATION_SERVE_HANDLER.get() {
4486 Some(handler) => handler(constellations_root, name, port, !no_open).await,
4487 None => fail("vela constellation serve: handler not registered"),
4488 },
4489 }
4490}
4491
4492fn cmd_carina(action: CarinaAction) {
4495 match action {
4496 CarinaAction::List { json } => {
4497 if json {
4498 println!(
4499 "{}",
4500 serde_json::to_string_pretty(&json!({
4501 "ok": true,
4502 "command": "carina.list",
4503 "primitives": carina_validate::PRIMITIVE_NAMES,
4504 }))
4505 .expect("failed to serialize carina.list")
4506 );
4507 } else {
4508 println!("Carina primitives bundled with this build:");
4509 for name in carina_validate::PRIMITIVE_NAMES {
4510 println!(" · {name}");
4511 }
4512 }
4513 }
4514 CarinaAction::Schema { primitive } => match carina_validate::schema_text(&primitive) {
4515 Some(text) => print!("{text}"),
4516 None => fail(&format!("carina: unknown primitive '{primitive}'")),
4517 },
4518 CarinaAction::Validate {
4519 path,
4520 primitive,
4521 json,
4522 } => {
4523 let text = std::fs::read_to_string(&path)
4524 .unwrap_or_else(|e| fail_return(&format!("read {}: {e}", path.display())));
4525 let value: Value = serde_json::from_str(&text)
4526 .unwrap_or_else(|e| fail_return(&format!("parse {}: {e}", path.display())));
4527 type CarinaValidateOutcome = Result<Option<&'static str>, Vec<String>>;
4533 let mut report: Vec<(String, CarinaValidateOutcome)> = Vec::new();
4534 if value.get("primitives").and_then(Value::as_object).is_some() && primitive.is_none() {
4535 let primitives = value.get("primitives").and_then(Value::as_object).unwrap();
4536 for (key, child) in primitives {
4537 let outcome = carina_validate::validate(key, child)
4538 .map(|()| carina_validate::detect_primitive(child));
4539 report.push((key.clone(), outcome));
4540 }
4541 } else {
4542 let outcome = match primitive.as_deref() {
4543 Some(name) => carina_validate::validate(name, &value).map(|()| {
4544 carina_validate::PRIMITIVE_NAMES
4545 .iter()
4546 .copied()
4547 .find(|p| *p == name)
4548 }),
4549 None => carina_validate::validate_auto(&value).map(Some),
4550 };
4551 let label = primitive.clone().unwrap_or_else(|| "<auto>".to_string());
4552 report.push((label, outcome));
4553 }
4554
4555 let total = report.len();
4556 let pass = report.iter().filter(|(_, r)| r.is_ok()).count();
4557 let fail = total - pass;
4558
4559 if json {
4560 let entries: Vec<Value> = report
4561 .iter()
4562 .map(|(label, r)| match r {
4563 Ok(name) => json!({
4564 "key": label,
4565 "primitive": name,
4566 "ok": true,
4567 }),
4568 Err(errs) => json!({
4569 "key": label,
4570 "ok": false,
4571 "errors": errs,
4572 }),
4573 })
4574 .collect();
4575 println!(
4576 "{}",
4577 serde_json::to_string_pretty(&json!({
4578 "ok": fail == 0,
4579 "command": "carina.validate",
4580 "file": path.display().to_string(),
4581 "total": total,
4582 "passed": pass,
4583 "failed": fail,
4584 "entries": entries,
4585 }))
4586 .expect("failed to serialize carina.validate")
4587 );
4588 } else {
4589 for (label, r) in &report {
4590 match r {
4591 Ok(Some(name)) => println!(" {} {label} (as {name})", style::ok("ok")),
4592 Ok(None) => println!(" {} {label}", style::ok("ok")),
4593 Err(errs) => {
4594 println!(" {} {label}", style::lost("fail"));
4595 for e in errs {
4596 println!(" {e}");
4597 }
4598 }
4599 }
4600 }
4601 println!();
4602 if fail == 0 {
4603 println!("{} {pass}/{total} valid", style::ok("carina.validate"));
4604 } else {
4605 println!(
4606 "{} {pass}/{total} valid · {fail} failed",
4607 style::lost("carina.validate")
4608 );
4609 }
4610 }
4611
4612 if fail > 0 {
4613 std::process::exit(1);
4614 }
4615 }
4616 }
4617}
4618
4619fn cmd_consensus(
4622 frontier: &Path,
4623 target: &str,
4624 weighting_str: &str,
4625 causal_claim: Option<&str>,
4626 causal_grade_min: Option<&str>,
4627 json: bool,
4628) {
4629 use crate::bundle::{CausalClaim, CausalEvidenceGrade};
4630
4631 if !target.starts_with("vf_") {
4632 fail(&format!("target `{target}` is not a vf_ finding id"));
4633 }
4634 let scheme =
4635 crate::aggregate::WeightingScheme::parse(weighting_str).unwrap_or_else(|e| fail_return(&e));
4636
4637 let parsed_claim = match causal_claim {
4638 None => None,
4639 Some("correlation") => Some(CausalClaim::Correlation),
4640 Some("mediation") => Some(CausalClaim::Mediation),
4641 Some("intervention") => Some(CausalClaim::Intervention),
4642 Some(other) => fail_return(&format!(
4643 "invalid --causal-claim '{other}'; valid: correlation | mediation | intervention"
4644 )),
4645 };
4646 let parsed_grade = match causal_grade_min {
4647 None => None,
4648 Some("theoretical") => Some(CausalEvidenceGrade::Theoretical),
4649 Some("observational") => Some(CausalEvidenceGrade::Observational),
4650 Some("quasi_experimental") => Some(CausalEvidenceGrade::QuasiExperimental),
4651 Some("rct") => Some(CausalEvidenceGrade::Rct),
4652 Some(other) => fail_return(&format!(
4653 "invalid --causal-grade-min '{other}'; valid: theoretical | observational | quasi_experimental | rct"
4654 )),
4655 };
4656 let filter = crate::aggregate::AggregateFilter {
4657 causal_claim: parsed_claim,
4658 causal_grade_min: parsed_grade,
4659 };
4660 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
4661
4662 let result = crate::aggregate::consensus_for_with_filter(&project, target, scheme, &filter)
4663 .unwrap_or_else(|| fail_return(&format!("target `{target}` not in frontier")));
4664
4665 if json {
4666 println!(
4667 "{}",
4668 serde_json::to_string_pretty(&result).expect("serialize consensus")
4669 );
4670 return;
4671 }
4672
4673 println!();
4674 println!(
4675 " {}",
4676 format!(
4677 "VELA · CONSENSUS · {} ({})",
4678 result.target, result.weighting
4679 )
4680 .to_uppercase()
4681 .dimmed()
4682 );
4683 println!(" {}", style::tick_row(60));
4684 println!(
4685 " target: {}",
4686 truncate(&result.target_assertion, 80)
4687 );
4688 println!(" similar findings: {}", result.n_findings);
4689 println!(
4690 " consensus: {:.3} ({:.3} – {:.3} 95% credible)",
4691 result.consensus_confidence, result.credible_interval_lo, result.credible_interval_hi
4692 );
4693 println!();
4694 println!(" constituents (sorted by weight):");
4695 let mut sorted = result.constituents.clone();
4696 sorted.sort_by(|a, b| {
4697 b.weight
4698 .partial_cmp(&a.weight)
4699 .unwrap_or(std::cmp::Ordering::Equal)
4700 });
4701 for c in sorted.iter().take(10) {
4702 let repls = if c.n_replications > 0 {
4703 format!(" ({}r {}f)", c.n_replicated, c.n_failed_replications)
4704 } else {
4705 String::new()
4706 };
4707 println!(
4708 " · w={:.2} raw={:.2} adj={:.2}{}",
4709 c.weight, c.raw_score, c.adjusted_score, repls
4710 );
4711 println!(" {}", truncate(&c.assertion_text, 88));
4712 }
4713 if result.constituents.len() > 10 {
4714 println!(" ... ({} more)", result.constituents.len() - 10);
4715 }
4716}
4717
4718fn parse_expected_outcome(s: &str) -> Result<crate::bundle::ExpectedOutcome, String> {
4724 let trimmed = s.trim();
4725 if trimmed.eq_ignore_ascii_case("affirmed") {
4726 return Ok(crate::bundle::ExpectedOutcome::Affirmed);
4727 }
4728 if trimmed.eq_ignore_ascii_case("falsified") {
4729 return Ok(crate::bundle::ExpectedOutcome::Falsified);
4730 }
4731 if let Some(rest) = trimmed.strip_prefix("cat:") {
4732 return Ok(crate::bundle::ExpectedOutcome::Categorical {
4733 value: rest.to_string(),
4734 });
4735 }
4736 if let Some(rest) = trimmed.strip_prefix("quant:") {
4737 let (vt, units) = rest.split_once(' ').unwrap_or((rest, ""));
4738 let (val_s, tol_s) = vt
4739 .split_once('±')
4740 .or_else(|| vt.split_once("+/-"))
4741 .ok_or_else(|| format!("expected `quant:VALUE±TOL UNITS`, got `quant:{rest}`"))?;
4742 let value: f64 = val_s
4743 .parse()
4744 .map_err(|e| format!("bad quant value `{val_s}`: {e}"))?;
4745 let tolerance: f64 = tol_s
4746 .parse()
4747 .map_err(|e| format!("bad quant tolerance `{tol_s}`: {e}"))?;
4748 return Ok(crate::bundle::ExpectedOutcome::Quantitative {
4749 value,
4750 tolerance,
4751 units: units.to_string(),
4752 });
4753 }
4754 Err(format!(
4755 "unknown outcome `{s}`; expected one of: affirmed | falsified | quant:V±T units | cat:label"
4756 ))
4757}
4758
4759#[allow(clippy::too_many_arguments)]
4761fn cmd_predict(
4762 frontier: &Path,
4763 by: &str,
4764 claim: &str,
4765 criterion: &str,
4766 resolves_by: Option<&str>,
4767 confidence: f64,
4768 target_csv: &str,
4769 outcome: &str,
4770 conditions_text: &str,
4771 json: bool,
4772) {
4773 if !(0.0..=1.0).contains(&confidence) {
4774 fail(&format!("confidence must be in [0, 1]; got {confidence}"));
4775 }
4776 let expected = parse_expected_outcome(outcome).unwrap_or_else(|e| fail_return(&e));
4777
4778 let mut project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
4779
4780 let targets: Vec<String> = target_csv
4781 .split(',')
4782 .map(|s| s.trim().to_string())
4783 .filter(|s| !s.is_empty())
4784 .collect();
4785 for t in &targets {
4786 if !t.starts_with("vf_") {
4787 fail(&format!("target `{t}` is not a vf_ id"));
4788 }
4789 if !project.findings.iter().any(|f| f.id == *t) {
4790 fail(&format!("target `{t}` not present in frontier"));
4791 }
4792 }
4793
4794 let lower = conditions_text.to_lowercase();
4795 let conditions = crate::bundle::Conditions {
4796 text: conditions_text.to_string(),
4797 species_verified: Vec::new(),
4798 species_unverified: Vec::new(),
4799 in_vitro: lower.contains("in vitro"),
4800 in_vivo: lower.contains("in vivo"),
4801 human_data: lower.contains("human") || lower.contains("clinical"),
4802 clinical_trial: lower.contains("clinical trial") || lower.contains("phase "),
4803 concentration_range: None,
4804 duration: None,
4805 age_group: None,
4806 cell_type: None,
4807 };
4808
4809 let prediction = crate::bundle::Prediction::new(
4810 claim.to_string(),
4811 targets,
4812 None,
4813 resolves_by.map(|s| s.to_string()),
4814 criterion.to_string(),
4815 expected,
4816 by.to_string(),
4817 confidence,
4818 conditions,
4819 );
4820
4821 if project.predictions.iter().any(|p| p.id == prediction.id) {
4822 if json {
4823 println!(
4824 "{}",
4825 serde_json::to_string_pretty(&json!({
4826 "ok": false,
4827 "command": "predict",
4828 "reason": "prediction_already_exists",
4829 "id": prediction.id,
4830 }))
4831 .expect("serialize")
4832 );
4833 } else {
4834 println!(
4835 "{} prediction {} already exists in {}; skipping.",
4836 style::warn("predict"),
4837 prediction.id,
4838 frontier.display()
4839 );
4840 }
4841 return;
4842 }
4843
4844 let new_id = prediction.id.clone();
4845 project.predictions.push(prediction);
4846 repo::save_to_path(frontier, &project).unwrap_or_else(|e| fail_return(&e));
4847
4848 if json {
4849 println!(
4850 "{}",
4851 serde_json::to_string_pretty(&json!({
4852 "ok": true,
4853 "command": "predict",
4854 "id": new_id,
4855 "made_by": by,
4856 "confidence": confidence,
4857 "frontier": frontier.display().to_string(),
4858 }))
4859 .expect("serialize predict result")
4860 );
4861 } else {
4862 println!();
4863 println!(
4864 " {}",
4865 format!("VELA · PREDICT · {}", new_id)
4866 .to_uppercase()
4867 .dimmed()
4868 );
4869 println!(" {}", style::tick_row(60));
4870 println!(" by: {by}");
4871 println!(" confidence: {confidence:.3}");
4872 if let Some(d) = resolves_by {
4873 println!(" resolves by: {d}");
4874 }
4875 println!(" outcome: {outcome}");
4876 println!(" claim: {}", truncate(claim, 88));
4877 println!();
4878 println!(
4879 " {} prediction recorded in {}",
4880 style::ok("ok"),
4881 frontier.display()
4882 );
4883 }
4884}
4885
4886#[allow(clippy::too_many_arguments)]
4888fn cmd_resolve(
4889 frontier: &Path,
4890 prediction_id: &str,
4891 actual_outcome: &str,
4892 matched: bool,
4893 by: &str,
4894 confidence: f64,
4895 source_title: &str,
4896 doi: Option<&str>,
4897 json: bool,
4898) {
4899 if !prediction_id.starts_with("vpred_") {
4900 fail(&format!("prediction `{prediction_id}` is not a vpred_ id"));
4901 }
4902 if !(0.0..=1.0).contains(&confidence) {
4903 fail(&format!("confidence must be in [0, 1]; got {confidence}"));
4904 }
4905 let mut project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
4906 if !project.predictions.iter().any(|p| p.id == prediction_id) {
4907 fail(&format!(
4908 "prediction `{prediction_id}` not present in frontier"
4909 ));
4910 }
4911
4912 let evidence = crate::bundle::Evidence {
4913 evidence_type: "experimental".to_string(),
4914 model_system: String::new(),
4915 species: None,
4916 method: "prediction_resolution".to_string(),
4917 sample_size: None,
4918 effect_size: None,
4919 p_value: None,
4920 replicated: false,
4921 replication_count: None,
4922 evidence_spans: if source_title.is_empty() {
4923 Vec::new()
4924 } else {
4925 vec![serde_json::json!({"text": source_title})]
4926 },
4927 };
4928
4929 let _ = doi; let resolution = crate::bundle::Resolution::new(
4936 prediction_id.to_string(),
4937 actual_outcome.to_string(),
4938 matched,
4939 by.to_string(),
4940 evidence,
4941 confidence,
4942 );
4943
4944 if project.resolutions.iter().any(|r| r.id == resolution.id) {
4945 if json {
4946 println!(
4947 "{}",
4948 serde_json::to_string_pretty(&json!({
4949 "ok": false,
4950 "command": "resolve",
4951 "reason": "resolution_already_exists",
4952 "id": resolution.id,
4953 }))
4954 .expect("serialize")
4955 );
4956 } else {
4957 println!(
4958 "{} resolution {} already exists in {}; skipping.",
4959 style::warn("resolve"),
4960 resolution.id,
4961 frontier.display()
4962 );
4963 }
4964 return;
4965 }
4966
4967 let new_id = resolution.id.clone();
4968 project.resolutions.push(resolution);
4969 repo::save_to_path(frontier, &project).unwrap_or_else(|e| fail_return(&e));
4970
4971 if json {
4972 println!(
4973 "{}",
4974 serde_json::to_string_pretty(&json!({
4975 "ok": true,
4976 "command": "resolve",
4977 "id": new_id,
4978 "prediction": prediction_id,
4979 "matched": matched,
4980 "frontier": frontier.display().to_string(),
4981 }))
4982 .expect("serialize resolve result")
4983 );
4984 } else {
4985 println!();
4986 println!(
4987 " {}",
4988 format!("VELA · RESOLVE · {}", new_id)
4989 .to_uppercase()
4990 .dimmed()
4991 );
4992 println!(" {}", style::tick_row(60));
4993 println!(" prediction: {prediction_id}");
4994 println!(
4995 " matched: {}",
4996 if matched {
4997 style::ok("yes")
4998 } else {
4999 style::lost("no")
5000 }
5001 );
5002 println!(" by: {by}");
5003 println!(" outcome: {}", truncate(actual_outcome, 80));
5004 println!();
5005 println!(
5006 " {} resolution recorded in {}",
5007 style::ok("ok"),
5008 frontier.display()
5009 );
5010 }
5011}
5012
5013fn cmd_predictions(frontier: &Path, by: Option<&str>, open: bool, json: bool) {
5015 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
5016
5017 let resolved_ids: std::collections::HashSet<&str> = project
5018 .resolutions
5019 .iter()
5020 .map(|r| r.prediction_id.as_str())
5021 .collect();
5022
5023 let mut filtered: Vec<&crate::bundle::Prediction> = project
5024 .predictions
5025 .iter()
5026 .filter(|p| by.is_none_or(|b| p.made_by == b))
5027 .filter(|p| !open || !resolved_ids.contains(p.id.as_str()))
5028 .collect();
5029 filtered.sort_by(|a, b| {
5030 a.resolves_by
5031 .as_deref()
5032 .unwrap_or("9999")
5033 .cmp(b.resolves_by.as_deref().unwrap_or("9999"))
5034 });
5035
5036 if json {
5037 let payload: Vec<serde_json::Value> = filtered
5038 .iter()
5039 .map(|p| {
5040 json!({
5041 "id": p.id,
5042 "claim_text": p.claim_text,
5043 "made_by": p.made_by,
5044 "confidence": p.confidence,
5045 "predicted_at": p.predicted_at,
5046 "resolves_by": p.resolves_by,
5047 "expected_outcome": p.expected_outcome,
5048 "resolved": resolved_ids.contains(p.id.as_str()),
5049 })
5050 })
5051 .collect();
5052 println!(
5053 "{}",
5054 serde_json::to_string_pretty(&json!({
5055 "ok": true,
5056 "command": "predictions",
5057 "frontier": frontier.display().to_string(),
5058 "count": payload.len(),
5059 "predictions": payload,
5060 }))
5061 .expect("serialize predictions")
5062 );
5063 return;
5064 }
5065
5066 println!();
5067 println!(
5068 " {}",
5069 format!("VELA · PREDICTIONS · {}", frontier.display())
5070 .to_uppercase()
5071 .dimmed()
5072 );
5073 println!(" {}", style::tick_row(60));
5074 if filtered.is_empty() {
5075 println!(" (no predictions matching filters)");
5076 return;
5077 }
5078 for p in &filtered {
5079 let resolved = resolved_ids.contains(p.id.as_str());
5080 let chip = if resolved {
5081 style::ok("resolved")
5082 } else {
5083 style::warn("open")
5084 };
5085 let deadline = p.resolves_by.as_deref().unwrap_or("(no deadline)");
5086 println!(
5087 " · {} {} by {} → {}",
5088 p.id.dimmed(),
5089 chip,
5090 p.made_by,
5091 deadline,
5092 );
5093 println!(" claim: {}", truncate(&p.claim_text, 90));
5094 println!(" confidence: {:.2}", p.confidence);
5095 }
5096}
5097
5098fn cmd_predictions_expire(frontier: &Path, now_override: Option<&str>, dry_run: bool, json: bool) {
5103 use chrono::DateTime;
5104
5105 let now_dt = match now_override {
5106 Some(s) => DateTime::parse_from_rfc3339(s)
5107 .map(|dt| dt.with_timezone(&chrono::Utc))
5108 .unwrap_or_else(|e| fail_return(&format!("invalid --now '{s}': {e}"))),
5109 None => chrono::Utc::now(),
5110 };
5111
5112 let mut project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
5113 if dry_run {
5114 let mut probe = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
5116 let report = crate::calibration::expire_overdue_predictions(&mut probe, now_dt);
5117 if json {
5118 println!(
5119 "{}",
5120 serde_json::to_string_pretty(&json!({
5121 "ok": true,
5122 "command": "predictions.expire",
5123 "dry_run": true,
5124 "report": report,
5125 }))
5126 .expect("serialize predictions.expire (dry-run)")
5127 );
5128 } else {
5129 println!(
5130 "{} dry-run @ {}: {} would expire, {} already expired, {} resolved, {} still open",
5131 style::ok("ok"),
5132 report.now,
5133 report.newly_expired.len(),
5134 report.already_expired.len(),
5135 report.already_resolved.len(),
5136 report.still_open.len(),
5137 );
5138 for id in &report.newly_expired {
5139 println!(" · {id}");
5140 }
5141 }
5142 return;
5143 }
5144
5145 let report = crate::calibration::expire_overdue_predictions(&mut project, now_dt);
5146 repo::save_to_path(frontier, &project).unwrap_or_else(|e| fail_return(&e));
5147
5148 if json {
5149 println!(
5150 "{}",
5151 serde_json::to_string_pretty(&json!({
5152 "ok": true,
5153 "command": "predictions.expire",
5154 "report": report,
5155 }))
5156 .expect("serialize predictions.expire")
5157 );
5158 } else {
5159 println!(
5160 "{} @ {}: {} newly expired, {} already expired, {} resolved, {} still open",
5161 style::ok("expired"),
5162 report.now,
5163 report.newly_expired.len(),
5164 report.already_expired.len(),
5165 report.already_resolved.len(),
5166 report.still_open.len(),
5167 );
5168 for id in &report.newly_expired {
5169 println!(" · {id}");
5170 }
5171 }
5172}
5173
5174fn cmd_calibration(frontier: &Path, actor: Option<&str>, json: bool) {
5175 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
5176 let records = match actor {
5177 Some(a) => {
5178 crate::calibration::calibration_for_actor(a, &project.predictions, &project.resolutions)
5179 .map(|r| vec![r])
5180 .unwrap_or_default()
5181 }
5182 None => crate::calibration::calibration_records(&project.predictions, &project.resolutions),
5183 };
5184
5185 if json {
5186 println!(
5187 "{}",
5188 serde_json::to_string_pretty(&json!({
5189 "ok": true,
5190 "command": "calibration",
5191 "frontier": frontier.display().to_string(),
5192 "filter_actor": actor,
5193 "records": records,
5194 }))
5195 .expect("serialize calibration")
5196 );
5197 return;
5198 }
5199
5200 println!();
5201 println!(
5202 " {}",
5203 format!("VELA · CALIBRATION · {}", frontier.display())
5204 .to_uppercase()
5205 .dimmed()
5206 );
5207 println!(" {}", style::tick_row(60));
5208 if records.is_empty() {
5209 println!(" (no calibration records)");
5210 return;
5211 }
5212 for r in &records {
5213 println!(" · {}", r.actor);
5214 println!(
5215 " predictions: {} resolved: {} hits: {}",
5216 r.n_predictions, r.n_resolved, r.n_hit
5217 );
5218 match r.hit_rate {
5219 Some(h) => println!(" hit rate: {:.1}%", h * 100.0),
5220 None => println!(" hit rate: n/a"),
5221 }
5222 match r.brier_score {
5223 Some(b) => println!(
5224 " brier: {:.4} (lower is better; 0.25 = chance)",
5225 b
5226 ),
5227 None => println!(" brier: n/a"),
5228 }
5229 match r.log_score {
5230 Some(l) => println!(
5231 " log score: {:.4} (higher is better; 0 = perfect)",
5232 l
5233 ),
5234 None => println!(" log score: n/a"),
5235 }
5236 }
5237}
5238
5239#[allow(clippy::too_many_arguments)]
5241fn cmd_dataset_add(
5242 frontier: &Path,
5243 name: &str,
5244 version: Option<&str>,
5245 content_hash: &str,
5246 url: Option<&str>,
5247 license: Option<&str>,
5248 source_title: &str,
5249 doi: Option<&str>,
5250 row_count: Option<u64>,
5251 json: bool,
5252) {
5253 let mut project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
5254
5255 let provenance = crate::bundle::Provenance {
5256 source_type: "data_release".to_string(),
5257 doi: doi.map(|s| s.to_string()),
5258 pmid: None,
5259 pmc: None,
5260 openalex_id: None,
5261 url: url.map(|s| s.to_string()),
5262 title: source_title.to_string(),
5263 authors: Vec::new(),
5264 year: None,
5265 journal: None,
5266 license: license.map(|s| s.to_string()),
5267 publisher: None,
5268 funders: Vec::new(),
5269 extraction: crate::bundle::Extraction {
5270 method: "manual_curation".to_string(),
5271 model: None,
5272 model_version: None,
5273 extracted_at: chrono::Utc::now().to_rfc3339(),
5274 extractor_version: env!("CARGO_PKG_VERSION").to_string(),
5275 },
5276 review: None,
5277 citation_count: None,
5278 };
5279
5280 let mut dataset = crate::bundle::Dataset::new(
5281 name.to_string(),
5282 version.map(|s| s.to_string()),
5283 content_hash.to_string(),
5284 url.map(|s| s.to_string()),
5285 license.map(|s| s.to_string()),
5286 provenance,
5287 );
5288 dataset.row_count = row_count;
5289
5290 if project.datasets.iter().any(|d| d.id == dataset.id) {
5291 if json {
5292 println!(
5293 "{}",
5294 serde_json::to_string_pretty(&json!({
5295 "ok": false,
5296 "command": "dataset.add",
5297 "reason": "dataset_already_exists",
5298 "id": dataset.id,
5299 }))
5300 .expect("serialize")
5301 );
5302 } else {
5303 println!(
5304 "{} dataset {} already exists in {}; skipping.",
5305 style::warn("dataset"),
5306 dataset.id,
5307 frontier.display()
5308 );
5309 }
5310 return;
5311 }
5312
5313 let new_id = dataset.id.clone();
5314 project.datasets.push(dataset);
5315 repo::save_to_path(frontier, &project).unwrap_or_else(|e| fail_return(&e));
5316
5317 if json {
5318 println!(
5319 "{}",
5320 serde_json::to_string_pretty(&json!({
5321 "ok": true,
5322 "command": "dataset.add",
5323 "id": new_id,
5324 "name": name,
5325 "version": version,
5326 "frontier": frontier.display().to_string(),
5327 }))
5328 .expect("failed to serialize dataset.add result")
5329 );
5330 } else {
5331 println!();
5332 println!(
5333 " {}",
5334 format!("VELA · DATASET · {}", new_id)
5335 .to_uppercase()
5336 .dimmed()
5337 );
5338 println!(" {}", style::tick_row(60));
5339 println!(" name: {name}");
5340 if let Some(v) = version {
5341 println!(" version: {v}");
5342 }
5343 println!(" content_hash: {content_hash}");
5344 if let Some(u) = url {
5345 println!(" url: {u}");
5346 }
5347 println!(" source: {source_title}");
5348 println!();
5349 println!(
5350 " {} dataset recorded in {}",
5351 style::ok("ok"),
5352 frontier.display()
5353 );
5354 }
5355}
5356
5357#[allow(clippy::too_many_arguments)]
5363fn cmd_negative_result_add(
5364 frontier: &Path,
5365 kind: &str,
5366 deposited_by: &str,
5367 reason: &str,
5368 conditions_text: &str,
5369 notes: &str,
5370 targets: Vec<String>,
5371 endpoint: Option<&str>,
5372 intervention: Option<&str>,
5373 comparator: Option<&str>,
5374 population: Option<&str>,
5375 n_enrolled: Option<u32>,
5376 power: Option<f64>,
5377 ci_lower: Option<f64>,
5378 ci_upper: Option<f64>,
5379 effect_size_threshold: Option<f64>,
5380 registry_id: Option<&str>,
5381 reagent: Option<&str>,
5382 observation: Option<&str>,
5383 attempts: Option<u32>,
5384 source_title: &str,
5385 doi: Option<&str>,
5386 url: Option<&str>,
5387 year: Option<i32>,
5388 json: bool,
5389) {
5390 let nr_kind = match kind {
5391 "registered_trial" => {
5392 let endpoint =
5393 endpoint.unwrap_or_else(|| fail_return("--endpoint required for registered_trial"));
5394 let intervention = intervention
5395 .unwrap_or_else(|| fail_return("--intervention required for registered_trial"));
5396 let comparator = comparator
5397 .unwrap_or_else(|| fail_return("--comparator required for registered_trial"));
5398 let population = population
5399 .unwrap_or_else(|| fail_return("--population required for registered_trial"));
5400 let n_enrolled = n_enrolled
5401 .unwrap_or_else(|| fail_return("--n-enrolled required for registered_trial"));
5402 let power =
5403 power.unwrap_or_else(|| fail_return("--power required for registered_trial"));
5404 let ci_lower =
5405 ci_lower.unwrap_or_else(|| fail_return("--ci-lower required for registered_trial"));
5406 let ci_upper =
5407 ci_upper.unwrap_or_else(|| fail_return("--ci-upper required for registered_trial"));
5408 crate::bundle::NegativeResultKind::RegisteredTrial {
5409 endpoint: endpoint.to_string(),
5410 intervention: intervention.to_string(),
5411 comparator: comparator.to_string(),
5412 population: population.to_string(),
5413 n_enrolled,
5414 power,
5415 effect_size_ci: (ci_lower, ci_upper),
5416 effect_size_threshold,
5417 registry_id: registry_id.map(|s| s.to_string()),
5418 }
5419 }
5420 "exploratory" => {
5421 let reagent =
5422 reagent.unwrap_or_else(|| fail_return("--reagent required for exploratory"));
5423 let observation = observation
5424 .unwrap_or_else(|| fail_return("--observation required for exploratory"));
5425 let attempts =
5426 attempts.unwrap_or_else(|| fail_return("--attempts required for exploratory"));
5427 crate::bundle::NegativeResultKind::Exploratory {
5428 reagent: reagent.to_string(),
5429 observation: observation.to_string(),
5430 attempts,
5431 }
5432 }
5433 other => fail_return(&format!(
5434 "--kind must be 'registered_trial' or 'exploratory', got '{other}'"
5435 )),
5436 };
5437
5438 let conditions = crate::bundle::Conditions {
5439 text: conditions_text.to_string(),
5440 species_verified: Vec::new(),
5441 species_unverified: Vec::new(),
5442 in_vitro: false,
5443 in_vivo: false,
5444 human_data: false,
5445 clinical_trial: matches!(kind, "registered_trial"),
5446 concentration_range: None,
5447 duration: None,
5448 age_group: None,
5449 cell_type: None,
5450 };
5451
5452 let provenance = crate::bundle::Provenance {
5453 source_type: if matches!(kind, "registered_trial") {
5454 "clinical_trial".to_string()
5455 } else {
5456 "lab_notebook".to_string()
5457 },
5458 doi: doi.map(|s| s.to_string()),
5459 pmid: None,
5460 pmc: None,
5461 openalex_id: None,
5462 url: url.map(|s| s.to_string()),
5463 title: source_title.to_string(),
5464 authors: Vec::new(),
5465 year,
5466 journal: None,
5467 license: None,
5468 publisher: None,
5469 funders: Vec::new(),
5470 extraction: crate::bundle::Extraction {
5471 method: "manual_curation".to_string(),
5472 model: None,
5473 model_version: None,
5474 extracted_at: chrono::Utc::now().to_rfc3339(),
5475 extractor_version: env!("CARGO_PKG_VERSION").to_string(),
5476 },
5477 review: None,
5478 citation_count: None,
5479 };
5480
5481 let report = state::add_negative_result(
5482 frontier,
5483 nr_kind,
5484 targets,
5485 deposited_by,
5486 conditions,
5487 provenance,
5488 notes,
5489 reason,
5490 )
5491 .unwrap_or_else(|e| fail_return(&e));
5492
5493 if json {
5494 println!(
5495 "{}",
5496 serde_json::to_string_pretty(&report).expect("serialize report")
5497 );
5498 } else {
5499 println!();
5500 println!(
5501 " {}",
5502 format!("VELA · NEGATIVE-RESULT · {}", report.finding_id)
5503 .to_uppercase()
5504 .dimmed()
5505 );
5506 println!(" {}", style::tick_row(60));
5507 println!(" kind: {kind}");
5508 println!(" deposited_by: {deposited_by}");
5509 if let Some(ev) = &report.applied_event_id {
5510 println!(" event: {ev}");
5511 }
5512 println!(
5513 " {} negative_result deposited in {}",
5514 style::ok("ok"),
5515 frontier.display()
5516 );
5517 }
5518}
5519
5520fn cmd_negative_results(frontier: &Path, target: Option<&str>, json: bool) {
5523 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
5524 let filtered: Vec<&crate::bundle::NegativeResult> = project
5525 .negative_results
5526 .iter()
5527 .filter(|nr| {
5528 target
5529 .map(|t| nr.target_findings.iter().any(|f| f == t))
5530 .unwrap_or(true)
5531 })
5532 .collect();
5533
5534 if json {
5535 println!(
5536 "{}",
5537 serde_json::to_string_pretty(&json!({
5538 "ok": true,
5539 "command": "negative_results",
5540 "frontier": frontier.display().to_string(),
5541 "count": filtered.len(),
5542 "negative_results": filtered,
5543 }))
5544 .expect("serialize negative_results")
5545 );
5546 return;
5547 }
5548
5549 if filtered.is_empty() {
5550 println!(" no negative_results in {}", frontier.display());
5551 return;
5552 }
5553
5554 println!();
5555 println!(
5556 " {} ({})",
5557 "VELA · NEGATIVE RESULTS".dimmed(),
5558 filtered.len()
5559 );
5560 println!(" {}", style::tick_row(60));
5561 for nr in &filtered {
5562 let kind_label = match &nr.kind {
5563 crate::bundle::NegativeResultKind::RegisteredTrial {
5564 endpoint, power, ..
5565 } => format!("trial · {endpoint} · power {power:.2}"),
5566 crate::bundle::NegativeResultKind::Exploratory {
5567 reagent, attempts, ..
5568 } => format!("exploratory · {reagent} · {attempts} attempts"),
5569 };
5570 let retracted = if nr.retracted { " [retracted]" } else { "" };
5571 let review = nr
5572 .review_state
5573 .as_ref()
5574 .map(|s| format!(" [{s:?}]"))
5575 .unwrap_or_default();
5576 println!(" {}{}{}", nr.id, retracted, review);
5577 println!(" {kind_label}");
5578 if !nr.target_findings.is_empty() {
5579 println!(" targets: {}", nr.target_findings.join(", "));
5580 }
5581 }
5582 println!();
5583}
5584
5585#[allow(clippy::too_many_arguments)]
5587fn cmd_tier_set(
5588 frontier: &Path,
5589 object_type: &str,
5590 object_id: &str,
5591 tier: &str,
5592 actor: &str,
5593 reason: &str,
5594 json: bool,
5595) {
5596 let parsed_tier =
5597 crate::access_tier::AccessTier::parse(tier).unwrap_or_else(|e| fail_return(&e));
5598 let report = state::set_tier(frontier, object_type, object_id, parsed_tier, actor, reason)
5599 .unwrap_or_else(|e| fail_return(&e));
5600
5601 if json {
5602 println!(
5603 "{}",
5604 serde_json::to_string_pretty(&report).expect("serialize tier-set report")
5605 );
5606 } else {
5607 println!();
5608 println!(
5609 " {}",
5610 format!("VELA · TIER · {}", object_id)
5611 .to_uppercase()
5612 .dimmed()
5613 );
5614 println!(" {}", style::tick_row(60));
5615 println!(" object_type: {object_type}");
5616 println!(" new_tier: {}", parsed_tier.canonical());
5617 println!(" actor: {actor}");
5618 if let Some(ev) = &report.applied_event_id {
5619 println!(" event: {ev}");
5620 }
5621 println!(" {} tier set in {}", style::ok("ok"), frontier.display());
5622 }
5623}
5624
5625#[allow(clippy::too_many_arguments)]
5627fn cmd_trajectory_create(
5628 frontier: &Path,
5629 deposited_by: &str,
5630 reason: &str,
5631 targets: Vec<String>,
5632 notes: &str,
5633 json: bool,
5634) {
5635 let report = state::create_trajectory(frontier, targets, deposited_by, notes, reason)
5636 .unwrap_or_else(|e| fail_return(&e));
5637
5638 if json {
5639 println!(
5640 "{}",
5641 serde_json::to_string_pretty(&report).expect("serialize trajectory report")
5642 );
5643 } else {
5644 println!();
5645 println!(
5646 " {}",
5647 format!("VELA · TRAJECTORY · {}", report.finding_id)
5648 .to_uppercase()
5649 .dimmed()
5650 );
5651 println!(" {}", style::tick_row(60));
5652 println!(" deposited_by: {deposited_by}");
5653 if let Some(ev) = &report.applied_event_id {
5654 println!(" event: {ev}");
5655 }
5656 println!(
5657 " {} trajectory opened in {}",
5658 style::ok("ok"),
5659 frontier.display()
5660 );
5661 }
5662}
5663
5664#[allow(clippy::too_many_arguments)]
5666fn cmd_trajectory_step(
5667 frontier: &Path,
5668 trajectory_id: &str,
5669 kind: &str,
5670 description: &str,
5671 actor: &str,
5672 reason: &str,
5673 references: Vec<String>,
5674 json: bool,
5675) {
5676 let parsed_kind = match kind {
5677 "hypothesis" => crate::bundle::TrajectoryStepKind::Hypothesis,
5678 "tried" => crate::bundle::TrajectoryStepKind::Tried,
5679 "ruled_out" => crate::bundle::TrajectoryStepKind::RuledOut,
5680 "observed" => crate::bundle::TrajectoryStepKind::Observed,
5681 "refined" => crate::bundle::TrajectoryStepKind::Refined,
5682 other => fail_return(&format!(
5683 "--kind must be one of hypothesis|tried|ruled_out|observed|refined, got '{other}'"
5684 )),
5685 };
5686 let report = state::append_trajectory_step(
5687 frontier,
5688 trajectory_id,
5689 parsed_kind,
5690 description,
5691 actor,
5692 references,
5693 reason,
5694 )
5695 .unwrap_or_else(|e| fail_return(&e));
5696
5697 if json {
5698 println!(
5699 "{}",
5700 serde_json::to_string_pretty(&report).expect("serialize step report")
5701 );
5702 } else {
5703 println!();
5704 println!(
5705 " {}",
5706 format!("VELA · STEP · {}", report.finding_id)
5707 .to_uppercase()
5708 .dimmed()
5709 );
5710 println!(" {}", style::tick_row(60));
5711 println!(" trajectory: {trajectory_id}");
5712 println!(" kind: {kind}");
5713 println!(" actor: {actor}");
5714 println!(
5715 " {} step appended in {}",
5716 style::ok("ok"),
5717 frontier.display()
5718 );
5719 }
5720}
5721
5722fn cmd_trajectories(frontier: &Path, target: Option<&str>, json: bool) {
5724 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
5725 let filtered: Vec<&crate::bundle::Trajectory> = project
5726 .trajectories
5727 .iter()
5728 .filter(|t| {
5729 target
5730 .map(|tg| t.target_findings.iter().any(|f| f == tg))
5731 .unwrap_or(true)
5732 })
5733 .collect();
5734
5735 if json {
5736 println!(
5737 "{}",
5738 serde_json::to_string_pretty(&json!({
5739 "ok": true,
5740 "command": "trajectories",
5741 "frontier": frontier.display().to_string(),
5742 "count": filtered.len(),
5743 "trajectories": filtered,
5744 }))
5745 .expect("serialize trajectories")
5746 );
5747 return;
5748 }
5749
5750 if filtered.is_empty() {
5751 println!(" no trajectories in {}", frontier.display());
5752 return;
5753 }
5754
5755 println!();
5756 println!(" {} ({})", "VELA · TRAJECTORIES".dimmed(), filtered.len());
5757 println!(" {}", style::tick_row(60));
5758 for t in &filtered {
5759 let retracted = if t.retracted { " [retracted]" } else { "" };
5760 let review = t
5761 .review_state
5762 .as_ref()
5763 .map(|s| format!(" [{s:?}]"))
5764 .unwrap_or_default();
5765 println!(" {}{}{}", t.id, retracted, review);
5766 println!(
5767 " {} step(s){}",
5768 t.steps.len(),
5769 if t.target_findings.is_empty() {
5770 String::new()
5771 } else {
5772 format!(" · targets: {}", t.target_findings.join(", "))
5773 }
5774 );
5775 for step in &t.steps {
5776 let label = match step.kind {
5777 crate::bundle::TrajectoryStepKind::Hypothesis => "hypothesis",
5778 crate::bundle::TrajectoryStepKind::Tried => "tried",
5779 crate::bundle::TrajectoryStepKind::RuledOut => "ruled_out",
5780 crate::bundle::TrajectoryStepKind::Observed => "observed",
5781 crate::bundle::TrajectoryStepKind::Refined => "refined",
5782 };
5783 let preview: String = step.description.chars().take(80).collect();
5784 println!(" [{label}] {preview}");
5785 }
5786 }
5787 println!();
5788}
5789
5790fn cmd_datasets(frontier: &Path, json: bool) {
5792 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
5793 if json {
5794 println!(
5795 "{}",
5796 serde_json::to_string_pretty(&json!({
5797 "ok": true,
5798 "command": "datasets",
5799 "frontier": frontier.display().to_string(),
5800 "count": project.datasets.len(),
5801 "datasets": project.datasets,
5802 }))
5803 .expect("serialize datasets")
5804 );
5805 return;
5806 }
5807 println!();
5808 println!(
5809 " {}",
5810 format!("VELA · DATASETS · {}", frontier.display())
5811 .to_uppercase()
5812 .dimmed()
5813 );
5814 println!(" {}", style::tick_row(60));
5815 if project.datasets.is_empty() {
5816 println!(" (no datasets registered)");
5817 return;
5818 }
5819 for ds in &project.datasets {
5820 let v = ds
5821 .version
5822 .as_deref()
5823 .map(|s| format!("@{s}"))
5824 .unwrap_or_default();
5825 println!(" · {} {}{}", ds.id.dimmed(), ds.name, v);
5826 if let Some(u) = &ds.url {
5827 println!(" url: {}", truncate(u, 80));
5828 }
5829 println!(" hash: {}", truncate(&ds.content_hash, 80));
5830 }
5831}
5832
5833#[allow(clippy::too_many_arguments)]
5835fn cmd_code_add(
5836 frontier: &Path,
5837 language: &str,
5838 repo_url: Option<&str>,
5839 commit: Option<&str>,
5840 path: &str,
5841 content_hash: &str,
5842 line_start: Option<u32>,
5843 line_end: Option<u32>,
5844 entry_point: Option<&str>,
5845 json: bool,
5846) {
5847 let mut project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
5848
5849 let line_range = match (line_start, line_end) {
5850 (Some(a), Some(b)) => Some((a, b)),
5851 (Some(a), None) => Some((a, a)),
5852 _ => None,
5853 };
5854
5855 let artifact = crate::bundle::CodeArtifact::new(
5856 language.to_string(),
5857 repo_url.map(|s| s.to_string()),
5858 commit.map(|s| s.to_string()),
5859 path.to_string(),
5860 line_range,
5861 content_hash.to_string(),
5862 entry_point.map(|s| s.to_string()),
5863 );
5864
5865 if project.code_artifacts.iter().any(|c| c.id == artifact.id) {
5866 if json {
5867 println!(
5868 "{}",
5869 serde_json::to_string_pretty(&json!({
5870 "ok": false,
5871 "command": "code.add",
5872 "reason": "artifact_already_exists",
5873 "id": artifact.id,
5874 }))
5875 .expect("serialize")
5876 );
5877 } else {
5878 println!(
5879 "{} code artifact {} already exists in {}; skipping.",
5880 style::warn("code"),
5881 artifact.id,
5882 frontier.display()
5883 );
5884 }
5885 return;
5886 }
5887
5888 let new_id = artifact.id.clone();
5889 project.code_artifacts.push(artifact);
5890 repo::save_to_path(frontier, &project).unwrap_or_else(|e| fail_return(&e));
5891
5892 if json {
5893 println!(
5894 "{}",
5895 serde_json::to_string_pretty(&json!({
5896 "ok": true,
5897 "command": "code.add",
5898 "id": new_id,
5899 "language": language,
5900 "path": path,
5901 "frontier": frontier.display().to_string(),
5902 }))
5903 .expect("failed to serialize code.add result")
5904 );
5905 } else {
5906 println!();
5907 println!(
5908 " {}",
5909 format!("VELA · CODE · {}", new_id).to_uppercase().dimmed()
5910 );
5911 println!(" {}", style::tick_row(60));
5912 println!(" language: {language}");
5913 if let Some(r) = repo_url {
5914 println!(" repo: {r}");
5915 }
5916 if let Some(c) = commit {
5917 println!(" commit: {c}");
5918 }
5919 println!(" path: {path}");
5920 if let Some((a, b)) = line_range {
5921 println!(" lines: {a}-{b}");
5922 }
5923 println!(" content_hash: {content_hash}");
5924 println!();
5925 println!(
5926 " {} code artifact recorded in {}",
5927 style::ok("ok"),
5928 frontier.display()
5929 );
5930 }
5931}
5932
5933fn cmd_code_artifacts(frontier: &Path, json: bool) {
5935 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
5936 if json {
5937 println!(
5938 "{}",
5939 serde_json::to_string_pretty(&json!({
5940 "ok": true,
5941 "command": "code-artifacts",
5942 "frontier": frontier.display().to_string(),
5943 "count": project.code_artifacts.len(),
5944 "code_artifacts": project.code_artifacts,
5945 }))
5946 .expect("serialize code-artifacts")
5947 );
5948 return;
5949 }
5950 println!();
5951 println!(
5952 " {}",
5953 format!("VELA · CODE · {}", frontier.display())
5954 .to_uppercase()
5955 .dimmed()
5956 );
5957 println!(" {}", style::tick_row(60));
5958 if project.code_artifacts.is_empty() {
5959 println!(" (no code artifacts registered)");
5960 return;
5961 }
5962 for c in &project.code_artifacts {
5963 let lr = c
5964 .line_range
5965 .map(|(a, b)| format!(":{a}-{b}"))
5966 .unwrap_or_default();
5967 println!(" · {} {} {}{}", c.id.dimmed(), c.language, c.path, lr);
5968 if let Some(r) = &c.repo_url {
5969 println!(" repo: {}", truncate(r, 80));
5970 }
5971 if let Some(g) = &c.git_commit {
5972 println!(" commit: {g}");
5973 }
5974 }
5975}
5976
5977fn sha256_for_bytes(bytes: &[u8]) -> String {
5978 format!("sha256:{}", hex::encode(Sha256::digest(bytes)))
5979}
5980
5981fn sha256_hex_part(content_hash: &str) -> &str {
5982 content_hash.strip_prefix("sha256:").unwrap_or(content_hash)
5983}
5984
5985fn artifact_blob_locator(frontier: &Path, content_hash: &str, bytes: &[u8]) -> Option<String> {
5986 let Ok(repo::VelaSource::VelaRepo(root)) = repo::detect(frontier) else {
5987 return None;
5988 };
5989 let hex = sha256_hex_part(content_hash);
5990 let rel = format!(".vela/artifact-blobs/sha256/{hex}");
5991 let path = root.join(&rel);
5992 if let Some(parent) = path.parent() {
5993 std::fs::create_dir_all(parent).unwrap_or_else(|e| {
5994 fail(&format!(
5995 "Failed to create artifact blob directory {}: {e}",
5996 parent.display()
5997 ))
5998 });
5999 }
6000 if !path.is_file() {
6001 std::fs::write(&path, bytes)
6002 .unwrap_or_else(|e| fail(&format!("Failed to write artifact blob: {e}")));
6003 }
6004 Some(rel)
6005}
6006
6007fn parse_metadata_pairs(pairs: Vec<String>) -> BTreeMap<String, Value> {
6008 let mut out = BTreeMap::new();
6009 for pair in pairs {
6010 let Some((key, value)) = pair.split_once('=') else {
6011 fail(&format!("--metadata must be key=value, got {pair:?}"));
6012 };
6013 let key = key.trim();
6014 if key.is_empty() {
6015 fail("--metadata key must be non-empty");
6016 }
6017 out.insert(key.to_string(), Value::String(value.trim().to_string()));
6018 }
6019 out
6020}
6021
6022fn artifact_source_type(kind: &str) -> &'static str {
6023 match kind {
6024 "clinical_trial_record" | "protocol" => "clinical_trial",
6025 "dataset" => "data_release",
6026 "model_output" => "model_output",
6027 "registry_record" => "database_record",
6028 "lab_file" => "lab_notebook",
6029 _ => "database_record",
6030 }
6031}
6032
6033fn artifact_provenance(
6034 kind: &str,
6035 title: &str,
6036 url: Option<&str>,
6037 doi: Option<&str>,
6038 license: Option<&str>,
6039) -> crate::bundle::Provenance {
6040 crate::bundle::Provenance {
6041 source_type: artifact_source_type(kind).to_string(),
6042 doi: doi.map(str::to_string),
6043 pmid: None,
6044 pmc: None,
6045 openalex_id: None,
6046 url: url.map(str::to_string),
6047 title: title.to_string(),
6048 authors: Vec::new(),
6049 year: None,
6050 journal: None,
6051 license: license.map(str::to_string),
6052 publisher: None,
6053 funders: Vec::new(),
6054 extraction: crate::bundle::Extraction {
6055 method: "artifact_deposit".to_string(),
6056 model: None,
6057 model_version: None,
6058 extracted_at: chrono::Utc::now().to_rfc3339(),
6059 extractor_version: env!("CARGO_PKG_VERSION").to_string(),
6060 },
6061 review: None,
6062 citation_count: None,
6063 }
6064}
6065
6066#[allow(clippy::too_many_arguments)]
6067fn cmd_artifact_add(
6068 frontier: &Path,
6069 kind: &str,
6070 name: &str,
6071 file: Option<&Path>,
6072 url: Option<&str>,
6073 content_hash: Option<&str>,
6074 media_type: Option<&str>,
6075 license: Option<&str>,
6076 source_title: Option<&str>,
6077 source_url: Option<&str>,
6078 doi: Option<&str>,
6079 target: Vec<String>,
6080 metadata: Vec<String>,
6081 access_tier: &str,
6082 deposited_by: &str,
6083 reason: &str,
6084 json_out: bool,
6085) {
6086 let tier =
6087 crate::access_tier::AccessTier::parse(access_tier).unwrap_or_else(|e| fail_return(&e));
6088 let mut size_bytes = None;
6089 let mut storage_mode = "pointer".to_string();
6090 let mut locator = url.map(str::to_string);
6091 let mut computed_hash = content_hash.map(str::to_string);
6092
6093 if let Some(path) = file {
6094 let bytes = std::fs::read(path)
6095 .unwrap_or_else(|e| fail(&format!("Failed to read artifact file: {e}")));
6096 let actual_hash = sha256_for_bytes(&bytes);
6097 if let Some(expected) = content_hash {
6098 let expected_hex = sha256_hex_part(expected);
6099 let actual_hex = sha256_hex_part(&actual_hash);
6100 if !expected_hex.eq_ignore_ascii_case(actual_hex) {
6101 fail(&format!(
6102 "--content-hash does not match file bytes: expected {expected}, got {actual_hash}"
6103 ));
6104 }
6105 }
6106 size_bytes = Some(bytes.len() as u64);
6107 computed_hash = Some(actual_hash.clone());
6108 if let Some(rel) = artifact_blob_locator(frontier, &actual_hash, &bytes) {
6109 storage_mode = "local_blob".to_string();
6110 locator = Some(rel);
6111 } else {
6112 storage_mode = "local_file".to_string();
6113 locator = Some(path.display().to_string());
6114 }
6115 }
6116
6117 let Some(content_hash) = computed_hash else {
6118 fail("Provide --content-hash unless --file is present.");
6119 };
6120 let content_hash_for_print = content_hash.clone();
6121 if file.is_none() && url.is_some() {
6122 storage_mode = "remote".to_string();
6123 }
6124
6125 let source_url_effective = source_url.or(url);
6126 let source_title = source_title.unwrap_or(name);
6127 let provenance = artifact_provenance(kind, source_title, source_url_effective, doi, license);
6128 let metadata = parse_metadata_pairs(metadata);
6129 let artifact = crate::bundle::Artifact::new(
6130 kind.to_string(),
6131 name.to_string(),
6132 content_hash,
6133 size_bytes,
6134 media_type.map(str::to_string),
6135 storage_mode,
6136 locator,
6137 source_url_effective.map(str::to_string),
6138 license.map(str::to_string),
6139 target,
6140 provenance,
6141 metadata,
6142 tier,
6143 )
6144 .unwrap_or_else(|e| fail_return(&e));
6145
6146 let artifact_id = artifact.id.clone();
6147 let report = state::add_artifact(frontier, artifact, deposited_by, reason)
6148 .unwrap_or_else(|e| fail_return(&e));
6149
6150 if json_out {
6151 println!(
6152 "{}",
6153 serde_json::to_string_pretty(&json!({
6154 "ok": true,
6155 "command": "artifact.add",
6156 "id": artifact_id,
6157 "frontier": frontier.display().to_string(),
6158 "event": report.applied_event_id,
6159 }))
6160 .expect("serialize artifact.add")
6161 );
6162 } else {
6163 println!();
6164 println!(
6165 " {}",
6166 format!("VELA · ARTIFACT · {}", artifact_id)
6167 .to_uppercase()
6168 .dimmed()
6169 );
6170 println!(" {}", style::tick_row(60));
6171 println!(" kind: {kind}");
6172 println!(" name: {name}");
6173 println!(" hash: {content_hash_for_print}");
6174 println!(
6175 " {} artifact recorded in {}",
6176 style::ok("ok"),
6177 frontier.display()
6178 );
6179 }
6180}
6181
6182fn cmd_artifacts(frontier: &Path, target: Option<&str>, json_out: bool) {
6183 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
6184 let filtered: Vec<&crate::bundle::Artifact> = project
6185 .artifacts
6186 .iter()
6187 .filter(|artifact| {
6188 target
6189 .map(|t| artifact.target_findings.iter().any(|f| f == t))
6190 .unwrap_or(true)
6191 })
6192 .collect();
6193
6194 if json_out {
6195 println!(
6196 "{}",
6197 serde_json::to_string_pretty(&json!({
6198 "ok": true,
6199 "command": "artifacts",
6200 "frontier": frontier.display().to_string(),
6201 "count": filtered.len(),
6202 "artifacts": filtered,
6203 }))
6204 .expect("serialize artifacts")
6205 );
6206 return;
6207 }
6208
6209 println!();
6210 println!(
6211 " {}",
6212 format!("VELA · ARTIFACTS · {}", frontier.display())
6213 .to_uppercase()
6214 .dimmed()
6215 );
6216 println!(" {}", style::tick_row(60));
6217 if filtered.is_empty() {
6218 println!(" (no artifacts registered)");
6219 return;
6220 }
6221 for artifact in filtered {
6222 println!(
6223 " · {} {} · {}",
6224 artifact.id.dimmed(),
6225 artifact.kind,
6226 artifact.name
6227 );
6228 if let Some(locator) = &artifact.locator {
6229 println!(" locator: {}", truncate(locator, 88));
6230 }
6231 if !artifact.target_findings.is_empty() {
6232 println!(" targets: {}", artifact.target_findings.join(", "));
6233 }
6234 }
6235}
6236
6237fn cmd_artifact_audit(frontier: &Path, json_out: bool) {
6238 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
6239 let audit = crate::artifact_audit::audit_artifacts(frontier, &project);
6240 if json_out {
6241 println!(
6242 "{}",
6243 serde_json::to_string_pretty(&audit).expect("serialize artifact audit")
6244 );
6245 if !audit.ok {
6246 std::process::exit(1);
6247 }
6248 return;
6249 }
6250
6251 println!();
6252 println!(
6253 " {}",
6254 format!("VELA · ARTIFACT AUDIT · {}", frontier.display())
6255 .to_uppercase()
6256 .dimmed()
6257 );
6258 println!(" {}", style::tick_row(60));
6259 println!(" artifacts: {}", audit.artifact_count);
6260 println!(" checked local blobs: {}", audit.checked_local_blobs);
6261 println!(" local blob bytes: {}", audit.local_blob_bytes);
6262 if !audit.by_kind.is_empty() {
6263 let kinds = audit
6264 .by_kind
6265 .iter()
6266 .map(|(kind, count)| format!("{kind}:{count}"))
6267 .collect::<Vec<_>>()
6268 .join(", ");
6269 println!(" kinds: {kinds}");
6270 }
6271 if audit.ok {
6272 println!(" {} artifact audit passed.", style::ok("ok"));
6273 return;
6274 }
6275 for issue in &audit.issues {
6276 println!(
6277 " {} {} {}: {}",
6278 style::lost("invalid"),
6279 issue.id,
6280 issue.field,
6281 issue.message
6282 );
6283 }
6284 std::process::exit(1);
6285}
6286
6287fn cmd_decision_brief(frontier: &Path, json_out: bool) {
6288 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
6289 let report = decision::load_decision_brief(frontier, &project);
6290 if json_out {
6291 println!(
6292 "{}",
6293 serde_json::to_string_pretty(&report).expect("serialize decision brief report")
6294 );
6295 if !report.ok {
6296 std::process::exit(1);
6297 }
6298 return;
6299 }
6300 println!();
6301 println!(
6302 " {}",
6303 format!("VELA · DECISION BRIEF · {}", project.project.name)
6304 .to_uppercase()
6305 .dimmed()
6306 );
6307 println!(" {}", style::tick_row(60));
6308 if !report.ok {
6309 print_projection_issues(&report.issues, report.error.as_deref());
6310 std::process::exit(1);
6311 }
6312 let brief = report
6313 .projection
6314 .as_ref()
6315 .expect("ok decision report carries projection");
6316 for question in &brief.questions {
6317 println!(" · {} · {}", question.id.dimmed(), question.title);
6318 println!(" answer: {}", wrap_line(&question.short_answer, 82));
6319 println!(" caveat: {}", wrap_line(&question.caveat, 82));
6320 println!(" support: {}", question.supporting_findings.join(", "));
6321 if !question.tension_findings.is_empty() {
6322 println!(" tensions: {}", question.tension_findings.join(", "));
6323 }
6324 if !question.gap_findings.is_empty() {
6325 println!(" gaps: {}", question.gap_findings.join(", "));
6326 }
6327 if !question.artifact_ids.is_empty() {
6328 println!(" artifacts: {}", question.artifact_ids.join(", "));
6329 }
6330 println!(
6331 " would change: {}",
6332 wrap_line(&question.what_would_change_this_answer, 82)
6333 );
6334 }
6335}
6336
6337fn cmd_trial_summary(frontier: &Path, json_out: bool) {
6338 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
6339 let report = decision::load_trial_outcomes(frontier, &project);
6340 if json_out {
6341 println!(
6342 "{}",
6343 serde_json::to_string_pretty(&report).expect("serialize trial summary report")
6344 );
6345 if !report.ok {
6346 std::process::exit(1);
6347 }
6348 return;
6349 }
6350 println!();
6351 println!(
6352 " {}",
6353 format!("VELA · TRIAL SUMMARY · {}", project.project.name)
6354 .to_uppercase()
6355 .dimmed()
6356 );
6357 println!(" {}", style::tick_row(60));
6358 if !report.ok {
6359 print_projection_issues(&report.issues, report.error.as_deref());
6360 std::process::exit(1);
6361 }
6362 let outcomes = report
6363 .projection
6364 .as_ref()
6365 .expect("ok trial report carries projection");
6366 for row in &outcomes.rows {
6367 println!(" · {} · {} ({})", row.id.dimmed(), row.program, row.drug);
6368 println!(" population: {}", wrap_line(&row.population, 82));
6369 println!(" endpoint: {}", wrap_line(&row.primary_endpoint, 82));
6370 println!(" cognition: {}", wrap_line(&row.cognitive_result, 82));
6371 println!(" biomarker: {}", wrap_line(&row.biomarker_result, 82));
6372 println!(" risk: {}", wrap_line(&row.aria_or_safety_result, 82));
6373 println!(" status: {}", wrap_line(&row.regulatory_status, 82));
6374 if !row.finding_ids.is_empty() {
6375 println!(" findings: {}", row.finding_ids.join(", "));
6376 }
6377 if !row.artifact_ids.is_empty() {
6378 println!(" artifacts: {}", row.artifact_ids.join(", "));
6379 }
6380 }
6381}
6382
6383fn cmd_source_verification(frontier: &Path, json_out: bool) {
6384 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
6385 let report = decision::load_source_verification(frontier, &project);
6386 if json_out {
6387 println!(
6388 "{}",
6389 serde_json::to_string_pretty(&report).expect("serialize source verification report")
6390 );
6391 if !report.ok {
6392 std::process::exit(1);
6393 }
6394 return;
6395 }
6396 println!();
6397 println!(
6398 " {}",
6399 format!("VELA · SOURCE VERIFICATION · {}", project.project.name)
6400 .to_uppercase()
6401 .dimmed()
6402 );
6403 println!(" {}", style::tick_row(60));
6404 if !report.ok {
6405 print_projection_issues(&report.issues, report.error.as_deref());
6406 std::process::exit(1);
6407 }
6408 let verification = report
6409 .projection
6410 .as_ref()
6411 .expect("ok source verification report carries projection");
6412 println!(" verified_at: {}", verification.verified_at);
6413 for source in &verification.sources {
6414 println!(" · {} · {}", source.id.dimmed(), source.title);
6415 println!(" agency: {}", source.agency);
6416 println!(" url: {}", truncate(&source.url, 88));
6417 println!(" status: {}", wrap_line(&source.current_status, 82));
6418 }
6419}
6420
6421fn cmd_source_ingest_plan(frontier: &Path, json_out: bool) {
6422 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
6423 let report = decision::load_source_ingest_plan(frontier, &project);
6424 if json_out {
6425 println!(
6426 "{}",
6427 serde_json::to_string_pretty(&report).expect("serialize source ingest plan report")
6428 );
6429 if !report.ok {
6430 std::process::exit(1);
6431 }
6432 return;
6433 }
6434 println!();
6435 println!(
6436 " {}",
6437 format!("VELA · SOURCE INGEST PLAN · {}", project.project.name)
6438 .to_uppercase()
6439 .dimmed()
6440 );
6441 println!(" {}", style::tick_row(60));
6442 if !report.ok {
6443 print_projection_issues(&report.issues, report.error.as_deref());
6444 std::process::exit(1);
6445 }
6446 let plan = report
6447 .projection
6448 .as_ref()
6449 .expect("ok source ingest plan report carries projection");
6450 println!(" verified_at: {}", plan.verified_at);
6451 println!(" entries: {}", plan.entries.len());
6452 for entry in &plan.entries {
6453 println!(
6454 " · {} · {} · {} · {}",
6455 entry.id.dimmed(),
6456 entry.category,
6457 entry.priority,
6458 entry.ingest_status
6459 );
6460 println!(" name: {}", wrap_line(&entry.name, 82));
6461 println!(" locator: {}", truncate(&entry.locator, 88));
6462 println!(" use: {}", wrap_line(&entry.target_use, 82));
6463 if let Some(id) = &entry.current_frontier_artifact_id {
6464 println!(" artifact: {id}");
6465 }
6466 if !entry.target_findings.is_empty() {
6467 println!(" findings: {}", entry.target_findings.join(", "));
6468 }
6469 }
6470}
6471
6472fn print_projection_issues(issues: &[decision::ProjectionIssue], error: Option<&str>) {
6473 if let Some(error) = error {
6474 println!(" {} {error}", style::lost("unavailable"));
6475 }
6476 for issue in issues {
6477 println!(
6478 " {} {}: {}",
6479 style::lost("invalid"),
6480 issue.path,
6481 issue.message
6482 );
6483 }
6484}
6485
6486fn wrap_line(text: &str, max_chars: usize) -> String {
6487 if text.chars().count() <= max_chars {
6488 return text.to_string();
6489 }
6490 let mut out = String::new();
6491 let mut line_len = 0usize;
6492 for word in text.split_whitespace() {
6493 let word_len = word.chars().count();
6494 if line_len > 0 && line_len + 1 + word_len > max_chars {
6495 out.push('\n');
6496 out.push_str(" ");
6497 out.push_str(word);
6498 line_len = word_len;
6499 } else {
6500 if line_len > 0 {
6501 out.push(' ');
6502 line_len += 1;
6503 }
6504 out.push_str(word);
6505 line_len += word_len;
6506 }
6507 }
6508 out
6509}
6510
6511fn clinical_str<'a>(study: &'a Value, pointer: &str) -> Option<&'a str> {
6512 study.pointer(pointer).and_then(Value::as_str)
6513}
6514
6515fn clinical_string_array(study: &Value, pointer: &str) -> Vec<String> {
6516 study
6517 .pointer(pointer)
6518 .and_then(Value::as_array)
6519 .map(|items| {
6520 items
6521 .iter()
6522 .filter_map(Value::as_str)
6523 .map(str::to_string)
6524 .collect()
6525 })
6526 .unwrap_or_default()
6527}
6528
6529fn clinical_named_array(study: &Value, pointer: &str, field: &str) -> Vec<String> {
6530 study
6531 .pointer(pointer)
6532 .and_then(Value::as_array)
6533 .map(|items| {
6534 items
6535 .iter()
6536 .filter_map(|item| item.get(field).and_then(Value::as_str))
6537 .map(str::to_string)
6538 .collect()
6539 })
6540 .unwrap_or_default()
6541}
6542
6543fn insert_string_vec_metadata(
6544 metadata: &mut BTreeMap<String, Value>,
6545 key: &str,
6546 values: Vec<String>,
6547) {
6548 if values.is_empty() {
6549 return;
6550 }
6551 metadata.insert(
6552 key.to_string(),
6553 Value::Array(values.into_iter().map(Value::String).collect()),
6554 );
6555}
6556
6557async fn cmd_clinical_trial_import(
6558 frontier: &Path,
6559 nct_id: &str,
6560 input_json: Option<&Path>,
6561 target: Vec<String>,
6562 deposited_by: &str,
6563 reason: &str,
6564 license: &str,
6565 json_out: bool,
6566) {
6567 let api_url = format!("https://clinicaltrials.gov/api/v2/studies/{nct_id}");
6568 let raw = if let Some(path) = input_json {
6569 std::fs::read_to_string(path)
6570 .unwrap_or_else(|e| fail(&format!("Failed to read ClinicalTrials.gov JSON: {e}")))
6571 } else {
6572 let response = reqwest::get(&api_url).await.unwrap_or_else(|e| {
6573 fail(&format!(
6574 "Failed to fetch ClinicalTrials.gov record {nct_id}: {e}"
6575 ))
6576 });
6577 let response = response.error_for_status().unwrap_or_else(|e| {
6578 fail(&format!(
6579 "Failed to fetch ClinicalTrials.gov record {nct_id}: {e}"
6580 ))
6581 });
6582 response.text().await.unwrap_or_else(|e| {
6583 fail(&format!(
6584 "Failed to read ClinicalTrials.gov record {nct_id}: {e}"
6585 ))
6586 })
6587 };
6588 let study: Value = serde_json::from_str(&raw)
6589 .unwrap_or_else(|e| fail(&format!("Failed to parse ClinicalTrials.gov JSON: {e}")));
6590 let canonical_bytes = crate::canonical::to_canonical_bytes(&study)
6591 .unwrap_or_else(|e| fail(&format!("Failed to canonicalize trial JSON: {e}")));
6592 let content_hash = sha256_for_bytes(&canonical_bytes);
6593 let locator = artifact_blob_locator(frontier, &content_hash, &canonical_bytes)
6594 .unwrap_or_else(|| api_url.clone());
6595 let storage_mode = if locator.starts_with(".vela/") {
6596 "local_blob"
6597 } else {
6598 "remote"
6599 };
6600
6601 let parsed_nct = clinical_str(&study, "/protocolSection/identificationModule/nctId")
6602 .unwrap_or(nct_id)
6603 .to_string();
6604 let title = clinical_str(&study, "/protocolSection/identificationModule/briefTitle")
6605 .or_else(|| {
6606 clinical_str(
6607 &study,
6608 "/protocolSection/identificationModule/officialTitle",
6609 )
6610 })
6611 .unwrap_or(nct_id);
6612 let public_url = format!("https://clinicaltrials.gov/study/{parsed_nct}");
6613 let mut metadata = BTreeMap::new();
6614 metadata.insert("nct_id".to_string(), Value::String(parsed_nct.clone()));
6615 metadata.insert(
6616 "source_api".to_string(),
6617 Value::String("clinicaltrials.gov-v2".to_string()),
6618 );
6619 metadata.insert(
6620 "retrieved_at".to_string(),
6621 Value::String(chrono::Utc::now().to_rfc3339()),
6622 );
6623 for (key, pointer) in [
6624 (
6625 "overall_status",
6626 "/protocolSection/statusModule/overallStatus",
6627 ),
6628 (
6629 "start_date",
6630 "/protocolSection/statusModule/startDateStruct/date",
6631 ),
6632 (
6633 "completion_date",
6634 "/protocolSection/statusModule/completionDateStruct/date",
6635 ),
6636 ] {
6637 if let Some(value) = clinical_str(&study, pointer) {
6638 metadata.insert(key.to_string(), Value::String(value.to_string()));
6639 }
6640 }
6641 insert_string_vec_metadata(
6642 &mut metadata,
6643 "phases",
6644 clinical_string_array(&study, "/protocolSection/designModule/phases"),
6645 );
6646 insert_string_vec_metadata(
6647 &mut metadata,
6648 "conditions",
6649 clinical_string_array(&study, "/protocolSection/conditionsModule/conditions"),
6650 );
6651 insert_string_vec_metadata(
6652 &mut metadata,
6653 "interventions",
6654 clinical_named_array(
6655 &study,
6656 "/protocolSection/armsInterventionsModule/interventions",
6657 "name",
6658 ),
6659 );
6660 insert_string_vec_metadata(
6661 &mut metadata,
6662 "primary_outcomes",
6663 clinical_named_array(
6664 &study,
6665 "/protocolSection/outcomesModule/primaryOutcomes",
6666 "measure",
6667 ),
6668 );
6669 if let Some(has_results) = study.get("hasResults").and_then(Value::as_bool) {
6670 metadata.insert("has_results".to_string(), Value::Bool(has_results));
6671 }
6672
6673 let provenance = artifact_provenance(
6674 "clinical_trial_record",
6675 title,
6676 Some(&public_url),
6677 None,
6678 Some(license),
6679 );
6680 let artifact = crate::bundle::Artifact::new(
6681 "clinical_trial_record",
6682 title.to_string(),
6683 content_hash,
6684 Some(canonical_bytes.len() as u64),
6685 Some("application/json".to_string()),
6686 storage_mode.to_string(),
6687 Some(locator),
6688 Some(public_url.clone()),
6689 Some(license.to_string()),
6690 target,
6691 provenance,
6692 metadata,
6693 crate::access_tier::AccessTier::Public,
6694 )
6695 .unwrap_or_else(|e| fail_return(&e));
6696 let artifact_id = artifact.id.clone();
6697 let report = state::add_artifact(frontier, artifact, deposited_by, reason)
6698 .unwrap_or_else(|e| fail_return(&e));
6699
6700 if json_out {
6701 println!(
6702 "{}",
6703 serde_json::to_string_pretty(&json!({
6704 "ok": true,
6705 "command": "clinical-trial-import",
6706 "nct_id": parsed_nct,
6707 "id": artifact_id,
6708 "frontier": frontier.display().to_string(),
6709 "event": report.applied_event_id,
6710 "source_url": public_url,
6711 }))
6712 .expect("serialize clinical-trial-import")
6713 );
6714 } else {
6715 println!();
6716 println!(
6717 " {}",
6718 format!("VELA · CLINICAL TRIAL · {}", artifact_id)
6719 .to_uppercase()
6720 .dimmed()
6721 );
6722 println!(" {}", style::tick_row(60));
6723 println!(" nct_id: {parsed_nct}");
6724 println!(" title: {}", truncate(title, 96));
6725 println!(" source: {public_url}");
6726 println!(
6727 " {} trial record imported into {}",
6728 style::ok("ok"),
6729 frontier.display()
6730 );
6731 }
6732}
6733
6734#[allow(clippy::too_many_arguments)]
6741fn cmd_replicate(
6742 frontier: &Path,
6743 target: &str,
6744 outcome: &str,
6745 attempted_by: &str,
6746 conditions_text: &str,
6747 source_title: &str,
6748 doi: Option<&str>,
6749 pmid: Option<&str>,
6750 sample_size: Option<&str>,
6751 note: &str,
6752 previous_attempt: Option<&str>,
6753 no_cascade: bool,
6754 json: bool,
6755) {
6756 if !crate::bundle::VALID_REPLICATION_OUTCOMES.contains(&outcome) {
6757 fail(&format!(
6758 "invalid outcome '{outcome}'; valid: {:?}",
6759 crate::bundle::VALID_REPLICATION_OUTCOMES
6760 ));
6761 }
6762 if !target.starts_with("vf_") {
6763 fail(&format!("target '{target}' is not a vf_ finding id"));
6764 }
6765
6766 let mut project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
6767
6768 if !project.findings.iter().any(|f| f.id == target) {
6769 fail(&format!(
6770 "target finding '{target}' not present in frontier '{}'",
6771 frontier.display()
6772 ));
6773 }
6774
6775 let lower = conditions_text.to_lowercase();
6780 let conditions = crate::bundle::Conditions {
6781 text: conditions_text.to_string(),
6782 species_verified: Vec::new(),
6783 species_unverified: Vec::new(),
6784 in_vitro: lower.contains("in vitro") || lower.contains("ipsc"),
6785 in_vivo: lower.contains("in vivo") || lower.contains("mouse") || lower.contains("rat"),
6786 human_data: lower.contains("human")
6787 || lower.contains("clinical")
6788 || lower.contains("patient"),
6789 clinical_trial: lower.contains("clinical trial") || lower.contains("phase "),
6790 concentration_range: None,
6791 duration: None,
6792 age_group: None,
6793 cell_type: None,
6794 };
6795
6796 let evidence = crate::bundle::Evidence {
6797 evidence_type: "experimental".to_string(),
6798 model_system: String::new(),
6799 species: None,
6800 method: "replication_attempt".to_string(),
6801 sample_size: sample_size.map(|s| s.to_string()),
6802 effect_size: None,
6803 p_value: None,
6804 replicated: outcome == "replicated",
6805 replication_count: None,
6806 evidence_spans: Vec::new(),
6807 };
6808
6809 let provenance = crate::bundle::Provenance {
6810 source_type: "published_paper".to_string(),
6811 doi: doi.map(|s| s.to_string()),
6812 pmid: pmid.map(|s| s.to_string()),
6813 pmc: None,
6814 openalex_id: None,
6815 url: None,
6816 title: source_title.to_string(),
6817 authors: Vec::new(),
6818 year: None,
6819 journal: None,
6820 license: None,
6821 publisher: None,
6822 funders: Vec::new(),
6823 extraction: crate::bundle::Extraction {
6824 method: "manual_curation".to_string(),
6825 model: None,
6826 model_version: None,
6827 extracted_at: chrono::Utc::now().to_rfc3339(),
6828 extractor_version: env!("CARGO_PKG_VERSION").to_string(),
6829 },
6830 review: None,
6831 citation_count: None,
6832 };
6833
6834 let mut rep = crate::bundle::Replication::new(
6835 target.to_string(),
6836 attempted_by.to_string(),
6837 outcome.to_string(),
6838 evidence,
6839 conditions,
6840 provenance,
6841 note.to_string(),
6842 );
6843 rep.previous_attempt = previous_attempt.map(|s| s.to_string());
6844
6845 if project.replications.iter().any(|r| r.id == rep.id) {
6848 if json {
6849 println!(
6850 "{}",
6851 serde_json::to_string_pretty(&json!({
6852 "ok": false,
6853 "command": "replicate",
6854 "reason": "replication_already_exists",
6855 "id": rep.id,
6856 }))
6857 .expect("serialize")
6858 );
6859 } else {
6860 println!(
6861 "{} replication {} already exists in {}; skipping.",
6862 style::warn("replicate"),
6863 rep.id,
6864 frontier.display()
6865 );
6866 }
6867 return;
6868 }
6869
6870 let new_id = rep.id.clone();
6871 project.replications.push(rep);
6872
6873 let cascade_result = if no_cascade {
6880 None
6881 } else {
6882 let result = propagate::propagate_correction(
6883 &mut project,
6884 target,
6885 propagate::PropagationAction::ReplicationOutcome {
6886 outcome: outcome.to_string(),
6887 vrep_id: new_id.clone(),
6888 },
6889 );
6890 project.review_events.extend(result.events.clone());
6893 project::recompute_stats(&mut project);
6894 Some(result)
6895 };
6896
6897 repo::save_to_path(frontier, &project).unwrap_or_else(|e| fail_return(&e));
6898
6899 if json {
6900 let cascade_json = cascade_result.as_ref().map(|r| {
6901 json!({
6902 "affected": r.affected,
6903 "events": r.events.len(),
6904 })
6905 });
6906 println!(
6907 "{}",
6908 serde_json::to_string_pretty(&json!({
6909 "ok": true,
6910 "command": "replicate",
6911 "id": new_id,
6912 "target": target,
6913 "outcome": outcome,
6914 "attempted_by": attempted_by,
6915 "cascade": cascade_json,
6916 "frontier": frontier.display().to_string(),
6917 }))
6918 .expect("failed to serialize replicate result")
6919 );
6920 } else {
6921 println!();
6922 println!(
6923 " {}",
6924 format!("VELA · REPLICATE · {}", new_id)
6925 .to_uppercase()
6926 .dimmed()
6927 );
6928 println!(" {}", style::tick_row(60));
6929 println!(" target: {target}");
6930 println!(" outcome: {outcome}");
6931 println!(" attempted by: {attempted_by}");
6932 println!(" conditions: {conditions_text}");
6933 println!(" source: {source_title}");
6934 if let Some(d) = doi {
6935 println!(" doi: {d}");
6936 }
6937 println!();
6938 println!(
6939 " {} replication recorded in {}",
6940 style::ok("ok"),
6941 frontier.display()
6942 );
6943 if let Some(result) = cascade_result {
6944 println!(
6945 " {} cascade: {} dependent(s) flagged, {} review event(s) recorded",
6946 style::ok("ok"),
6947 result.affected,
6948 result.events.len()
6949 );
6950 } else {
6951 println!(" {} cascade skipped (--no-cascade)", style::warn("info"));
6952 }
6953 }
6954}
6955
6956fn cmd_replications(frontier: &Path, target: Option<&str>, json: bool) {
6958 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
6959 let filtered: Vec<&crate::bundle::Replication> = project
6960 .replications
6961 .iter()
6962 .filter(|r| target.is_none_or(|t| r.target_finding == t))
6963 .collect();
6964
6965 if json {
6966 let payload = json!({
6967 "ok": true,
6968 "command": "replications",
6969 "frontier": frontier.display().to_string(),
6970 "filter_target": target,
6971 "count": filtered.len(),
6972 "replications": filtered,
6973 });
6974 println!(
6975 "{}",
6976 serde_json::to_string_pretty(&payload).expect("failed to serialize replications list")
6977 );
6978 return;
6979 }
6980
6981 println!();
6982 let header = match target {
6983 Some(t) => format!("VELA · REPLICATIONS · {t}"),
6984 None => format!("VELA · REPLICATIONS · {}", frontier.display()),
6985 };
6986 println!(" {}", header.to_uppercase().dimmed());
6987 println!(" {}", style::tick_row(60));
6988 if filtered.is_empty() {
6989 println!(" (no replications recorded)");
6990 return;
6991 }
6992 for rep in &filtered {
6993 let outcome_chip = match rep.outcome.as_str() {
6994 "replicated" => style::ok(&rep.outcome),
6995 "failed" => style::lost(&rep.outcome),
6996 "partial" => style::warn(&rep.outcome),
6997 _ => rep.outcome.clone().normal().to_string(),
6998 };
6999 println!(
7000 " · {} {} by {}",
7001 rep.id.dimmed(),
7002 outcome_chip,
7003 rep.attempted_by
7004 );
7005 println!(" target: {}", rep.target_finding);
7006 if !rep.conditions.text.is_empty() {
7007 println!(" conditions: {}", truncate(&rep.conditions.text, 80));
7008 }
7009 if !rep.provenance.title.is_empty() {
7010 println!(" source: {}", truncate(&rep.provenance.title, 80));
7011 }
7012 }
7013}
7014
7015async fn cmd_ingest(
7028 path: &str,
7029 frontier: &Path,
7030 backend: Option<&str>,
7031 actor: Option<&str>,
7032 dry_run: bool,
7033 json: bool,
7034) {
7035 let lowered = path.trim().to_lowercase();
7037 if lowered.starts_with("doi:") || lowered.starts_with("pmid:") || lowered.starts_with("nct:") {
7038 cmd_source_fetch(path.trim(), None, None, false, json).await;
7039 if !json {
7045 eprintln!();
7046 eprintln!(
7047 " vela ingest · note: doi:/pmid:/nct: URIs only fetch metadata; no frontier state was written."
7048 );
7049 eprintln!(
7050 " next: turn this paper into a proposal with `vela finding add {} --assertion '...' --author 'reviewer:you' --apply`",
7051 frontier.display()
7052 );
7053 }
7054 return;
7055 }
7056
7057 let p = std::path::PathBuf::from(path);
7058 if !p.exists() {
7059 fail(&format!(
7060 "ingest: path '{path}' does not exist (and is not a doi:/pmid:/nct: URI)"
7061 ));
7062 }
7063
7064 let ext = p
7066 .extension()
7067 .and_then(|s| s.to_str())
7068 .map(|s| s.to_ascii_lowercase());
7069
7070 if p.is_file() {
7071 match ext.as_deref() {
7072 Some("pdf") => {
7073 cmd_scout(&p, frontier, backend, dry_run, json).await;
7077 }
7078 Some("md") | Some("markdown") => {
7079 cmd_compile_notes(&p, frontier, backend, None, None, dry_run, json).await;
7082 }
7083 Some("csv") | Some("tsv") => {
7084 cmd_compile_data(&p, frontier, backend, None, dry_run, json).await;
7087 }
7088 Some("json") => {
7089 let actor_id = actor.unwrap_or("agent:vela-ingest-bot");
7091 cmd_artifact_to_state(frontier, &p, actor_id, false, json);
7092 }
7093 other => {
7094 fail(&format!(
7095 "ingest: unsupported file type '{}' (expected .pdf, .md, .csv, .tsv, .json, or a doi:/pmid:/nct: URI)",
7096 other.unwrap_or("(none)")
7097 ));
7098 }
7099 }
7100 return;
7101 }
7102
7103 if p.is_dir() {
7104 let mut pdf_count = 0usize;
7111 let mut md_count = 0usize;
7112 let mut data_count = 0usize;
7113 let mut json_count = 0usize;
7114 let mut unhandled_exts: std::collections::BTreeSet<String> =
7115 std::collections::BTreeSet::new();
7116 if let Ok(entries) = std::fs::read_dir(&p) {
7117 for entry in entries.flatten() {
7118 let path = entry.path();
7119 if !path.is_file() {
7120 continue;
7121 }
7122 if let Some(name) = entry.file_name().to_str()
7123 && let Some(dot) = name.rfind('.')
7124 {
7125 let ext = name[dot + 1..].to_ascii_lowercase();
7126 match ext.as_str() {
7127 "pdf" => pdf_count += 1,
7128 "md" | "markdown" => md_count += 1,
7129 "csv" | "tsv" => data_count += 1,
7130 "json" => json_count += 1,
7131 other => {
7132 if !name.starts_with('.') {
7135 unhandled_exts.insert(other.to_string());
7136 }
7137 }
7138 }
7139 }
7140 }
7141 }
7142
7143 let dispatched_types = (pdf_count > 0) as usize
7144 + (md_count > 0) as usize
7145 + (data_count > 0) as usize
7146 + (json_count > 0) as usize;
7147
7148 if dispatched_types == 0 {
7149 cmd_compile_code(&p, frontier, backend, None, dry_run, json).await;
7152 return;
7153 }
7154
7155 if dispatched_types > 1 {
7156 eprintln!(
7157 " vela ingest · folder has multiple handlable types; running each in sequence"
7158 );
7159 eprintln!(
7160 " pdf:{pdf_count} md:{md_count} csv/tsv:{data_count} json:{json_count}"
7161 );
7162 }
7163
7164 if pdf_count > 0 {
7171 cmd_scout(&p, frontier, backend, dry_run, json).await;
7172 }
7173 if md_count > 0 {
7174 cmd_compile_notes(&p, frontier, backend, None, None, dry_run, json).await;
7175 }
7176 if data_count > 0 {
7177 cmd_compile_data(&p, frontier, backend, None, dry_run, json).await;
7178 }
7179 if json_count > 0 {
7180 let actor_id = actor.unwrap_or("agent:vela-ingest-bot");
7183 if let Ok(entries) = std::fs::read_dir(&p) {
7184 for entry in entries.flatten() {
7185 let path = entry.path();
7186 if path.is_file()
7187 && path
7188 .extension()
7189 .and_then(|s| s.to_str())
7190 .map(|s| s.eq_ignore_ascii_case("json"))
7191 .unwrap_or(false)
7192 {
7193 cmd_artifact_to_state(frontier, &path, actor_id, false, json);
7194 }
7195 }
7196 }
7197 }
7198
7199 if !unhandled_exts.is_empty() {
7200 let kinds: Vec<String> = unhandled_exts.into_iter().collect();
7201 eprintln!(
7202 " vela ingest · skipped {} file extension(s) with no handler: {}",
7203 kinds.len(),
7204 kinds.join(", ")
7205 );
7206 }
7207 return;
7208 }
7209
7210 fail(&format!(
7211 "ingest: path '{path}' is neither a file nor a directory"
7212 ));
7213}
7214
7215#[allow(clippy::too_many_arguments)]
7216async fn cmd_compile_data(
7218 root: &Path,
7219 frontier: &Path,
7220 backend: Option<&str>,
7221 sample_rows: Option<usize>,
7222 dry_run: bool,
7223 json_out: bool,
7224) {
7225 match DATASETS_HANDLER.get() {
7226 Some(handler) => {
7227 handler(
7228 root.to_path_buf(),
7229 frontier.to_path_buf(),
7230 backend.map(String::from),
7231 sample_rows,
7232 dry_run,
7233 json_out,
7234 )
7235 .await;
7236 }
7237 None => {
7238 eprintln!(
7239 "{} `vela compile-data` requires the vela CLI binary; the library is unwired without a registered datasets handler.",
7240 style::err_prefix()
7241 );
7242 std::process::exit(1);
7243 }
7244 }
7245}
7246
7247async fn cmd_review_pending(
7250 frontier: &Path,
7251 backend: Option<&str>,
7252 max_proposals: Option<usize>,
7253 batch_size: usize,
7254 dry_run: bool,
7255 json_out: bool,
7256) {
7257 match REVIEWER_HANDLER.get() {
7258 Some(handler) => {
7259 handler(
7260 frontier.to_path_buf(),
7261 backend.map(String::from),
7262 max_proposals,
7263 batch_size,
7264 dry_run,
7265 json_out,
7266 )
7267 .await;
7268 }
7269 None => {
7270 eprintln!(
7271 "{} `vela review-pending` requires the vela CLI binary; the library is unwired without a registered reviewer handler.",
7272 style::err_prefix()
7273 );
7274 std::process::exit(1);
7275 }
7276 }
7277}
7278
7279async fn cmd_find_tensions(
7282 frontier: &Path,
7283 backend: Option<&str>,
7284 max_findings: Option<usize>,
7285 dry_run: bool,
7286 json_out: bool,
7287) {
7288 match TENSIONS_HANDLER.get() {
7289 Some(handler) => {
7290 handler(
7291 frontier.to_path_buf(),
7292 backend.map(String::from),
7293 max_findings,
7294 dry_run,
7295 json_out,
7296 )
7297 .await;
7298 }
7299 None => {
7300 eprintln!(
7301 "{} `vela find-tensions` requires the vela CLI binary; the library is unwired without a registered tensions handler.",
7302 style::err_prefix()
7303 );
7304 std::process::exit(1);
7305 }
7306 }
7307}
7308
7309async fn cmd_plan_experiments(
7312 frontier: &Path,
7313 backend: Option<&str>,
7314 max_findings: Option<usize>,
7315 dry_run: bool,
7316 json_out: bool,
7317) {
7318 match EXPERIMENTS_HANDLER.get() {
7319 Some(handler) => {
7320 handler(
7321 frontier.to_path_buf(),
7322 backend.map(String::from),
7323 max_findings,
7324 dry_run,
7325 json_out,
7326 )
7327 .await;
7328 }
7329 None => {
7330 eprintln!(
7331 "{} `vela plan-experiments` requires the vela CLI binary; the library is unwired without a registered experiments handler.",
7332 style::err_prefix()
7333 );
7334 std::process::exit(1);
7335 }
7336 }
7337}
7338
7339async fn cmd_compile_code(
7342 root: &Path,
7343 frontier: &Path,
7344 backend: Option<&str>,
7345 max_files: Option<usize>,
7346 dry_run: bool,
7347 json_out: bool,
7348) {
7349 match CODE_HANDLER.get() {
7350 Some(handler) => {
7351 handler(
7352 root.to_path_buf(),
7353 frontier.to_path_buf(),
7354 backend.map(String::from),
7355 max_files,
7356 dry_run,
7357 json_out,
7358 )
7359 .await;
7360 }
7361 None => {
7362 eprintln!(
7363 "{} `vela compile-code` requires the vela CLI binary; the library is unwired without a registered code handler.",
7364 style::err_prefix()
7365 );
7366 std::process::exit(1);
7367 }
7368 }
7369}
7370
7371async fn cmd_compile_notes(
7376 vault: &Path,
7377 frontier: &Path,
7378 backend: Option<&str>,
7379 max_files: Option<usize>,
7380 max_items_per_category: Option<usize>,
7381 dry_run: bool,
7382 json_out: bool,
7383) {
7384 match NOTES_HANDLER.get() {
7385 Some(handler) => {
7386 handler(
7387 vault.to_path_buf(),
7388 frontier.to_path_buf(),
7389 backend.map(String::from),
7390 max_files,
7391 max_items_per_category,
7392 dry_run,
7393 json_out,
7394 )
7395 .await;
7396 }
7397 None => {
7398 eprintln!(
7399 "{} `vela compile-notes` requires the vela CLI binary; the library is unwired without a registered notes handler.",
7400 style::err_prefix()
7401 );
7402 std::process::exit(1);
7403 }
7404 }
7405}
7406
7407async fn cmd_scout(
7414 folder: &Path,
7415 frontier: &Path,
7416 backend: Option<&str>,
7417 dry_run: bool,
7418 json_out: bool,
7419) {
7420 match SCOUT_HANDLER.get() {
7421 Some(handler) => {
7422 handler(
7423 folder.to_path_buf(),
7424 frontier.to_path_buf(),
7425 backend.map(String::from),
7426 dry_run,
7427 json_out,
7428 )
7429 .await;
7430 }
7431 None => {
7432 eprintln!(
7433 "{} `vela scout` requires the vela CLI binary; the library is unwired without a registered scout handler.",
7434 style::err_prefix()
7435 );
7436 std::process::exit(1);
7437 }
7438 }
7439}
7440
7441#[allow(clippy::too_many_arguments)]
7442fn cmd_check(
7443 source: Option<&Path>,
7444 schema: bool,
7445 stats: bool,
7446 conformance_flag: bool,
7447 conformance_dir: &Path,
7448 all: bool,
7449 schema_only: bool,
7450 strict: bool,
7451 fix: bool,
7452 json_output: bool,
7453) {
7454 if json_output {
7455 let Some(src) = source else {
7456 fail("--json requires a frontier source");
7457 };
7458 let payload = check_json_payload(src, schema_only, strict);
7459 println!(
7460 "{}",
7461 serde_json::to_string_pretty(&payload).expect("failed to serialize check report")
7462 );
7463 if payload.get("ok").and_then(Value::as_bool) != Some(true) {
7464 std::process::exit(1);
7465 }
7466 return;
7467 }
7468
7469 let run_all = all || (!schema && !stats && !conformance_flag && !schema_only);
7470 if run_all || schema || schema_only {
7471 let Some(src) = source else {
7472 fail("check requires a frontier source");
7473 };
7474 validate::run(src);
7475 }
7476 if !schema_only && (run_all || stats) {
7477 let Some(src) = source else {
7478 fail("--stats requires a frontier source");
7479 };
7480 let frontier = load_frontier_or_fail(src);
7481 let report = lint::lint(&frontier, None, None);
7482 lint::print_report(&report);
7483 let replay_report = events::replay_report(&frontier);
7484 println!("event replay: {}", replay_report.status);
7485 if !replay_report.conflicts.is_empty() {
7486 for conflict in &replay_report.conflicts {
7487 println!(" - {conflict}");
7488 }
7489 }
7490 if let Ok(signature_report) = sign::verify_frontier_data(&frontier, None)
7491 && signature_report.signed > 0
7492 {
7493 println!(
7494 "Signatures: {} valid / {} invalid / {} unsigned",
7495 signature_report.valid, signature_report.invalid, signature_report.unsigned
7496 );
7497 }
7498 let signal_report = signals::analyze(&frontier, &[]);
7499 print_signal_summary(&signal_report, strict);
7500 if !replay_report.ok
7501 || (strict
7502 && (!signal_report.review_queue.is_empty()
7503 || signal_report.proof_readiness.status != "ready"))
7504 {
7505 std::process::exit(1);
7506 }
7507 }
7508 if run_all || conformance_flag {
7509 conformance::run(conformance_dir);
7510 }
7511 let _ = fix;
7512}
7513
7514fn check_json_payload(src: &Path, schema_only: bool, strict: bool) -> Value {
7515 let report = validate::validate(src);
7516 let loaded = repo::load_from_path(src).ok();
7517 let (method_report, graph_report) = if schema_only {
7518 (None, None)
7519 } else if let Some(frontier) = loaded.as_ref() {
7520 (
7521 Some(lint::lint(frontier, None, None)),
7522 Some(lint::lint_frontier(frontier)),
7523 )
7524 } else {
7525 (None, None)
7526 };
7527 let source_hash = hash_path(src).unwrap_or_else(|_| "unavailable".to_string());
7528 let mut diagnostics = Vec::new();
7529 diagnostics.extend(report.errors.iter().map(|e| {
7530 json!({
7531 "severity": "error",
7532 "rule_id": "schema",
7533 "finding_id": null,
7534 "file": &e.file,
7535 "field_path": null,
7536 "message": &e.error,
7537 "suggestion": schema_error_suggestion(&e.error),
7538 "fixable": schema_error_fix(&e.error),
7539 "normalize_action": schema_error_action(&e.error),
7540 })
7541 }));
7542 for (check_id, lint_report) in [
7543 ("methodology", method_report.as_ref()),
7544 ("frontier_graph", graph_report.as_ref()),
7545 ] {
7546 if let Some(lint_report) = lint_report {
7547 diagnostics.extend(lint_report.diagnostics.iter().map(|d| {
7548 json!({
7549 "severity": d.severity.to_string(),
7550 "rule_id": &d.rule_id,
7551 "check": check_id,
7552 "finding_id": &d.finding_id,
7553 "field_path": null,
7554 "message": &d.message,
7555 "suggestion": &d.suggestion,
7556 "fixable": false,
7557 "normalize_action": null,
7558 })
7559 }));
7560 }
7561 }
7562 let method_errors = method_report.as_ref().map_or(0, |r| r.errors);
7563 let method_warnings = method_report.as_ref().map_or(0, |r| r.warnings);
7564 let method_infos = method_report.as_ref().map_or(0, |r| r.infos);
7565 let graph_errors = graph_report.as_ref().map_or(0, |r| r.errors);
7566 let graph_warnings = graph_report.as_ref().map_or(0, |r| r.warnings);
7567 let graph_infos = graph_report.as_ref().map_or(0, |r| r.infos);
7568 let replay_report = loaded.as_ref().map(events::replay_report);
7569 let state_integrity_report = if schema_only {
7570 loaded.as_ref().map(state_integrity::analyze)
7571 } else {
7572 state_integrity::analyze_path(src).ok()
7573 };
7574 if let Some(replay) = replay_report.as_ref()
7575 && !replay.ok
7576 {
7577 diagnostics.extend(replay.conflicts.iter().map(|conflict| {
7578 json!({
7579 "severity": "error",
7580 "rule_id": "event_replay",
7581 "check": "events",
7582 "finding_id": null,
7583 "field_path": null,
7584 "message": conflict,
7585 "suggestion": "Inspect canonical state events and repair the frontier event log before proof export.",
7586 "fixable": false,
7587 "normalize_action": null,
7588 })
7589 }));
7590 }
7591 let event_errors = replay_report
7592 .as_ref()
7593 .map_or(0, |replay| usize::from(!replay.ok));
7594 let state_integrity_errors = state_integrity_report
7595 .as_ref()
7596 .map_or(0, |report| report.structural_errors.len());
7597 let (source_registry, evidence_atoms, conditions, proposal_summary, proof_state) = loaded
7598 .as_ref()
7599 .map(|frontier| {
7600 (
7601 sources::source_summary(frontier),
7602 sources::evidence_summary(frontier),
7603 sources::condition_summary(frontier),
7604 proposals::summary(frontier),
7605 proposals::proof_state_json(&frontier.proof_state),
7606 )
7607 })
7608 .unwrap_or_else(|| {
7609 (
7610 sources::SourceRegistrySummary::default(),
7611 sources::EvidenceAtomSummary::default(),
7612 sources::ConditionSummary::default(),
7613 proposals::ProposalSummary::default(),
7614 Value::Null,
7615 )
7616 });
7617 let signature_report = loaded
7618 .as_ref()
7619 .and_then(|frontier| sign::verify_frontier_data(frontier, None).ok());
7620 if let Some(frontier) = loaded.as_ref()
7621 && !schema_only
7622 {
7623 let projection = sources::derive_projection(frontier);
7624 let existing_sources = frontier
7625 .sources
7626 .iter()
7627 .map(|source| source.id.as_str())
7628 .collect::<std::collections::BTreeSet<_>>();
7629 let existing_atoms = frontier
7630 .evidence_atoms
7631 .iter()
7632 .map(|atom| atom.id.as_str())
7633 .collect::<std::collections::BTreeSet<_>>();
7634 let existing_conditions = frontier
7635 .condition_records
7636 .iter()
7637 .map(|record| record.id.as_str())
7638 .collect::<std::collections::BTreeSet<_>>();
7639 for source in projection
7640 .sources
7641 .iter()
7642 .filter(|source| !existing_sources.contains(source.id.as_str()))
7643 {
7644 diagnostics.push(json!({
7645 "severity": "warning",
7646 "rule_id": "missing_source_record",
7647 "check": "source_registry",
7648 "finding_id": source.finding_ids.first(),
7649 "field_path": "sources",
7650 "message": format!("Source record {} is derivable but not materialized in frontier state.", source.id),
7651 "suggestion": "Run `vela normalize` to materialize source records before proof export.",
7652 "fixable": true,
7653 "normalize_action": "materialize_source_record",
7654 }));
7655 }
7656 for atom in projection
7657 .evidence_atoms
7658 .iter()
7659 .filter(|atom| !existing_atoms.contains(atom.id.as_str()))
7660 {
7661 diagnostics.push(json!({
7662 "severity": "warning",
7663 "rule_id": "missing_evidence_atom",
7664 "check": "evidence_atoms",
7665 "finding_id": atom.finding_id,
7666 "field_path": "evidence_atoms",
7667 "message": format!("Evidence atom {} is derivable but not materialized in frontier state.", atom.id),
7668 "suggestion": "Run `vela normalize` to materialize evidence atoms before proof export.",
7669 "fixable": true,
7670 "normalize_action": "materialize_evidence_atom",
7671 }));
7672 }
7673 for atom in projection
7674 .evidence_atoms
7675 .iter()
7676 .filter(|atom| atom.locator.is_none())
7677 {
7678 diagnostics.push(json!({
7679 "severity": "warning",
7680 "rule_id": "missing_evidence_locator",
7681 "check": "evidence_atoms",
7682 "finding_id": atom.finding_id,
7683 "field_path": "evidence_atoms[].locator",
7684 "message": format!("Evidence atom {} has no source locator.", atom.id),
7685 "suggestion": "Add or verify evidence spans, table rows, pages, sections, or run locators.",
7686 "fixable": false,
7687 "normalize_action": null,
7688 }));
7689 }
7690 for condition in projection
7691 .condition_records
7692 .iter()
7693 .filter(|condition| !existing_conditions.contains(condition.id.as_str()))
7694 {
7695 diagnostics.push(json!({
7696 "severity": "warning",
7697 "rule_id": "condition_record_missing",
7698 "check": "conditions",
7699 "finding_id": condition.finding_id,
7700 "field_path": "condition_records",
7701 "message": format!("Condition record {} is derivable but not materialized in frontier state.", condition.id),
7702 "suggestion": "Run `vela normalize` to materialize condition boundaries before proof export.",
7703 "fixable": true,
7704 "normalize_action": "materialize_condition_record",
7705 }));
7706 }
7707 for proposal in frontier.proposals.iter().filter(|proposal| {
7708 matches!(proposal.status.as_str(), "accepted" | "applied")
7709 && proposal
7710 .reviewed_by
7711 .as_deref()
7712 .is_none_or(proposals::is_placeholder_reviewer)
7713 }) {
7714 diagnostics.push(json!({
7715 "severity": "error",
7716 "rule_id": "reviewer_identity_missing",
7717 "check": "proposals",
7718 "finding_id": proposal.target.id,
7719 "field_path": "proposals[].reviewed_by",
7720 "message": format!("Accepted or applied proposal {} uses a missing or placeholder reviewer identity.", proposal.id),
7721 "suggestion": "Accept the proposal with a stable named reviewer id before strict proof use.",
7722 "fixable": false,
7723 "normalize_action": null,
7724 }));
7725 }
7726 }
7727 let signal_report = loaded
7728 .as_ref()
7729 .map(|frontier| signals::analyze(frontier, &diagnostics))
7730 .unwrap_or_else(empty_signal_report);
7731 let errors =
7732 report.errors.len() + method_errors + graph_errors + event_errors + state_integrity_errors;
7733 let warnings = method_warnings + graph_warnings + signal_report.proof_readiness.warnings;
7734 let infos = method_infos + graph_infos;
7735 let strict_blockers = signal_report
7736 .signals
7737 .iter()
7738 .filter(|signal| signal.blocks.iter().any(|block| block == "strict_check"))
7739 .count();
7740 let fixable = diagnostics
7741 .iter()
7742 .filter(|d| d.get("fixable").and_then(Value::as_bool).unwrap_or(false))
7743 .count();
7744 let ok = errors == 0 && (!strict || (warnings == 0 && strict_blockers == 0));
7745
7746 json!({
7747 "ok": ok,
7748 "command": "check",
7749 "schema_version": project::VELA_SCHEMA_VERSION,
7750 "source": {
7751 "path": src.display().to_string(),
7752 "hash": format!("sha256:{source_hash}"),
7753 },
7754 "summary": {
7755 "status": if ok { "pass" } else { "fail" },
7756 "checked_findings": report.total_files,
7757 "valid_findings": report.valid,
7758 "invalid_findings": report.invalid,
7759 "errors": errors,
7760 "warnings": warnings,
7761 "info": infos,
7762 "fixable": fixable,
7763 "strict": strict,
7764 "schema_only": schema_only,
7765 },
7766 "checks": [
7767 {
7768 "id": "schema",
7769 "status": if report.invalid == 0 { "pass" } else { "fail" },
7770 "checked": report.total_files,
7771 "failed": report.invalid,
7772 "errors": report.errors.iter().map(|e| json!({
7773 "file": e.file,
7774 "message": e.error,
7775 })).collect::<Vec<_>>(),
7776 },
7777 {
7778 "id": "methodology",
7779 "status": if method_errors == 0 { "pass" } else { "fail" },
7780 "checked": method_report.as_ref().map_or(0, |r| r.findings_checked),
7781 "failed": method_errors,
7782 "warnings": method_warnings,
7783 "info": method_infos,
7784 "skipped": schema_only,
7785 },
7786 {
7787 "id": "frontier_graph",
7788 "status": if graph_errors == 0 { "pass" } else { "fail" },
7789 "checked": graph_report.as_ref().map_or(0, |r| r.findings_checked),
7790 "failed": graph_errors,
7791 "warnings": graph_warnings,
7792 "info": graph_infos,
7793 "skipped": schema_only,
7794 },
7795 {
7796 "id": "signals",
7797 "status": if strict_blockers == 0 { "pass" } else { "fail" },
7798 "checked": signal_report.signals.len(),
7799 "failed": strict_blockers,
7800 "warnings": signal_report.proof_readiness.warnings,
7801 "skipped": loaded.is_none(),
7802 "blockers": signal_report.signals.iter()
7803 .filter(|s| s.blocks.iter().any(|b| b == "strict_check"))
7804 .map(|s| json!({
7805 "id": s.id,
7806 "kind": s.kind,
7807 "severity": s.severity,
7808 "reason": s.reason,
7809 }))
7810 .collect::<Vec<_>>(),
7811 },
7812 {
7813 "id": "events",
7814 "status": if replay_report.as_ref().is_none_or(|replay| replay.ok) { "pass" } else { "fail" },
7815 "checked": replay_report.as_ref().map_or(0, |replay| replay.event_log.count),
7816 "failed": event_errors,
7817 "skipped": schema_only || loaded.is_none(),
7818 },
7819 {
7820 "id": "state_integrity",
7821 "status": if state_integrity_report.as_ref().is_none_or(|report| report.status != "fail") { "pass" } else { "fail" },
7822 "checked": state_integrity_report.as_ref().map_or(0, |report| report.summary.get("events").copied().unwrap_or_default()),
7823 "failed": state_integrity_errors,
7824 "skipped": schema_only || loaded.is_none(),
7825 }
7826 ],
7827 "event_log": replay_report.as_ref().map(|replay| &replay.event_log),
7828 "replay": replay_report,
7829 "state_integrity": state_integrity_report,
7830 "source_registry": source_registry,
7831 "evidence_atoms": evidence_atoms,
7832 "conditions": conditions,
7833 "proposals": proposal_summary,
7834 "proof_state": proof_state,
7835 "signatures": signature_report,
7836 "diagnostics": diagnostics,
7837 "signals": signal_report.signals,
7838 "review_queue": signal_report.review_queue,
7839 "proof_readiness": signal_report.proof_readiness,
7840 "repair_plan": build_repair_plan(&diagnostics),
7841 })
7842}
7843
7844#[allow(clippy::too_many_arguments)]
7845fn cmd_normalize(
7846 source: &Path,
7847 out: Option<&Path>,
7848 write: bool,
7849 dry_run: bool,
7850 rewrite_ids: bool,
7851 id_map: Option<&Path>,
7852 resync_provenance: bool,
7853 json_output: bool,
7854) {
7855 if write && out.is_some() {
7856 fail("Use either --write or --out, not both.");
7857 }
7858 if dry_run && (write || out.is_some()) {
7859 fail("--dry-run cannot be combined with --write or --out.");
7860 }
7861 if id_map.is_some() && !rewrite_ids {
7862 fail("--id-map requires --rewrite-ids.");
7863 }
7864
7865 let detected = repo::detect(source).unwrap_or_else(|e| {
7866 eprintln!("{e}");
7867 std::process::exit(1);
7868 });
7869 if matches!(detected, repo::VelaSource::PacketDir(_)) {
7870 fail(
7871 "Cannot normalize a proof packet directory. Export a new packet from frontier state instead.",
7872 );
7873 }
7874 let mut frontier = repo::load(&detected).unwrap_or_else(|e| fail_return(&e));
7875 let has_substantive_events = frontier
7880 .events
7881 .iter()
7882 .any(|event| event.kind != "frontier.created");
7883 if has_substantive_events && (write || out.is_some()) {
7884 fail(
7885 "Refusing to normalize a frontier with canonical events. Normalize before proposal-backed writes, or create a new reviewed transition for the intended change.",
7886 );
7887 }
7888 let source_hash = hash_path(source).unwrap_or_else(|_| "unavailable".to_string());
7889 let before_stats = serde_json::to_value(&frontier.stats).unwrap_or(Value::Null);
7890 let (entity_type_fixes, entity_name_fixes) =
7891 normalize::normalize_findings(&mut frontier.findings);
7892 let confidence_updates =
7893 bundle::recompute_all_confidence(&mut frontier.findings, &frontier.replications);
7894 let provenance_resync_count = if resync_provenance {
7898 sources::resync_provenance_from_sources(&mut frontier)
7899 } else {
7900 0
7901 };
7902 let before_source_count = frontier.sources.len();
7903 let before_evidence_atom_count = frontier.evidence_atoms.len();
7904 let before_condition_record_count = frontier.condition_records.len();
7905
7906 let mut id_rewrites = Vec::new();
7907 if rewrite_ids {
7908 let mut id_map_values = std::collections::BTreeMap::<String, String>::new();
7909 for finding in &frontier.findings {
7910 let expected =
7911 bundle::FindingBundle::content_address(&finding.assertion, &finding.provenance);
7912 if expected != finding.id {
7913 id_map_values.insert(finding.id.clone(), expected);
7914 }
7915 }
7916 let new_ids = id_map_values
7917 .values()
7918 .map(String::as_str)
7919 .collect::<std::collections::HashSet<_>>();
7920 if new_ids.len() != id_map_values.len() {
7921 fail("Refusing to rewrite IDs because two findings map to the same content address.");
7922 }
7923 for finding in &mut frontier.findings {
7924 if let Some(new_id) = id_map_values.get(&finding.id) {
7925 id_rewrites.push(json!({"old": finding.id, "new": new_id}));
7926 finding.previous_version = Some(finding.id.clone());
7927 finding.id = new_id.clone();
7928 }
7929 }
7930 for finding in &mut frontier.findings {
7931 for link in &mut finding.links {
7932 if let Some(new_target) = id_map_values.get(&link.target) {
7933 link.target = new_target.clone();
7934 }
7935 }
7936 }
7937 if let Some(path) = id_map {
7938 std::fs::write(
7939 path,
7940 serde_json::to_string_pretty(&id_map_values)
7941 .expect("failed to serialize normalize id map"),
7942 )
7943 .unwrap_or_else(|e| fail(&format!("Failed to write {}: {e}", path.display())));
7944 }
7945 }
7946
7947 sources::materialize_project(&mut frontier);
7948 let source_records_materialized = frontier.sources.len().saturating_sub(before_source_count);
7949 let evidence_atoms_materialized = frontier
7950 .evidence_atoms
7951 .len()
7952 .saturating_sub(before_evidence_atom_count);
7953 let condition_records_materialized = frontier
7954 .condition_records
7955 .len()
7956 .saturating_sub(before_condition_record_count);
7957 let after_stats = serde_json::to_value(&frontier.stats).unwrap_or(Value::Null);
7958 let id_rewrite_count = id_rewrites.len();
7959 let wrote_to = if write {
7960 repo::save(&detected, &frontier).unwrap_or_else(|e| fail(&e));
7961 Some(source.display().to_string())
7962 } else if let Some(out_path) = out {
7963 repo::save_to_path(out_path, &frontier).unwrap_or_else(|e| fail(&e));
7964 Some(out_path.display().to_string())
7965 } else {
7966 None
7967 };
7968 let wrote = wrote_to.is_some();
7969 let planned_changes = entity_type_fixes
7970 + entity_name_fixes
7971 + confidence_updates
7972 + id_rewrite_count
7973 + source_records_materialized
7974 + evidence_atoms_materialized
7975 + condition_records_materialized
7976 + provenance_resync_count;
7977 let payload = json!({
7978 "ok": true,
7979 "command": "normalize",
7980 "schema_version": project::VELA_SCHEMA_VERSION,
7981 "source": {
7982 "path": source.display().to_string(),
7983 "hash": format!("sha256:{source_hash}"),
7984 },
7985 "dry_run": wrote_to.is_none(),
7986 "wrote_to": wrote_to,
7987 "summary": {
7988 "planned": planned_changes,
7989 "safe": planned_changes,
7990 "unsafe": 0,
7991 "applied": if wrote { planned_changes } else { 0 },
7992 },
7993 "changes": {
7994 "entity_type_fixes": entity_type_fixes,
7995 "entity_name_fixes": entity_name_fixes,
7996 "confidence_updates": confidence_updates,
7997 "id_rewrites": id_rewrite_count,
7998 "source_records_materialized": source_records_materialized,
7999 "evidence_atoms_materialized": evidence_atoms_materialized,
8000 "condition_records_materialized": condition_records_materialized,
8001 "provenance_resyncs": provenance_resync_count,
8002 "stats_changed": before_stats != after_stats,
8003 },
8004 "id_rewrites": id_rewrites,
8005 "repair_plan": if wrote { Vec::<Value>::new() } else {
8006 vec![json!({
8007 "action": "apply_normalization",
8008 "command": "vela normalize <frontier> --out frontier.normalized.json"
8009 })]
8010 },
8011 });
8012 if json_output {
8013 println!(
8014 "{}",
8015 serde_json::to_string_pretty(&payload).expect("failed to serialize normalize report")
8016 );
8017 } else if let Some(path) = payload.get("wrote_to").and_then(Value::as_str) {
8018 println!("{} normalized frontier written to {path}", style::ok("ok"));
8019 println!(
8020 " entity type fixes: {}, entity name fixes: {}, confidence updates: {}, id rewrites: {}",
8021 entity_type_fixes, entity_name_fixes, confidence_updates, id_rewrite_count
8022 );
8023 } else {
8024 println!("normalize dry run for {}", source.display());
8025 println!(
8026 " would apply entity type fixes: {}, entity name fixes: {}, confidence updates: {}, id rewrites: {}",
8027 entity_type_fixes, entity_name_fixes, confidence_updates, id_rewrite_count
8028 );
8029 }
8030}
8031
8032fn cmd_proof(
8033 frontier: &Path,
8034 out: &Path,
8035 template: &str,
8036 gold: Option<&Path>,
8037 record_proof_state: bool,
8038 json_output: bool,
8039) {
8040 if template != "bbb-alzheimer" {
8041 fail(&format!(
8042 "Unsupported proof template '{template}'. Supported: bbb-alzheimer"
8043 ));
8044 }
8045 let mut loaded = load_frontier_or_fail(frontier);
8046 let source_hash = hash_path_or_fail(frontier);
8047 let export_record = export::export_packet_with_source(&loaded, Some(frontier), out)
8048 .unwrap_or_else(|e| fail(&e));
8049 let benchmark_summary = gold.map(|gold_path| {
8050 let summary = benchmark::run_suite(gold_path).unwrap_or_else(|e| {
8051 fail(&format!(
8052 "Failed to run proof benchmark '{}': {e}",
8053 gold_path.display()
8054 ))
8055 });
8056 append_packet_json_file(out, "benchmark-summary.json", &summary).unwrap_or_else(|e| {
8057 fail(&format!("Failed to write benchmark summary: {e}"));
8058 });
8059 if summary.get("ok").and_then(Value::as_bool) != Some(true) {
8060 fail(&format!(
8061 "Proof benchmark failed for {}",
8062 gold_path.display()
8063 ));
8064 }
8065 summary
8066 });
8067 let validation_summary = packet::validate(out).unwrap_or_else(|e| {
8068 fail(&format!("Proof packet validation failed: {e}"));
8069 });
8070 proposals::record_proof_export(
8071 &mut loaded,
8072 proposals::ProofPacketRecord {
8073 generated_at: export_record.generated_at.clone(),
8074 snapshot_hash: export_record.snapshot_hash.clone(),
8075 event_log_hash: export_record.event_log_hash.clone(),
8076 packet_manifest_hash: export_record.packet_manifest_hash.clone(),
8077 },
8078 );
8079 project::recompute_stats(&mut loaded);
8080 if record_proof_state {
8081 repo::save_to_path(frontier, &loaded).unwrap_or_else(|e| fail(&e));
8082 }
8083 let signal_report = signals::analyze(&loaded, &[]);
8084 if json_output {
8085 let payload = json!({
8086 "ok": true,
8087 "command": "proof",
8088 "schema_version": project::VELA_SCHEMA_VERSION,
8089 "recorded_proof_state": record_proof_state,
8090 "frontier": {
8091 "name": &loaded.project.name,
8092 "source": frontier.display().to_string(),
8093 "hash": format!("sha256:{source_hash}"),
8094 },
8095 "template": template,
8096 "gold": gold.map(|p| p.display().to_string()),
8097 "benchmark": benchmark_summary,
8098 "output": out.display().to_string(),
8099 "packet": {
8100 "manifest_path": out.join("manifest.json").display().to_string(),
8101 },
8102 "validation": {
8103 "status": "ok",
8104 "summary": validation_summary,
8105 },
8106 "proposals": proposals::summary(&loaded),
8107 "proof_state": loaded.proof_state,
8108 "signals": signal_report.signals,
8109 "review_queue": signal_report.review_queue,
8110 "proof_readiness": signal_report.proof_readiness,
8111 "trace_path": out.join("proof-trace.json").display().to_string(),
8112 });
8113 println!(
8114 "{}",
8115 serde_json::to_string_pretty(&payload).expect("failed to serialize proof response")
8116 );
8117 } else {
8118 println!("vela proof");
8119 println!(" source: {}", frontier.display());
8120 println!(" template: {template}");
8121 println!(" output: {}", out.display());
8122 println!(" trace: {}", out.join("proof-trace.json").display());
8123 println!(
8124 " proof state: {}",
8125 if record_proof_state {
8126 "recorded"
8127 } else {
8128 "not recorded"
8129 }
8130 );
8131 println!();
8132 println!("{validation_summary}");
8133 }
8134}
8135
8136fn cmd_status(path: &Path, json: bool) {
8140 let project = repo::load_from_path(path).unwrap_or_else(|e| fail_return(&e));
8141
8142 let mut pending_total = 0usize;
8144 let mut pending_by_kind: std::collections::BTreeMap<String, usize> =
8145 std::collections::BTreeMap::new();
8146 for p in &project.proposals {
8147 if p.status == "pending_review" {
8148 pending_total += 1;
8149 *pending_by_kind.entry(p.kind.clone()).or_insert(0) += 1;
8150 }
8151 }
8152
8153 let audit = crate::causal_reasoning::audit_frontier(&project);
8155 let audit_summary = crate::causal_reasoning::summarize_audit(&audit);
8156
8157 let mut last_sync: Option<&crate::events::StateEvent> = None;
8159 let mut last_conflict: Option<&crate::events::StateEvent> = None;
8160 let mut total_conflicts = 0usize;
8161 for e in &project.events {
8162 match e.kind.as_str() {
8163 "frontier.synced_with_peer" => {
8164 if last_sync
8165 .map(|prev| e.timestamp > prev.timestamp)
8166 .unwrap_or(true)
8167 {
8168 last_sync = Some(e);
8169 }
8170 }
8171 "frontier.conflict_detected" => {
8172 total_conflicts += 1;
8173 if last_conflict
8174 .map(|prev| e.timestamp > prev.timestamp)
8175 .unwrap_or(true)
8176 {
8177 last_conflict = Some(e);
8178 }
8179 }
8180 _ => {}
8181 }
8182 }
8183
8184 let mut targets_with_success = std::collections::HashSet::new();
8186 let mut failed_replications = 0usize;
8187 for r in &project.replications {
8188 if r.outcome == "replicated" {
8189 targets_with_success.insert(r.target_finding.clone());
8190 } else if r.outcome == "failed" {
8191 failed_replications += 1;
8192 }
8193 }
8194
8195 if json {
8196 println!(
8197 "{}",
8198 serde_json::to_string_pretty(&json!({
8199 "ok": true,
8200 "command": "status",
8201 "frontier": frontier_label(&project),
8202 "vfr_id": project.frontier_id(),
8203 "findings": project.findings.len(),
8204 "events": project.events.len(),
8205 "actors": project.actors.len(),
8206 "peers": project.peers.len(),
8207 "inbox": {
8208 "pending_total": pending_total,
8209 "pending_by_kind": pending_by_kind,
8210 },
8211 "causal_audit": {
8212 "identified": audit_summary.identified,
8213 "conditional": audit_summary.conditional,
8214 "underidentified": audit_summary.underidentified,
8215 "underdetermined": audit_summary.underdetermined,
8216 },
8217 "replications": {
8218 "total": project.replications.len(),
8219 "findings_with_success": targets_with_success.len(),
8220 "failed": failed_replications,
8221 },
8222 "federation": {
8223 "peers": project.peers.len(),
8224 "last_sync": last_sync.map(|e| e.timestamp.clone()),
8225 "last_conflict": last_conflict.map(|e| e.timestamp.clone()),
8226 "total_conflicts": total_conflicts,
8227 },
8228 }))
8229 .expect("serialize status")
8230 );
8231 return;
8232 }
8233
8234 println!();
8235 println!(
8236 " {}",
8237 format!("VELA · STATUS · {}", path.display())
8238 .to_uppercase()
8239 .dimmed()
8240 );
8241 println!(" {}", style::tick_row(60));
8242 println!();
8243 println!(" frontier: {}", frontier_label(&project));
8244 println!(" vfr_id: {}", project.frontier_id());
8245 println!(
8246 " findings: {} events: {} peers: {} actors: {}",
8247 project.findings.len(),
8248 project.events.len(),
8249 project.peers.len(),
8250 project.actors.len(),
8251 );
8252 println!();
8253 if pending_total > 0 {
8254 println!(
8255 " {} {pending_total} pending proposals",
8256 style::warn("inbox")
8257 );
8258 for (k, n) in &pending_by_kind {
8259 println!(" · {n:>3} {k}");
8260 }
8261 } else {
8262 println!(" {} inbox clean", style::ok("ok"));
8263 }
8264 println!();
8265 if audit_summary.underidentified > 0 || audit_summary.conditional > 0 {
8266 let chip = if audit_summary.underidentified > 0 {
8267 style::lost("audit")
8268 } else {
8269 style::warn("audit")
8270 };
8271 println!(
8272 " {} identified {} · conditional {} · underidentified {} · underdetermined {}",
8273 chip,
8274 audit_summary.identified,
8275 audit_summary.conditional,
8276 audit_summary.underidentified,
8277 audit_summary.underdetermined,
8278 );
8279 if audit_summary.underidentified > 0 {
8280 println!(
8281 " next: vela causal audit {} --problems-only",
8282 path.display()
8283 );
8284 }
8285 } else if audit_summary.underdetermined == 0 {
8286 println!(
8287 " {} causal audit: all {} identified",
8288 style::ok("ok"),
8289 audit_summary.identified
8290 );
8291 } else {
8292 println!(
8293 " {} causal audit: {} identified, {} ungraded",
8294 style::warn("audit"),
8295 audit_summary.identified,
8296 audit_summary.underdetermined,
8297 );
8298 }
8299 println!();
8300 if !project.replications.is_empty() {
8301 println!(
8302 " {} {} records · {} findings replicated · {} failed",
8303 style::ok("replications"),
8304 project.replications.len(),
8305 targets_with_success.len(),
8306 failed_replications,
8307 );
8308 }
8309 if project.peers.is_empty() {
8310 println!(
8311 " {} no federation peers registered",
8312 style::warn("federation")
8313 );
8314 } else {
8315 let last = last_sync
8316 .map(|e| fmt_timestamp(&e.timestamp))
8317 .unwrap_or_else(|| "never".to_string());
8318 let chip = if total_conflicts > 0 {
8319 style::warn("federation")
8320 } else {
8321 style::ok("federation")
8322 };
8323 println!(
8324 " {} {} peer(s) · last sync {} · {} conflict events",
8325 chip,
8326 project.peers.len(),
8327 last,
8328 total_conflicts,
8329 );
8330 }
8331 println!();
8332}
8333
8334fn cmd_log(path: &Path, limit: usize, kind_filter: Option<&str>, json: bool) {
8336 let project = repo::load_from_path(path).unwrap_or_else(|e| fail_return(&e));
8337 let mut events: Vec<&crate::events::StateEvent> = project
8338 .events
8339 .iter()
8340 .filter(|e| match kind_filter {
8341 Some(k) => e.kind.contains(k),
8342 None => true,
8343 })
8344 .collect();
8345 events.sort_by(|a, b| b.timestamp.cmp(&a.timestamp));
8346 events.truncate(limit);
8347
8348 if json {
8349 let payload: Vec<_> = events
8350 .iter()
8351 .map(|e| {
8352 json!({
8353 "id": e.id,
8354 "kind": e.kind,
8355 "actor": e.actor.id,
8356 "target": &e.target.id,
8357 "target_type": &e.target.r#type,
8358 "timestamp": e.timestamp,
8359 "reason": e.reason,
8360 })
8361 })
8362 .collect();
8363 println!(
8364 "{}",
8365 serde_json::to_string_pretty(&json!({
8366 "ok": true,
8367 "command": "log",
8368 "events": payload,
8369 }))
8370 .expect("serialize log")
8371 );
8372 return;
8373 }
8374
8375 println!();
8376 println!(
8377 " {}",
8378 format!("VELA · LOG · {} (latest {})", path.display(), events.len())
8379 .to_uppercase()
8380 .dimmed()
8381 );
8382 println!(" {}", style::tick_row(60));
8383 if events.is_empty() {
8384 println!(" (no events)");
8385 return;
8386 }
8387 for e in &events {
8388 let when = fmt_timestamp(&e.timestamp);
8389 let target_short = if e.target.id.len() > 22 {
8390 format!("{}…", &e.target.id[..21])
8391 } else {
8392 e.target.id.clone()
8393 };
8394 let reason: String = e.reason.chars().take(70).collect();
8395 println!(
8396 " {:<19} {:<32} {:<24} {}",
8397 when, e.kind, target_short, reason
8398 );
8399 }
8400 println!();
8401}
8402
8403fn cmd_inbox(path: &Path, kind_filter: Option<&str>, limit: usize, json: bool) {
8405 let project = repo::load_from_path(path).unwrap_or_else(|e| fail_return(&e));
8406
8407 let mut score_map: std::collections::HashMap<String, (f64, f64, f64, f64)> =
8410 std::collections::HashMap::new();
8411 for p in &project.proposals {
8412 if p.kind != "finding.note" {
8413 continue;
8414 }
8415 if p.actor.id != "agent:reviewer-agent" {
8416 continue;
8417 }
8418 let reason = &p.reason;
8419 let Some(target) = reason.split_whitespace().find(|s| s.starts_with("vpr_")) else {
8420 continue;
8421 };
8422 let text = p.payload.get("text").and_then(|v| v.as_str()).unwrap_or("");
8423 let extract = |k: &str| -> f64 {
8424 let pat = format!("{k} ");
8425 text.find(&pat)
8426 .and_then(|idx| text[idx + pat.len()..].split_whitespace().next())
8427 .and_then(|t| t.parse::<f64>().ok())
8428 .unwrap_or(0.0)
8429 };
8430 score_map.insert(
8431 target.to_string(),
8432 (
8433 extract("plausibility"),
8434 extract("evidence"),
8435 extract("scope"),
8436 extract("duplicate-risk"),
8437 ),
8438 );
8439 }
8440
8441 let mut pending: Vec<&crate::proposals::StateProposal> = project
8442 .proposals
8443 .iter()
8444 .filter(|p| {
8445 p.status == "pending_review"
8446 && match kind_filter {
8447 Some(k) => p.kind.contains(k),
8448 None => true,
8449 }
8450 })
8451 .collect();
8452 pending.sort_by(|a, b| {
8454 let sa = score_map
8455 .get(&a.id)
8456 .map(|(p, e, s, d)| 0.4 * p + 0.3 * e + 0.2 * s - 0.3 * d);
8457 let sb = score_map
8458 .get(&b.id)
8459 .map(|(p, e, s, d)| 0.4 * p + 0.3 * e + 0.2 * s - 0.3 * d);
8460 sb.partial_cmp(&sa).unwrap_or(std::cmp::Ordering::Equal)
8461 });
8462 pending.truncate(limit);
8463
8464 if json {
8465 let payload: Vec<_> = pending
8466 .iter()
8467 .map(|p| {
8468 let assertion_text = p
8469 .payload
8470 .get("finding")
8471 .and_then(|f| f.get("assertion"))
8472 .and_then(|a| a.get("text"))
8473 .and_then(|t| t.as_str());
8474 let assertion_type = p
8475 .payload
8476 .get("finding")
8477 .and_then(|f| f.get("assertion"))
8478 .and_then(|a| a.get("type"))
8479 .and_then(|t| t.as_str());
8480 let composite = score_map
8481 .get(&p.id)
8482 .map(|(pl, e, s, d)| 0.4 * pl + 0.3 * e + 0.2 * s - 0.3 * d);
8483 json!({
8484 "proposal_id": p.id,
8485 "kind": p.kind,
8486 "actor": p.actor,
8487 "reason": p.reason,
8488 "assertion_text": assertion_text,
8489 "assertion_type": assertion_type,
8490 "reviewer_composite": composite,
8491 })
8492 })
8493 .collect();
8494 println!(
8495 "{}",
8496 serde_json::to_string_pretty(&json!({
8497 "ok": true,
8498 "command": "inbox",
8499 "shown": pending.len(),
8500 "proposals": payload,
8501 }))
8502 .expect("serialize inbox")
8503 );
8504 return;
8505 }
8506
8507 println!();
8508 println!(
8509 " {}",
8510 format!(
8511 "VELA · INBOX · {} ({} pending shown)",
8512 path.display(),
8513 pending.len()
8514 )
8515 .to_uppercase()
8516 .dimmed()
8517 );
8518 println!(" {}", style::tick_row(60));
8519 if pending.is_empty() {
8520 println!(" (inbox clean)");
8521 return;
8522 }
8523 for p in &pending {
8524 let assertion_text = p
8525 .payload
8526 .get("finding")
8527 .and_then(|f| f.get("assertion"))
8528 .and_then(|a| a.get("text"))
8529 .and_then(|t| t.as_str())
8530 .unwrap_or("");
8531 let assertion_type = p
8532 .payload
8533 .get("finding")
8534 .and_then(|f| f.get("assertion"))
8535 .and_then(|a| a.get("type"))
8536 .and_then(|t| t.as_str())
8537 .unwrap_or("");
8538 let composite = score_map
8539 .get(&p.id)
8540 .map(|(pl, e, s, d)| 0.4 * pl + 0.3 * e + 0.2 * s - 0.3 * d);
8541 let score_str = composite
8542 .map(|c| format!("[{:.2}]", c))
8543 .unwrap_or_else(|| "[—] ".to_string());
8544 let kind_short = if p.kind.len() > 12 {
8545 format!("{}…", &p.kind[..11])
8546 } else {
8547 p.kind.clone()
8548 };
8549 let summary: String = if !assertion_text.is_empty() {
8550 assertion_text.chars().take(80).collect()
8551 } else {
8552 p.reason.chars().take(80).collect()
8553 };
8554 println!(
8555 " {} {} {:<13} {:<18} {}",
8556 score_str, p.id, kind_short, assertion_type, summary
8557 );
8558 }
8559 println!();
8560}
8561
8562fn cmd_ask(path: &Path, question: &str, json: bool) {
8567 let project = repo::load_from_path(path).unwrap_or_else(|e| fail_return(&e));
8568
8569 if question.trim().is_empty() {
8570 use std::io::{BufRead, Write};
8572 println!();
8573 println!(
8574 " {}",
8575 format!("VELA · ASK · {}", path.display())
8576 .to_uppercase()
8577 .dimmed()
8578 );
8579 println!(" {}", style::tick_row(60));
8580 println!(" Ask a question. Type `exit` to quit.");
8581 println!(" Examples:");
8582 println!(" · what's pending?");
8583 println!(" · what's underidentified?");
8584 println!(" · how many findings?");
8585 println!(" · what changed recently?");
8586 println!(" · who has what calibration?");
8587 println!();
8588 let stdin = std::io::stdin();
8589 let mut stdout = std::io::stdout();
8590 loop {
8591 print!(" ask> ");
8592 stdout.flush().ok();
8593 let mut line = String::new();
8594 if stdin.lock().read_line(&mut line).is_err() {
8595 break;
8596 }
8597 let q = line.trim();
8598 if q.is_empty() {
8599 continue;
8600 }
8601 if matches!(q, "exit" | "quit" | "q") {
8602 break;
8603 }
8604 answer(&project, q, false);
8605 }
8606 return;
8607 }
8608
8609 answer(&project, question, json);
8610}
8611
8612fn answer(project: &crate::project::Project, q: &str, json: bool) {
8613 let lower = q.to_lowercase();
8614
8615 if lower.contains("pending")
8617 || lower.contains("inbox")
8618 || lower.contains("queue")
8619 || lower.contains("to review")
8620 {
8621 let pending: Vec<&crate::proposals::StateProposal> = project
8622 .proposals
8623 .iter()
8624 .filter(|p| p.status == "pending_review")
8625 .collect();
8626 let mut by_kind: std::collections::BTreeMap<String, usize> = Default::default();
8627 for p in &pending {
8628 *by_kind.entry(p.kind.clone()).or_insert(0) += 1;
8629 }
8630 if json {
8631 println!(
8632 "{}",
8633 serde_json::to_string_pretty(&json!({
8634 "answer": "pending",
8635 "total": pending.len(),
8636 "by_kind": by_kind,
8637 }))
8638 .unwrap()
8639 );
8640 } else {
8641 println!(" {} pending proposals.", pending.len());
8642 for (k, n) in &by_kind {
8643 println!(" · {n:>3} {k}");
8644 }
8645 if pending.is_empty() {
8646 println!(" Inbox is clean.");
8647 } else {
8648 println!(" Run `vela inbox <frontier>` to triage.");
8649 }
8650 }
8651 return;
8652 }
8653
8654 if lower.contains("underident")
8656 || lower.contains("audit")
8657 || lower.contains("identif")
8658 || lower.contains("causal")
8659 {
8660 let entries = crate::causal_reasoning::audit_frontier(project);
8661 let summary = crate::causal_reasoning::summarize_audit(&entries);
8662 if json {
8663 println!(
8664 "{}",
8665 serde_json::to_string_pretty(&json!({
8666 "answer": "audit",
8667 "summary": {
8668 "identified": summary.identified,
8669 "conditional": summary.conditional,
8670 "underidentified": summary.underidentified,
8671 "underdetermined": summary.underdetermined,
8672 },
8673 }))
8674 .unwrap()
8675 );
8676 } else {
8677 println!(
8678 " Causal audit: {} identified · {} conditional · {} underidentified · {} underdetermined.",
8679 summary.identified,
8680 summary.conditional,
8681 summary.underidentified,
8682 summary.underdetermined,
8683 );
8684 if summary.underidentified > 0 {
8685 println!(
8686 " The {} underidentified findings are concrete review items:",
8687 summary.underidentified
8688 );
8689 for e in entries
8690 .iter()
8691 .filter(|e| {
8692 matches!(
8693 e.verdict,
8694 crate::causal_reasoning::Identifiability::Underidentified
8695 )
8696 })
8697 .take(8)
8698 {
8699 let txt: String = e.assertion_text.chars().take(70).collect();
8700 println!(" · {} {}", e.finding_id, txt);
8701 }
8702 }
8703 }
8704 return;
8705 }
8706
8707 if lower.contains("recent")
8709 || lower.contains("changed")
8710 || lower.contains("latest")
8711 || lower.contains("happen")
8712 {
8713 let mut events: Vec<&crate::events::StateEvent> = project.events.iter().collect();
8714 events.sort_by(|a, b| b.timestamp.cmp(&a.timestamp));
8715 events.truncate(8);
8716 if json {
8717 println!(
8718 "{}",
8719 serde_json::to_string_pretty(&json!({
8720 "answer": "recent_events",
8721 "events": events.iter().map(|e| json!({
8722 "id": e.id, "kind": e.kind, "timestamp": e.timestamp,
8723 "actor": e.actor.id, "target": e.target.id,
8724 })).collect::<Vec<_>>(),
8725 }))
8726 .unwrap()
8727 );
8728 } else {
8729 println!(" Most recent {} events:", events.len());
8730 for e in &events {
8731 let when = fmt_timestamp(&e.timestamp);
8732 println!(" · {when} {:<28} {}", e.kind, e.target.id);
8733 }
8734 }
8735 return;
8736 }
8737
8738 if lower.starts_with("how many") || lower.contains("count") || lower.contains("total") {
8740 let n = project.findings.len();
8741 let evs = project.events.len();
8742 let peers = project.peers.len();
8743 let actors = project.actors.len();
8744 if json {
8745 println!(
8746 "{}",
8747 serde_json::to_string_pretty(&json!({
8748 "answer": "counts",
8749 "findings": n,
8750 "events": evs,
8751 "peers": peers,
8752 "actors": actors,
8753 "replications": project.replications.len(),
8754 "predictions": project.predictions.len(),
8755 }))
8756 .unwrap()
8757 );
8758 } else {
8759 println!(" {n} findings · {evs} events · {actors} actors · {peers} peers.");
8760 println!(
8761 " {} replications · {} predictions · {} datasets · {} code artifacts.",
8762 project.replications.len(),
8763 project.predictions.len(),
8764 project.datasets.len(),
8765 project.code_artifacts.len(),
8766 );
8767 }
8768 return;
8769 }
8770
8771 if lower.contains("calibration") || lower.contains("brier") || lower.contains("predict") {
8773 let records =
8774 crate::calibration::calibration_records(&project.predictions, &project.resolutions);
8775 if json {
8776 println!("{}", serde_json::to_string_pretty(&records).unwrap());
8777 } else if records.is_empty() {
8778 println!(" No predictions yet. The calibration ledger is empty.");
8779 } else {
8780 println!(" Calibration over {} actor(s):", records.len());
8781 for r in &records {
8782 let brier = r
8783 .brier_score
8784 .map(|b| format!("{:.3}", b))
8785 .unwrap_or_else(|| "—".into());
8786 println!(
8787 " · {:<28} predictions {} · resolved {} · expired {} · Brier {}",
8788 r.actor, r.n_predictions, r.n_resolved, r.n_expired, brier
8789 );
8790 }
8791 }
8792 return;
8793 }
8794
8795 if lower.contains("peer")
8797 || lower.contains("federat")
8798 || lower.contains("sync")
8799 || lower.contains("conflict")
8800 {
8801 let mut total_conflicts = 0usize;
8802 for e in &project.events {
8803 if e.kind == "frontier.conflict_detected" {
8804 total_conflicts += 1;
8805 }
8806 }
8807 if json {
8808 println!(
8809 "{}",
8810 serde_json::to_string_pretty(&json!({
8811 "answer": "federation",
8812 "peers": project.peers.iter().map(|p| &p.id).collect::<Vec<_>>(),
8813 "total_conflicts": total_conflicts,
8814 }))
8815 .unwrap()
8816 );
8817 } else {
8818 println!(" {} peer(s) registered:", project.peers.len());
8819 for p in &project.peers {
8820 println!(" · {:<24} {}", p.id, p.url);
8821 }
8822 println!(" {total_conflicts} conflict events on the canonical log.");
8823 }
8824 return;
8825 }
8826
8827 if json {
8829 println!(
8830 "{}",
8831 serde_json::to_string_pretty(&json!({
8832 "answer": "unknown_question",
8833 "question": q,
8834 "hint": "Try: pending, audit, recent, how many, calibration, peers."
8835 }))
8836 .unwrap()
8837 );
8838 } else {
8839 println!(" Don't know how to route that question yet.");
8840 println!(" Try: pending · audit · recent · how many · calibration · peers");
8841 }
8842}
8843
8844fn frontier_label(p: &crate::project::Project) -> String {
8845 if p.project.name.trim().is_empty() {
8846 "(unnamed)".to_string()
8847 } else {
8848 p.project.name.clone()
8849 }
8850}
8851
8852fn fmt_timestamp(ts: &str) -> String {
8853 chrono::DateTime::parse_from_rfc3339(ts)
8856 .map(|dt| dt.format("%m-%d %H:%M").to_string())
8857 .unwrap_or_else(|_| ts.chars().take(16).collect())
8858}
8859
8860fn cmd_stats(path: &Path) {
8861 let frontier = load_frontier_or_fail(path);
8862 let s = &frontier.stats;
8863 println!();
8864 println!(" {}", "FRONTIER · V0.36.0".dimmed());
8865 println!(" {}", frontier.project.name.bold());
8866 println!(" {}", style::tick_row(60));
8867 println!(" id: {}", frontier.frontier_id());
8868 println!(" compiled: {}", frontier.project.compiled_at);
8869 println!(" papers: {}", frontier.project.papers_processed);
8870 println!(" findings: {}", s.findings);
8871 println!(" links: {}", s.links);
8872 println!(" replicated: {}", s.replicated);
8873 println!(" avg confidence: {}", s.avg_confidence);
8874 println!(" gaps: {}", s.gaps);
8875 println!(" contested: {}", s.contested);
8876 println!(" reviewed: {}", s.human_reviewed);
8877 println!(" proposals: {}", s.proposal_count);
8878 println!(
8879 " recorded proof: {}",
8880 frontier.proof_state.latest_packet.status
8881 );
8882 if frontier.proof_state.latest_packet.status != "never_exported" {
8883 println!(
8884 " proof note: recorded frontier metadata; packet files are checked by `vela packet validate`"
8885 );
8886 }
8887 if !s.categories.is_empty() {
8888 println!();
8889 println!(" {}", "categories".dimmed());
8890 let mut categories = s.categories.iter().collect::<Vec<_>>();
8891 categories.sort_by(|a, b| b.1.cmp(a.1));
8892 for (category, count) in categories {
8893 println!(" {category}: {}", count);
8894 }
8895 }
8896 println!();
8897 println!(" {}", style::tick_row(60));
8898 println!();
8899}
8900
8901fn cmd_proposals(action: ProposalAction) {
8902 match action {
8903 ProposalAction::List {
8904 frontier,
8905 status,
8906 json,
8907 } => {
8908 let frontier_state =
8909 repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
8910 let proposals_list = proposals::list(&frontier_state, status.as_deref());
8911 let payload = json!({
8912 "ok": true,
8913 "command": "proposals.list",
8914 "frontier": frontier_state.project.name,
8915 "status_filter": status,
8916 "summary": proposals::summary(&frontier_state),
8917 "proposals": proposals_list,
8918 });
8919 if json {
8920 println!(
8921 "{}",
8922 serde_json::to_string_pretty(&payload)
8923 .expect("failed to serialize proposals list")
8924 );
8925 } else {
8926 println!("vela proposals list");
8927 println!(" frontier: {}", frontier_state.project.name);
8928 println!(
8929 " proposals: {}",
8930 payload["proposals"].as_array().map_or(0, Vec::len)
8931 );
8932 }
8933 }
8934 ProposalAction::Show {
8935 frontier,
8936 proposal_id,
8937 json,
8938 } => {
8939 let frontier_state =
8940 repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
8941 let proposal =
8942 proposals::show(&frontier_state, &proposal_id).unwrap_or_else(|e| fail_return(&e));
8943 let payload = json!({
8944 "ok": true,
8945 "command": "proposals.show",
8946 "frontier": frontier_state.project.name,
8947 "proposal": proposal,
8948 });
8949 if json {
8950 println!(
8951 "{}",
8952 serde_json::to_string_pretty(&payload)
8953 .expect("failed to serialize proposal show")
8954 );
8955 } else {
8956 println!("vela proposals show");
8957 println!(" frontier: {}", frontier_state.project.name);
8958 println!(" proposal: {}", proposal_id);
8959 println!(" kind: {}", proposal.kind);
8960 println!(" status: {}", proposal.status);
8961 }
8962 }
8963 ProposalAction::Preview {
8964 frontier,
8965 proposal_id,
8966 reviewer,
8967 json,
8968 } => {
8969 let preview = proposals::preview_at_path(&frontier, &proposal_id, &reviewer)
8970 .unwrap_or_else(|e| fail_return(&e));
8971 let payload = json!({
8972 "ok": true,
8973 "command": "proposals.preview",
8974 "frontier": frontier.display().to_string(),
8975 "preview": preview,
8976 });
8977 if json {
8978 println!(
8979 "{}",
8980 serde_json::to_string_pretty(&payload)
8981 .expect("failed to serialize proposal preview")
8982 );
8983 } else {
8984 println!("vela proposals preview");
8985 println!(" proposal: {}", proposal_id);
8986 println!(" kind: {}", preview.kind);
8987 println!(
8988 " findings: {} -> {}",
8989 preview.findings_before, preview.findings_after
8990 );
8991 println!(
8992 " artifacts: {} -> {}",
8993 preview.artifacts_before, preview.artifacts_after
8994 );
8995 println!(
8996 " events: {} -> {}",
8997 preview.events_before, preview.events_after
8998 );
8999 if !preview.changed_findings.is_empty() {
9000 println!(
9001 " findings changed: {}",
9002 preview.changed_findings.join(", ")
9003 );
9004 }
9005 if !preview.changed_artifacts.is_empty() {
9006 println!(
9007 " artifacts changed: {}",
9008 preview.changed_artifacts.join(", ")
9009 );
9010 }
9011 if !preview.event_kinds.is_empty() {
9012 println!(" event kinds: {}", preview.event_kinds.join(", "));
9013 }
9014 println!(" event: {}", preview.applied_event_id);
9015 }
9016 }
9017 ProposalAction::Import {
9018 frontier,
9019 source,
9020 json,
9021 } => {
9022 let report =
9023 proposals::import_from_path(&frontier, &source).unwrap_or_else(|e| fail_return(&e));
9024 let payload = json!({
9025 "ok": true,
9026 "command": "proposals.import",
9027 "frontier": frontier.display().to_string(),
9028 "source": source.display().to_string(),
9029 "summary": {
9030 "imported": report.imported,
9031 "applied": report.applied,
9032 "rejected": report.rejected,
9033 "duplicates": report.duplicates,
9034 },
9035 });
9036 if json {
9037 println!(
9038 "{}",
9039 serde_json::to_string_pretty(&payload)
9040 .expect("failed to serialize proposal import")
9041 );
9042 } else {
9043 println!(
9044 "Imported {} proposals into {}",
9045 report.imported, report.wrote_to
9046 );
9047 }
9048 }
9049 ProposalAction::Validate { source, json } => {
9050 let report = proposals::validate_source(&source).unwrap_or_else(|e| fail_return(&e));
9051 let payload = json!({
9052 "ok": report.ok,
9053 "command": "proposals.validate",
9054 "source": source.display().to_string(),
9055 "summary": {
9056 "checked": report.checked,
9057 "valid": report.valid,
9058 "invalid": report.invalid,
9059 },
9060 "proposal_ids": report.proposal_ids,
9061 "errors": report.errors,
9062 });
9063 if json {
9064 println!(
9065 "{}",
9066 serde_json::to_string_pretty(&payload)
9067 .expect("failed to serialize proposal validation")
9068 );
9069 } else if report.ok {
9070 println!("{} validated {} proposals", style::ok("ok"), report.valid);
9071 } else {
9072 println!(
9073 "{} validated {} proposals, {} invalid",
9074 style::lost("lost"),
9075 report.valid,
9076 report.invalid
9077 );
9078 for error in &report.errors {
9079 println!(" · {error}");
9080 }
9081 std::process::exit(1);
9082 }
9083 }
9084 ProposalAction::Export {
9085 frontier,
9086 output,
9087 status,
9088 json,
9089 } => {
9090 let count = proposals::export_to_path(&frontier, &output, status.as_deref())
9091 .unwrap_or_else(|e| fail_return(&e));
9092 let payload = json!({
9093 "ok": true,
9094 "command": "proposals.export",
9095 "frontier": frontier.display().to_string(),
9096 "output": output.display().to_string(),
9097 "status": status,
9098 "exported": count,
9099 });
9100 if json {
9101 println!(
9102 "{}",
9103 serde_json::to_string_pretty(&payload)
9104 .expect("failed to serialize proposal export")
9105 );
9106 } else {
9107 println!("sealed · {count} proposals · {}", output.display());
9108 }
9109 }
9110 ProposalAction::Accept {
9111 frontier,
9112 proposal_id,
9113 reviewer,
9114 reason,
9115 json,
9116 } => {
9117 let event_id = proposals::accept_at_path(&frontier, &proposal_id, &reviewer, &reason)
9118 .unwrap_or_else(|e| fail_return(&e));
9119 let payload = json!({
9120 "ok": true,
9121 "command": "proposals.accept",
9122 "frontier": frontier.display().to_string(),
9123 "proposal_id": proposal_id,
9124 "reviewer": reviewer,
9125 "applied_event_id": event_id,
9126 });
9127 if json {
9128 println!(
9129 "{}",
9130 serde_json::to_string_pretty(&payload)
9131 .expect("failed to serialize proposal accept")
9132 );
9133 } else {
9134 println!(
9135 "{} accepted and applied proposal {}",
9136 style::ok("ok"),
9137 proposal_id
9138 );
9139 println!(" event: {}", event_id);
9140 }
9141 }
9142 ProposalAction::Reject {
9143 frontier,
9144 proposal_id,
9145 reviewer,
9146 reason,
9147 json,
9148 } => {
9149 proposals::reject_at_path(&frontier, &proposal_id, &reviewer, &reason)
9150 .unwrap_or_else(|e| fail_return(&e));
9151 let payload = json!({
9152 "ok": true,
9153 "command": "proposals.reject",
9154 "frontier": frontier.display().to_string(),
9155 "proposal_id": proposal_id,
9156 "reviewer": reviewer,
9157 "status": "rejected",
9158 });
9159 if json {
9160 println!(
9161 "{}",
9162 serde_json::to_string_pretty(&payload)
9163 .expect("failed to serialize proposal reject")
9164 );
9165 } else {
9166 println!(
9167 "{} rejected proposal {}",
9168 style::warn("rejected"),
9169 proposal_id
9170 );
9171 }
9172 }
9173 }
9174}
9175
9176fn cmd_artifact_to_state(
9177 frontier: &Path,
9178 packet: &Path,
9179 actor: &str,
9180 apply_artifacts: bool,
9181 json: bool,
9182) {
9183 let report =
9184 crate::artifact_to_state::import_packet_at_path(frontier, packet, actor, apply_artifacts)
9185 .unwrap_or_else(|e| fail_return(&e));
9186 if json {
9187 println!(
9188 "{}",
9189 serde_json::to_string_pretty(&report)
9190 .expect("failed to serialize artifact-to-state report")
9191 );
9192 } else {
9193 println!("vela artifact-to-state");
9194 println!(" packet: {}", report.packet_id);
9195 println!(" frontier: {}", report.frontier);
9196 println!(" artifact proposals: {}", report.artifact_proposals);
9197 println!(" finding proposals: {}", report.finding_proposals);
9198 println!(" gap proposals: {}", report.gap_proposals);
9199 println!(
9200 " applied artifact events: {}",
9201 report.applied_artifact_events
9202 );
9203 println!(
9204 " pending truth proposals: {}",
9205 report.pending_truth_proposals
9206 );
9207 }
9208}
9209
9210fn cmd_bridge_kit(action: BridgeKitAction) {
9211 match action {
9212 BridgeKitAction::Validate { source, json } => {
9213 let report = crate::artifact_to_state::validate_bridge_kit_path(&source);
9214 if json {
9215 println!(
9216 "{}",
9217 serde_json::to_string_pretty(&report)
9218 .expect("failed to serialize bridge-kit validation report")
9219 );
9220 } else {
9221 println!("vela bridge-kit validate");
9222 println!(" source: {}", report.source);
9223 println!(" packets: {}", report.packet_count);
9224 println!(" valid: {}", report.valid_packet_count);
9225 println!(" invalid: {}", report.invalid_packet_count);
9226 for packet in &report.packets {
9227 if packet.ok {
9228 println!(
9229 " ok: {} · {} artifacts · {} claims · {} needs",
9230 packet
9231 .packet_id
9232 .as_deref()
9233 .unwrap_or("packet id unavailable"),
9234 packet.artifact_count,
9235 packet.candidate_claim_count,
9236 packet.open_need_count
9237 );
9238 } else {
9239 println!(" invalid: {} · {}", packet.path, packet.errors.join("; "));
9240 }
9241 }
9242 for error in &report.errors {
9243 println!(" error: {error}");
9244 }
9245 }
9246 if !report.ok {
9247 std::process::exit(1);
9248 }
9249 }
9250 }
9251}
9252
9253async fn cmd_source_adapter(action: SourceAdapterAction) {
9254 match action {
9255 SourceAdapterAction::Run {
9256 frontier,
9257 adapter,
9258 actor,
9259 entries,
9260 priority,
9261 include_excluded,
9262 allow_partial,
9263 dry_run,
9264 input_dir,
9265 apply_artifacts,
9266 json,
9267 } => {
9268 let report = crate::source_adapters::run(
9269 &frontier,
9270 crate::source_adapters::SourceAdapterRunOptions {
9271 adapter,
9272 actor,
9273 entries,
9274 priority,
9275 include_excluded,
9276 allow_partial,
9277 dry_run,
9278 input_dir,
9279 apply_artifacts,
9280 },
9281 )
9282 .await
9283 .unwrap_or_else(|e| fail_return(&e));
9284 if json {
9285 println!(
9286 "{}",
9287 serde_json::to_string_pretty(&report)
9288 .expect("failed to serialize source adapter report")
9289 );
9290 } else {
9291 println!("vela source-adapter run");
9292 println!(" adapter: {}", report.adapter);
9293 println!(" run: {}", report.run_id);
9294 println!(" frontier: {}", report.frontier);
9295 println!(" selected entries: {}", report.selected_entries);
9296 println!(" fetched records: {}", report.fetched_records);
9297 println!(" changed records: {}", report.changed_records);
9298 println!(" unchanged records: {}", report.unchanged_records);
9299 println!(" failed records: {}", report.failed_records.len());
9300 if let Some(packet_id) = report.packet_id {
9301 println!(" packet: {packet_id}");
9302 }
9303 println!(" artifact proposals: {}", report.artifact_proposals);
9304 println!(" review note proposals: {}", report.review_note_proposals);
9305 println!(" applied events: {}", report.applied_event_ids.len());
9306 }
9307 }
9308 }
9309}
9310
9311fn cmd_runtime_adapter(action: RuntimeAdapterAction) {
9312 match action {
9313 RuntimeAdapterAction::Run {
9314 frontier,
9315 adapter,
9316 input,
9317 actor,
9318 dry_run,
9319 apply_artifacts,
9320 json,
9321 } => {
9322 let report = crate::runtime_adapters::run(
9323 &frontier,
9324 crate::runtime_adapters::RuntimeAdapterRunOptions {
9325 adapter,
9326 input,
9327 actor,
9328 dry_run,
9329 apply_artifacts,
9330 },
9331 )
9332 .unwrap_or_else(|e| fail_return(&e));
9333 if json {
9334 println!(
9335 "{}",
9336 serde_json::to_string_pretty(&report)
9337 .expect("failed to serialize runtime adapter report")
9338 );
9339 } else {
9340 println!("vela runtime-adapter run");
9341 println!(" adapter: {}", report.adapter);
9342 println!(" run: {}", report.run_id);
9343 println!(" frontier: {}", report.frontier);
9344 if let Some(packet_id) = report.packet_id {
9345 println!(" packet: {packet_id}");
9346 }
9347 println!(" artifact proposals: {}", report.artifact_proposals);
9348 println!(" finding proposals: {}", report.finding_proposals);
9349 println!(" gap proposals: {}", report.gap_proposals);
9350 println!(" review note proposals: {}", report.review_note_proposals);
9351 println!(
9352 " applied artifact events: {}",
9353 report.applied_artifact_events
9354 );
9355 println!(
9356 " pending truth proposals: {}",
9357 report.pending_truth_proposals
9358 );
9359 }
9360 }
9361 }
9362}
9363
9364fn cmd_sign(action: SignAction) {
9365 match action {
9366 SignAction::GenerateKeypair { out, json } => {
9367 let public_key = sign::generate_keypair(&out).unwrap_or_else(|e| fail_return(&e));
9368 let payload = json!({
9369 "ok": true,
9370 "command": "sign.generate-keypair",
9371 "output_dir": out.display().to_string(),
9372 "public_key": public_key,
9373 });
9374 if json {
9375 println!(
9376 "{}",
9377 serde_json::to_string_pretty(&payload)
9378 .expect("failed to serialize sign.generate-keypair")
9379 );
9380 } else {
9381 println!("{} keypair · {}", style::ok("generated"), out.display());
9382 println!(" public key: {public_key}");
9383 }
9384 }
9385 SignAction::Apply {
9386 frontier,
9387 private_key,
9388 json,
9389 } => {
9390 let count =
9391 sign::sign_frontier(&frontier, &private_key).unwrap_or_else(|e| fail_return(&e));
9392 let payload = json!({
9393 "ok": true,
9394 "command": "sign.apply",
9395 "frontier": frontier.display().to_string(),
9396 "private_key": private_key.display().to_string(),
9397 "signed": count,
9398 });
9399 if json {
9400 println!(
9401 "{}",
9402 serde_json::to_string_pretty(&payload).expect("failed to serialize sign.apply")
9403 );
9404 } else {
9405 println!(
9406 "{} {count} findings in {}",
9407 style::ok("signed"),
9408 frontier.display()
9409 );
9410 }
9411 }
9412 SignAction::Verify {
9413 frontier,
9414 public_key,
9415 json,
9416 } => {
9417 let report = sign::verify_frontier(&frontier, public_key.as_deref())
9418 .unwrap_or_else(|e| fail_return(&e));
9419 if json {
9420 println!(
9421 "{}",
9422 serde_json::to_string_pretty(&report).expect("failed to serialize sign.verify")
9423 );
9424 } else {
9425 println!();
9426 println!(
9427 " {}",
9428 format!("VELA · SIGN · VERIFY · {}", frontier.display())
9429 .to_uppercase()
9430 .dimmed()
9431 );
9432 println!(" {}", style::tick_row(60));
9433 println!(" total findings: {}", report.total_findings);
9434 println!(" signed: {}", report.signed);
9435 println!(" unsigned: {}", report.unsigned);
9436 println!(" valid: {}", report.valid);
9437 println!(" invalid: {}", report.invalid);
9438 if report.findings_with_threshold > 0 {
9439 println!(" with threshold: {}", report.findings_with_threshold);
9440 println!(" jointly accepted: {}", report.jointly_accepted);
9441 }
9442 }
9443 }
9444 SignAction::ThresholdSet {
9445 frontier,
9446 finding_id,
9447 to,
9448 json,
9449 } => {
9450 if to == 0 {
9451 fail("--to must be >= 1");
9452 }
9453 let mut project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
9454 let Some(idx) = project.findings.iter().position(|f| f.id == finding_id) else {
9455 fail(&format!("finding '{finding_id}' not present in frontier"));
9456 };
9457 project.findings[idx].flags.signature_threshold = Some(to);
9458 sign::refresh_jointly_accepted(&mut project);
9462 let met = project.findings[idx].flags.jointly_accepted;
9463 repo::save_to_path(&frontier, &project).unwrap_or_else(|e| fail_return(&e));
9464
9465 if json {
9466 println!(
9467 "{}",
9468 serde_json::to_string_pretty(&json!({
9469 "ok": true,
9470 "command": "sign.threshold-set",
9471 "finding_id": finding_id,
9472 "threshold": to,
9473 "jointly_accepted": met,
9474 "frontier": frontier.display().to_string(),
9475 }))
9476 .expect("failed to serialize sign.threshold-set")
9477 );
9478 } else {
9479 println!(
9480 "{} signature_threshold={to} on {finding_id} ({})",
9481 style::ok("set"),
9482 if met {
9483 "jointly accepted"
9484 } else {
9485 "awaiting signatures"
9486 }
9487 );
9488 }
9489 }
9490 }
9491}
9492
9493fn cmd_actor(action: ActorAction) {
9494 match action {
9495 ActorAction::Add {
9496 frontier,
9497 id,
9498 pubkey,
9499 tier,
9500 orcid,
9501 clearance,
9502 json,
9503 } => {
9504 let trimmed = pubkey.trim();
9506 if trimmed.len() != 64 || hex::decode(trimmed).is_err() {
9507 fail("Public key must be 64 hex characters (32-byte Ed25519 pubkey).");
9508 }
9509 let orcid_normalized = orcid
9511 .as_deref()
9512 .map(|s| sign::validate_orcid(s).unwrap_or_else(|e| fail_return(&e)));
9513 let clearance: Option<crate::access_tier::AccessTier> = clearance.as_deref().map(|s| {
9516 crate::access_tier::AccessTier::parse(s).unwrap_or_else(|e| fail_return(&e))
9517 });
9518
9519 let mut project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
9520 if project.actors.iter().any(|actor| actor.id == id) {
9521 fail(&format!(
9522 "Actor '{id}' already registered in this frontier."
9523 ));
9524 }
9525 project.actors.push(sign::ActorRecord {
9526 id: id.clone(),
9527 public_key: trimmed.to_string(),
9528 algorithm: "ed25519".to_string(),
9529 created_at: chrono::Utc::now().to_rfc3339(),
9530 tier: tier.clone(),
9531 orcid: orcid_normalized.clone(),
9532 access_clearance: clearance,
9533 });
9534 repo::save_to_path(&frontier, &project).unwrap_or_else(|e| fail_return(&e));
9535 let payload = json!({
9536 "ok": true,
9537 "command": "actor.add",
9538 "frontier": frontier.display().to_string(),
9539 "actor_id": id,
9540 "public_key": trimmed,
9541 "tier": tier,
9542 "orcid": orcid_normalized,
9543 "registered_count": project.actors.len(),
9544 });
9545 if json {
9546 println!(
9547 "{}",
9548 serde_json::to_string_pretty(&payload).expect("failed to serialize actor.add")
9549 );
9550 } else {
9551 let tier_suffix = tier
9552 .as_deref()
9553 .map_or_else(String::new, |t| format!(" tier={t}"));
9554 println!(
9555 "{} actor {} (pubkey {}{tier_suffix})",
9556 style::ok("registered"),
9557 id,
9558 &trimmed[..16]
9559 );
9560 }
9561 }
9562 ActorAction::List { frontier, json } => {
9563 let project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
9564 if json {
9565 let payload = json!({
9566 "ok": true,
9567 "command": "actor.list",
9568 "frontier": frontier.display().to_string(),
9569 "actors": project.actors,
9570 });
9571 println!(
9572 "{}",
9573 serde_json::to_string_pretty(&payload).expect("failed to serialize actor.list")
9574 );
9575 } else {
9576 println!();
9577 println!(
9578 " {}",
9579 format!("VELA · ACTOR · LIST · {}", frontier.display())
9580 .to_uppercase()
9581 .dimmed()
9582 );
9583 println!(" {}", style::tick_row(60));
9584 if project.actors.is_empty() {
9585 println!(" (no actors registered)");
9586 } else {
9587 for actor in &project.actors {
9588 println!(
9589 " {:<28} {}… registered {}",
9590 actor.id,
9591 &actor.public_key[..16],
9592 actor.created_at
9593 );
9594 }
9595 }
9596 }
9597 }
9598 }
9599}
9600
9601fn cmd_causal(action: CausalAction) {
9603 use crate::causal_reasoning;
9604
9605 match action {
9606 CausalAction::Audit {
9607 frontier,
9608 problems_only,
9609 json,
9610 } => {
9611 let project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
9612 let mut entries = causal_reasoning::audit_frontier(&project);
9613 if problems_only {
9614 entries.retain(|e| e.verdict.needs_reviewer_attention());
9615 }
9616 let summary = causal_reasoning::summarize_audit(&entries);
9617
9618 if json {
9619 println!(
9620 "{}",
9621 serde_json::to_string_pretty(&json!({
9622 "ok": true,
9623 "command": "causal.audit",
9624 "frontier": frontier.display().to_string(),
9625 "summary": summary,
9626 "entries": entries,
9627 }))
9628 .expect("serialize causal.audit")
9629 );
9630 return;
9631 }
9632
9633 println!();
9634 println!(
9635 " {}",
9636 format!("VELA · CAUSAL · AUDIT · {}", frontier.display())
9637 .to_uppercase()
9638 .dimmed()
9639 );
9640 println!(" {}", style::tick_row(60));
9641 println!(
9642 " total: {} identified: {} conditional: {} underidentified: {} underdetermined: {}",
9643 summary.total,
9644 summary.identified,
9645 summary.conditional,
9646 summary.underidentified,
9647 summary.underdetermined,
9648 );
9649 if entries.is_empty() {
9650 println!(" (no entries to report)");
9651 return;
9652 }
9653 for e in &entries {
9654 let chip = match e.verdict {
9655 crate::causal_reasoning::Identifiability::Identified => style::ok("identified"),
9656 crate::causal_reasoning::Identifiability::Conditional => {
9657 style::warn("conditional")
9658 }
9659 crate::causal_reasoning::Identifiability::Underidentified => {
9660 style::lost("underidentified")
9661 }
9662 crate::causal_reasoning::Identifiability::Underdetermined => {
9663 style::warn("underdetermined")
9664 }
9665 };
9666 let claim = e
9667 .causal_claim
9668 .map_or("none".to_string(), |c| format!("{c:?}").to_lowercase());
9669 let grade = e
9670 .causal_evidence_grade
9671 .map_or("none".to_string(), |g| format!("{g:?}").to_lowercase());
9672 println!();
9673 println!(" {chip} {} ({}/{})", e.finding_id, claim, grade);
9674 let assertion_short: String = e.assertion_text.chars().take(78).collect();
9675 println!(" {assertion_short}");
9676 println!(" {} {}", style::ok("why:"), e.rationale);
9677 if e.verdict.needs_reviewer_attention()
9678 || matches!(
9679 e.verdict,
9680 crate::causal_reasoning::Identifiability::Underdetermined
9681 )
9682 {
9683 println!(" {} {}", style::ok("fix:"), e.remediation);
9684 }
9685 }
9686 }
9687 CausalAction::Effect {
9688 frontier,
9689 source,
9690 on: target,
9691 json,
9692 } => {
9693 use crate::causal_graph::{CausalEffectVerdict, identify_effect};
9694
9695 let project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
9696 let verdict = identify_effect(&project, &source, &target);
9697
9698 if json {
9699 println!(
9700 "{}",
9701 serde_json::to_string_pretty(&json!({
9702 "ok": true,
9703 "command": "causal.effect",
9704 "frontier": frontier.display().to_string(),
9705 "source": source,
9706 "target": target,
9707 "verdict": verdict,
9708 }))
9709 .expect("serialize causal.effect")
9710 );
9711 return;
9712 }
9713
9714 println!();
9715 println!(
9716 " {}",
9717 format!("VELA · CAUSAL · EFFECT · {} → {}", source, target)
9718 .to_uppercase()
9719 .dimmed()
9720 );
9721 println!(" {}", style::tick_row(60));
9722 match verdict {
9723 CausalEffectVerdict::Identified {
9724 adjustment_set,
9725 back_door_paths_considered,
9726 } => {
9727 if adjustment_set.is_empty() {
9728 println!(
9729 " {} no back-door adjustment needed",
9730 style::ok("identified")
9731 );
9732 } else {
9733 println!(" {} identified by adjusting on:", style::ok("identified"));
9734 for z in &adjustment_set {
9735 println!(" · {z}");
9736 }
9737 }
9738 println!(
9739 " back-door paths considered: {}",
9740 back_door_paths_considered
9741 );
9742 }
9743 CausalEffectVerdict::IdentifiedByFrontDoor { mediator_set } => {
9744 println!(
9745 " {} identified via front-door criterion (Pearl 1995 §3.3)",
9746 style::ok("identified")
9747 );
9748 println!(" mediators that intercept all directed paths:");
9749 for m in &mediator_set {
9750 println!(" · {m}");
9751 }
9752 println!(
9753 " applies when source-target confounders are unobserved but the mediator chain is."
9754 );
9755 }
9756 CausalEffectVerdict::NoCausalPath { reason } => {
9757 println!(" {} no causal path: {reason}", style::warn("no_path"));
9758 }
9759 CausalEffectVerdict::Underidentified {
9760 unblocked_back_door_paths,
9761 candidates_tried,
9762 } => {
9763 println!(
9764 " {} no observational adjustment set found ({} candidates tried)",
9765 style::lost("underidentified"),
9766 candidates_tried
9767 );
9768 println!(" open back-door paths:");
9769 for path in unblocked_back_door_paths.iter().take(5) {
9770 println!(" · {}", path.join(" — "));
9771 }
9772 println!(
9773 " remediation: either intervene experimentally on {source}, or extend the link graph to make a confounder observable."
9774 );
9775 }
9776 CausalEffectVerdict::UnknownNode { which } => {
9777 fail(&which);
9778 }
9779 }
9780 println!();
9781 }
9782 CausalAction::Graph {
9783 frontier,
9784 node,
9785 json,
9786 } => {
9787 use crate::causal_graph::CausalGraph;
9788 let project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
9789 let graph = CausalGraph::from_project(&project);
9790
9791 let nodes: Vec<&str> = if let Some(n) = node.as_deref() {
9794 if !graph.contains(n) {
9795 fail(&format!("node not in frontier: {n}"));
9796 }
9797 vec![n]
9798 } else {
9799 project.findings.iter().map(|f| f.id.as_str()).collect()
9800 };
9801
9802 if json {
9803 let payload: Vec<_> = nodes
9804 .iter()
9805 .map(|n| {
9806 let parents: Vec<&str> = graph.parents_of(n).collect();
9807 let children: Vec<&str> = graph.children_of(n).collect();
9808 json!({
9809 "node": n,
9810 "parents": parents,
9811 "children": children,
9812 })
9813 })
9814 .collect();
9815 println!(
9816 "{}",
9817 serde_json::to_string_pretty(&json!({
9818 "ok": true,
9819 "command": "causal.graph",
9820 "node_count": graph.node_count(),
9821 "edge_count": graph.edge_count(),
9822 "nodes": payload,
9823 }))
9824 .expect("serialize causal.graph")
9825 );
9826 return;
9827 }
9828
9829 println!();
9830 println!(
9831 " {}",
9832 format!("VELA · CAUSAL · GRAPH · {}", frontier.display())
9833 .to_uppercase()
9834 .dimmed()
9835 );
9836 println!(" {}", style::tick_row(60));
9837 println!(
9838 " {} nodes · {} edges",
9839 graph.node_count(),
9840 graph.edge_count()
9841 );
9842 println!();
9843 for n in &nodes {
9844 let parents: Vec<&str> = graph.parents_of(n).collect();
9845 let children: Vec<&str> = graph.children_of(n).collect();
9846 if parents.is_empty() && children.is_empty() && nodes.len() > 1 {
9847 continue; }
9849 println!(" {n}");
9850 if !parents.is_empty() {
9851 println!(" parents: {}", parents.join(", "));
9852 }
9853 if !children.is_empty() {
9854 println!(" children: {}", children.join(", "));
9855 }
9856 }
9857 }
9858 CausalAction::Counterfactual {
9859 frontier,
9860 intervene_on,
9861 set_to,
9862 target,
9863 json,
9864 } => {
9865 use crate::counterfactual::{
9866 CounterfactualQuery, CounterfactualVerdict, answer_counterfactual,
9867 };
9868
9869 let project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
9870 let query = CounterfactualQuery {
9871 intervene_on: intervene_on.clone(),
9872 set_to,
9873 target: target.clone(),
9874 };
9875 let verdict = answer_counterfactual(&project, &query);
9876
9877 if json {
9878 println!(
9879 "{}",
9880 serde_json::to_string_pretty(&json!({
9881 "ok": true,
9882 "command": "causal.counterfactual",
9883 "frontier": frontier.display().to_string(),
9884 "query": query,
9885 "verdict": verdict,
9886 }))
9887 .expect("serialize causal.counterfactual")
9888 );
9889 return;
9890 }
9891
9892 println!();
9893 println!(
9894 " {}",
9895 format!(
9896 "VELA · CAUSAL · COUNTERFACTUAL · do({intervene_on} := {set_to:.3}) → {target}"
9897 )
9898 .to_uppercase()
9899 .dimmed()
9900 );
9901 println!(" {}", style::tick_row(72));
9902 match verdict {
9903 CounterfactualVerdict::Resolved {
9904 factual,
9905 counterfactual,
9906 delta,
9907 paths_used,
9908 } => {
9909 println!(
9910 " {} factual: {factual:.3} counterfactual: {counterfactual:.3} delta: {delta:+.3}",
9911 style::ok("resolved")
9912 );
9913 println!(
9914 " twin-network propagation through {} causal path(s):",
9915 paths_used.len()
9916 );
9917 for p in paths_used.iter().take(5) {
9918 println!(" · {}", p.join(" → "));
9919 }
9920 println!(
9921 " reading: \"if {intervene_on}'s confidence had been {set_to:.3} \
9922 instead of factual, {target}'s confidence would shift by {delta:+.3}.\""
9923 );
9924 }
9925 CounterfactualVerdict::MechanismUnspecified { unspecified_edges } => {
9926 println!(
9927 " {} causal path exists but {} edge(s) lack a mechanism annotation",
9928 style::warn("mechanism_unspecified"),
9929 unspecified_edges.len()
9930 );
9931 for (parent, child) in unspecified_edges.iter().take(8) {
9932 println!(" · {parent} → {child}");
9933 }
9934 println!(
9935 " remediation: annotate one of the link mechanisms (linear / monotonic / threshold / saturating)."
9936 );
9937 }
9938 CounterfactualVerdict::NoCausalPath { factual } => {
9939 println!(
9940 " {} no directed path from {intervene_on} to {target}; counterfactual = factual = {factual:.3}",
9941 style::warn("no_path")
9942 );
9943 }
9944 CounterfactualVerdict::UnknownNode { which } => {
9945 fail(&format!("node not in frontier: {which}"));
9946 }
9947 CounterfactualVerdict::InvalidIntervention { reason } => {
9948 fail(&reason);
9949 }
9950 }
9951 println!();
9952 }
9953 }
9954}
9955
9956fn cmd_bridges(action: BridgesAction) {
9959 use crate::bridge::{Bridge, BridgeStatus, derive_bridges};
9960 use std::collections::HashMap;
9961
9962 fn bridges_dir(frontier: &Path) -> PathBuf {
9963 frontier.join(".vela/bridges")
9964 }
9965
9966 fn load_bridge(frontier: &Path, id: &str) -> Result<Bridge, String> {
9967 let path = bridges_dir(frontier).join(format!("{id}.json"));
9968 if !path.is_file() {
9969 return Err(format!("bridge not found: {id}"));
9970 }
9971 let data = std::fs::read_to_string(&path).map_err(|e| format!("read {id}: {e}"))?;
9972 serde_json::from_str(&data).map_err(|e| format!("parse {id}: {e}"))
9973 }
9974
9975 fn save_bridge(frontier: &Path, b: &Bridge) -> Result<(), String> {
9976 let dir = bridges_dir(frontier);
9977 std::fs::create_dir_all(&dir).map_err(|e| format!("mkdir bridges/: {e}"))?;
9978 let path = dir.join(format!("{}.json", b.id));
9979 let data = serde_json::to_string_pretty(b).map_err(|e| format!("serialize bridge: {e}"))?;
9980 std::fs::write(&path, format!("{data}\n")).map_err(|e| format!("write bridge: {e}"))
9981 }
9982
9983 fn default_reviewer_id() -> String {
9986 std::env::var("VELA_REVIEWER_ID").unwrap_or_else(|_| "reviewer:will-blair".to_string())
9987 }
9988
9989 fn emit_bridge_reviewed_event(
10000 frontier: &Path,
10001 bridge_id: &str,
10002 status: &str,
10003 reviewer_id: &str,
10004 note: Option<&str>,
10005 ) -> Result<(), String> {
10006 let mut payload = serde_json::json!({
10007 "bridge_id": bridge_id,
10008 "status": status,
10009 });
10010 if let Some(n) = note
10011 && !n.trim().is_empty()
10012 {
10013 payload["note"] = serde_json::Value::String(n.to_string());
10014 }
10015 let known_ids: Vec<String> = list_bridges(frontier)
10017 .unwrap_or_default()
10018 .into_iter()
10019 .map(|b| b.id)
10020 .collect();
10021 crate::events::validate_bridge_reviewed_against_state(&payload, &known_ids)?;
10022 let event = crate::events::new_bridge_reviewed_event(
10023 bridge_id,
10024 reviewer_id,
10025 "human",
10026 &format!("Bridge {status} by {reviewer_id}"),
10027 payload,
10028 Vec::new(),
10029 );
10030 let events_dir = frontier.join(".vela/events");
10031 std::fs::create_dir_all(&events_dir).map_err(|e| format!("mkdir .vela/events: {e}"))?;
10032 let event_path = events_dir.join(format!("{}.json", event.id));
10033 let data =
10034 serde_json::to_string_pretty(&event).map_err(|e| format!("serialize event: {e}"))?;
10035 std::fs::write(&event_path, format!("{data}\n")).map_err(|e| format!("write event: {e}"))
10036 }
10037
10038 fn list_bridges(frontier: &Path) -> Result<Vec<Bridge>, String> {
10039 let dir = bridges_dir(frontier);
10040 if !dir.is_dir() {
10041 return Ok(Vec::new());
10042 }
10043 let mut out = Vec::new();
10044 for entry in std::fs::read_dir(&dir).map_err(|e| format!("read bridges/: {e}"))? {
10045 let entry = entry.map_err(|e| format!("read entry: {e}"))?;
10046 let path = entry.path();
10047 if path.extension().and_then(|s| s.to_str()) != Some("json") {
10048 continue;
10049 }
10050 let data = std::fs::read_to_string(&path).map_err(|e| format!("read {path:?}: {e}"))?;
10051 let b: Bridge =
10052 serde_json::from_str(&data).map_err(|e| format!("parse {path:?}: {e}"))?;
10053 out.push(b);
10054 }
10055 out.sort_by(|a, b| {
10056 b.finding_refs
10057 .len()
10058 .cmp(&a.finding_refs.len())
10059 .then(a.entity_name.cmp(&b.entity_name))
10060 });
10061 Ok(out)
10062 }
10063
10064 match action {
10065 BridgesAction::Derive {
10066 frontier_a,
10067 label_a,
10068 frontier_b,
10069 label_b,
10070 json,
10071 } => {
10072 let a = repo::load_from_path(&frontier_a).unwrap_or_else(|e| fail_return(&e));
10073 let b = repo::load_from_path(&frontier_b).unwrap_or_else(|e| fail_return(&e));
10074 let now = chrono::Utc::now().to_rfc3339();
10075 let new_bridges =
10076 derive_bridges(&[(label_a.as_str(), &a), (label_b.as_str(), &b)], &now);
10077
10078 let existing = list_bridges(&frontier_a).unwrap_or_default();
10082 let existing_by_id: HashMap<String, Bridge> =
10083 existing.iter().map(|b| (b.id.clone(), b.clone())).collect();
10084 let mut written = 0;
10085 let mut preserved = 0;
10086 let mut new_ids = Vec::new();
10087 for mut bridge in new_bridges {
10088 if let Some(prev) = existing_by_id.get(&bridge.id)
10089 && prev.status != BridgeStatus::Derived
10090 {
10091 bridge.status = prev.status;
10093 bridge.derived_at = prev.derived_at.clone();
10094 preserved += 1;
10095 }
10096 save_bridge(&frontier_a, &bridge).unwrap_or_else(|e| fail_return(&e));
10097 new_ids.push(bridge.id.clone());
10098 written += 1;
10099 }
10100
10101 if json {
10102 println!(
10103 "{}",
10104 serde_json::to_string_pretty(&json!({
10105 "ok": true,
10106 "command": "bridges.derive",
10107 "frontier_a": frontier_a.display().to_string(),
10108 "frontier_b": frontier_b.display().to_string(),
10109 "bridges_written": written,
10110 "reviewer_judgments_preserved": preserved,
10111 "ids": new_ids,
10112 }))
10113 .expect("serialize bridges.derive")
10114 );
10115 return;
10116 }
10117
10118 println!();
10119 println!(
10120 " {}",
10121 format!("VELA · BRIDGES · DERIVE · {} ↔ {}", label_a, label_b)
10122 .to_uppercase()
10123 .dimmed()
10124 );
10125 println!(" {}", style::tick_row(60));
10126 println!(" {} {} bridge(s) materialized", style::ok("ok"), written);
10127 if preserved > 0 {
10128 println!(
10129 " {} {} reviewer judgment(s) preserved",
10130 style::ok("kept"),
10131 preserved
10132 );
10133 }
10134 for id in new_ids.iter().take(10) {
10135 println!(" · {id}");
10136 }
10137 if new_ids.len() > 10 {
10138 println!(" … and {} more", new_ids.len() - 10);
10139 }
10140 println!();
10141 }
10142 BridgesAction::List {
10143 frontier,
10144 status,
10145 json,
10146 } => {
10147 let mut bridges = list_bridges(&frontier).unwrap_or_else(|e| fail_return(&e));
10148 if let Some(s) = status.as_deref() {
10149 let want = match s.to_lowercase().as_str() {
10150 "derived" => BridgeStatus::Derived,
10151 "confirmed" => BridgeStatus::Confirmed,
10152 "refuted" => BridgeStatus::Refuted,
10153 other => fail_return(&format!(
10154 "unknown bridge status '{other}' (try derived|confirmed|refuted)"
10155 )),
10156 };
10157 bridges.retain(|b| b.status == want);
10158 }
10159 if json {
10160 println!(
10161 "{}",
10162 serde_json::to_string_pretty(&json!({
10163 "ok": true,
10164 "command": "bridges.list",
10165 "frontier": frontier.display().to_string(),
10166 "count": bridges.len(),
10167 "bridges": bridges,
10168 }))
10169 .expect("serialize bridges.list")
10170 );
10171 return;
10172 }
10173 println!();
10174 println!(
10175 " {}",
10176 format!("VELA · BRIDGES · LIST · {}", frontier.display())
10177 .to_uppercase()
10178 .dimmed()
10179 );
10180 println!(" {}", style::tick_row(60));
10181 println!(" {} bridge(s)", bridges.len());
10182 for b in &bridges {
10183 let chip = match b.status {
10184 BridgeStatus::Derived => style::warn("derived"),
10185 BridgeStatus::Confirmed => style::ok("confirmed"),
10186 BridgeStatus::Refuted => style::lost("refuted"),
10187 };
10188 println!();
10189 println!(
10190 " {chip} {} {} ↔ findings:{}",
10191 b.id,
10192 b.entity_name,
10193 b.finding_refs.len()
10194 );
10195 println!(" frontiers: {}", b.frontiers.join(", "));
10196 if let Some(t) = &b.tension {
10197 println!(" tension: {t}");
10198 }
10199 }
10200 println!();
10201 }
10202 BridgesAction::Show {
10203 frontier,
10204 bridge_id,
10205 json,
10206 } => {
10207 let b = load_bridge(&frontier, &bridge_id).unwrap_or_else(|e| fail_return(&e));
10208 if json {
10209 println!("{}", serde_json::to_string_pretty(&b).expect("serialize"));
10210 return;
10211 }
10212 println!();
10213 println!(
10214 " {}",
10215 format!("VELA · BRIDGES · SHOW · {}", b.id)
10216 .to_uppercase()
10217 .dimmed()
10218 );
10219 println!(" {}", style::tick_row(60));
10220 println!(" entity: {}", b.entity_name);
10221 println!(" status: {:?}", b.status);
10222 println!(" frontiers: {}", b.frontiers.join(", "));
10223 if !b.frontier_ids.is_empty() {
10224 println!(" frontier_ids: {}", b.frontier_ids.join(", "));
10225 }
10226 if let Some(t) = &b.tension {
10227 println!(" tension: {t}");
10228 }
10229 println!(" derived_at: {}", b.derived_at);
10230 println!(" finding refs ({}):", b.finding_refs.len());
10231 for r in &b.finding_refs {
10232 let dir = r.direction.as_deref().unwrap_or("—");
10233 let truncated: String = r.assertion_text.chars().take(72).collect();
10234 println!(
10235 " · [{}] {} (conf={:.2}, dir={})",
10236 r.frontier, r.finding_id, r.confidence, dir
10237 );
10238 println!(" {truncated}");
10239 }
10240 println!();
10241 }
10242 BridgesAction::Confirm {
10243 frontier,
10244 bridge_id,
10245 reviewer,
10246 note,
10247 json,
10248 } => {
10249 let mut b = load_bridge(&frontier, &bridge_id).unwrap_or_else(|e| fail_return(&e));
10250 let reviewer_id = reviewer.unwrap_or_else(default_reviewer_id);
10251 b.status = BridgeStatus::Confirmed;
10252 save_bridge(&frontier, &b).unwrap_or_else(|e| fail_return(&e));
10253 let _ = emit_bridge_reviewed_event(
10257 &frontier,
10258 &bridge_id,
10259 "confirmed",
10260 &reviewer_id,
10261 note.as_deref(),
10262 );
10263 if json {
10264 println!("{}", serde_json::to_string_pretty(&b).expect("serialize"));
10265 return;
10266 }
10267 println!();
10268 println!(" {} {} now confirmed", style::ok("confirmed"), b.id);
10269 println!();
10270 }
10271 BridgesAction::Refute {
10272 frontier,
10273 bridge_id,
10274 reviewer,
10275 note,
10276 json,
10277 } => {
10278 let mut b = load_bridge(&frontier, &bridge_id).unwrap_or_else(|e| fail_return(&e));
10279 let reviewer_id = reviewer.unwrap_or_else(default_reviewer_id);
10280 b.status = BridgeStatus::Refuted;
10281 save_bridge(&frontier, &b).unwrap_or_else(|e| fail_return(&e));
10282 let _ = emit_bridge_reviewed_event(
10283 &frontier,
10284 &bridge_id,
10285 "refuted",
10286 &reviewer_id,
10287 note.as_deref(),
10288 );
10289 if json {
10290 println!("{}", serde_json::to_string_pretty(&b).expect("serialize"));
10291 return;
10292 }
10293 println!();
10294 println!(" {} {} now refuted", style::lost("refuted"), b.id);
10295 println!();
10296 }
10297 }
10298}
10299
10300fn cmd_federation(action: FederationAction) {
10302 use crate::federation::PeerHub;
10303
10304 match action {
10305 FederationAction::PeerAdd {
10306 frontier,
10307 id,
10308 url,
10309 pubkey,
10310 note,
10311 json,
10312 } => {
10313 let peer = PeerHub {
10314 id: id.clone(),
10315 url: url.clone(),
10316 public_key: pubkey.trim().to_string(),
10317 added_at: chrono::Utc::now().to_rfc3339(),
10318 note: note.clone(),
10319 };
10320 peer.validate().unwrap_or_else(|e| fail_return(&e));
10321
10322 let mut project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
10323 if project.peers.iter().any(|p| p.id == id) {
10324 fail(&format!("peer '{id}' already in registry"));
10325 }
10326 project.peers.push(peer.clone());
10327 repo::save_to_path(&frontier, &project).unwrap_or_else(|e| fail_return(&e));
10328
10329 if json {
10330 println!(
10331 "{}",
10332 serde_json::to_string_pretty(&json!({
10333 "ok": true,
10334 "command": "federation.peer-add",
10335 "frontier": frontier.display().to_string(),
10336 "peer": peer,
10337 "registered_count": project.peers.len(),
10338 }))
10339 .expect("serialize federation.peer-add")
10340 );
10341 } else {
10342 println!(
10343 "{} peer {} (pubkey {}…) at {}",
10344 style::ok("registered"),
10345 id,
10346 &peer.public_key[..16],
10347 peer.url
10348 );
10349 }
10350 }
10351 FederationAction::PeerList { frontier, json } => {
10352 let project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
10353 if json {
10354 println!(
10355 "{}",
10356 serde_json::to_string_pretty(&json!({
10357 "ok": true,
10358 "command": "federation.peer-list",
10359 "frontier": frontier.display().to_string(),
10360 "peers": project.peers,
10361 }))
10362 .expect("serialize federation.peer-list")
10363 );
10364 } else {
10365 println!();
10366 println!(
10367 " {}",
10368 format!("VELA · FEDERATION · PEERS · {}", frontier.display())
10369 .to_uppercase()
10370 .dimmed()
10371 );
10372 println!(" {}", style::tick_row(60));
10373 if project.peers.is_empty() {
10374 println!(" (no peers registered)");
10375 } else {
10376 for p in &project.peers {
10377 let note_suffix = if p.note.is_empty() {
10378 String::new()
10379 } else {
10380 format!(" · {}", p.note)
10381 };
10382 println!(
10383 " {:<24} {} {}…{note_suffix}",
10384 p.id,
10385 p.url,
10386 &p.public_key[..16]
10387 );
10388 }
10389 }
10390 }
10391 }
10392 FederationAction::Sync {
10393 frontier,
10394 peer_id,
10395 url,
10396 via_hub,
10397 vfr_id,
10398 allow_cross_vfr,
10399 dry_run,
10400 json,
10401 } => {
10402 use crate::federation::{self, DiscoveryResult};
10403
10404 let mut project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
10405 let Some(peer) = project.peers.iter().find(|p| p.id == peer_id).cloned() else {
10406 fail(&format!(
10407 "peer '{peer_id}' not in registry; run `vela federation peer add` first"
10408 ));
10409 };
10410 let local_frontier_id = project.frontier_id();
10411
10412 if via_hub
10419 && let Some(target) = vfr_id.as_deref()
10420 && target != local_frontier_id
10421 && !allow_cross_vfr
10422 {
10423 fail(&format!(
10424 "cross-vfr sync refused: --vfr-id {target} differs from local frontier_id {local_frontier_id}. \
10425 Pass --allow-cross-vfr to opt in (every peer-side finding will be recorded as a \
10426 missing_locally conflict). Or omit --vfr-id to default to the local frontier id."
10427 ));
10428 }
10429
10430 #[derive(Debug)]
10432 enum SyncOutcome {
10433 Resolved(crate::project::Project, String), BrokenLocator(String, String, u16), UnverifiedEntry(String, String), EntryNotFound(String, u16),
10437 }
10438
10439 let outcome = if via_hub {
10440 let target_vfr = vfr_id.clone().unwrap_or_else(|| local_frontier_id.clone());
10441 match federation::discover_peer_frontier(
10442 &peer.url,
10443 &target_vfr,
10444 Some(&peer.public_key),
10445 ) {
10446 DiscoveryResult::Resolved(p) => {
10447 let src =
10448 format!("{}/entries/{}", peer.url.trim_end_matches('/'), target_vfr);
10449 SyncOutcome::Resolved(p, src)
10450 }
10451 DiscoveryResult::BrokenLocator {
10452 vfr_id,
10453 locator,
10454 status,
10455 } => SyncOutcome::BrokenLocator(vfr_id, locator, status),
10456 DiscoveryResult::UnverifiedEntry { vfr_id, reason } => {
10457 SyncOutcome::UnverifiedEntry(vfr_id, reason)
10458 }
10459 DiscoveryResult::EntryNotFound { vfr_id, status } => {
10460 SyncOutcome::EntryNotFound(vfr_id, status)
10461 }
10462 DiscoveryResult::Unreachable { url, error } => {
10463 fail(&format!("peer hub unreachable ({url}): {error}"));
10464 }
10465 }
10466 } else {
10467 let resolved_url = url.unwrap_or_else(|| {
10468 let base = peer.url.trim_end_matches('/');
10469 format!("{base}/manifest/{local_frontier_id}.json")
10470 });
10471 match federation::fetch_peer_frontier(&resolved_url) {
10472 Ok(p) => SyncOutcome::Resolved(p, resolved_url),
10473 Err(e) => fail(&format!("direct fetch failed: {e}")),
10474 }
10475 };
10476
10477 let peer_source: String;
10480 let peer_state = match outcome {
10481 SyncOutcome::Resolved(p, src) => {
10482 if !json {
10483 println!(" · resolved via {src}");
10484 }
10485 peer_source = src;
10486 p
10487 }
10488 SyncOutcome::BrokenLocator(vfr, locator, status) => {
10489 if dry_run {
10490 if json {
10491 println!(
10492 "{}",
10493 serde_json::to_string_pretty(&json!({
10494 "ok": true,
10495 "command": "federation.sync",
10496 "dry_run": true,
10497 "outcome": "broken_locator",
10498 "vfr_id": vfr,
10499 "locator": locator,
10500 "http_status": status,
10501 }))
10502 .expect("serialize")
10503 );
10504 } else {
10505 println!(
10506 "{} dry-run: peer entry resolved but locator dead",
10507 style::warn("broken_locator")
10508 );
10509 println!(" vfr_id: {vfr}");
10510 println!(" locator: {locator} (HTTP {status})");
10511 }
10512 return;
10513 }
10514 let report = federation::record_locator_failure(
10515 &mut project,
10516 &peer_id,
10517 &vfr,
10518 &locator,
10519 status,
10520 );
10521 repo::save_to_path(&frontier, &project).unwrap_or_else(|e| fail_return(&e));
10522 if json {
10523 println!(
10524 "{}",
10525 serde_json::to_string_pretty(&json!({
10526 "ok": true,
10527 "command": "federation.sync",
10528 "outcome": "broken_locator",
10529 "report": report,
10530 }))
10531 .expect("serialize")
10532 );
10533 } else {
10534 println!(
10535 "{} sync recorded broken-locator conflict against {peer_id}",
10536 style::warn("broken_locator")
10537 );
10538 println!(" vfr_id: {vfr}");
10539 println!(" locator: {locator} (HTTP {status})");
10540 println!(" events appended: {}", report.events_appended);
10541 }
10542 return;
10543 }
10544 SyncOutcome::UnverifiedEntry(vfr, reason) => {
10545 if dry_run {
10546 if json {
10547 println!(
10548 "{}",
10549 serde_json::to_string_pretty(&json!({
10550 "ok": true,
10551 "command": "federation.sync",
10552 "dry_run": true,
10553 "outcome": "unverified_peer_entry",
10554 "vfr_id": vfr,
10555 "reason": reason,
10556 }))
10557 .expect("serialize")
10558 );
10559 } else {
10560 println!(
10561 "{} dry-run: peer entry signature did not verify",
10562 style::lost("unverified_peer_entry")
10563 );
10564 println!(" vfr_id: {vfr}");
10565 println!(" reason: {reason}");
10566 }
10567 return;
10568 }
10569 let report =
10570 federation::record_unverified_entry(&mut project, &peer_id, &vfr, &reason);
10571 repo::save_to_path(&frontier, &project).unwrap_or_else(|e| fail_return(&e));
10572 if json {
10573 println!(
10574 "{}",
10575 serde_json::to_string_pretty(&json!({
10576 "ok": true,
10577 "command": "federation.sync",
10578 "outcome": "unverified_peer_entry",
10579 "report": report,
10580 }))
10581 .expect("serialize")
10582 );
10583 } else {
10584 println!(
10585 "{} sync halted; peer's registry entry signature did not verify",
10586 style::lost("unverified_peer_entry")
10587 );
10588 println!(" vfr_id: {vfr}");
10589 println!(" reason: {reason}");
10590 }
10591 return;
10592 }
10593 SyncOutcome::EntryNotFound(vfr, status) => {
10594 if json {
10595 println!(
10596 "{}",
10597 serde_json::to_string_pretty(&json!({
10598 "ok": false,
10599 "command": "federation.sync",
10600 "outcome": "entry_not_found",
10601 "vfr_id": vfr,
10602 "http_status": status,
10603 }))
10604 .expect("serialize")
10605 );
10606 } else {
10607 println!(
10608 "{} peer's hub does not publish vfr_id {vfr} (HTTP {status})",
10609 style::warn("entry_not_found")
10610 );
10611 }
10612 return;
10613 }
10614 };
10615
10616 if dry_run {
10617 let conflicts = federation::diff_frontiers(&project, &peer_state);
10618 if json {
10619 println!(
10620 "{}",
10621 serde_json::to_string_pretty(&json!({
10622 "ok": true,
10623 "command": "federation.sync",
10624 "dry_run": true,
10625 "peer_id": peer_id,
10626 "peer_source": peer_source,
10627 "conflicts": conflicts,
10628 }))
10629 .expect("serialize federation.sync (dry-run)")
10630 );
10631 } else {
10632 println!(
10633 "{} dry-run vs {peer_id} ({}): {} conflict(s)",
10634 style::ok("ok"),
10635 peer_source,
10636 conflicts.len()
10637 );
10638 for c in &conflicts {
10639 println!(" · {} {} {}", c.kind.as_str(), c.finding_id, c.detail);
10640 }
10641 }
10642 return;
10643 }
10644
10645 let report = federation::sync_with_peer(&mut project, &peer_id, &peer_state);
10646 repo::save_to_path(&frontier, &project).unwrap_or_else(|e| fail_return(&e));
10647
10648 if json {
10649 println!(
10650 "{}",
10651 serde_json::to_string_pretty(&json!({
10652 "ok": true,
10653 "command": "federation.sync",
10654 "peer_id": peer_id,
10655 "peer_source": peer_source,
10656 "report": report,
10657 }))
10658 .expect("serialize federation.sync")
10659 );
10660 } else {
10661 println!(
10662 "{} synced with {} ({})",
10663 style::ok("ok"),
10664 peer_id,
10665 peer_source
10666 );
10667 println!(
10668 " our: {}",
10669 &report.our_snapshot_hash[..16.min(report.our_snapshot_hash.len())]
10670 );
10671 println!(
10672 " peer: {}",
10673 &report.peer_snapshot_hash[..16.min(report.peer_snapshot_hash.len())]
10674 );
10675 println!(
10676 " conflicts: {} events appended: {}",
10677 report.conflicts.len(),
10678 report.events_appended
10679 );
10680 for c in &report.conflicts {
10681 println!(" · {} {} {}", c.kind.as_str(), c.finding_id, c.detail);
10682 }
10683 }
10684 }
10685 FederationAction::PushResolution {
10686 frontier,
10687 conflict_event_id,
10688 to,
10689 key,
10690 vfr_id,
10691 json,
10692 } => {
10693 cmd_federation_push_resolution(frontier, conflict_event_id, to, key, vfr_id, json);
10694 }
10695 FederationAction::PeerRemove { frontier, id, json } => {
10696 let mut project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
10697 let before = project.peers.len();
10698 project.peers.retain(|p| p.id != id);
10699 if project.peers.len() == before {
10700 fail(&format!("peer '{id}' not found in registry"));
10701 }
10702 repo::save_to_path(&frontier, &project).unwrap_or_else(|e| fail_return(&e));
10703
10704 if json {
10705 println!(
10706 "{}",
10707 serde_json::to_string_pretty(&json!({
10708 "ok": true,
10709 "command": "federation.peer-remove",
10710 "frontier": frontier.display().to_string(),
10711 "removed": id,
10712 "remaining": project.peers.len(),
10713 }))
10714 .expect("serialize federation.peer-remove")
10715 );
10716 } else {
10717 println!(
10718 "{} peer {} ({} remaining)",
10719 style::ok("removed"),
10720 id,
10721 project.peers.len()
10722 );
10723 }
10724 }
10725 }
10726}
10727
10728fn cmd_federation_push_resolution(
10740 frontier: PathBuf,
10741 conflict_event_id: String,
10742 to: String,
10743 key: Option<PathBuf>,
10744 vfr_id: Option<String>,
10745 json: bool,
10746) {
10747 use crate::canonical;
10748 use crate::sign;
10749
10750 let project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
10751
10752 let Some(peer) = project.peers.iter().find(|p| p.id == to).cloned() else {
10753 fail(&format!(
10754 "peer '{to}' not in registry; run `vela federation peer-add` first"
10755 ));
10756 };
10757
10758 let Some(resolution) = project
10760 .events
10761 .iter()
10762 .find(|e| {
10763 e.kind == "frontier.conflict_resolved"
10764 && e.payload.get("conflict_event_id").and_then(|v| v.as_str())
10765 == Some(conflict_event_id.as_str())
10766 })
10767 .cloned()
10768 else {
10769 fail(&format!(
10770 "no frontier.conflict_resolved event paired with conflict {conflict_event_id} in {}",
10771 frontier.display()
10772 ));
10773 };
10774
10775 let actor_id = resolution.actor.id.clone();
10778 let Some(actor) = project.actors.iter().find(|a| a.id == actor_id) else {
10779 fail(&format!(
10780 "resolution event's actor.id ({actor_id}) is not in the frontier's actor registry; \
10781 register the reviewer with `vela actor add` before pushing"
10782 ));
10783 };
10784
10785 let key_path = key.unwrap_or_else(|| {
10788 let home = std::env::var("HOME").unwrap_or_default();
10789 let base = PathBuf::from(home)
10790 .join(".config")
10791 .join("vela")
10792 .join("keys");
10793 let safe_id = actor.id.replace([':', '/'], "_");
10794 let by_actor = base.join(format!("{safe_id}.key"));
10795 if by_actor.exists() {
10796 by_actor
10797 } else {
10798 base.join("private.key")
10799 }
10800 });
10801
10802 let signing_key = sign::load_signing_key_from_path(&key_path).unwrap_or_else(|e| {
10803 fail_return(&format!(
10804 "load private key from {}: {e}",
10805 key_path.display()
10806 ))
10807 });
10808 let pubkey_hex = sign::pubkey_hex(&signing_key);
10809 if !pubkey_hex.eq_ignore_ascii_case(&actor.public_key) {
10810 fail(&format!(
10811 "private key at {} does not match actor {}'s registered public key. \
10812 Loaded pubkey {}, expected {}.",
10813 key_path.display(),
10814 actor.id,
10815 &pubkey_hex[..16],
10816 &actor.public_key[..16]
10817 ));
10818 }
10819
10820 let signature_hex = sign::sign_event(&resolution, &signing_key)
10823 .unwrap_or_else(|e| fail_return(&format!("sign event: {e}")));
10824
10825 let mut body = resolution.clone();
10830 body.signature = None;
10831 let body_value =
10832 serde_json::to_value(&body).unwrap_or_else(|e| fail_return(&format!("serialize: {e}")));
10833 let _canonical_check = canonical::to_canonical_bytes(&body_value)
10834 .unwrap_or_else(|e| fail_return(&format!("canonicalize: {e}")));
10835
10836 let target_vfr = vfr_id.unwrap_or_else(|| project.frontier_id());
10837 let url = format!(
10838 "{}/entries/{}/events",
10839 peer.url.trim_end_matches('/'),
10840 target_vfr
10841 );
10842
10843 let url_owned = url.clone();
10845 let pubkey_owned = pubkey_hex.clone();
10846 let signature_owned = signature_hex.clone();
10847 let body_owned = body_value.clone();
10848 let response: Result<(u16, String), String> = std::thread::spawn(move || {
10849 let client = reqwest::blocking::Client::new();
10850 let resp = client
10851 .post(&url_owned)
10852 .header("X-Vela-Signer-Pubkey", &pubkey_owned)
10853 .header("X-Vela-Signature", &signature_owned)
10854 .json(&body_owned)
10855 .send()
10856 .map_err(|e| format!("HTTP POST {url_owned}: {e}"))?;
10857 let status = resp.status().as_u16();
10858 let text = resp.text().unwrap_or_default();
10859 Ok((status, text))
10860 })
10861 .join()
10862 .map_err(|_| "push thread panicked".to_string())
10863 .unwrap_or_else(|e| fail_return(&e));
10864
10865 let (status, text) = response.unwrap_or_else(|e| fail_return(&e));
10866 let parsed: serde_json::Value =
10867 serde_json::from_str(&text).unwrap_or_else(|_| json!({ "raw": text }));
10868
10869 let accepted = matches!(status, 200..=202);
10870 if json {
10871 println!(
10872 "{}",
10873 serde_json::to_string_pretty(&json!({
10874 "ok": accepted,
10875 "command": "federation.push-resolution",
10876 "frontier": frontier.display().to_string(),
10877 "peer_id": to,
10878 "url": url,
10879 "conflict_event_id": conflict_event_id,
10880 "event_id": resolution.id,
10881 "actor_id": actor.id,
10882 "http_status": status,
10883 "response": parsed,
10884 }))
10885 .expect("serialize federation.push-resolution")
10886 );
10887 } else if accepted {
10888 println!(
10889 "{} resolution {} pushed to {} (HTTP {})",
10890 style::ok("ok"),
10891 &resolution.id[..16.min(resolution.id.len())],
10892 to,
10893 status
10894 );
10895 println!(" url: {url}");
10896 println!(" signer: {} (actor {})", &pubkey_hex[..16], actor.id);
10897 } else {
10898 println!("{} push refused (HTTP {})", style::lost("rejected"), status);
10899 println!(" url: {url}");
10900 println!(" response: {text}");
10901 std::process::exit(1);
10902 }
10903}
10904
10905fn cmd_queue(action: QueueAction) {
10910 use crate::queue;
10911 match action {
10912 QueueAction::List { queue_file, json } => {
10913 let path = queue_file.unwrap_or_else(queue::default_queue_path);
10914 let q = queue::load(&path).unwrap_or_else(|e| fail_return(&e));
10915 if json {
10916 let payload = json!({
10917 "ok": true,
10918 "command": "queue.list",
10919 "queue_file": path.display().to_string(),
10920 "schema": q.schema,
10921 "actions": q.actions,
10922 });
10923 println!(
10924 "{}",
10925 serde_json::to_string_pretty(&payload).expect("failed to serialize queue.list")
10926 );
10927 } else {
10928 println!();
10929 println!(
10930 " {}",
10931 format!("VELA · QUEUE · LIST · {}", path.display())
10932 .to_uppercase()
10933 .dimmed()
10934 );
10935 println!(" {}", style::tick_row(60));
10936 if q.actions.is_empty() {
10937 println!(" (queue is empty)");
10938 } else {
10939 for (idx, action) in q.actions.iter().enumerate() {
10940 println!(
10941 " [{idx}] {} → {} queued {}",
10942 action.kind,
10943 action.frontier.display(),
10944 action.queued_at
10945 );
10946 }
10947 }
10948 }
10949 }
10950 QueueAction::Clear { queue_file, json } => {
10951 let path = queue_file.unwrap_or_else(queue::default_queue_path);
10952 let dropped = queue::clear(&path).unwrap_or_else(|e| fail_return(&e));
10953 if json {
10954 let payload = json!({
10955 "ok": true,
10956 "command": "queue.clear",
10957 "queue_file": path.display().to_string(),
10958 "dropped": dropped,
10959 });
10960 println!(
10961 "{}",
10962 serde_json::to_string_pretty(&payload)
10963 .expect("failed to serialize queue.clear")
10964 );
10965 } else {
10966 println!("{} dropped {dropped} queued action(s)", style::ok("ok"));
10967 }
10968 }
10969 QueueAction::Sign {
10970 actor,
10971 key,
10972 queue_file,
10973 yes_to_all,
10974 json,
10975 } => {
10976 let path = queue_file.unwrap_or_else(queue::default_queue_path);
10977 let q = queue::load(&path).unwrap_or_else(|e| fail_return(&e));
10978 if q.actions.is_empty() {
10979 if json {
10980 println!("{}", json!({"ok": true, "signed": 0, "remaining": 0}));
10981 } else {
10982 println!("{} queue is empty", style::ok("ok"));
10983 }
10984 return;
10985 }
10986 let key_hex = std::fs::read_to_string(&key)
10987 .map(|s| s.trim().to_string())
10988 .unwrap_or_else(|e| fail_return(&format!("read key {}: {e}", key.display())));
10989 let signing_key = parse_signing_key(&key_hex);
10990 let mut signed_count = 0usize;
10991 let mut remaining = Vec::new();
10992 for action in q.actions.iter() {
10993 if !yes_to_all && !confirm_action(action) {
10994 remaining.push(action.clone());
10995 continue;
10996 }
10997 match sign_and_apply(&signing_key, &actor, action) {
10998 Ok(report) => {
10999 signed_count += 1;
11000 if !json {
11001 println!(
11002 "{} {} on {} → {}",
11003 style::ok("signed"),
11004 action.kind,
11005 action.frontier.display(),
11006 report
11007 );
11008 }
11009 }
11010 Err(error) => {
11011 remaining.push(action.clone());
11013 if !json {
11014 eprintln!(
11015 "{} {} on {}: {error}",
11016 style::warn("failed"),
11017 action.kind,
11018 action.frontier.display()
11019 );
11020 }
11021 }
11022 }
11023 }
11024 queue::replace_actions(&path, remaining.clone()).unwrap_or_else(|e| fail_return(&e));
11025 if json {
11026 let payload = json!({
11027 "ok": true,
11028 "command": "queue.sign",
11029 "signed": signed_count,
11030 "remaining": remaining.len(),
11031 });
11032 println!(
11033 "{}",
11034 serde_json::to_string_pretty(&payload).expect("failed to serialize queue.sign")
11035 );
11036 } else {
11037 println!(
11038 "{} signed {signed_count} action(s); {} remaining in queue",
11039 style::ok("ok"),
11040 remaining.len()
11041 );
11042 }
11043 }
11044 }
11045}
11046
11047fn parse_signing_key(hex_str: &str) -> ed25519_dalek::SigningKey {
11048 let bytes = hex::decode(hex_str)
11049 .unwrap_or_else(|e| fail_return(&format!("invalid private-key hex: {e}")));
11050 let key_bytes: [u8; 32] = bytes
11051 .try_into()
11052 .unwrap_or_else(|_| fail_return("private key must be 32 bytes"));
11053 ed25519_dalek::SigningKey::from_bytes(&key_bytes)
11054}
11055
11056fn confirm_action(action: &crate::queue::QueuedAction) -> bool {
11057 use std::io::{self, BufRead, Write};
11058 let mut stdout = io::stdout().lock();
11059 let _ = writeln!(
11060 stdout,
11061 " sign {} on {}? [y/N] ",
11062 action.kind,
11063 action.frontier.display()
11064 );
11065 let _ = stdout.flush();
11066 drop(stdout);
11067 let stdin = io::stdin();
11068 let mut line = String::new();
11069 if stdin.lock().read_line(&mut line).is_err() {
11070 return false;
11071 }
11072 matches!(line.trim().to_lowercase().as_str(), "y" | "yes")
11073}
11074
11075fn sign_and_apply(
11080 signing_key: &ed25519_dalek::SigningKey,
11081 actor: &str,
11082 action: &crate::queue::QueuedAction,
11083) -> Result<String, String> {
11084 use crate::events::StateTarget;
11085 use crate::proposals;
11086 let args = &action.args;
11087 match action.kind.as_str() {
11088 "propose_review" | "propose_note" | "propose_revise_confidence" | "propose_retract" => {
11089 let kind = match action.kind.as_str() {
11090 "propose_review" => "finding.review",
11091 "propose_note" => "finding.note",
11092 "propose_revise_confidence" => "finding.confidence_revise",
11093 "propose_retract" => "finding.retract",
11094 _ => unreachable!(),
11095 };
11096 let target_id = args
11097 .get("target_finding_id")
11098 .and_then(Value::as_str)
11099 .ok_or("target_finding_id missing")?;
11100 let reason = args
11101 .get("reason")
11102 .and_then(Value::as_str)
11103 .ok_or("reason missing")?;
11104 let payload = match action.kind.as_str() {
11105 "propose_review" => {
11106 let status = args
11107 .get("status")
11108 .and_then(Value::as_str)
11109 .ok_or("status missing")?;
11110 json!({"status": status})
11111 }
11112 "propose_note" => {
11113 let text = args
11114 .get("text")
11115 .and_then(Value::as_str)
11116 .ok_or("text missing")?;
11117 json!({"text": text})
11118 }
11119 "propose_revise_confidence" => {
11120 let new_score = args
11121 .get("new_score")
11122 .and_then(Value::as_f64)
11123 .ok_or("new_score missing")?;
11124 json!({"new_score": new_score})
11125 }
11126 "propose_retract" => json!({}),
11127 _ => unreachable!(),
11128 };
11129 let created_at = args
11130 .get("created_at")
11131 .and_then(Value::as_str)
11132 .map(String::from)
11133 .unwrap_or_else(|| chrono::Utc::now().to_rfc3339());
11134 let mut proposal = proposals::new_proposal(
11135 kind,
11136 StateTarget {
11137 r#type: "finding".to_string(),
11138 id: target_id.to_string(),
11139 },
11140 actor,
11141 "human",
11142 reason,
11143 payload,
11144 Vec::new(),
11145 Vec::new(),
11146 );
11147 proposal.created_at = created_at;
11148 proposal.id = proposals::proposal_id(&proposal);
11149 let _signature = crate::sign::sign_proposal(&proposal, signing_key)?;
11153 let result = proposals::create_or_apply(&action.frontier, proposal, false)
11154 .map_err(|e| format!("create_or_apply: {e}"))?;
11155 Ok(format!("proposal {}", result.proposal_id))
11156 }
11157 "accept_proposal" | "reject_proposal" => {
11158 let proposal_id = args
11159 .get("proposal_id")
11160 .and_then(Value::as_str)
11161 .ok_or("proposal_id missing")?;
11162 let reason = args
11163 .get("reason")
11164 .and_then(Value::as_str)
11165 .ok_or("reason missing")?;
11166 let timestamp = args
11167 .get("timestamp")
11168 .and_then(Value::as_str)
11169 .map(String::from)
11170 .unwrap_or_else(|| chrono::Utc::now().to_rfc3339());
11171 let preimage = json!({
11173 "action": if action.kind == "accept_proposal" { "accept" } else { "reject" },
11174 "proposal_id": proposal_id,
11175 "reviewer_id": actor,
11176 "reason": reason,
11177 "timestamp": timestamp,
11178 });
11179 let bytes = crate::canonical::to_canonical_bytes(&preimage)?;
11180 use ed25519_dalek::Signer;
11181 let _signature = hex::encode(signing_key.sign(&bytes).to_bytes());
11182 if action.kind == "accept_proposal" {
11183 let event_id =
11184 crate::proposals::accept_at_path(&action.frontier, proposal_id, actor, reason)
11185 .map_err(|e| format!("accept_at_path: {e}"))?;
11186 Ok(format!("event {event_id}"))
11187 } else {
11188 crate::proposals::reject_at_path(&action.frontier, proposal_id, actor, reason)
11189 .map_err(|e| format!("reject_at_path: {e}"))?;
11190 Ok(format!("rejected {proposal_id}"))
11191 }
11192 }
11193 other => Err(format!("unsupported queued action kind '{other}'")),
11194 }
11195}
11196
11197fn cmd_entity(action: EntityAction) {
11209 use crate::entity_resolve;
11210 match action {
11211 EntityAction::Resolve {
11212 frontier,
11213 force,
11214 json,
11215 } => {
11216 let mut p = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
11217 let report = entity_resolve::resolve_frontier(&mut p, force);
11218 repo::save_to_path(&frontier, &p).unwrap_or_else(|e| fail_return(&e));
11219 if json {
11220 println!(
11221 "{}",
11222 serde_json::to_string_pretty(&serde_json::json!({
11223 "ok": true,
11224 "command": "entity.resolve",
11225 "frontier_path": frontier.display().to_string(),
11226 "report": report,
11227 }))
11228 .expect("serialize")
11229 );
11230 } else {
11231 println!(
11232 "{} resolved {} of {} entities ({} already, {} unresolved) across {} findings",
11233 style::ok("entity"),
11234 report.resolved,
11235 report.total_entities,
11236 report.already_resolved,
11237 report.unresolved_count,
11238 report.findings_touched,
11239 );
11240 let unresolved_summary: std::collections::BTreeSet<&str> = report
11241 .per_finding
11242 .iter()
11243 .flat_map(|f| f.unresolved.iter().map(String::as_str))
11244 .collect();
11245 if !unresolved_summary.is_empty() {
11246 let take = unresolved_summary.iter().take(8).collect::<Vec<_>>();
11247 println!(
11248 " unresolved (first {}): {}",
11249 take.len(),
11250 take.iter().copied().cloned().collect::<Vec<_>>().join(", ")
11251 );
11252 }
11253 }
11254 }
11255 EntityAction::List { json } => {
11256 let entries: Vec<serde_json::Value> = entity_resolve::iter_bundled()
11257 .map(|(name, etype, source, id)| {
11258 serde_json::json!({
11259 "canonical_name": name,
11260 "entity_type": etype,
11261 "source": source,
11262 "id": id,
11263 })
11264 })
11265 .collect();
11266 if json {
11267 println!(
11268 "{}",
11269 serde_json::to_string_pretty(&serde_json::json!({
11270 "ok": true,
11271 "command": "entity.list",
11272 "count": entries.len(),
11273 "entries": entries,
11274 }))
11275 .expect("serialize")
11276 );
11277 } else {
11278 println!("{} {} bundled entries", style::ok("entity"), entries.len());
11279 for e in &entries {
11280 println!(
11281 " {:32} {:18} {} {}",
11282 e["canonical_name"].as_str().unwrap_or("?"),
11283 e["entity_type"].as_str().unwrap_or("?"),
11284 e["source"].as_str().unwrap_or("?"),
11285 e["id"].as_str().unwrap_or("?"),
11286 );
11287 }
11288 }
11289 }
11290 }
11291}
11292
11293fn cmd_link(action: LinkAction) {
11294 use crate::bundle::{Link, LinkRef};
11295 match action {
11296 LinkAction::Add {
11297 frontier,
11298 from,
11299 to,
11300 r#type,
11301 note,
11302 inferred_by,
11303 no_check_target,
11304 json,
11305 } => {
11306 validate_enum_arg("--type", &r#type, bundle::VALID_LINK_TYPES);
11307 if !["compiler", "reviewer", "author"].contains(&inferred_by.as_str()) {
11308 fail(&format!(
11309 "invalid --inferred-by '{inferred_by}'. Valid: compiler, reviewer, author"
11310 ));
11311 }
11312 let parsed = LinkRef::parse(&to).unwrap_or_else(|e| {
11313 fail(&format!(
11314 "invalid --to '{to}': {e}. Expected vf_<hex> or vf_<hex>@vfr_<hex>"
11315 ))
11316 });
11317 let mut p = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
11318 let source_idx = p
11319 .findings
11320 .iter()
11321 .position(|f| f.id == from)
11322 .unwrap_or_else(|| {
11323 fail_return(&format!("--from finding '{from}' not in frontier"))
11324 });
11325 if let LinkRef::Local { vf_id } = &parsed
11326 && !p.findings.iter().any(|f| &f.id == vf_id)
11327 {
11328 fail(&format!(
11329 "local --to target '{vf_id}' not in frontier; add the target finding first"
11330 ));
11331 }
11332 if let LinkRef::Cross { vfr_id, .. } = &parsed
11333 && p.dep_for_vfr(vfr_id).is_none()
11334 {
11335 fail(&format!(
11336 "cross-frontier --to references vfr_id '{vfr_id}' but no matching dep is declared. Run `vela frontier add-dep {vfr_id} --locator <url> --snapshot <hash>` first."
11337 ));
11338 }
11339
11340 let mut target_warning: Option<String> = None;
11346 if let LinkRef::Cross {
11347 vfr_id: target_vfr,
11348 vf_id: target_vf,
11349 } = &parsed
11350 && !no_check_target
11351 && let Some(dep) = p.dep_for_vfr(target_vfr)
11352 && let Some(locator) = dep.locator.as_deref()
11353 && (locator.starts_with("http://") || locator.starts_with("https://"))
11354 {
11355 let client = reqwest::blocking::Client::builder()
11356 .timeout(std::time::Duration::from_secs(15))
11357 .build()
11358 .ok();
11359 if let Some(client) = client
11360 && let Ok(resp) = client.get(locator).send()
11361 && resp.status().is_success()
11362 && let Ok(dep_project) = resp.json::<crate::project::Project>()
11363 {
11364 if let Some(target_finding) =
11365 dep_project.findings.iter().find(|f| &f.id == target_vf)
11366 {
11367 if target_finding.flags.superseded {
11368 target_warning = Some(format!(
11369 "warn · cross-frontier target '{target_vf}' in '{target_vfr}' has flags.superseded = true. \
11370You may be linking to outdated wording. Pull --transitive and inspect the supersedes chain to find the current finding. \
11371Use --no-check-target to skip this check."
11372 ));
11373 }
11374 } else {
11375 target_warning = Some(format!(
11376 "warn · cross-frontier target '{target_vf}' not found in dep '{target_vfr}' (fetched from {locator}). \
11377The target may have been removed or never existed in the pinned snapshot."
11378 ));
11379 }
11380 }
11381 }
11382
11383 let now = chrono::Utc::now().to_rfc3339_opts(chrono::SecondsFormat::Secs, true);
11384 let link = Link {
11385 target: to.clone(),
11386 link_type: r#type.clone(),
11387 note: note.clone(),
11388 inferred_by: inferred_by.clone(),
11389 created_at: now,
11390 mechanism: None,
11391 };
11392 p.findings[source_idx].links.push(link);
11393 project::recompute_stats(&mut p);
11394 repo::save_to_path(&frontier, &p).unwrap_or_else(|e| fail_return(&e));
11395 let payload = json!({
11396 "ok": true,
11397 "command": "link.add",
11398 "frontier": frontier.display().to_string(),
11399 "from": from,
11400 "to": to,
11401 "type": r#type,
11402 "cross_frontier": parsed.is_cross_frontier(),
11403 });
11404 if json {
11405 let mut p2 = payload.clone();
11406 if let Some(w) = &target_warning
11407 && let serde_json::Value::Object(m) = &mut p2
11408 {
11409 m.insert(
11410 "target_warning".to_string(),
11411 serde_json::Value::String(w.clone()),
11412 );
11413 }
11414 println!(
11415 "{}",
11416 serde_json::to_string_pretty(&p2).expect("failed to serialize link.add")
11417 );
11418 } else {
11419 println!(
11420 "{} {} --[{}]--> {}{}",
11421 style::ok("link"),
11422 from,
11423 r#type,
11424 to,
11425 if parsed.is_cross_frontier() {
11426 " (cross-frontier)"
11427 } else {
11428 ""
11429 }
11430 );
11431 if let Some(w) = target_warning {
11432 println!(" {w}");
11433 }
11434 }
11435 }
11436 }
11437}
11438
11439fn cmd_frontier(action: FrontierAction) {
11440 use crate::project::ProjectDependency;
11441 use crate::repo;
11442 match action {
11443 FrontierAction::New {
11444 path,
11445 name,
11446 description,
11447 force,
11448 json,
11449 } => {
11450 if path.exists() && !force {
11451 fail(&format!(
11452 "{} already exists; pass --force to overwrite",
11453 path.display()
11454 ));
11455 }
11456 let now = chrono::Utc::now().to_rfc3339_opts(chrono::SecondsFormat::Secs, true);
11457 let project = project::Project {
11458 vela_version: project::VELA_SCHEMA_VERSION.to_string(),
11459 schema: project::VELA_SCHEMA_URL.to_string(),
11460 frontier_id: None,
11461 project: project::ProjectMeta {
11462 name: name.clone(),
11463 description: description.clone(),
11464 compiled_at: now,
11465 compiler: project::VELA_COMPILER_VERSION.to_string(),
11466 papers_processed: 0,
11467 errors: 0,
11468 dependencies: Vec::new(),
11469 },
11470 stats: project::ProjectStats::default(),
11471 findings: Vec::new(),
11472 sources: Vec::new(),
11473 evidence_atoms: Vec::new(),
11474 condition_records: Vec::new(),
11475 review_events: Vec::new(),
11476 confidence_updates: Vec::new(),
11477 events: Vec::new(),
11478 proposals: Vec::new(),
11479 proof_state: proposals::ProofState::default(),
11480 signatures: Vec::new(),
11481 actors: Vec::new(),
11482 replications: Vec::new(),
11483 datasets: Vec::new(),
11484 code_artifacts: Vec::new(),
11485 artifacts: Vec::new(),
11486 predictions: Vec::new(),
11487 resolutions: Vec::new(),
11488 peers: Vec::new(),
11489 negative_results: Vec::new(),
11490 trajectories: Vec::new(),
11491 };
11492 repo::save_to_path(&path, &project).unwrap_or_else(|e| fail_return(&e));
11493 let payload = json!({
11494 "ok": true,
11495 "command": "frontier.new",
11496 "path": path.display().to_string(),
11497 "name": name,
11498 "schema": project::VELA_SCHEMA_URL,
11499 "vela_version": env!("CARGO_PKG_VERSION"),
11500 "next_steps": [
11501 "vela finding add <path> --assertion '...' --author 'reviewer:you' --apply",
11502 "vela sign generate-keypair --out keys",
11503 "vela actor add <path> reviewer:you --pubkey \"$(cat keys/public.key)\"",
11504 "vela registry publish <path> --owner reviewer:you --key keys/private.key --locator <url> --to https://vela-hub.fly.dev",
11505 ],
11506 });
11507 if json {
11508 println!(
11509 "{}",
11510 serde_json::to_string_pretty(&payload)
11511 .expect("failed to serialize frontier.new")
11512 );
11513 } else {
11514 println!(
11515 "{} scaffolded frontier '{name}' at {}",
11516 style::ok("frontier"),
11517 path.display()
11518 );
11519 println!(" next steps:");
11520 println!(
11521 " 1. vela finding add {} --assertion '...' --author 'reviewer:you' --apply",
11522 path.display()
11523 );
11524 println!(" 2. vela sign generate-keypair --out keys");
11525 println!(
11526 " 3. vela actor add {} reviewer:you --pubkey \"$(cat keys/public.key)\"",
11527 path.display()
11528 );
11529 println!(
11530 " 4. vela registry publish {} --owner reviewer:you --key keys/private.key --locator <url> --to https://vela-hub.fly.dev",
11531 path.display()
11532 );
11533 }
11534 }
11535 FrontierAction::Materialize { frontier, json } => {
11536 let payload = frontier_repo::materialize(&frontier).unwrap_or_else(|e| fail_return(&e));
11537 if json {
11538 println!(
11539 "{}",
11540 serde_json::to_string_pretty(&payload)
11541 .expect("failed to serialize frontier materialize")
11542 );
11543 } else {
11544 println!(
11545 "{} materialized frontier repo at {}",
11546 style::ok("frontier"),
11547 frontier.display()
11548 );
11549 }
11550 }
11551 FrontierAction::AddDep {
11552 frontier,
11553 vfr_id,
11554 locator,
11555 snapshot,
11556 name,
11557 json,
11558 } => {
11559 let mut p = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
11560 if p.project
11561 .dependencies
11562 .iter()
11563 .any(|d| d.vfr_id.as_deref() == Some(&vfr_id))
11564 {
11565 fail(&format!(
11566 "cross-frontier dependency '{vfr_id}' already declared; remove it first via `vela frontier remove-dep`"
11567 ));
11568 }
11569 let dep = ProjectDependency {
11570 name: name.unwrap_or_else(|| vfr_id.clone()),
11571 source: "vela.hub".into(),
11572 version: None,
11573 pinned_hash: None,
11574 vfr_id: Some(vfr_id.clone()),
11575 locator: Some(locator.clone()),
11576 pinned_snapshot_hash: Some(snapshot.clone()),
11577 };
11578 p.project.dependencies.push(dep);
11579 repo::save_to_path(&frontier, &p).unwrap_or_else(|e| fail_return(&e));
11580 let payload = json!({
11581 "ok": true,
11582 "command": "frontier.add-dep",
11583 "frontier": frontier.display().to_string(),
11584 "vfr_id": vfr_id,
11585 "locator": locator,
11586 "pinned_snapshot_hash": snapshot,
11587 "declared_count": p.project.dependencies.len(),
11588 });
11589 if json {
11590 println!(
11591 "{}",
11592 serde_json::to_string_pretty(&payload)
11593 .expect("failed to serialize frontier.add-dep")
11594 );
11595 } else {
11596 println!(
11597 "{} declared cross-frontier dep {vfr_id}",
11598 style::ok("frontier")
11599 );
11600 println!(" locator: {locator}");
11601 println!(" snapshot: {snapshot}");
11602 }
11603 }
11604 FrontierAction::ListDeps { frontier, json } => {
11605 let p = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
11606 let deps: Vec<&ProjectDependency> = p.project.dependencies.iter().collect();
11607 if json {
11608 let payload = json!({
11609 "ok": true,
11610 "command": "frontier.list-deps",
11611 "frontier": frontier.display().to_string(),
11612 "count": deps.len(),
11613 "dependencies": deps,
11614 });
11615 println!(
11616 "{}",
11617 serde_json::to_string_pretty(&payload)
11618 .expect("failed to serialize frontier.list-deps")
11619 );
11620 } else {
11621 println!();
11622 println!(
11623 " {}",
11624 format!("VELA · FRONTIER · LIST-DEPS · {}", frontier.display())
11625 .to_uppercase()
11626 .dimmed()
11627 );
11628 println!(" {}", style::tick_row(60));
11629 if deps.is_empty() {
11630 println!(" (no dependencies declared)");
11631 } else {
11632 for d in &deps {
11633 let kind = if d.is_cross_frontier() {
11634 "cross-frontier"
11635 } else {
11636 "compile-time"
11637 };
11638 println!(" · {} [{kind}]", d.name);
11639 if let Some(v) = &d.vfr_id {
11640 println!(" vfr_id: {v}");
11641 }
11642 if let Some(l) = &d.locator {
11643 println!(" locator: {l}");
11644 }
11645 if let Some(s) = &d.pinned_snapshot_hash {
11646 println!(" snapshot: {s}");
11647 }
11648 }
11649 }
11650 }
11651 }
11652 FrontierAction::RemoveDep {
11653 frontier,
11654 vfr_id,
11655 json,
11656 } => {
11657 let mut p = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
11658 for f in &p.findings {
11660 for l in &f.links {
11661 if let Ok(crate::bundle::LinkRef::Cross { vfr_id: ref v, .. }) =
11662 crate::bundle::LinkRef::parse(&l.target)
11663 && v == &vfr_id
11664 {
11665 fail(&format!(
11666 "cannot remove dep '{vfr_id}': finding {} still links to it via {}",
11667 f.id, l.target
11668 ));
11669 }
11670 }
11671 }
11672 let before = p.project.dependencies.len();
11673 p.project
11674 .dependencies
11675 .retain(|d| d.vfr_id.as_deref() != Some(&vfr_id));
11676 let removed = before - p.project.dependencies.len();
11677 if removed == 0 {
11678 fail(&format!("no cross-frontier dependency '{vfr_id}' found"));
11679 }
11680 repo::save_to_path(&frontier, &p).unwrap_or_else(|e| fail_return(&e));
11681 let payload = json!({
11682 "ok": true,
11683 "command": "frontier.remove-dep",
11684 "frontier": frontier.display().to_string(),
11685 "vfr_id": vfr_id,
11686 "removed": removed,
11687 });
11688 if json {
11689 println!(
11690 "{}",
11691 serde_json::to_string_pretty(&payload)
11692 .expect("failed to serialize frontier.remove-dep")
11693 );
11694 } else {
11695 println!(
11696 "{} removed cross-frontier dep {vfr_id}",
11697 style::ok("frontier")
11698 );
11699 }
11700 }
11701 FrontierAction::RefreshDeps {
11702 frontier,
11703 from,
11704 dry_run,
11705 json,
11706 } => {
11707 let mut p = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
11708 let cross_deps: Vec<String> = p
11709 .project
11710 .dependencies
11711 .iter()
11712 .filter_map(|d| d.vfr_id.clone())
11713 .collect();
11714 if cross_deps.is_empty() {
11715 if json {
11716 println!(
11717 "{}",
11718 serde_json::to_string_pretty(&json!({
11719 "ok": true,
11720 "command": "frontier.refresh-deps",
11721 "frontier": frontier.display().to_string(),
11722 "from": from,
11723 "dry_run": dry_run,
11724 "deps": [],
11725 "summary": { "total": 0, "refreshed": 0, "unchanged": 0, "missing": 0, "unreachable": 0 },
11726 })).expect("serialize")
11727 );
11728 } else {
11729 println!(
11730 "{} no cross-frontier deps declared in {}",
11731 style::ok("frontier"),
11732 frontier.display()
11733 );
11734 }
11735 return;
11736 }
11737 let client = reqwest::blocking::Client::builder()
11738 .timeout(std::time::Duration::from_secs(20))
11739 .build()
11740 .unwrap_or_else(|e| fail_return(&format!("http client init failed: {e}")));
11741 let base = from.trim_end_matches('/');
11742 #[derive(serde::Deserialize)]
11743 struct HubEntry {
11744 latest_snapshot_hash: String,
11745 }
11746 let mut per_dep: Vec<serde_json::Value> = Vec::new();
11747 let (mut refreshed, mut unchanged, mut missing, mut unreachable) =
11748 (0u32, 0u32, 0u32, 0u32);
11749 for vfr in &cross_deps {
11750 let url = format!("{base}/entries/{vfr}");
11751 let resp = client.get(&url).send();
11752 let outcome = match resp {
11753 Ok(r) if r.status().as_u16() == 404 => {
11754 missing += 1;
11755 json!({ "vfr_id": vfr, "status": "missing", "url": url })
11756 }
11757 Ok(r) if !r.status().is_success() => {
11758 unreachable += 1;
11759 json!({ "vfr_id": vfr, "status": "unreachable", "http_status": r.status().as_u16() })
11760 }
11761 Err(e) => {
11762 unreachable += 1;
11763 json!({ "vfr_id": vfr, "status": "unreachable", "error": e.to_string() })
11764 }
11765 Ok(r) => match r.json::<HubEntry>() {
11766 Err(e) => {
11767 unreachable += 1;
11768 json!({ "vfr_id": vfr, "status": "unreachable", "error": format!("invalid hub response: {e}") })
11769 }
11770 Ok(entry) => {
11771 match p
11773 .project
11774 .dependencies
11775 .iter()
11776 .position(|d| d.vfr_id.as_deref() == Some(vfr.as_str()))
11777 {
11778 None => {
11779 unreachable += 1;
11780 json!({ "vfr_id": vfr, "status": "unreachable", "error": "dep disappeared mid-scan" })
11781 }
11782 Some(idx) => {
11783 let local_pin =
11784 p.project.dependencies[idx].pinned_snapshot_hash.clone();
11785 let new_pin = entry.latest_snapshot_hash;
11786 if local_pin.as_deref() == Some(new_pin.as_str()) {
11787 unchanged += 1;
11788 json!({ "vfr_id": vfr, "status": "unchanged", "snapshot": new_pin })
11789 } else {
11790 if !dry_run {
11791 p.project.dependencies[idx].pinned_snapshot_hash =
11792 Some(new_pin.clone());
11793 }
11794 refreshed += 1;
11795 json!({
11796 "vfr_id": vfr,
11797 "status": "refreshed",
11798 "old_snapshot": local_pin,
11799 "new_snapshot": new_pin,
11800 })
11801 }
11802 }
11803 }
11804 }
11805 },
11806 };
11807 per_dep.push(outcome);
11808 }
11809 if !dry_run && refreshed > 0 {
11810 repo::save_to_path(&frontier, &p).unwrap_or_else(|e| fail_return(&e));
11811 }
11812 let payload = json!({
11813 "ok": true,
11814 "command": "frontier.refresh-deps",
11815 "frontier": frontier.display().to_string(),
11816 "from": from,
11817 "dry_run": dry_run,
11818 "deps": per_dep,
11819 "summary": {
11820 "total": cross_deps.len(),
11821 "refreshed": refreshed,
11822 "unchanged": unchanged,
11823 "missing": missing,
11824 "unreachable": unreachable,
11825 },
11826 });
11827 if json {
11828 println!(
11829 "{}",
11830 serde_json::to_string_pretty(&payload)
11831 .expect("failed to serialize frontier.refresh-deps")
11832 );
11833 } else {
11834 let mode = if dry_run { " (dry-run)" } else { "" };
11835 println!(
11836 "{} refresh-deps{mode} · {} total · {refreshed} refreshed · {unchanged} unchanged · {missing} missing · {unreachable} unreachable",
11837 style::ok("frontier"),
11838 cross_deps.len()
11839 );
11840 for d in &per_dep {
11841 let vfr = d["vfr_id"].as_str().unwrap_or("?");
11842 let status = d["status"].as_str().unwrap_or("?");
11843 match status {
11844 "refreshed" => println!(
11845 " {vfr} refreshed {} → {}",
11846 d["old_snapshot"]
11847 .as_str()
11848 .unwrap_or("(none)")
11849 .chars()
11850 .take(16)
11851 .collect::<String>(),
11852 d["new_snapshot"]
11853 .as_str()
11854 .unwrap_or("?")
11855 .chars()
11856 .take(16)
11857 .collect::<String>(),
11858 ),
11859 "unchanged" => println!(" {vfr} unchanged"),
11860 "missing" => println!(" {vfr} missing on hub"),
11861 _ => println!(" {vfr} unreachable"),
11862 }
11863 }
11864 }
11865 }
11866 FrontierAction::Diff {
11867 frontier,
11868 since,
11869 week,
11870 json,
11871 } => cmd_frontier_diff(&frontier, since.as_deref(), week.as_deref(), json),
11872 }
11873}
11874
11875fn cmd_repo(action: RepoAction) {
11876 match action {
11877 RepoAction::Status { frontier, json } => {
11878 let payload = frontier_repo::repo_status(&frontier).unwrap_or_else(|e| fail_return(&e));
11879 if json {
11880 println!(
11881 "{}",
11882 serde_json::to_string_pretty(&payload)
11883 .expect("failed to serialize repo status")
11884 );
11885 } else {
11886 let summary = payload.get("summary").unwrap_or(&Value::Null);
11887 let freshness = payload.get("freshness").unwrap_or(&Value::Null);
11888 println!("vela repo status");
11889 println!(" frontier: {}", frontier.display());
11890 println!(
11891 " events: {}",
11892 summary
11893 .get("accepted_events")
11894 .and_then(Value::as_u64)
11895 .unwrap_or_default()
11896 );
11897 println!(
11898 " open proposals: {}",
11899 summary
11900 .get("open_proposals")
11901 .and_then(Value::as_u64)
11902 .unwrap_or_default()
11903 );
11904 println!(
11905 " state: {}",
11906 freshness
11907 .get("materialized_state")
11908 .and_then(Value::as_str)
11909 .unwrap_or("unknown")
11910 );
11911 println!(
11912 " proof: {}",
11913 freshness
11914 .get("proof")
11915 .and_then(Value::as_str)
11916 .unwrap_or("unknown")
11917 );
11918 }
11919 }
11920 RepoAction::Doctor { frontier, json } => {
11921 let payload = frontier_repo::repo_doctor(&frontier).unwrap_or_else(|e| fail_return(&e));
11922 if json {
11923 println!(
11924 "{}",
11925 serde_json::to_string_pretty(&payload)
11926 .expect("failed to serialize repo doctor")
11927 );
11928 } else {
11929 let ok = payload.get("ok").and_then(Value::as_bool) == Some(true);
11930 let issues = payload
11931 .get("issues")
11932 .and_then(Value::as_array)
11933 .map_or(0, Vec::len);
11934 println!("vela repo doctor");
11935 println!(" frontier: {}", frontier.display());
11936 println!(" status: {}", if ok { "ok" } else { "needs attention" });
11937 println!(" issues: {issues}");
11938 }
11939 }
11940 }
11941}
11942
11943fn cmd_proof_verify(frontier: &Path, json_output: bool) {
11944 let payload = frontier_repo::proof_verify(frontier).unwrap_or_else(|e| fail_return(&e));
11945 if json_output {
11946 println!(
11947 "{}",
11948 serde_json::to_string_pretty(&payload).expect("failed to serialize proof verify")
11949 );
11950 if payload.get("ok").and_then(Value::as_bool) != Some(true) {
11951 std::process::exit(1);
11952 }
11953 } else {
11954 let ok = payload.get("ok").and_then(Value::as_bool) == Some(true);
11955 println!("vela proof verify");
11956 println!(" frontier: {}", frontier.display());
11957 println!(" status: {}", if ok { "ok" } else { "failed" });
11958 if let Some(issues) = payload.get("issues").and_then(Value::as_array) {
11959 for issue in issues {
11960 if let Some(message) = issue.get("message").and_then(Value::as_str) {
11961 println!(" issue: {message}");
11962 }
11963 }
11964 }
11965 if !ok {
11966 std::process::exit(1);
11967 }
11968 }
11969}
11970
11971fn cmd_proof_explain(frontier: &Path) {
11972 let text = frontier_repo::proof_explain(frontier).unwrap_or_else(|e| fail_return(&e));
11973 print!("{text}");
11974}
11975
11976fn cmd_frontier_diff(frontier: &Path, since: Option<&str>, week: Option<&str>, json: bool) {
11985 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
11986
11987 let now = chrono::Utc::now();
11989 let (window_start, window_end, week_label): (
11990 chrono::DateTime<chrono::Utc>,
11991 chrono::DateTime<chrono::Utc>,
11992 Option<String>,
11993 ) = if let Some(s) = since {
11994 let parsed = chrono::DateTime::parse_from_rfc3339(s)
11995 .map(|d| d.with_timezone(&chrono::Utc))
11996 .unwrap_or_else(|e| fail_return(&format!("invalid --since timestamp '{s}': {e}")));
11997 (parsed, now, None)
11998 } else {
11999 let key = week
12000 .map(str::to_owned)
12001 .unwrap_or_else(|| iso_week_key_for(now.date_naive()));
12002 let (start, end) = iso_week_bounds(&key)
12003 .unwrap_or_else(|e| fail_return(&format!("invalid --week '{key}': {e}")));
12004 (start, end, Some(key))
12005 };
12006
12007 let mut added: Vec<&crate::bundle::FindingBundle> = Vec::new();
12009 let mut updated: Vec<&crate::bundle::FindingBundle> = Vec::new();
12010 let mut new_contradictions: Vec<&crate::bundle::FindingBundle> = Vec::new();
12011 let mut cumulative: usize = 0;
12012
12013 for f in &project.findings {
12014 let created = chrono::DateTime::parse_from_rfc3339(&f.created)
12015 .map(|d| d.with_timezone(&chrono::Utc))
12016 .ok();
12017 let updated_ts = f
12018 .updated
12019 .as_deref()
12020 .and_then(|u| chrono::DateTime::parse_from_rfc3339(u).ok())
12021 .map(|d| d.with_timezone(&chrono::Utc));
12022
12023 if let Some(c) = created
12024 && c < window_end
12025 {
12026 cumulative += 1;
12027 }
12028
12029 if let Some(c) = created
12030 && c >= window_start
12031 && c < window_end
12032 {
12033 added.push(f);
12034 let is_tension = f.flags.contested || f.assertion.assertion_type == "tension";
12035 if is_tension {
12036 new_contradictions.push(f);
12037 }
12038 continue;
12039 }
12040 if let Some(u) = updated_ts
12041 && u >= window_start
12042 && u < window_end
12043 {
12044 updated.push(f);
12045 }
12046 }
12047
12048 let summary_for = |list: &[&crate::bundle::FindingBundle]| -> Vec<serde_json::Value> {
12050 list.iter()
12051 .map(|f| {
12052 json!({
12053 "id": f.id,
12054 "assertion": f.assertion.text,
12055 "evidence_type": f.evidence.evidence_type,
12056 "confidence": f.confidence.score,
12057 "doi": f.provenance.doi,
12058 "pmid": f.provenance.pmid,
12059 })
12060 })
12061 .collect()
12062 };
12063
12064 let payload = json!({
12065 "ok": true,
12066 "command": "frontier.diff",
12067 "frontier": frontier.display().to_string(),
12068 "frontier_id": project.frontier_id,
12069 "window": {
12070 "start": window_start.to_rfc3339_opts(chrono::SecondsFormat::Secs, true),
12071 "end": window_end.to_rfc3339_opts(chrono::SecondsFormat::Secs, true),
12072 "iso_week": week_label,
12073 },
12074 "totals": {
12075 "added": added.len(),
12076 "updated": updated.len(),
12077 "new_contradictions": new_contradictions.len(),
12078 "cumulative_claims": cumulative,
12079 },
12080 "added": summary_for(&added),
12081 "updated": summary_for(&updated),
12082 "new_contradictions": summary_for(&new_contradictions),
12083 });
12084
12085 if json {
12086 println!(
12087 "{}",
12088 serde_json::to_string_pretty(&payload).expect("failed to serialize frontier.diff")
12089 );
12090 return;
12091 }
12092
12093 let label = week_label
12094 .clone()
12095 .unwrap_or_else(|| format!("since {}", window_start.format("%Y-%m-%d %H:%M UTC")));
12096 println!();
12097 println!(
12098 " {}",
12099 format!("VELA · FRONTIER · DIFF · {label}")
12100 .to_uppercase()
12101 .dimmed()
12102 );
12103 println!(" {}", style::tick_row(60));
12104 println!(
12105 " range: {} → {}",
12106 window_start.format("%Y-%m-%d %H:%M"),
12107 window_end.format("%Y-%m-%d %H:%M")
12108 );
12109 println!(" added: {}", added.len());
12110 println!(" updated: {}", updated.len());
12111 println!(" contradictions: {}", new_contradictions.len());
12112 println!(" cumulative: {cumulative}");
12113 if added.is_empty() && updated.is_empty() {
12114 println!();
12115 println!(" (quiet window — no findings added or updated)");
12116 } else {
12117 println!();
12118 println!(" added:");
12119 for f in &added {
12120 println!(
12121 " · {} {}",
12122 f.id.dimmed(),
12123 truncate(&f.assertion.text, 88)
12124 );
12125 }
12126 if !updated.is_empty() {
12127 println!();
12128 println!(" updated:");
12129 for f in &updated {
12130 println!(
12131 " · {} {}",
12132 f.id.dimmed(),
12133 truncate(&f.assertion.text, 88)
12134 );
12135 }
12136 }
12137 }
12138}
12139
12140fn truncate(s: &str, n: usize) -> String {
12141 if s.chars().count() <= n {
12142 s.to_string()
12143 } else {
12144 let mut out: String = s.chars().take(n.saturating_sub(1)).collect();
12145 out.push('…');
12146 out
12147 }
12148}
12149
12150fn iso_week_key_for(d: chrono::NaiveDate) -> String {
12152 use chrono::Datelike;
12153 let iso = d.iso_week();
12154 format!("{:04}-W{:02}", iso.year(), iso.week())
12155}
12156
12157fn iso_week_bounds(
12160 key: &str,
12161) -> Result<(chrono::DateTime<chrono::Utc>, chrono::DateTime<chrono::Utc>), String> {
12162 let (year_str, week_str) = key
12163 .split_once("-W")
12164 .ok_or_else(|| format!("expected YYYY-Www, got '{key}'"))?;
12165 let year: i32 = year_str
12166 .parse()
12167 .map_err(|e| format!("bad year in '{key}': {e}"))?;
12168 let week: u32 = week_str
12169 .parse()
12170 .map_err(|e| format!("bad week in '{key}': {e}"))?;
12171 let monday = chrono::NaiveDate::from_isoywd_opt(year, week, chrono::Weekday::Mon)
12172 .ok_or_else(|| format!("invalid ISO week: {key}"))?;
12173 let next_monday = monday + chrono::Duration::days(7);
12174 let start = monday.and_hms_opt(0, 0, 0).expect("00:00 valid").and_utc();
12175 let end = next_monday
12176 .and_hms_opt(0, 0, 0)
12177 .expect("00:00 valid")
12178 .and_utc();
12179 Ok((start, end))
12180}
12181
12182fn cmd_registry(action: RegistryAction) {
12187 use crate::registry;
12188 let default_registry = || -> PathBuf {
12189 let home = std::env::var("HOME").unwrap_or_else(|_| ".".to_string());
12190 PathBuf::from(home)
12191 .join(".vela")
12192 .join("registry")
12193 .join("entries.json")
12194 };
12195 match action {
12196 RegistryAction::DependsOn { vfr_id, from, json } => {
12197 let base = from.trim_end_matches('/');
12198 let url = format!("{base}/entries/{vfr_id}/depends-on");
12199 let client = reqwest::blocking::Client::builder()
12200 .timeout(std::time::Duration::from_secs(30))
12201 .build()
12202 .unwrap_or_else(|e| fail_return(&format!("http client init: {e}")));
12203 let resp = client
12204 .get(&url)
12205 .send()
12206 .unwrap_or_else(|e| fail_return(&format!("GET {url}: {e}")));
12207 if !resp.status().is_success() {
12208 fail(&format!("GET {url}: HTTP {}", resp.status()));
12209 }
12210 let body: serde_json::Value = resp
12211 .json()
12212 .unwrap_or_else(|e| fail_return(&format!("parse response: {e}")));
12213 if json {
12214 println!(
12215 "{}",
12216 serde_json::to_string_pretty(&body).expect("serialize")
12217 );
12218 } else {
12219 let dependents = body
12220 .get("dependents")
12221 .and_then(|v| v.as_array())
12222 .cloned()
12223 .unwrap_or_default();
12224 let count = dependents.len();
12225 println!(
12226 "{} {count} {} on {vfr_id}",
12227 style::ok("registry"),
12228 if count == 1 {
12229 "frontier depends"
12230 } else {
12231 "frontiers depend"
12232 },
12233 );
12234 for e in &dependents {
12235 let v = e.get("vfr_id").and_then(|v| v.as_str()).unwrap_or("?");
12236 let n = e.get("name").and_then(|v| v.as_str()).unwrap_or("?");
12237 let o = e
12238 .get("owner_actor_id")
12239 .and_then(|v| v.as_str())
12240 .unwrap_or("?");
12241 println!(" {v} {n} ({o})");
12242 }
12243 }
12244 }
12245 RegistryAction::Mirror {
12246 vfr_id,
12247 from,
12248 to,
12249 json,
12250 } => {
12251 let src_base = from.trim_end_matches('/');
12252 let dst_base = to.trim_end_matches('/');
12253 let src_url = format!("{src_base}/entries/{vfr_id}");
12254 let dst_url = format!("{dst_base}/entries");
12255 let client = reqwest::blocking::Client::builder()
12256 .timeout(std::time::Duration::from_secs(30))
12257 .build()
12258 .unwrap_or_else(|e| fail_return(&format!("http client init: {e}")));
12259
12260 let entry: serde_json::Value = client
12261 .get(&src_url)
12262 .send()
12263 .unwrap_or_else(|e| fail_return(&format!("GET {src_url}: {e}")))
12264 .error_for_status()
12265 .unwrap_or_else(|e| fail_return(&format!("GET {src_url}: {e}")))
12266 .json()
12267 .unwrap_or_else(|e| fail_return(&format!("parse {src_url}: {e}")));
12268
12269 let resp = client
12270 .post(&dst_url)
12271 .header("content-type", "application/json")
12272 .body(
12273 serde_json::to_vec(&entry)
12274 .unwrap_or_else(|e| fail_return(&format!("serialize: {e}"))),
12275 )
12276 .send()
12277 .unwrap_or_else(|e| fail_return(&format!("POST {dst_url}: {e}")));
12278 let status = resp.status();
12279 if !status.is_success() {
12280 let body = resp.text().unwrap_or_default();
12281 fail(&format!(
12282 "POST {dst_url}: HTTP {status}: {}",
12283 body.chars().take(300).collect::<String>()
12284 ));
12285 }
12286 let body: serde_json::Value = resp
12287 .json()
12288 .unwrap_or_else(|e| fail_return(&format!("parse POST response: {e}")));
12289 let duplicate = body
12290 .get("duplicate")
12291 .and_then(serde_json::Value::as_bool)
12292 .unwrap_or(false);
12293 let payload = json!({
12294 "ok": true,
12295 "command": "registry.mirror",
12296 "vfr_id": vfr_id,
12297 "from": src_base,
12298 "to": dst_base,
12299 "duplicate_on_destination": duplicate,
12300 "destination_response": body,
12301 });
12302 if json {
12303 println!(
12304 "{}",
12305 serde_json::to_string_pretty(&payload).expect("serialize")
12306 );
12307 } else {
12308 println!(
12309 "{} mirrored {vfr_id} from {src_base} → {dst_base}{}",
12310 style::ok("registry"),
12311 if duplicate {
12312 " (duplicate; signature already known)"
12313 } else {
12314 " (fresh insert)"
12315 }
12316 );
12317 }
12318 }
12319 RegistryAction::List { from, json } => {
12320 let (label, registry_data) = match &from {
12323 Some(loc) if loc.starts_with("http") => (
12324 loc.clone(),
12325 registry::load_any(loc).unwrap_or_else(|e| fail_return(&e)),
12326 ),
12327 Some(loc) => {
12328 let p = registry::resolve_local(loc).unwrap_or_else(|e| fail_return(&e));
12329 (
12330 p.display().to_string(),
12331 registry::load_local(&p).unwrap_or_else(|e| fail_return(&e)),
12332 )
12333 }
12334 None => {
12335 let p = default_registry();
12336 (
12337 p.display().to_string(),
12338 registry::load_local(&p).unwrap_or_else(|e| fail_return(&e)),
12339 )
12340 }
12341 };
12342 let r = registry_data;
12343 let path_label = label;
12344 if json {
12345 let payload = json!({
12346 "ok": true,
12347 "command": "registry.list",
12348 "registry": path_label,
12349 "entry_count": r.entries.len(),
12350 "entries": r.entries,
12351 });
12352 println!(
12353 "{}",
12354 serde_json::to_string_pretty(&payload)
12355 .expect("failed to serialize registry.list")
12356 );
12357 } else {
12358 println!();
12359 println!(
12360 " {}",
12361 format!("VELA · REGISTRY · LIST · {}", path_label)
12362 .to_uppercase()
12363 .dimmed()
12364 );
12365 println!(" {}", style::tick_row(60));
12366 if r.entries.is_empty() {
12367 println!(" (registry is empty)");
12368 } else {
12369 for entry in &r.entries {
12370 println!(
12371 " {} {} ({}) by {} published {}",
12372 entry.vfr_id,
12373 entry.name,
12374 entry.network_locator,
12375 entry.owner_actor_id,
12376 entry.signed_publish_at
12377 );
12378 }
12379 }
12380 }
12381 }
12382 RegistryAction::Publish {
12383 frontier,
12384 owner,
12385 key,
12386 locator,
12387 to,
12388 json,
12389 } => {
12390 let key_hex = std::fs::read_to_string(&key)
12393 .map(|s| s.trim().to_string())
12394 .unwrap_or_else(|e| fail_return(&format!("read key {}: {e}", key.display())));
12395 let signing_key = parse_signing_key(&key_hex);
12396 let derived = hex::encode(signing_key.verifying_key().to_bytes());
12397
12398 let mut frontier_data =
12400 repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
12401
12402 let pubkey = match frontier_data.actors.iter().find(|actor| actor.id == owner) {
12403 Some(actor) => actor.public_key.clone(),
12404 None => {
12405 eprintln!(
12413 " vela registry publish · auto-registering actor {owner} (derived pubkey {})",
12414 &derived[..16]
12415 );
12416 frontier_data.actors.push(sign::ActorRecord {
12417 id: owner.clone(),
12418 public_key: derived.clone(),
12419 algorithm: "ed25519".to_string(),
12420 created_at: chrono::Utc::now().to_rfc3339(),
12421 tier: None,
12422 orcid: None,
12423 access_clearance: None,
12424 });
12425 repo::save_to_path(&frontier, &frontier_data)
12426 .unwrap_or_else(|e| fail_return(&format!("save actor: {e}")));
12427 derived.clone()
12428 }
12429 };
12430
12431 let snapshot_hash = events::snapshot_hash(&frontier_data);
12435 let event_log_hash = events::event_log_hash(&frontier_data.events);
12436 let vfr_id = frontier_data.frontier_id();
12437 let name = frontier_data.project.name.clone();
12438
12439 if derived != pubkey {
12441 fail(&format!(
12442 "private key does not match registered pubkey for owner '{owner}'"
12443 ));
12444 }
12445
12446 let to_is_remote = matches!(
12454 to.as_deref(),
12455 Some(loc) if loc.starts_with("http://") || loc.starts_with("https://")
12456 );
12457 let resolved_locator = match locator {
12458 Some(l) => l,
12459 None => {
12460 if to_is_remote {
12461 let hub = to.as_deref().unwrap().trim_end_matches('/');
12462 let hub_root = hub.trim_end_matches("/entries");
12463 format!("{hub_root}/entries/{vfr_id}/snapshot")
12464 } else {
12465 fail_return(
12466 "--locator is required for local publishes; pass e.g. \
12467 --locator file:///path/to/frontier.json or an HTTPS URL.",
12468 )
12469 }
12470 }
12471 };
12472
12473 let mut entry = registry::RegistryEntry {
12474 schema: registry::ENTRY_SCHEMA.to_string(),
12475 vfr_id: vfr_id.clone(),
12476 name: name.clone(),
12477 owner_actor_id: owner.clone(),
12478 owner_pubkey: pubkey,
12479 latest_snapshot_hash: snapshot_hash,
12480 latest_event_log_hash: event_log_hash,
12481 network_locator: resolved_locator,
12482 signed_publish_at: chrono::Utc::now().to_rfc3339(),
12483 signature: String::new(),
12484 };
12485 entry.signature =
12486 registry::sign_entry(&entry, &signing_key).unwrap_or_else(|e| fail_return(&e));
12487
12488 let (registry_label, duplicate) = if to_is_remote {
12489 let hub_url = to.clone().unwrap();
12490 let resp = registry::publish_remote(&entry, &hub_url, Some(&frontier_data))
12494 .unwrap_or_else(|e| fail_return(&e));
12495 (hub_url, resp.duplicate)
12496 } else {
12497 let registry_path = match &to {
12498 Some(loc) => registry::resolve_local(loc).unwrap_or_else(|e| fail_return(&e)),
12499 None => default_registry(),
12500 };
12501 registry::publish_entry(®istry_path, entry.clone())
12502 .unwrap_or_else(|e| fail_return(&e));
12503 (registry_path.display().to_string(), false)
12504 };
12505
12506 let payload = json!({
12507 "ok": true,
12508 "command": "registry.publish",
12509 "registry": registry_label,
12510 "vfr_id": vfr_id,
12511 "name": name,
12512 "owner": owner,
12513 "snapshot_hash": entry.latest_snapshot_hash,
12514 "event_log_hash": entry.latest_event_log_hash,
12515 "signed_publish_at": entry.signed_publish_at,
12516 "signature": entry.signature,
12517 "duplicate": duplicate,
12518 });
12519 if json {
12520 println!(
12521 "{}",
12522 serde_json::to_string_pretty(&payload)
12523 .expect("failed to serialize registry.publish")
12524 );
12525 } else {
12526 let dup_suffix = if duplicate { " (duplicate, no-op)" } else { "" };
12527 println!(
12528 "{} published {vfr_id} → {}{}",
12529 style::ok("registry"),
12530 registry_label,
12531 dup_suffix
12532 );
12533 println!(" snapshot: {}", entry.latest_snapshot_hash);
12534 println!(" event_log: {}", entry.latest_event_log_hash);
12535 println!(" signature: {}…", &entry.signature[..16]);
12536 }
12537 }
12538 RegistryAction::Pull {
12539 vfr_id,
12540 from,
12541 out,
12542 transitive,
12543 depth,
12544 json,
12545 } => {
12546 let (registry_label, registry_data) = match &from {
12550 Some(loc) if loc.starts_with("http") => (
12551 loc.clone(),
12552 registry::load_any(loc).unwrap_or_else(|e| fail_return(&e)),
12553 ),
12554 Some(loc) => {
12555 let p = registry::resolve_local(loc).unwrap_or_else(|e| fail_return(&e));
12556 (
12557 p.display().to_string(),
12558 registry::load_local(&p).unwrap_or_else(|e| fail_return(&e)),
12559 )
12560 }
12561 None => {
12562 let p = default_registry();
12563 (
12564 p.display().to_string(),
12565 registry::load_local(&p).unwrap_or_else(|e| fail_return(&e)),
12566 )
12567 }
12568 };
12569 let entry = registry::find_latest(®istry_data, &vfr_id)
12570 .unwrap_or_else(|| fail_return(&format!("{vfr_id} not found in registry")));
12571
12572 if transitive {
12573 let result = registry::pull_transitive(®istry_data, &vfr_id, &out, depth)
12577 .unwrap_or_else(|e| fail_return(&format!("transitive pull failed: {e}")));
12578
12579 let dep_paths_json: serde_json::Value = serde_json::Value::Object(
12580 result
12581 .deps
12582 .iter()
12583 .map(|(k, v)| (k.clone(), serde_json::json!(v.display().to_string())))
12584 .collect(),
12585 );
12586 let payload = json!({
12587 "ok": true,
12588 "command": "registry.pull",
12589 "registry": registry_label,
12590 "vfr_id": vfr_id,
12591 "transitive": true,
12592 "depth": depth,
12593 "out_dir": out.display().to_string(),
12594 "primary": result.primary_path.display().to_string(),
12595 "verified": result.verified,
12596 "deps": dep_paths_json,
12597 });
12598 if json {
12599 println!(
12600 "{}",
12601 serde_json::to_string_pretty(&payload)
12602 .expect("failed to serialize registry.pull")
12603 );
12604 } else {
12605 println!(
12606 "{} pulled {vfr_id} (transitive) → {}",
12607 style::ok("registry"),
12608 out.display()
12609 );
12610 println!(" verified {} frontier(s):", result.verified.len());
12611 for v in &result.verified {
12612 println!(" · {v}");
12613 }
12614 println!(" every cross-frontier dependency's pinned snapshot hash matched");
12615 }
12616 return;
12617 }
12618
12619 registry::fetch_frontier_to_prefer_event_hub(&entry, from.as_deref(), &out)
12622 .unwrap_or_else(|e| fail_return(&format!("fetch frontier: {e}")));
12623 registry::verify_pull(&entry, &out).unwrap_or_else(|e| {
12624 let _ = std::fs::remove_file(&out);
12625 fail_return(&format!("pull verification failed: {e}"))
12626 });
12627
12628 let payload = json!({
12629 "ok": true,
12630 "command": "registry.pull",
12631 "registry": registry_label,
12632 "vfr_id": vfr_id,
12633 "out": out.display().to_string(),
12634 "snapshot_hash": entry.latest_snapshot_hash,
12635 "event_log_hash": entry.latest_event_log_hash,
12636 "verified": true,
12637 });
12638 if json {
12639 println!(
12640 "{}",
12641 serde_json::to_string_pretty(&payload)
12642 .expect("failed to serialize registry.pull")
12643 );
12644 } else {
12645 println!(
12646 "{} pulled {vfr_id} → {}",
12647 style::ok("registry"),
12648 out.display()
12649 );
12650 println!(" verified snapshot+event_log hashes match registry; signature ok");
12651 }
12652 }
12653 }
12654}
12655
12656fn print_stats_json(path: &Path) {
12657 let frontier = load_frontier_or_fail(path);
12658 let source_hash = hash_path_or_fail(path);
12659 let payload = json!({
12660 "ok": true,
12661 "command": "stats",
12662 "schema_version": project::VELA_SCHEMA_VERSION,
12663 "frontier": {
12664 "name": &frontier.project.name,
12665 "description": &frontier.project.description,
12666 "source": path.display().to_string(),
12667 "hash": format!("sha256:{source_hash}"),
12668 "compiled_at": &frontier.project.compiled_at,
12669 "compiler": &frontier.project.compiler,
12670 "papers_processed": frontier.project.papers_processed,
12671 "errors": frontier.project.errors,
12672 },
12673 "stats": frontier.stats,
12674 "proposals": proposals::summary(&frontier),
12675 "proof_state": frontier.proof_state,
12676 });
12677 println!(
12678 "{}",
12679 serde_json::to_string_pretty(&payload).expect("failed to serialize stats")
12680 );
12681}
12682
12683fn cmd_search(
12684 source: Option<&Path>,
12685 query: &str,
12686 entity: Option<&str>,
12687 assertion_type: Option<&str>,
12688 all: Option<&Path>,
12689 limit: usize,
12690 json_output: bool,
12691) {
12692 if let Some(dir) = all {
12693 search::run_all(dir, query, entity, assertion_type, limit);
12694 return;
12695 }
12696 let Some(src) = source else {
12697 fail("Provide --source <frontier> or --all <directory>.");
12698 };
12699 if json_output {
12700 let results = search::search(src, query, entity, assertion_type, limit);
12701 let loaded = load_frontier_or_fail(src);
12702 let source_hash = hash_path_or_fail(src);
12703 let payload = json!({
12704 "ok": true,
12705 "command": "search",
12706 "schema_version": project::VELA_SCHEMA_VERSION,
12707 "query": query,
12708 "frontier": {
12709 "name": &loaded.project.name,
12710 "source": src.display().to_string(),
12711 "hash": format!("sha256:{source_hash}"),
12712 },
12713 "filters": {
12714 "entity": entity,
12715 "assertion_type": assertion_type,
12716 "limit": limit,
12717 },
12718 "count": results.len(),
12719 "results": results.iter().map(|result| json!({
12720 "id": &result.id,
12721 "score": result.score,
12722 "assertion": &result.assertion,
12723 "assertion_type": &result.assertion_type,
12724 "confidence": result.confidence,
12725 "entities": &result.entities,
12726 "doi": &result.doi,
12727 })).collect::<Vec<_>>()
12728 });
12729 println!(
12730 "{}",
12731 serde_json::to_string_pretty(&payload).expect("failed to serialize search results")
12732 );
12733 } else {
12734 search::run(src, query, entity, assertion_type, limit);
12735 }
12736}
12737
12738fn cmd_tensions(source: &Path, both_high: bool, cross_domain: bool, top: usize, json_output: bool) {
12739 let frontier = load_frontier_or_fail(source);
12740 let result = tensions::analyze(&frontier, both_high, cross_domain, top);
12741 if json_output {
12742 let source_hash = hash_path_or_fail(source);
12743 let payload = json!({
12744 "ok": true,
12745 "command": "tensions",
12746 "schema_version": project::VELA_SCHEMA_VERSION,
12747 "frontier": {
12748 "name": &frontier.project.name,
12749 "source": source.display().to_string(),
12750 "hash": format!("sha256:{source_hash}"),
12751 },
12752 "filters": {
12753 "both_high": both_high,
12754 "cross_domain": cross_domain,
12755 "top": top,
12756 },
12757 "count": result.len(),
12758 "tensions": result.iter().map(|t| json!({
12759 "score": t.score,
12760 "resolved": t.resolved,
12761 "superseding_id": &t.superseding_id,
12762 "finding_a": {
12763 "id": &t.finding_a.id,
12764 "assertion": &t.finding_a.assertion,
12765 "confidence": t.finding_a.confidence,
12766 "assertion_type": &t.finding_a.assertion_type,
12767 "citation_count": t.finding_a.citation_count,
12768 "contradicts_count": t.finding_a.contradicts_count,
12769 },
12770 "finding_b": {
12771 "id": &t.finding_b.id,
12772 "assertion": &t.finding_b.assertion,
12773 "confidence": t.finding_b.confidence,
12774 "assertion_type": &t.finding_b.assertion_type,
12775 "citation_count": t.finding_b.citation_count,
12776 "contradicts_count": t.finding_b.contradicts_count,
12777 }
12778 })).collect::<Vec<_>>()
12779 });
12780 println!(
12781 "{}",
12782 serde_json::to_string_pretty(&payload).expect("failed to serialize tensions")
12783 );
12784 } else {
12785 tensions::print_tensions(&result);
12786 }
12787}
12788
12789fn cmd_gaps(action: GapsAction) {
12790 match action {
12791 GapsAction::Rank {
12792 frontier,
12793 top,
12794 domain,
12795 json,
12796 } => cmd_gap_rank(&frontier, top, domain.as_deref(), json),
12797 }
12798}
12799
12800fn cmd_gap_rank(frontier_path: &Path, top: usize, domain: Option<&str>, json_output: bool) {
12801 let frontier = load_frontier_or_fail(frontier_path);
12802 let mut ranked = frontier
12803 .findings
12804 .iter()
12805 .filter(|finding| finding.flags.gap || finding.flags.negative_space)
12806 .filter(|finding| {
12807 domain.is_none_or(|domain| {
12808 finding
12809 .assertion
12810 .text
12811 .to_lowercase()
12812 .contains(&domain.to_lowercase())
12813 || finding
12814 .assertion
12815 .entities
12816 .iter()
12817 .any(|entity| entity.name.to_lowercase().contains(&domain.to_lowercase()))
12818 })
12819 })
12820 .map(|finding| {
12821 let dependency_count = frontier
12822 .findings
12823 .iter()
12824 .flat_map(|candidate| candidate.links.iter())
12825 .filter(|link| link.target == finding.id)
12826 .count();
12827 let score = dependency_count as f64 + finding.confidence.score;
12828 json!({
12829 "id": &finding.id,
12830 "kind": "candidate_gap_review_lead",
12831 "assertion": &finding.assertion.text,
12832 "score": score,
12833 "dependency_count": dependency_count,
12834 "confidence": finding.confidence.score,
12835 "evidence_type": &finding.evidence.evidence_type,
12836 "entities": finding.assertion.entities.iter().map(|e| &e.name).collect::<Vec<_>>(),
12837 "recommended_action": "Review source scope and missing evidence before treating this as an experiment target.",
12838 "caveats": ["Candidate gap rankings are review leads, not guaranteed underexplored areas or experiment targets."],
12839 })
12840 })
12841 .collect::<Vec<_>>();
12842 ranked.sort_by(|a, b| {
12843 b.get("score")
12844 .and_then(Value::as_f64)
12845 .partial_cmp(&a.get("score").and_then(Value::as_f64))
12846 .unwrap_or(std::cmp::Ordering::Equal)
12847 });
12848 ranked.truncate(top);
12849 if json_output {
12850 let source_hash = hash_path_or_fail(frontier_path);
12851 let payload = json!({
12852 "ok": true,
12853 "command": "gaps rank",
12854 "schema_version": project::VELA_SCHEMA_VERSION,
12855 "frontier": {
12856 "name": &frontier.project.name,
12857 "source": frontier_path.display().to_string(),
12858 "hash": format!("sha256:{source_hash}"),
12859 },
12860 "filters": {
12861 "top": top,
12862 "domain": domain,
12863 },
12864 "count": ranked.len(),
12865 "ranking_label": "candidate gap review leads",
12866 "caveats": ["These rankings are navigation signals over flagged findings, not scientific conclusions."],
12867 "review_leads": ranked.clone(),
12868 "gaps": ranked,
12869 });
12870 println!(
12871 "{}",
12872 serde_json::to_string_pretty(&payload).expect("failed to serialize gap ranking")
12873 );
12874 } else {
12875 println!();
12876 println!(" {}", "CANDIDATE GAP REVIEW LEADS".dimmed());
12877 println!(" {}", style::tick_row(60));
12878 println!(" review source scope; these are not guaranteed experiment targets.");
12879 println!();
12880 for (idx, gap) in ranked.iter().enumerate() {
12881 println!(
12882 " {}. [{}] score={} {}",
12883 idx + 1,
12884 gap["id"].as_str().unwrap_or("?"),
12885 gap["score"].as_f64().unwrap_or(0.0),
12886 gap["assertion"].as_str().unwrap_or("")
12887 );
12888 }
12889 }
12890}
12891
12892async fn cmd_bridge(inputs: &[PathBuf], check_novelty: bool, top_n: usize) {
12893 if inputs.len() < 2 {
12894 fail("need at least 2 frontier files for bridge detection.");
12895 }
12896 println!();
12897 println!(" {}", "VELA · BRIDGE · V0.36.0".dimmed());
12898 println!(" {}", style::tick_row(60));
12899 println!(" loading {} frontiers...", inputs.len());
12900 let mut named_projects = Vec::<(String, project::Project)>::new();
12901 let mut total_findings = 0;
12902 for path in inputs {
12903 let frontier = load_frontier_or_fail(path);
12904 let name = path
12905 .file_stem()
12906 .unwrap_or_default()
12907 .to_string_lossy()
12908 .to_string();
12909 println!(" {} · {} findings", name, frontier.stats.findings);
12910 total_findings += frontier.stats.findings;
12911 named_projects.push((name, frontier));
12912 }
12913 let refs = named_projects
12914 .iter()
12915 .map(|(name, frontier)| (name.as_str(), frontier))
12916 .collect::<Vec<_>>();
12917 let mut bridges = bridge::detect_bridges(&refs);
12918 if check_novelty && !bridges.is_empty() {
12919 let client = Client::new();
12920 let check_count = bridges.len().min(top_n);
12921 println!(" running rough PubMed prior-art checks for top {check_count} bridges...");
12922 for bridge_item in bridges.iter_mut().take(check_count) {
12923 let query = bridge::novelty_query(&bridge_item.entity_name, bridge_item);
12924 match bridge::check_novelty(&client, &query).await {
12925 Ok(count) => bridge_item.pubmed_count = Some(count),
12926 Err(e) => eprintln!(
12927 " {} prior-art check failed for {}: {e}",
12928 style::err_prefix(),
12929 bridge_item.entity_name
12930 ),
12931 }
12932 tokio::time::sleep(std::time::Duration::from_millis(350)).await;
12933 }
12934 }
12935 print!("{}", bridge::format_report(&bridges, total_findings));
12936}
12937
12938struct BenchArgs {
12939 frontier: Option<PathBuf>,
12940 gold: Option<PathBuf>,
12941 entity_gold: Option<PathBuf>,
12942 link_gold: Option<PathBuf>,
12943 suite: Option<PathBuf>,
12944 suite_ready: bool,
12945 min_f1: Option<f64>,
12946 min_precision: Option<f64>,
12947 min_recall: Option<f64>,
12948 no_thresholds: bool,
12949 json: bool,
12950}
12951
12952fn cmd_agent_bench(
12957 gold: &Path,
12958 candidate: &Path,
12959 sources: Option<&Path>,
12960 threshold: Option<f64>,
12961 report_path: Option<&Path>,
12962 json_out: bool,
12963) {
12964 let input = crate::agent_bench::BenchInput {
12965 gold_path: gold.to_path_buf(),
12966 candidate_path: candidate.to_path_buf(),
12967 sources: sources.map(Path::to_path_buf),
12968 threshold: threshold.unwrap_or(0.0),
12969 };
12970 let report = match crate::agent_bench::run(input) {
12971 Ok(r) => r,
12972 Err(e) => {
12973 eprintln!("{} bench failed: {e}", style::err_prefix());
12974 std::process::exit(1);
12975 }
12976 };
12977
12978 let json = serde_json::to_string_pretty(&report).unwrap_or_default();
12979 if let Some(path) = report_path
12980 && let Err(e) = std::fs::write(path, &json)
12981 {
12982 eprintln!(
12983 "{} failed to write report to {}: {e}",
12984 style::err_prefix(),
12985 path.display()
12986 );
12987 }
12988
12989 if json_out {
12990 println!("{json}");
12991 } else {
12992 println!();
12993 println!(" {}", "VELA · BENCH · AGENT STATE-UPDATE".dimmed());
12994 println!(" {}", style::tick_row(60));
12995 print!("{}", crate::agent_bench::render_pretty(&report));
12996 println!();
12997 }
12998
12999 if !report.pass {
13000 std::process::exit(1);
13001 }
13002}
13003
13004fn cmd_bench(args: BenchArgs) {
13005 if args.suite_ready {
13006 let suite_path = args
13007 .suite
13008 .unwrap_or_else(|| PathBuf::from("benchmarks/suites/bbb-core.json"));
13009 let payload =
13010 benchmark::suite_ready_report(&suite_path).unwrap_or_else(|e| fail_return(&e));
13011 println!(
13012 "{}",
13013 serde_json::to_string_pretty(&payload).expect("failed to serialize suite-ready report")
13014 );
13015 if payload.get("ok").and_then(Value::as_bool) != Some(true) {
13016 std::process::exit(1);
13017 }
13018 return;
13019 }
13020 if let Some(suite_path) = args.suite {
13021 let payload = benchmark::run_suite(&suite_path).unwrap_or_else(|e| fail_return(&e));
13022 if args.json {
13023 println!(
13024 "{}",
13025 serde_json::to_string_pretty(&payload)
13026 .expect("failed to serialize benchmark suite")
13027 );
13028 } else {
13029 let ok = payload.get("ok").and_then(Value::as_bool) == Some(true);
13030 let metrics = payload.get("metrics").unwrap_or(&Value::Null);
13031 println!();
13032 println!(" {}", "VELA · BENCH · SUITE".dimmed());
13033 println!(" {}", style::tick_row(60));
13034 println!(" suite: {}", suite_path.display());
13035 println!(
13036 " status: {}",
13037 if ok {
13038 style::ok("pass")
13039 } else {
13040 style::lost("fail")
13041 }
13042 );
13043 println!(
13044 " tasks: {}/{} passed",
13045 metrics
13046 .get("tasks_passed")
13047 .and_then(Value::as_u64)
13048 .unwrap_or(0),
13049 metrics
13050 .get("tasks_total")
13051 .and_then(Value::as_u64)
13052 .unwrap_or(0)
13053 );
13054 }
13055 if payload.get("ok").and_then(Value::as_bool) != Some(true) {
13056 std::process::exit(1);
13057 }
13058 return;
13059 }
13060
13061 let frontier = args
13062 .frontier
13063 .unwrap_or_else(|| PathBuf::from("frontiers/bbb-alzheimer.json"));
13064 let thresholds = benchmark::BenchmarkThresholds {
13065 min_f1: if args.no_thresholds {
13066 None
13067 } else {
13068 args.min_f1.or(Some(0.05))
13069 },
13070 min_precision: if args.no_thresholds {
13071 None
13072 } else {
13073 args.min_precision
13074 },
13075 min_recall: if args.no_thresholds {
13076 None
13077 } else {
13078 args.min_recall
13079 },
13080 ..Default::default()
13081 };
13082 if let Some(path) = args.link_gold {
13083 print_benchmark_or_exit(benchmark::task_envelope(
13084 &frontier,
13085 None,
13086 benchmark::BenchmarkMode::Link,
13087 Some(&path),
13088 &thresholds,
13089 None,
13090 ));
13091 } else if let Some(path) = args.entity_gold {
13092 print_benchmark_or_exit(benchmark::task_envelope(
13093 &frontier,
13094 None,
13095 benchmark::BenchmarkMode::Entity,
13096 Some(&path),
13097 &thresholds,
13098 None,
13099 ));
13100 } else if let Some(path) = args.gold {
13101 if args.json {
13102 print_benchmark_or_exit(benchmark::task_envelope(
13103 &frontier,
13104 None,
13105 benchmark::BenchmarkMode::Finding,
13106 Some(&path),
13107 &thresholds,
13108 None,
13109 ));
13110 } else {
13111 benchmark::run(&frontier, &path, false);
13112 }
13113 } else {
13114 fail("Provide --suite, --gold, --entity-gold, or --link-gold.");
13115 }
13116}
13117
13118fn print_benchmark_or_exit(result: Result<Value, String>) {
13119 let payload = result.unwrap_or_else(|e| fail_return(&e));
13120 println!(
13121 "{}",
13122 serde_json::to_string_pretty(&payload).expect("failed to serialize benchmark report")
13123 );
13124 if payload.get("ok").and_then(Value::as_bool) != Some(true) {
13125 std::process::exit(1);
13126 }
13127}
13128
13129fn cmd_packet(action: PacketAction) {
13130 let (result, json_output) = match action {
13131 PacketAction::Inspect { path, json } => (packet::inspect(&path), json),
13132 PacketAction::Validate { path, json } => (packet::validate(&path), json),
13133 };
13134 match result {
13135 Ok(output) if json_output => {
13136 println!(
13137 "{}",
13138 serde_json::to_string_pretty(&json!({
13139 "ok": true,
13140 "command": "packet",
13141 "result": output,
13142 }))
13143 .expect("failed to serialize packet response")
13144 );
13145 }
13146 Ok(output) => println!("{output}"),
13147 Err(e) => fail(&e),
13148 }
13149}
13150
13151fn cmd_verify(path: &Path, json_output: bool) {
13156 let result = packet::validate(path);
13157 match result {
13158 Ok(output) if json_output => {
13159 println!(
13160 "{}",
13161 serde_json::to_string_pretty(&json!({
13162 "ok": true,
13163 "command": "verify",
13164 "result": output,
13165 }))
13166 .expect("failed to serialize verify response")
13167 );
13168 }
13169 Ok(output) => {
13170 println!("{output}");
13171 println!(
13172 "\nverify: ok\n every file in the manifest matched its claimed sha256.\n pull this packet on another machine, run the same command, see the same line."
13173 );
13174 }
13175 Err(e) => fail(&e),
13176 }
13177}
13178
13179fn cmd_init(path: &Path, name: &str, template: &str, initialize_git: bool, json_output: bool) {
13180 if path.join(".vela").exists() {
13181 fail(&format!(
13182 "already initialized: {} exists",
13183 path.join(".vela").display()
13184 ));
13185 }
13186 let payload = frontier_repo::initialize(
13187 path,
13188 frontier_repo::InitOptions {
13189 name,
13190 template,
13191 initialize_git,
13192 },
13193 )
13194 .unwrap_or_else(|e| fail_return(&e));
13195 if json_output {
13196 println!(
13197 "{}",
13198 serde_json::to_string_pretty(&payload).expect("failed to serialize init report")
13199 );
13200 } else {
13201 println!(
13202 "{} initialized frontier repository in {}",
13203 style::ok("ok"),
13204 path.display()
13205 );
13206 }
13207}
13208
13209fn cmd_quickstart(
13216 path: &Path,
13217 name: &str,
13218 reviewer: &str,
13219 assertion: Option<&str>,
13220 keys_out: Option<&Path>,
13221 json_output: bool,
13222) {
13223 use std::process::Command;
13224
13225 if path.join(".vela").exists() {
13226 fail(&format!(
13227 "already initialized: {} exists",
13228 path.join(".vela").display()
13229 ));
13230 }
13231
13232 let exe = std::env::current_exe()
13233 .unwrap_or_else(|e| fail_return(&format!("cannot locate current executable: {e}")));
13234 let keys_dir = keys_out
13235 .map(Path::to_path_buf)
13236 .unwrap_or_else(|| path.join("keys"));
13237 let assertion_text =
13238 assertion.unwrap_or("Quickstart placeholder claim. Replace with your real assertion.");
13239
13240 let run_step = |label: &str, args: &[&str]| -> std::process::Output {
13241 let out = Command::new(&exe)
13242 .args(args)
13243 .output()
13244 .unwrap_or_else(|e| fail_return(&format!("{label}: failed to spawn: {e}")));
13245 if !out.status.success() {
13246 let stderr = String::from_utf8_lossy(&out.stderr);
13247 fail(&format!("{label} failed:\n{stderr}"));
13248 }
13249 out
13250 };
13251
13252 run_step(
13254 "init",
13255 &[
13256 "init",
13257 path.to_string_lossy().as_ref(),
13258 "--name",
13259 name,
13260 "--no-git",
13261 "--json",
13262 ],
13263 );
13264
13265 let keys_out_str = keys_dir.to_string_lossy().into_owned();
13267 let keypair_out = run_step(
13268 "sign.generate-keypair",
13269 &[
13270 "sign",
13271 "generate-keypair",
13272 "--out",
13273 keys_out_str.as_ref(),
13274 "--json",
13275 ],
13276 );
13277 let keypair_json: serde_json::Value = serde_json::from_slice(&keypair_out.stdout)
13278 .unwrap_or_else(|e| fail_return(&format!("sign.generate-keypair: bad json: {e}")));
13279 let public_key = keypair_json
13280 .get("public_key")
13281 .and_then(|v| v.as_str())
13282 .unwrap_or_else(|| fail_return("sign.generate-keypair: missing public_key in output"))
13283 .to_string();
13284
13285 run_step(
13287 "actor.add",
13288 &[
13289 "actor",
13290 "add",
13291 path.to_string_lossy().as_ref(),
13292 reviewer,
13293 "--pubkey",
13294 public_key.as_str(),
13295 "--json",
13296 ],
13297 );
13298
13299 let finding_out = run_step(
13301 "finding.add",
13302 &[
13303 "finding",
13304 "add",
13305 path.to_string_lossy().as_ref(),
13306 "--assertion",
13307 assertion_text,
13308 "--author",
13309 reviewer,
13310 "--apply",
13311 "--json",
13312 ],
13313 );
13314 let finding_json: serde_json::Value = serde_json::from_slice(&finding_out.stdout)
13315 .unwrap_or_else(|e| fail_return(&format!("finding.add: bad json: {e}")));
13316 let finding_id = finding_json
13317 .get("finding_id")
13318 .and_then(|v| v.as_str())
13319 .map(str::to_string);
13320
13321 if json_output {
13322 let payload = json!({
13323 "ok": true,
13324 "command": "quickstart",
13325 "frontier": path.display().to_string(),
13326 "name": name,
13327 "reviewer": reviewer,
13328 "public_key": public_key,
13329 "keys_dir": keys_dir.display().to_string(),
13330 "finding_id": finding_id,
13331 "next_steps": [
13332 format!("vela serve {}", path.display()),
13333 format!("vela ingest {} <paper.pdf|doi:...>", path.display()),
13334 format!("vela log {}", path.display()),
13335 ],
13336 });
13337 println!(
13338 "{}",
13339 serde_json::to_string_pretty(&payload).expect("failed to serialize quickstart")
13340 );
13341 return;
13342 }
13343
13344 println!();
13345 println!(
13346 " {}",
13347 format!("VELA · QUICKSTART · {}", path.display())
13348 .to_uppercase()
13349 .dimmed()
13350 );
13351 println!(" {}", style::tick_row(60));
13352 println!(" frontier: {}", path.display());
13353 println!(" name: {name}");
13354 println!(" reviewer: {reviewer}");
13355 println!(" keys: {}", keys_dir.display());
13356 println!(" pubkey: {}…", &public_key[..16]);
13357 if let Some(id) = finding_id.as_deref() {
13358 println!(" finding: {id}");
13359 }
13360 println!();
13361 println!(" {}", style::ok("done"));
13362 println!(" next:");
13363 println!(" vela serve {}", path.display());
13364 println!(
13365 " vela ingest {} <paper.pdf|doi:10.xxx|pmid:xxx>",
13366 path.display()
13367 );
13368 println!(" vela log {}", path.display());
13369 println!();
13370}
13371
13372fn cmd_import(frontier_path: &Path, into: Option<&Path>) {
13373 let frontier = repo::load_from_path(frontier_path).unwrap_or_else(|e| fail_return(&e));
13374 let target = into
13375 .map(Path::to_path_buf)
13376 .unwrap_or_else(|| PathBuf::from(frontier.project.name.replace(' ', "-").to_lowercase()));
13377 repo::init_repo(&target, &frontier).unwrap_or_else(|e| fail(&e));
13378 println!(
13379 "{} {} findings · {}",
13380 style::ok("imported"),
13381 frontier.findings.len(),
13382 target.display()
13383 );
13384}
13385
13386fn cmd_locator_repair(
13387 path: &Path,
13388 atom_id: &str,
13389 locator_override: Option<&str>,
13390 reviewer: &str,
13391 reason: &str,
13392 apply: bool,
13393 json_output: bool,
13394) {
13395 let report = state::repair_evidence_atom_locator(
13396 path,
13397 atom_id,
13398 locator_override,
13399 reviewer,
13400 reason,
13401 apply,
13402 )
13403 .unwrap_or_else(|e| fail_return(&e));
13404 print_state_report(&report, json_output);
13405}
13406
13407async fn cmd_source_fetch(
13412 identifier: &str,
13413 cache_root: Option<&Path>,
13414 out_path: Option<&Path>,
13415 refresh: bool,
13416 _json_output: bool,
13417) {
13418 use sha2::{Digest, Sha256};
13419
13420 let normalized = normalize_source_identifier(identifier);
13421 let cache_path = cache_root.map(|root| {
13422 let hash = format!("{:x}", Sha256::digest(normalized.as_bytes()));
13423 root.join("sources")
13424 .join("cache")
13425 .join(format!("{hash}.json"))
13426 });
13427
13428 if !refresh
13429 && let Some(p) = cache_path.as_ref()
13430 && p.is_file()
13431 {
13432 let body = std::fs::read_to_string(p)
13433 .unwrap_or_else(|e| fail_return(&format!("read cache {}: {e}", p.display())));
13434 emit_source_fetch_result(&body, out_path);
13435 return;
13436 }
13437
13438 let result = fetch_source_metadata(&normalized).await;
13439 let json = match result {
13440 Ok(value) => serde_json::to_string_pretty(&value)
13441 .unwrap_or_else(|e| fail_return(&format!("serialize fetched record: {e}"))),
13442 Err(e) => fail_return(&format!("source-fetch '{identifier}': {e}")),
13443 };
13444
13445 if let Some(p) = cache_path.as_ref() {
13446 if let Some(parent) = p.parent() {
13447 std::fs::create_dir_all(parent)
13448 .unwrap_or_else(|e| fail_return(&format!("mkdir {}: {e}", parent.display())));
13449 }
13450 std::fs::write(p, &json)
13451 .unwrap_or_else(|e| fail_return(&format!("write cache {}: {e}", p.display())));
13452 }
13453 emit_source_fetch_result(&json, out_path);
13454}
13455
13456fn emit_source_fetch_result(body: &str, out_path: Option<&Path>) {
13457 if let Some(p) = out_path {
13458 if let Some(parent) = p.parent() {
13459 let _ = std::fs::create_dir_all(parent);
13460 }
13461 std::fs::write(p, body)
13462 .unwrap_or_else(|e| fail_return(&format!("write {}: {e}", p.display())));
13463 } else {
13464 println!("{body}");
13465 }
13466}
13467
13468fn normalize_source_identifier(raw: &str) -> String {
13469 let trimmed = raw.trim();
13470 if trimmed.starts_with("doi:")
13471 || trimmed.starts_with("pmid:")
13472 || trimmed.starts_with("nct:")
13473 || trimmed.starts_with("pmc:")
13474 {
13475 return trimmed.to_string();
13476 }
13477 if trimmed.starts_with("10.") {
13478 return format!("doi:{trimmed}");
13479 }
13480 if trimmed.starts_with("NCT") || trimmed.starts_with("nct") {
13481 return format!(
13482 "nct:{}",
13483 trimmed
13484 .to_uppercase()
13485 .trim_start_matches("NCT")
13486 .to_string()
13487 .split_at(0)
13488 .0
13489 );
13490 }
13491 if trimmed.chars().all(|c| c.is_ascii_digit()) {
13492 return format!("pmid:{trimmed}");
13493 }
13494 trimmed.to_string()
13495}
13496
13497async fn fetch_source_metadata(normalized: &str) -> Result<Value, String> {
13498 let client = Client::builder()
13499 .user_agent("vela/0.66.0 (vela-source-fetch; +https://vela-science.fly.dev)")
13500 .timeout(std::time::Duration::from_secs(30))
13501 .build()
13502 .map_err(|e| format!("client build: {e}"))?;
13503 if let Some(rest) = normalized.strip_prefix("doi:") {
13504 let mut record = fetch_via_crossref(&client, rest).await?;
13511 let crossref_abstract = record
13512 .get("abstract")
13513 .and_then(|v| v.as_str())
13514 .unwrap_or("");
13515 if crossref_abstract.is_empty()
13516 && let Some(pmid) = resolve_doi_to_pmid(&client, rest).await
13517 && let Ok(pubmed_record) = fetch_via_pubmed(&client, &pmid).await
13518 {
13519 let pubmed_abstract = pubmed_record
13520 .get("abstract")
13521 .and_then(|v| v.as_str())
13522 .unwrap_or("")
13523 .to_string();
13524 if !pubmed_abstract.is_empty()
13525 && let Some(obj) = record.as_object_mut()
13526 {
13527 obj.insert("abstract".to_string(), Value::String(pubmed_abstract));
13528 obj.insert(
13529 "abstract_source".to_string(),
13530 Value::String(format!("pubmed:{pmid}")),
13531 );
13532 }
13533 }
13534 return Ok(record);
13535 }
13536 if let Some(rest) = normalized.strip_prefix("pmid:") {
13537 return fetch_via_pubmed(&client, rest).await;
13538 }
13539 if let Some(rest) = normalized.strip_prefix("nct:") {
13540 return fetch_via_ctgov(&client, rest).await;
13541 }
13542 Err(format!(
13543 "unsupported source identifier '{normalized}'. Use doi:, pmid:, or nct: prefix."
13544 ))
13545}
13546
13547async fn resolve_doi_to_pmid(client: &Client, doi: &str) -> Option<String> {
13551 let url = format!(
13552 "https://eutils.ncbi.nlm.nih.gov/entrez/eutils/esearch.fcgi?db=pubmed&term={}[doi]&retmode=json",
13553 urlencoding::encode(doi)
13554 );
13555 let resp = client.get(&url).send().await.ok()?;
13556 if !resp.status().is_success() {
13557 return None;
13558 }
13559 let body: Value = resp.json().await.ok()?;
13560 let id_list = body.pointer("/esearchresult/idlist")?.as_array()?;
13561 if id_list.len() != 1 {
13562 return None;
13565 }
13566 id_list.first()?.as_str().map(|s| s.to_string())
13567}
13568
13569async fn fetch_via_crossref(client: &Client, doi: &str) -> Result<Value, String> {
13570 let url = format!("https://api.crossref.org/works/{doi}");
13571 let resp = client
13572 .get(&url)
13573 .send()
13574 .await
13575 .map_err(|e| format!("crossref get: {e}"))?;
13576 if !resp.status().is_success() {
13577 return Err(format!("crossref returned {}", resp.status()));
13578 }
13579 let body: Value = resp
13580 .json()
13581 .await
13582 .map_err(|e| format!("crossref json: {e}"))?;
13583 let work = body.get("message").cloned().unwrap_or(Value::Null);
13584 let title = work
13585 .get("title")
13586 .and_then(|v| v.as_array())
13587 .and_then(|a| a.first())
13588 .and_then(|v| v.as_str())
13589 .unwrap_or("")
13590 .to_string();
13591 let abstract_html = work
13592 .get("abstract")
13593 .and_then(|v| v.as_str())
13594 .unwrap_or("")
13595 .to_string();
13596 let abstract_text = strip_jats_tags(&abstract_html);
13597 let year = work
13598 .get("issued")
13599 .and_then(|v| v.get("date-parts"))
13600 .and_then(|v| v.as_array())
13601 .and_then(|a| a.first())
13602 .and_then(|v| v.as_array())
13603 .and_then(|a| a.first())
13604 .and_then(|v| v.as_i64());
13605 let journal = work
13606 .get("container-title")
13607 .and_then(|v| v.as_array())
13608 .and_then(|a| a.first())
13609 .and_then(|v| v.as_str())
13610 .unwrap_or("")
13611 .to_string();
13612 let authors = work
13613 .get("author")
13614 .and_then(|v| v.as_array())
13615 .map(|arr| {
13616 arr.iter()
13617 .filter_map(|a| {
13618 let given = a.get("given").and_then(|v| v.as_str()).unwrap_or("");
13619 let family = a.get("family").and_then(|v| v.as_str()).unwrap_or("");
13620 let combined = format!("{given} {family}").trim().to_string();
13621 if combined.is_empty() {
13622 None
13623 } else {
13624 Some(combined)
13625 }
13626 })
13627 .collect::<Vec<_>>()
13628 })
13629 .unwrap_or_default();
13630 Ok(json!({
13631 "schema": "vela.source_fetch.v0.1",
13632 "identifier": format!("doi:{doi}"),
13633 "source": "crossref",
13634 "title": title,
13635 "abstract": abstract_text,
13636 "year": year,
13637 "journal": journal,
13638 "authors": authors,
13639 "retrieved_at": chrono::Utc::now().to_rfc3339(),
13640 }))
13641}
13642
13643async fn fetch_via_pubmed(client: &Client, pmid: &str) -> Result<Value, String> {
13644 let url = format!(
13645 "https://eutils.ncbi.nlm.nih.gov/entrez/eutils/efetch.fcgi?db=pubmed&id={pmid}&retmode=xml"
13646 );
13647 let resp = client
13648 .get(&url)
13649 .send()
13650 .await
13651 .map_err(|e| format!("pubmed get: {e}"))?;
13652 if !resp.status().is_success() {
13653 return Err(format!("pubmed returned {}", resp.status()));
13654 }
13655 let xml = resp.text().await.map_err(|e| format!("pubmed text: {e}"))?;
13656 let title = extract_xml_text(&xml, "<ArticleTitle>", "</ArticleTitle>");
13657 let abstract_text = extract_xml_text(&xml, "<AbstractText>", "</AbstractText>");
13658 let year = extract_xml_text(&xml, "<Year>", "</Year>")
13659 .parse::<i64>()
13660 .ok();
13661 let journal = extract_xml_text(&xml, "<Title>", "</Title>");
13662 Ok(json!({
13663 "schema": "vela.source_fetch.v0.1",
13664 "identifier": format!("pmid:{pmid}"),
13665 "source": "pubmed",
13666 "title": title,
13667 "abstract": abstract_text,
13668 "year": year,
13669 "journal": journal,
13670 "authors": Vec::<String>::new(),
13671 "retrieved_at": chrono::Utc::now().to_rfc3339(),
13672 }))
13673}
13674
13675async fn fetch_via_ctgov(client: &Client, nct: &str) -> Result<Value, String> {
13676 let nct_clean = nct.trim();
13677 let nct_id = if nct_clean.starts_with("NCT") || nct_clean.starts_with("nct") {
13678 nct_clean.to_uppercase()
13679 } else {
13680 format!("NCT{nct_clean}")
13681 };
13682 let url = format!("https://clinicaltrials.gov/api/v2/studies/{nct_id}");
13683 let resp = client
13684 .get(&url)
13685 .send()
13686 .await
13687 .map_err(|e| format!("ctgov get: {e}"))?;
13688 if !resp.status().is_success() {
13689 return Err(format!("ctgov returned {}", resp.status()));
13690 }
13691 let body: Value = resp.json().await.map_err(|e| format!("ctgov json: {e}"))?;
13692 let title = body
13693 .pointer("/protocolSection/identificationModule/briefTitle")
13694 .and_then(|v| v.as_str())
13695 .unwrap_or("")
13696 .to_string();
13697 let abstract_text = body
13698 .pointer("/protocolSection/descriptionModule/briefSummary")
13699 .and_then(|v| v.as_str())
13700 .unwrap_or("")
13701 .to_string();
13702 let phase = body
13703 .pointer("/protocolSection/designModule/phases")
13704 .and_then(|v| v.as_array())
13705 .and_then(|a| a.first())
13706 .and_then(|v| v.as_str())
13707 .unwrap_or("")
13708 .to_string();
13709 Ok(json!({
13710 "schema": "vela.source_fetch.v0.1",
13711 "identifier": format!("nct:{nct_id}"),
13712 "source": "clinicaltrials.gov",
13713 "title": title,
13714 "abstract": abstract_text,
13715 "year": Value::Null,
13716 "journal": phase,
13717 "authors": Vec::<String>::new(),
13718 "retrieved_at": chrono::Utc::now().to_rfc3339(),
13719 }))
13720}
13721
13722fn extract_xml_text(xml: &str, open: &str, close: &str) -> String {
13723 if let Some(start) = xml.find(open) {
13724 let after = &xml[start + open.len()..];
13725 if let Some(end) = after.find(close) {
13726 return after[..end].trim().to_string();
13727 }
13728 }
13729 String::new()
13730}
13731
13732fn strip_jats_tags(html: &str) -> String {
13733 let mut out = String::with_capacity(html.len());
13734 let mut in_tag = false;
13735 for c in html.chars() {
13736 match c {
13737 '<' => in_tag = true,
13738 '>' => in_tag = false,
13739 _ if !in_tag => out.push(c),
13740 _ => {}
13741 }
13742 }
13743 out.split_whitespace().collect::<Vec<_>>().join(" ")
13744}
13745
13746fn cmd_span_repair(
13747 path: &Path,
13748 finding_id: &str,
13749 section: &str,
13750 text: &str,
13751 reviewer: &str,
13752 reason: &str,
13753 apply: bool,
13754 json_output: bool,
13755) {
13756 let report =
13757 state::repair_finding_span(path, finding_id, section, text, reviewer, reason, apply)
13758 .unwrap_or_else(|e| fail_return(&e));
13759 print_state_report(&report, json_output);
13760}
13761
13762#[allow(clippy::too_many_arguments)]
13763fn cmd_entity_resolve(
13764 path: &Path,
13765 finding_id: &str,
13766 entity_name: &str,
13767 source: &str,
13768 id: &str,
13769 confidence: f64,
13770 matched_name: Option<&str>,
13771 resolution_method: &str,
13772 reviewer: &str,
13773 reason: &str,
13774 apply: bool,
13775 json_output: bool,
13776) {
13777 let report = state::resolve_finding_entity(
13778 path,
13779 finding_id,
13780 entity_name,
13781 source,
13782 id,
13783 confidence,
13784 matched_name,
13785 resolution_method,
13786 reviewer,
13787 reason,
13788 apply,
13789 )
13790 .unwrap_or_else(|e| fail_return(&e));
13791 print_state_report(&report, json_output);
13792}
13793
13794fn cmd_propagate(
13795 path: &Path,
13796 retract: Option<String>,
13797 reduce_confidence: Option<String>,
13798 to: Option<f64>,
13799 output: Option<&Path>,
13800) {
13801 let mut frontier = load_frontier_or_fail(path);
13802 let (finding_id, action, label) = if let Some(id) = retract {
13803 (id, propagate::PropagationAction::Retracted, "retraction")
13804 } else if let Some(id) = reduce_confidence {
13805 let score = to.unwrap_or_else(|| fail_return("--reduce-confidence requires --to <score>"));
13806 if !(0.0..=1.0).contains(&score) {
13807 fail("--to must be between 0.0 and 1.0");
13808 }
13809 (
13810 id,
13811 propagate::PropagationAction::ConfidenceReduced { new_score: score },
13812 "confidence reduction",
13813 )
13814 } else {
13815 fail("specify --retract <id> or --reduce-confidence <id> --to <score>");
13816 };
13817 if !frontier.findings.iter().any(|f| f.id == finding_id) {
13818 fail(&format!("finding not found: {finding_id}"));
13819 }
13820 let result = propagate::propagate_correction(&mut frontier, &finding_id, action);
13821 frontier.review_events.extend(result.events.clone());
13826 project::recompute_stats(&mut frontier);
13827 propagate::print_result(&result, label, &finding_id);
13828 let out = output.unwrap_or(path);
13829 repo::save_to_path(out, &frontier).expect("Failed to save frontier");
13830 println!(" output: {}", out.display());
13831}
13832
13833fn cmd_mcp_setup(source: Option<&Path>, frontiers: Option<&Path>) {
13834 let source_desc = source
13835 .map(|p| p.display().to_string())
13836 .or_else(|| frontiers.map(|p| p.display().to_string()))
13837 .unwrap_or_else(|| "frontier.json".to_string());
13838 let args = if let Some(path) = source {
13839 format!(r#""serve", "{}""#, path.display())
13840 } else if let Some(path) = frontiers {
13841 format!(r#""serve", "--frontiers", "{}""#, path.display())
13842 } else {
13843 r#""serve", "frontier.json""#.to_string()
13844 };
13845 println!(
13846 r#"Add this MCP server configuration to your client:
13847
13848{{
13849 "mcpServers": {{
13850 "vela": {{
13851 "command": "vela",
13852 "args": [{args}]
13853 }}
13854 }}
13855}}
13856
13857Source: {source_desc}"#
13858 );
13859}
13860
13861fn parse_entities(input: &str) -> Vec<(String, String)> {
13862 if input.trim().is_empty() {
13863 return Vec::new();
13864 }
13865 input
13866 .split(',')
13867 .filter_map(|pair| {
13868 let parts = pair.trim().splitn(2, ':').collect::<Vec<_>>();
13869 if parts.len() == 2 {
13870 Some((parts[0].trim().to_string(), parts[1].trim().to_string()))
13871 } else {
13872 eprintln!(
13873 "{} skipping malformed entity '{}'",
13874 style::warn("warn"),
13875 pair.trim()
13876 );
13877 None
13878 }
13879 })
13880 .collect()
13881}
13882
13883fn parse_evidence_spans(inputs: &[String]) -> Vec<Value> {
13884 inputs
13885 .iter()
13886 .filter_map(|input| {
13887 let trimmed = input.trim();
13888 if trimmed.is_empty() {
13889 return None;
13890 }
13891 if trimmed.starts_with('{') {
13892 match serde_json::from_str::<Value>(trimmed) {
13893 Ok(value @ Value::Object(_)) => return Some(value),
13894 Ok(_) | Err(_) => {
13895 eprintln!(
13896 "{} evidence span JSON should be an object; storing as text",
13897 style::warn("warn")
13898 );
13899 }
13900 }
13901 }
13902 Some(json!({
13903 "section": "curator_source",
13904 "text": trimmed,
13905 }))
13906 })
13907 .collect()
13908}
13909
13910fn hash_path(path: &Path) -> Result<String, String> {
13911 let mut hasher = Sha256::new();
13912 if path.is_file() {
13913 let bytes = std::fs::read(path)
13914 .map_err(|e| format!("Failed to read {} for hashing: {e}", path.display()))?;
13915 hasher.update(&bytes);
13916 } else if path.is_dir() {
13917 let mut files = Vec::new();
13918 collect_hash_files(path, path, &mut files)?;
13919 files.sort();
13920 for rel in files {
13921 hasher.update(rel.to_string_lossy().as_bytes());
13922 let bytes = std::fs::read(path.join(&rel))
13923 .map_err(|e| format!("Failed to read {} for hashing: {e}", rel.display()))?;
13924 hasher.update(bytes);
13925 }
13926 } else {
13927 return Err(format!("Cannot hash missing path {}", path.display()));
13928 }
13929 Ok(format!("{:x}", hasher.finalize()))
13930}
13931
13932fn load_frontier_or_fail(path: &Path) -> project::Project {
13933 repo::load_from_path(path).unwrap_or_else(|e| {
13934 fail_return(&format!(
13935 "Failed to load frontier '{}': {e}",
13936 path.display()
13937 ))
13938 })
13939}
13940
13941fn hash_path_or_fail(path: &Path) -> String {
13942 hash_path(path).unwrap_or_else(|e| {
13943 fail_return(&format!(
13944 "Failed to hash frontier '{}': {e}",
13945 path.display()
13946 ))
13947 })
13948}
13949
13950fn collect_hash_files(root: &Path, dir: &Path, files: &mut Vec<PathBuf>) -> Result<(), String> {
13951 for entry in
13952 std::fs::read_dir(dir).map_err(|e| format!("Failed to read {}: {e}", dir.display()))?
13953 {
13954 let entry = entry.map_err(|e| format!("Failed to read directory entry: {e}"))?;
13955 let path = entry.path();
13956 if path.is_dir() {
13957 collect_hash_files(root, &path, files)?;
13958 } else if path.is_file() {
13959 files.push(
13960 path.strip_prefix(root)
13961 .map_err(|e| e.to_string())?
13962 .to_path_buf(),
13963 );
13964 }
13965 }
13966 Ok(())
13967}
13968
13969fn schema_error_suggestion(error: &str) -> &'static str {
13970 if schema_error_action(error).is_some() {
13971 "Run `vela normalize` to repair deterministic frontier state."
13972 } else {
13973 "Inspect and correct the referenced frontier field."
13974 }
13975}
13976
13977fn schema_error_fix(error: &str) -> bool {
13978 schema_error_action(error).is_some()
13979}
13980
13981fn schema_error_action(error: &str) -> Option<&'static str> {
13982 if error.contains("stats.findings")
13983 || error.contains("stats.links")
13984 || error.contains("Invalid compiler")
13985 || error.contains("Invalid vela_version")
13986 || error.contains("Invalid schema")
13987 {
13988 Some("normalize_metadata_and_stats")
13989 } else if error.contains("does not match content-address") {
13990 Some("rewrite_ids")
13991 } else {
13992 None
13993 }
13994}
13995
13996fn build_repair_plan(diagnostics: &[Value]) -> Vec<Value> {
13997 let mut actions = std::collections::BTreeMap::<String, usize>::new();
13998 for diagnostic in diagnostics {
13999 if let Some(action) = diagnostic.get("normalize_action").and_then(Value::as_str) {
14000 *actions.entry(action.to_string()).or_default() += 1;
14001 }
14002 }
14003 actions
14004 .into_iter()
14005 .map(|(action, count)| {
14006 let command = if action == "rewrite_ids" {
14007 "vela normalize <frontier> --write --rewrite-ids --id-map id-map.json"
14008 } else {
14009 "vela normalize <frontier> --write"
14010 };
14011 json!({
14012 "action": action,
14013 "count": count,
14014 "command": command,
14015 })
14016 })
14017 .collect()
14018}
14019
14020fn cmd_integrity(frontier: &Path, json: bool) {
14021 let report = state_integrity::analyze_path(frontier).unwrap_or_else(|e| fail_return(&e));
14022 if json {
14023 println!(
14024 "{}",
14025 serde_json::to_string_pretty(&report).expect("failed to serialize integrity report")
14026 );
14027 } else {
14028 println!("vela integrity");
14029 println!(" frontier: {}", frontier.display());
14030 println!(" status: {}", report.status);
14031 println!(" proof freshness: {}", report.proof_freshness);
14032 println!(" structural errors: {}", report.structural_errors.len());
14033 for error in report.structural_errors.iter().take(8) {
14034 println!(" - {}: {}", error.rule_id, error.message);
14035 }
14036 }
14037}
14038
14039fn cmd_impact(frontier: &Path, finding_id: &str, depth: Option<usize>, json: bool) {
14040 let report =
14041 impact::analyze_path(frontier, finding_id, depth).unwrap_or_else(|e| fail_return(&e));
14042 if json {
14043 println!(
14044 "{}",
14045 serde_json::to_string_pretty(&report).expect("failed to serialize impact report")
14046 );
14047 } else {
14048 println!("vela impact");
14049 println!(" finding: {}", report.target.id);
14050 println!(" frontier: {}", report.frontier.vfr_id);
14051 println!(" direct dependents: {}", report.summary.direct_dependents);
14052 println!(" downstream: {}", report.summary.total_downstream);
14053 println!(" open proposals: {}", report.summary.open_proposals);
14054 println!(" accepted events: {}", report.summary.accepted_events);
14055 println!(" proof: {}", report.summary.proof_status);
14056 }
14057}
14058
14059fn cmd_discord(frontier: &Path, json: bool, kind_filter: Option<&str>) {
14060 use crate::discord::DiscordKind;
14061 use crate::discord_compute::compute_discord_assignment;
14062
14063 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
14064 let assignment = compute_discord_assignment(&project);
14065 let support = assignment.frontier_support();
14066
14067 let mut rows: Vec<(String, Vec<String>)> = Vec::new();
14070 for context in support.iter() {
14071 let set = assignment.get(context);
14072 let kinds: Vec<String> = set.iter().map(|k| k.as_str().to_string()).collect();
14073 if let Some(filter) = kind_filter
14074 && !kinds.iter().any(|k| k == filter)
14075 {
14076 continue;
14077 }
14078 rows.push((context.clone(), kinds));
14079 }
14080
14081 let mut histogram: std::collections::BTreeMap<&'static str, usize> =
14085 std::collections::BTreeMap::new();
14086 for kind in DiscordKind::ALL {
14087 let count = assignment
14088 .iter()
14089 .filter(|(_, set)| set.contains(*kind))
14090 .count();
14091 if count > 0 {
14092 histogram.insert(kind.as_str(), count);
14093 }
14094 }
14095
14096 let total_findings = project.findings.len();
14097 let frontier_id = project
14098 .frontier_id
14099 .clone()
14100 .unwrap_or_else(|| String::from("<unknown>"));
14101
14102 if json {
14103 let row_value = |row: &(String, Vec<String>)| {
14104 serde_json::json!({
14105 "finding_id": row.0,
14106 "discord_kinds": row.1,
14107 })
14108 };
14109 let report = serde_json::json!({
14110 "frontier_id": frontier_id,
14111 "total_findings": total_findings,
14112 "frontier_support_size": support.len(),
14113 "filtered_row_count": rows.len(),
14114 "filter_kind": kind_filter,
14115 "histogram": histogram,
14116 "rows": rows.iter().map(row_value).collect::<Vec<_>>(),
14117 });
14118 println!(
14119 "{}",
14120 serde_json::to_string_pretty(&report).expect("serialize discord report")
14121 );
14122 return;
14123 }
14124
14125 println!("vela discord");
14126 println!(" frontier: {frontier_id}");
14127 println!(" total findings: {total_findings}");
14128 println!(
14129 " frontier support (any discord): {} of {}",
14130 support.len(),
14131 total_findings
14132 );
14133 if let Some(k) = kind_filter {
14134 println!(" filter: kind = {k}");
14135 }
14136 println!();
14137 if histogram.is_empty() {
14138 println!(" no discord detected.");
14139 } else {
14140 println!(" discord histogram:");
14141 for (k, n) in &histogram {
14142 println!(" {n:>4} {k}");
14143 }
14144 }
14145 if !rows.is_empty() {
14146 println!();
14147 println!(" findings with discord (showing up to 50):");
14148 for (fid, kinds) in rows.iter().take(50) {
14149 println!(" {fid} · {}", kinds.join(", "));
14150 }
14151 if rows.len() > 50 {
14152 println!(" ... and {} more", rows.len() - 50);
14153 }
14154 }
14155}
14156
14157fn empty_signal_report() -> signals::SignalReport {
14158 signals::SignalReport {
14159 schema: "vela.signals.v0".to_string(),
14160 frontier: "unavailable".to_string(),
14161 signals: Vec::new(),
14162 review_queue: Vec::new(),
14163 proof_readiness: signals::ProofReadiness {
14164 status: "unavailable".to_string(),
14165 blockers: 0,
14166 warnings: 0,
14167 caveats: vec!["Frontier could not be loaded for signal analysis.".to_string()],
14168 },
14169 }
14170}
14171
14172fn print_signal_summary(report: &signals::SignalReport, strict: bool) {
14173 println!();
14174 println!(" {}", "SIGNALS".dimmed());
14175 println!(" {}", style::tick_row(60));
14176 println!(" total signals: {}", report.signals.len());
14177 println!(" proof readiness: {}", report.proof_readiness.status);
14178 if !report.review_queue.is_empty() {
14179 println!(" review queue: {} items", report.review_queue.len());
14180 }
14181 if strict && report.proof_readiness.status != "ready" {
14182 println!(
14183 " {} proof readiness has blocking signals.",
14184 style::lost("strict check failed")
14185 );
14186 }
14187}
14188
14189fn append_packet_json_file(
14190 packet_dir: &Path,
14191 relative_path: &str,
14192 value: &Value,
14193) -> Result<(), String> {
14194 let content = serde_json::to_vec_pretty(value)
14195 .map_err(|e| format!("Failed to serialize packet JSON file: {e}"))?;
14196 let path = packet_dir.join(relative_path);
14197 if let Some(parent) = path.parent() {
14198 std::fs::create_dir_all(parent)
14199 .map_err(|e| format!("Failed to create {}: {e}", parent.display()))?;
14200 }
14201 std::fs::write(&path, &content)
14202 .map_err(|e| format!("Failed to write {}: {e}", path.display()))?;
14203 let entry = json!({
14204 "path": relative_path,
14205 "sha256": hex::encode(Sha256::digest(&content)),
14206 "bytes": content.len(),
14207 });
14208
14209 for manifest_name in ["manifest.json", "packet.lock.json"] {
14210 let manifest_path = packet_dir.join(manifest_name);
14211 let data = std::fs::read_to_string(&manifest_path)
14212 .map_err(|e| format!("Failed to read {}: {e}", manifest_path.display()))?;
14213 let mut manifest: Value = serde_json::from_str(&data)
14214 .map_err(|e| format!("Failed to parse {}: {e}", manifest_path.display()))?;
14215 let array_key = if manifest_name == "manifest.json" {
14216 "included_files"
14217 } else {
14218 "files"
14219 };
14220 let files = manifest
14221 .get_mut(array_key)
14222 .and_then(Value::as_array_mut)
14223 .ok_or_else(|| format!("{} missing {array_key} array", manifest_path.display()))?;
14224 files.retain(|file| {
14225 file.get("path")
14226 .and_then(Value::as_str)
14227 .is_none_or(|path| path != relative_path)
14228 });
14229 files.push(entry.clone());
14230 std::fs::write(
14231 &manifest_path,
14232 serde_json::to_vec_pretty(&manifest)
14233 .map_err(|e| format!("Failed to serialize {}: {e}", manifest_path.display()))?,
14234 )
14235 .map_err(|e| format!("Failed to write {}: {e}", manifest_path.display()))?;
14236 }
14237
14238 let lock_path = packet_dir.join("packet.lock.json");
14239 let lock_content = std::fs::read(&lock_path)
14240 .map_err(|e| format!("Failed to read {}: {e}", lock_path.display()))?;
14241 let lock_entry = json!({
14242 "path": "packet.lock.json",
14243 "sha256": hex::encode(Sha256::digest(&lock_content)),
14244 "bytes": lock_content.len(),
14245 });
14246 let manifest_path = packet_dir.join("manifest.json");
14247 let data = std::fs::read_to_string(&manifest_path)
14248 .map_err(|e| format!("Failed to read {}: {e}", manifest_path.display()))?;
14249 let mut manifest: Value = serde_json::from_str(&data)
14250 .map_err(|e| format!("Failed to parse {}: {e}", manifest_path.display()))?;
14251 let files = manifest
14252 .get_mut("included_files")
14253 .and_then(Value::as_array_mut)
14254 .ok_or_else(|| format!("{} missing included_files array", manifest_path.display()))?;
14255 files.retain(|file| {
14256 file.get("path")
14257 .and_then(Value::as_str)
14258 .is_none_or(|path| path != "packet.lock.json")
14259 });
14260 files.push(lock_entry);
14261 std::fs::write(
14262 &manifest_path,
14263 serde_json::to_vec_pretty(&manifest)
14264 .map_err(|e| format!("Failed to serialize {}: {e}", manifest_path.display()))?,
14265 )
14266 .map_err(|e| format!("Failed to write {}: {e}", manifest_path.display()))?;
14267 Ok(())
14268}
14269
14270fn print_tool_check_report(report: &Value) {
14271 let summary = report.get("summary").unwrap_or(&Value::Null);
14272 let frontier = report.get("frontier").unwrap_or(&Value::Null);
14273 println!();
14274 println!(" {}", "VELA · SERVE · CHECK-TOOLS".dimmed());
14275 println!(" {}", style::tick_row(60));
14276 println!(
14277 "frontier: {}",
14278 frontier
14279 .get("name")
14280 .and_then(Value::as_str)
14281 .unwrap_or("unknown")
14282 );
14283 println!(
14284 "findings: {}",
14285 frontier
14286 .get("findings")
14287 .and_then(Value::as_u64)
14288 .unwrap_or_default()
14289 );
14290 println!(
14291 "checks: {} passed, {} failed",
14292 summary
14293 .get("passed")
14294 .and_then(Value::as_u64)
14295 .unwrap_or_default(),
14296 summary
14297 .get("failed")
14298 .and_then(Value::as_u64)
14299 .unwrap_or_default()
14300 );
14301 if let Some(tools) = report.get("tools").and_then(Value::as_array) {
14302 let names = tools
14303 .iter()
14304 .filter_map(Value::as_str)
14305 .collect::<Vec<_>>()
14306 .join(", ");
14307 println!("tools: {names}");
14308 }
14309 if let Some(checks) = report.get("checks").and_then(Value::as_array) {
14310 for check in checks {
14311 let status = if check.get("ok").and_then(Value::as_bool) == Some(true) {
14312 style::ok("ok")
14313 } else {
14314 style::lost("lost")
14315 };
14316 println!(
14317 " {} {}",
14318 status,
14319 check
14320 .get("tool")
14321 .and_then(Value::as_str)
14322 .unwrap_or("unknown")
14323 );
14324 }
14325 }
14326}
14327
14328fn print_state_report(report: &state::StateCommandReport, json_output: bool) {
14329 if json_output {
14330 println!(
14331 "{}",
14332 serde_json::to_string_pretty(report).expect("failed to serialize state command report")
14333 );
14334 } else {
14335 println!("{}", report.message);
14336 println!(" frontier: {}", report.frontier);
14337 println!(" finding: {}", report.finding_id);
14338 println!(" proposal: {}", report.proposal_id);
14339 println!(" status: {}", report.proposal_status);
14340 if let Some(event_id) = &report.applied_event_id {
14341 println!(" event: {}", event_id);
14342 }
14343 println!(" wrote: {}", report.wrote_to);
14344 }
14345}
14346
14347fn print_history(payload: &Value) {
14348 let finding = payload.get("finding").unwrap_or(&Value::Null);
14349 println!("vela history");
14350 println!(
14351 " finding: {}",
14352 finding
14353 .get("id")
14354 .and_then(Value::as_str)
14355 .unwrap_or("unknown")
14356 );
14357 println!(
14358 " assertion: {}",
14359 finding
14360 .get("assertion")
14361 .and_then(Value::as_str)
14362 .unwrap_or("")
14363 );
14364 println!(
14365 " confidence: {:.3}",
14366 finding
14367 .get("confidence")
14368 .and_then(Value::as_f64)
14369 .unwrap_or_default()
14370 );
14371 let reviews = payload
14372 .get("review_events")
14373 .and_then(Value::as_array)
14374 .map_or(0, Vec::len);
14375 let updates = payload
14376 .get("confidence_updates")
14377 .and_then(Value::as_array)
14378 .map_or(0, Vec::len);
14379 let annotations = finding
14380 .get("annotations")
14381 .and_then(Value::as_array)
14382 .map_or(0, Vec::len);
14383 let sources = payload
14384 .get("sources")
14385 .and_then(Value::as_array)
14386 .map_or(0, Vec::len);
14387 let atoms = payload
14388 .get("evidence_atoms")
14389 .and_then(Value::as_array)
14390 .map_or(0, Vec::len);
14391 let conditions = payload
14392 .get("condition_records")
14393 .and_then(Value::as_array)
14394 .map_or(0, Vec::len);
14395 let proposals = payload
14396 .get("proposals")
14397 .and_then(Value::as_array)
14398 .map_or(0, Vec::len);
14399 let events = payload
14400 .get("events")
14401 .and_then(Value::as_array)
14402 .map_or(0, Vec::len);
14403 println!(" review events: {reviews}");
14404 println!(" confidence updates: {updates}");
14405 println!(" annotations: {annotations}");
14406 println!(" sources: {sources}");
14407 println!(" evidence atoms: {atoms}");
14408 println!(" condition records: {conditions}");
14409 println!(" proposals: {proposals}");
14410 println!(" canonical events: {events}");
14411 if let Some(status) = payload
14412 .get("proof_state")
14413 .and_then(|value| value.get("latest_packet"))
14414 .and_then(|value| value.get("status"))
14415 .and_then(Value::as_str)
14416 {
14417 println!(" proof state: {status}");
14418 }
14419 if let Some(events) = payload.get("review_events").and_then(Value::as_array) {
14420 for event in events.iter().take(8) {
14421 println!(
14422 " - {} {} {}",
14423 event
14424 .get("reviewed_at")
14425 .and_then(Value::as_str)
14426 .unwrap_or(""),
14427 event.get("id").and_then(Value::as_str).unwrap_or(""),
14428 event.get("reason").and_then(Value::as_str).unwrap_or("")
14429 );
14430 }
14431 }
14432}
14433
14434#[derive(Debug, Serialize)]
14435pub struct ProofTrace {
14436 pub trace_version: String,
14437 pub command: Vec<String>,
14438 pub source: String,
14439 pub source_hash: String,
14440 pub schema_version: String,
14441 pub checked_artifacts: Vec<String>,
14442 pub benchmark: Option<Value>,
14443 pub packet_manifest: String,
14444 pub packet_validation: String,
14445 pub caveats: Vec<String>,
14446 pub status: String,
14447 pub trace_path: String,
14448}
14449
14450const SCIENCE_SUBCOMMANDS: &[&str] = &[
14451 "compile-notes",
14452 "compile-code",
14453 "compile-data",
14454 "review-pending",
14455 "find-tensions",
14456 "plan-experiments",
14457 "scout",
14458 "check",
14459 "normalize",
14460 "integrity",
14461 "impact",
14462 "discord",
14463 "quickstart",
14464 "proof",
14465 "repo",
14466 "serve",
14467 "stats",
14468 "search",
14469 "tensions",
14470 "gaps",
14471 "bridge",
14472 "export",
14473 "packet",
14474 "bench",
14475 "conformance",
14476 "version",
14477 "sign",
14478 "actor",
14479 "frontier",
14480 "queue",
14481 "registry",
14482 "init",
14483 "import",
14484 "diff",
14485 "proposals",
14486 "finding",
14487 "link",
14488 "entity",
14489 "review",
14490 "note",
14491 "caveat",
14492 "revise",
14493 "reject",
14494 "history",
14495 "import-events",
14496 "retract",
14497 "propagate",
14498 "replicate",
14500 "replications",
14501 "dataset-add",
14504 "datasets",
14505 "code-add",
14506 "code-artifacts",
14507 "artifact-add",
14508 "artifact-to-state",
14509 "bridge-kit",
14510 "source-adapter",
14511 "runtime-adapter",
14512 "artifacts",
14513 "artifact-audit",
14514 "decision-brief",
14515 "trial-summary",
14516 "source-verification",
14517 "source-ingest-plan",
14518 "clinical-trial-import",
14519 "negative-result-add",
14521 "negative-results",
14522 "trajectory-create",
14524 "trajectory-step",
14525 "trajectories",
14526 "tier-set",
14528 "locator-repair",
14530 "span-repair",
14532 "entity-resolve",
14534 "entity-add",
14536 "source-fetch",
14538 "predict",
14541 "resolve",
14542 "predictions",
14543 "predictions-expire",
14544 "calibration",
14545 "consensus",
14548 "federation",
14550 "causal",
14552 "status",
14556 "log",
14557 "inbox",
14558 "ask",
14559 "bridges",
14561 "workbench",
14563 "verify",
14565 "ingest",
14569 "propose",
14570 "accept",
14571 "attest",
14572 "lineage",
14573 "carina",
14576 "atlas",
14579 "constellation",
14582];
14583
14584pub fn is_science_subcommand(name: &str) -> bool {
14585 SCIENCE_SUBCOMMANDS.contains(&name)
14586}
14587
14588fn print_strict_help() {
14589 println!(
14590 r#"Vela {}
14591Version control for scientific state.
14592
14593Usage:
14594 vela <COMMAND>
14595
14596Core flow (v0.74):
14597 init Initialize a split frontier repo
14598 ingest Ingest a paper, dataset, or Carina packet (dispatches by file type)
14599 propose Create a finding.review proposal
14600 diff Preview a `vpr_*` proposal, or compare two frontier files
14601 accept Apply a proposal under reviewer authority
14602 attest Sign findings under your private key
14603 log Recent canonical state events
14604 lineage State-transition replay for one finding
14605 serve Local Workbench (findings, evidence, diff, lineage)
14606
14607Read-only inspection:
14608 check Validate a frontier, repo, or proof packet
14609 integrity Check accepted frontier state integrity
14610 impact Report downstream finding impact
14611 normalize Apply deterministic frontier-state repairs
14612 proof Export and validate a proof packet
14613 repo Inspect split frontier repository status and shape
14614 stats Show frontier statistics
14615 search Search findings
14616 tensions List candidate contradictions and tensions
14617 gaps Inspect and rank candidate gap review leads
14618 bridge Find candidate cross-domain connections
14619
14620Advanced (proposal-creation, agent inboxes, federation):
14621 scout Run Literature Scout against a folder of PDFs (writes proposals)
14622 compile-notes Run Notes Compiler against a Markdown vault (writes proposals)
14623 compile-code Run Code & Notebook Analyst against a research repo (writes proposals)
14624 compile-data Run Datasets agent against a folder of CSV/TSV data (writes proposals)
14625 review-pending Run Reviewer Agent: score every pending proposal (writes notes)
14626 find-tensions Run Contradiction Finder: surface real contradictions among findings
14627 plan-experiments Run Experiment Planner: propose experiments for open questions / hypotheses
14628 export Export frontier artifacts
14629 packet Inspect or validate proof packets
14630 bench Run deterministic benchmark gates
14631 conformance Run protocol conformance vectors
14632 sign Optional signing and signature verification
14633 runtime-adapter
14634 Normalize external runtime exports into reviewable proposals
14635 version Show version information
14636 import Import frontier.json into a .vela repo
14637 proposals Inspect, validate, export, import, accept, or reject write proposals
14638 artifact-to-state
14639 Import a Carina artifact packet as reviewable proposals
14640 bridge-kit
14641 Validate Carina artifact packets before importing runtime output
14642 source-adapter
14643 Run reviewed source adapters into artifact-to-state proposals
14644 finding Add or manage finding bundles as frontier state
14645 link Add typed links between findings (incl. cross-frontier vf_at-vfr targets)
14646 entity Resolve unresolved entities against a bundled common-entity table (v0.19)
14647 frontier Scaffold (`new`), materialize, and manage frontier metadata + deps
14648 actor Register Ed25519 publisher identities in a frontier
14649 registry Publish, list, or pull frontiers (open hub at https://vela-hub.fly.dev)
14650 review Create a review proposal or review interactively
14651 note Add a lightweight note to a finding
14652 caveat Create an explicit caveat proposal
14653 revise Create a confidence revision proposal
14654 reject Create a rejection proposal
14655 history Show state-transition history for one finding (v0.74 alias: `lineage`)
14656 import-events Import review/state events from a packet or JSON file
14657 retract Create a retraction proposal
14658 propagate Simulate impact over declared dependency links
14659 artifact-add Register a content-addressed artifact
14660 artifacts List content-addressed artifacts
14661 artifact-audit Audit artifact locators, hashes, references, and profiles
14662 decision-brief Show the validated decision brief projection
14663 trial-summary Show the validated trial outcome projection
14664 source-verification Show the validated source verification projection
14665 source-ingest-plan Show the validated source ingest plan
14666 clinical-trial-import Import a ClinicalTrials.gov record as an artifact
14667 locator-repair Mechanically repair an evidence atom's missing source locator
14668 span-repair Mechanically repair a finding's missing evidence span
14669 entity-resolve Resolve a finding entity to a canonical id
14670 source-fetch Fetch metadata + abstract for a doi:/pmid:/nct: source
14671 atlas Compose multiple frontiers into a domain-level Atlas (vat_*) (v0.78+)
14672 constellation Compose multiple Atlases into a cross-domain Constellation (vco_*) (v0.82+)
14673
14674Quick start (the demo):
14675 vela init demo --name "Your bounded question"
14676 vela ingest paper.pdf --frontier demo
14677 vela propose demo <vf_id> --status accepted --reason "..." --reviewer reviewer:you --apply
14678 vela diff <vpr_id> --frontier demo
14679 vela accept demo <vpr_id> --reviewer reviewer:you --reason "applied"
14680 vela serve --path demo
14681
14682Substrate health:
14683 vela frontier materialize my-frontier --json
14684 vela repo status my-frontier --json
14685 vela proof verify my-frontier --json
14686 vela check my-frontier --strict --json
14687
14688Monolithic frontier file:
14689 vela frontier new frontier.json --name "Your bounded question"
14690 vela finding add frontier.json --assertion "..." --author "reviewer:demo" --apply
14691 vela check frontier.json --json
14692 FINDING_ID=$(jq -r '.findings[0].id' frontier.json)
14693 vela review frontier.json "$FINDING_ID" --status contested --reason "Mouse-only evidence" --reviewer reviewer:demo --apply
14694
14695Publish your own frontier (see docs/PUBLISHING.md):
14696 vela frontier new ./frontier.json --name "Your bounded question"
14697 vela finding add ./frontier.json --assertion "..." --author "reviewer:you" --apply
14698 vela sign generate-keypair --out keys
14699 vela actor add ./frontier.json reviewer:you --pubkey "$(cat keys/public.key)"
14700 vela registry publish ./frontier.json --owner reviewer:you --key keys/private.key \
14701 --to https://vela-hub.fly.dev
14702"#,
14703 env!("CARGO_PKG_VERSION")
14704 );
14705}
14706
14707pub type ScoutHandler = fn(
14716 folder: PathBuf,
14717 frontier: PathBuf,
14718 backend: Option<String>,
14719 dry_run: bool,
14720 json: bool,
14721) -> Pin<Box<dyn Future<Output = ()> + Send>>;
14722
14723static SCOUT_HANDLER: OnceLock<ScoutHandler> = OnceLock::new();
14724
14725pub fn register_scout_handler(handler: ScoutHandler) {
14729 let _ = SCOUT_HANDLER.set(handler);
14730}
14731
14732pub type AtlasInitHandler = fn(
14736 atlases_root: PathBuf,
14737 name: String,
14738 domain: String,
14739 scope_note: Option<String>,
14740 frontiers: Vec<PathBuf>,
14741 json: bool,
14742) -> Pin<Box<dyn Future<Output = ()> + Send>>;
14743
14744static ATLAS_INIT_HANDLER: OnceLock<AtlasInitHandler> = OnceLock::new();
14745
14746pub fn register_atlas_init_handler(handler: AtlasInitHandler) {
14747 let _ = ATLAS_INIT_HANDLER.set(handler);
14748}
14749
14750pub type AtlasMaterializeHandler =
14752 fn(atlases_root: PathBuf, name: String, json: bool) -> Pin<Box<dyn Future<Output = ()> + Send>>;
14753
14754static ATLAS_MATERIALIZE_HANDLER: OnceLock<AtlasMaterializeHandler> = OnceLock::new();
14755
14756pub fn register_atlas_materialize_handler(handler: AtlasMaterializeHandler) {
14757 let _ = ATLAS_MATERIALIZE_HANDLER.set(handler);
14758}
14759
14760pub type AtlasServeHandler = fn(
14765 atlases_root: PathBuf,
14766 name: String,
14767 port: u16,
14768 open_browser: bool,
14769) -> Pin<Box<dyn Future<Output = ()> + Send>>;
14770
14771static ATLAS_SERVE_HANDLER: OnceLock<AtlasServeHandler> = OnceLock::new();
14772
14773pub fn register_atlas_serve_handler(handler: AtlasServeHandler) {
14774 let _ = ATLAS_SERVE_HANDLER.set(handler);
14775}
14776
14777pub type AtlasUpdateHandler = fn(
14782 atlases_root: PathBuf,
14783 name: String,
14784 add_frontier: Vec<PathBuf>,
14785 remove_vfr_id: Vec<String>,
14786 json: bool,
14787) -> Pin<Box<dyn Future<Output = ()> + Send>>;
14788
14789static ATLAS_UPDATE_HANDLER: OnceLock<AtlasUpdateHandler> = OnceLock::new();
14790
14791pub fn register_atlas_update_handler(handler: AtlasUpdateHandler) {
14792 let _ = ATLAS_UPDATE_HANDLER.set(handler);
14793}
14794
14795pub type ConstellationInitHandler = fn(
14799 constellations_root: PathBuf,
14800 name: String,
14801 scope_note: Option<String>,
14802 atlases: Vec<PathBuf>,
14803 json: bool,
14804) -> Pin<Box<dyn Future<Output = ()> + Send>>;
14805
14806static CONSTELLATION_INIT_HANDLER: OnceLock<ConstellationInitHandler> = OnceLock::new();
14807
14808pub fn register_constellation_init_handler(handler: ConstellationInitHandler) {
14809 let _ = CONSTELLATION_INIT_HANDLER.set(handler);
14810}
14811
14812pub type ConstellationMaterializeHandler = fn(
14813 constellations_root: PathBuf,
14814 name: String,
14815 json: bool,
14816) -> Pin<Box<dyn Future<Output = ()> + Send>>;
14817
14818static CONSTELLATION_MATERIALIZE_HANDLER: OnceLock<ConstellationMaterializeHandler> =
14819 OnceLock::new();
14820
14821pub fn register_constellation_materialize_handler(handler: ConstellationMaterializeHandler) {
14822 let _ = CONSTELLATION_MATERIALIZE_HANDLER.set(handler);
14823}
14824
14825pub type ConstellationServeHandler = fn(
14826 constellations_root: PathBuf,
14827 name: String,
14828 port: u16,
14829 open_browser: bool,
14830) -> Pin<Box<dyn Future<Output = ()> + Send>>;
14831
14832static CONSTELLATION_SERVE_HANDLER: OnceLock<ConstellationServeHandler> = OnceLock::new();
14833
14834pub fn register_constellation_serve_handler(handler: ConstellationServeHandler) {
14835 let _ = CONSTELLATION_SERVE_HANDLER.set(handler);
14836}
14837
14838pub type NotesHandler = fn(
14842 vault: PathBuf,
14843 frontier: PathBuf,
14844 backend: Option<String>,
14845 max_files: Option<usize>,
14846 max_items_per_category: Option<usize>,
14847 dry_run: bool,
14848 json: bool,
14849) -> Pin<Box<dyn Future<Output = ()> + Send>>;
14850
14851static NOTES_HANDLER: OnceLock<NotesHandler> = OnceLock::new();
14852
14853pub fn register_notes_handler(handler: NotesHandler) {
14855 let _ = NOTES_HANDLER.set(handler);
14856}
14857
14858pub type CodeHandler = fn(
14860 root: PathBuf,
14861 frontier: PathBuf,
14862 backend: Option<String>,
14863 max_files: Option<usize>,
14864 dry_run: bool,
14865 json: bool,
14866) -> Pin<Box<dyn Future<Output = ()> + Send>>;
14867
14868static CODE_HANDLER: OnceLock<CodeHandler> = OnceLock::new();
14869
14870pub fn register_code_handler(handler: CodeHandler) {
14872 let _ = CODE_HANDLER.set(handler);
14873}
14874
14875pub type DatasetsHandler = fn(
14877 root: PathBuf,
14878 frontier: PathBuf,
14879 backend: Option<String>,
14880 sample_rows: Option<usize>,
14881 dry_run: bool,
14882 json: bool,
14883) -> Pin<Box<dyn Future<Output = ()> + Send>>;
14884
14885static DATASETS_HANDLER: OnceLock<DatasetsHandler> = OnceLock::new();
14886
14887pub fn register_datasets_handler(handler: DatasetsHandler) {
14889 let _ = DATASETS_HANDLER.set(handler);
14890}
14891
14892pub type ReviewerHandler = fn(
14894 frontier: PathBuf,
14895 backend: Option<String>,
14896 max_proposals: Option<usize>,
14897 batch_size: usize,
14898 dry_run: bool,
14899 json: bool,
14900) -> Pin<Box<dyn Future<Output = ()> + Send>>;
14901
14902static REVIEWER_HANDLER: OnceLock<ReviewerHandler> = OnceLock::new();
14903
14904pub fn register_reviewer_handler(handler: ReviewerHandler) {
14906 let _ = REVIEWER_HANDLER.set(handler);
14907}
14908
14909pub type TensionsHandler = fn(
14911 frontier: PathBuf,
14912 backend: Option<String>,
14913 max_findings: Option<usize>,
14914 dry_run: bool,
14915 json: bool,
14916) -> Pin<Box<dyn Future<Output = ()> + Send>>;
14917
14918static TENSIONS_HANDLER: OnceLock<TensionsHandler> = OnceLock::new();
14919
14920pub fn register_tensions_handler(handler: TensionsHandler) {
14922 let _ = TENSIONS_HANDLER.set(handler);
14923}
14924
14925pub type ExperimentsHandler = fn(
14927 frontier: PathBuf,
14928 backend: Option<String>,
14929 max_findings: Option<usize>,
14930 dry_run: bool,
14931 json: bool,
14932) -> Pin<Box<dyn Future<Output = ()> + Send>>;
14933
14934static EXPERIMENTS_HANDLER: OnceLock<ExperimentsHandler> = OnceLock::new();
14935
14936pub fn register_experiments_handler(handler: ExperimentsHandler) {
14938 let _ = EXPERIMENTS_HANDLER.set(handler);
14939}
14940
14941fn find_vela_repo() -> Option<PathBuf> {
14957 let mut cur = std::env::current_dir().ok()?;
14958 loop {
14959 if cur.join(".vela").is_dir() {
14960 return Some(cur);
14961 }
14962 if !cur.pop() {
14963 return None;
14964 }
14965 }
14966}
14967
14968fn print_session_help() {
14969 println!();
14970 println!(
14971 " Vela {} · Version control for scientific state.",
14972 env!("CARGO_PKG_VERSION")
14973 );
14974 println!();
14975 println!(" USAGE");
14976 println!(" vela Open a session against the nearest .vela/ repo");
14977 println!(" vela <command> Run a specific subcommand");
14978 println!(" vela help advanced Full subcommand list (30+ commands)");
14979 println!();
14980 println!(" CORE FLOW (v0.74)");
14981 println!(" init Initialize a split frontier repo");
14982 println!(" ingest <path> Ingest a paper, dataset, or Carina packet");
14983 println!(" propose Create a finding.review proposal");
14984 println!(" diff <vpr_id> Preview a pending proposal vs current frontier");
14985 println!(" accept <vpr_id> Apply a proposal under reviewer authority");
14986 println!(" attest Sign findings under your private key");
14987 println!(" log Recent canonical state events");
14988 println!(" lineage <vf_id> State-transition replay for one finding");
14989 println!(" serve Local Workbench (find, evidence, diff, lineage)");
14990 println!();
14991 println!(" DAILY ALSO-RANS");
14992 println!(" status One-screen frontier health");
14993 println!(" inbox Pending review proposals");
14994 println!(" review Review a proposal interactively");
14995 println!(" ask <question> Plain-text query against the frontier");
14996 println!();
14997 println!(" REASONING (Pearl 1 → 2 → 3)");
14998 println!(" causal audit Per-finding identifiability");
14999 println!(" causal effect <src> --on <tgt> Pairwise back-door / front-door");
15000 println!(" causal counterfactual <src> --target <tgt> --set-to <0..1>");
15001 println!();
15002 println!(" COMPOSITION");
15003 println!(" bridge <a> <b> Cross-frontier hypotheses");
15004 println!(" consensus <vf> Field consensus over similar claims");
15005 println!();
15006 println!(" PUBLISH");
15007 println!(" registry publish Push a signed manifest to the hub");
15008 println!(" federation peer-add Federate with another hub");
15009 println!();
15010 println!(" In session, type a single letter for a quick verb, or any");
15011 println!(" question in plain text. `q` or `exit` quits.");
15012 println!();
15013}
15014
15015fn print_session_dashboard(project: &crate::project::Project, repo_path: &Path) {
15016 use crate::causal_reasoning::{audit_frontier, summarize_audit};
15017
15018 let label = frontier_label(project);
15019 let vfr = project.frontier_id();
15020 let vfr_short = vfr.chars().take(16).collect::<String>();
15021
15022 let mut pending = 0usize;
15023 let mut by_kind: std::collections::BTreeMap<String, usize> = std::collections::BTreeMap::new();
15024 for p in &project.proposals {
15025 if p.status == "pending_review" {
15026 pending += 1;
15027 *by_kind.entry(p.kind.clone()).or_insert(0) += 1;
15028 }
15029 }
15030
15031 let audit = audit_frontier(project);
15032 let audit_summary = summarize_audit(&audit);
15033
15034 let bridges_dir = repo_path.join(".vela/bridges");
15035 let mut bridge_total = 0usize;
15036 let mut bridge_confirmed = 0usize;
15037 let mut bridge_derived = 0usize;
15038 if bridges_dir.is_dir()
15039 && let Ok(entries) = std::fs::read_dir(&bridges_dir)
15040 {
15041 for entry in entries.flatten() {
15042 let path = entry.path();
15043 if path.extension().and_then(|s| s.to_str()) != Some("json") {
15044 continue;
15045 }
15046 bridge_total += 1;
15047 if let Ok(data) = std::fs::read_to_string(&path)
15048 && let Ok(b) = serde_json::from_str::<crate::bridge::Bridge>(&data)
15049 {
15050 match b.status {
15051 crate::bridge::BridgeStatus::Confirmed => bridge_confirmed += 1,
15052 crate::bridge::BridgeStatus::Derived => bridge_derived += 1,
15053 _ => {}
15054 }
15055 }
15056 }
15057 }
15058
15059 let mut targets_with_success = std::collections::HashSet::new();
15060 let mut failed_replications = 0usize;
15061 for r in &project.replications {
15062 if r.outcome == "replicated" {
15063 targets_with_success.insert(r.target_finding.clone());
15064 } else if r.outcome == "failed" {
15065 failed_replications += 1;
15066 }
15067 }
15068
15069 println!();
15070 let version = crate::project::VELA_COMPILER_VERSION
15071 .strip_prefix("vela/")
15072 .unwrap_or(crate::project::VELA_COMPILER_VERSION);
15073 println!(
15074 " {}",
15075 format!("VELA · {version} · {label}")
15076 .to_uppercase()
15077 .dimmed()
15078 );
15079 println!(" {}", style::tick_row(60));
15080 println!(
15081 " vfr_id {}… repo {}",
15082 vfr_short,
15083 repo_path.display()
15084 );
15085 println!(
15086 " findings {:>4} events {} proposals pending {}",
15087 project.findings.len(),
15088 project.events.len(),
15089 pending
15090 );
15091
15092 if pending > 0 {
15093 let parts: Vec<String> = by_kind.iter().map(|(k, n)| format!("{n} {k}")).collect();
15094 println!(" {} · {}", style::warn("inbox"), parts.join(" "));
15095 }
15096 if audit_summary.underidentified > 0 || audit_summary.conditional > 0 {
15097 println!(
15098 " {} · {} underidentified · {} conditional",
15099 if audit_summary.underidentified > 0 {
15100 style::lost("audit")
15101 } else {
15102 style::warn("audit")
15103 },
15104 audit_summary.underidentified,
15105 audit_summary.conditional,
15106 );
15107 }
15108 if bridge_total > 0 {
15109 println!(
15110 " {} · {} total · {} confirmed · {} awaiting review",
15111 style::ok("bridges"),
15112 bridge_total,
15113 bridge_confirmed,
15114 bridge_derived
15115 );
15116 }
15117 if !project.replications.is_empty() {
15118 println!(
15119 " {} · {} records · {} findings replicated · {} failed",
15120 style::ok("replications"),
15121 project.replications.len(),
15122 targets_with_success.len(),
15123 failed_replications,
15124 );
15125 }
15126
15127 println!();
15128 println!(" type a verb or ask anything:");
15129 println!(" a audit problems i inbox (pending) b bridges");
15130 println!(" g causal graph l log (recent) c counterfactuals");
15131 println!(" s refresh status h help (more verbs) q quit");
15132 println!();
15133}
15134
15135fn run_session_verb(verb: &str, repo_path: &Path) -> bool {
15137 match verb {
15138 "a" | "audit" => {
15139 let action = CausalAction::Audit {
15140 frontier: repo_path.to_path_buf(),
15141 problems_only: true,
15142 json: false,
15143 };
15144 cmd_causal(action);
15145 true
15146 }
15147 "i" | "inbox" => {
15148 let action = ProposalAction::List {
15149 frontier: repo_path.to_path_buf(),
15150 status: Some("pending_review".into()),
15151 json: false,
15152 };
15153 cmd_proposals(action);
15154 true
15155 }
15156 "b" | "bridges" => {
15157 let action = BridgesAction::List {
15158 frontier: repo_path.to_path_buf(),
15159 status: None,
15160 json: false,
15161 };
15162 cmd_bridges(action);
15163 true
15164 }
15165 "g" | "graph" => {
15166 let action = CausalAction::Graph {
15167 frontier: repo_path.to_path_buf(),
15168 node: None,
15169 json: false,
15170 };
15171 cmd_causal(action);
15172 true
15173 }
15174 "l" | "log" => {
15175 cmd_log(repo_path, 10, None, false);
15176 true
15177 }
15178 "c" | "counterfactual" | "counterfactuals" => {
15179 let project = match repo::load_from_path(repo_path) {
15182 Ok(p) => p,
15183 Err(e) => {
15184 eprintln!("{} {e}", style::err_prefix());
15185 return true;
15186 }
15187 };
15188 println!();
15189 println!(" {}", "VELA · COUNTERFACTUAL · LIVE PAIRS".dimmed());
15190 println!(" {}", style::tick_row(60));
15191 let mut pairs = 0usize;
15195 for child in &project.findings {
15196 for link in &child.links {
15197 if !matches!(link.link_type.as_str(), "depends" | "supports") {
15198 continue;
15199 }
15200 if link.mechanism.is_none() {
15201 continue;
15202 }
15203 let parent = link
15204 .target
15205 .split_once(':')
15206 .map_or(link.target.as_str(), |(_, r)| r);
15207 pairs += 1;
15208 if pairs <= 10 {
15209 println!(" · do({parent}) → {}", child.id);
15210 }
15211 }
15212 }
15213 if pairs == 0 {
15214 println!(" no mechanism-annotated edges found.");
15215 println!(" add a mechanism via the link's `mechanism` field; see /counterfactual");
15216 } else {
15217 println!();
15218 println!(" {pairs} live pair(s). Run with:");
15219 println!(" vela causal counterfactual <repo> <src> --target <tgt> --set-to 0.5");
15220 }
15221 println!();
15222 true
15223 }
15224 "s" | "status" | "refresh" => {
15225 match repo::load_from_path(repo_path) {
15227 Ok(p) => print_session_dashboard(&p, repo_path),
15228 Err(e) => eprintln!("{} {e}", style::err_prefix()),
15229 }
15230 true
15231 }
15232 "h" | "help" | "?" => {
15233 print_session_help();
15234 true
15235 }
15236 _ => false,
15237 }
15238}
15239
15240fn run_session() {
15241 let repo_path = match find_vela_repo() {
15242 Some(p) => p,
15243 None => {
15244 println!();
15245 println!(
15246 " {}",
15247 "VELA · NO FRONTIER FOUND IN CWD OR ANY PARENT".dimmed()
15248 );
15249 println!(" {}", style::tick_row(60));
15250 println!(" Run `vela init` here to create a frontier, or cd into one.");
15251 println!(" Or run `vela help` for the command list.");
15252 println!();
15253 return;
15254 }
15255 };
15256
15257 let project = match repo::load_from_path(&repo_path) {
15258 Ok(p) => p,
15259 Err(e) => {
15260 eprintln!("{} failed to load .vela/ repo: {e}", style::err_prefix());
15261 std::process::exit(1);
15262 }
15263 };
15264
15265 print_session_dashboard(&project, &repo_path);
15266
15267 use std::io::{BufRead, Write};
15268 let stdin = std::io::stdin();
15269 let mut stdout = std::io::stdout();
15270 loop {
15271 print!(" > ");
15272 stdout.flush().ok();
15273 let mut line = String::new();
15274 if stdin.lock().read_line(&mut line).is_err() {
15275 break;
15276 }
15277 let input = line.trim();
15278 if input.is_empty() {
15279 continue;
15280 }
15281 if matches!(input, "q" | "quit" | "exit") {
15282 break;
15283 }
15284 if run_session_verb(input, &repo_path) {
15285 continue;
15286 }
15287 let project = match repo::load_from_path(&repo_path) {
15289 Ok(p) => p,
15290 Err(e) => {
15291 eprintln!("{} {e}", style::err_prefix());
15292 continue;
15293 }
15294 };
15295 answer(&project, input, false);
15296 }
15297}
15298
15299pub fn run_from_args() {
15300 style::init();
15301 let args = std::env::args().collect::<Vec<_>>();
15302 match args.get(1).map(String::as_str) {
15303 None => {
15307 run_session();
15308 return;
15309 }
15310 Some("-h" | "--help" | "help") => {
15311 if args.get(2).map(String::as_str) == Some("advanced") {
15314 print_strict_help();
15315 } else {
15316 print_session_help();
15317 }
15318 return;
15319 }
15320 Some("-V" | "--version" | "version") => {
15321 println!("vela {}", env!("CARGO_PKG_VERSION"));
15322 return;
15323 }
15324 Some("proof") if args.get(2).map(String::as_str) == Some("verify") => {
15325 let json = args.iter().any(|arg| arg == "--json");
15326 let frontier = args
15327 .iter()
15328 .skip(3)
15329 .find(|arg| !arg.starts_with('-'))
15330 .map(PathBuf::from)
15331 .unwrap_or_else(|| {
15332 eprintln!(
15333 "{} proof verify requires a frontier repo",
15334 style::err_prefix()
15335 );
15336 std::process::exit(2);
15337 });
15338 cmd_proof_verify(&frontier, json);
15339 return;
15340 }
15341 Some("proof") if args.get(2).map(String::as_str) == Some("explain") => {
15342 let frontier = args
15343 .iter()
15344 .skip(3)
15345 .find(|arg| !arg.starts_with('-'))
15346 .map(PathBuf::from)
15347 .unwrap_or_else(|| {
15348 eprintln!(
15349 "{} proof explain requires a frontier repo",
15350 style::err_prefix()
15351 );
15352 std::process::exit(2);
15353 });
15354 cmd_proof_explain(&frontier);
15355 return;
15356 }
15357 Some(cmd) if !is_science_subcommand(cmd) => {
15358 eprintln!(
15359 "{} unknown or non-release command: {cmd}",
15360 style::err_prefix()
15361 );
15362 eprintln!("run `vela --help` for the strict v0 command surface.");
15363 std::process::exit(2);
15364 }
15365 Some(_) => {}
15366 }
15367 let runtime = tokio::runtime::Runtime::new().expect("failed to create tokio runtime");
15368 runtime.block_on(run_command());
15369}
15370
15371fn fail(message: &str) -> ! {
15372 eprintln!("{} {message}", style::err_prefix());
15373 std::process::exit(1);
15374}
15375
15376fn validate_enum_arg(flag: &str, value: &str, valid: &[&str]) {
15381 if !valid.contains(&value) {
15382 fail(&format!(
15383 "invalid {flag} '{value}'. Valid: {}",
15384 valid.join(", ")
15385 ));
15386 }
15387}
15388
15389fn fail_return<T>(message: &str) -> T {
15390 fail(message)
15391}