1use crate::{
2 benchmark, bridge, bundle, carina_validate, conformance, decision, diff, events, export,
3 frontier_repo, impact, lint, normalize, packet, project, propagate, proposals, repo, review,
4 search, serve, sign, signals, sources, state, state_integrity, tensions, validate,
5};
6
7use std::collections::BTreeMap;
8use std::future::Future;
9use std::path::{Path, PathBuf};
10use std::pin::Pin;
11use std::sync::OnceLock;
12
13use clap::{Parser, Subcommand};
14use colored::Colorize;
15
16use crate::cli_style as style;
17use reqwest::Client;
18use serde::Serialize;
19use serde_json::{Value, json};
20use sha2::{Digest, Sha256};
21
22#[derive(Parser)]
23#[command(name = "vela", version)]
24#[command(about = "Portable frontier state for science")]
25struct Cli {
26 #[command(subcommand)]
27 command: Commands,
28}
29
30#[derive(Subcommand)]
31enum Commands {
32 Scout {
39 folder: PathBuf,
41 #[arg(long)]
43 frontier: PathBuf,
44 #[arg(short, long)]
46 backend: Option<String>,
47 #[arg(long)]
49 dry_run: bool,
50 #[arg(long)]
52 json: bool,
53 },
54 CompileNotes {
61 vault: PathBuf,
63 #[arg(long)]
65 frontier: PathBuf,
66 #[arg(short, long)]
68 backend: Option<String>,
69 #[arg(long)]
71 max_files: Option<usize>,
72 #[arg(long)]
77 max_items_per_category: Option<usize>,
78 #[arg(long)]
80 dry_run: bool,
81 #[arg(long)]
83 json: bool,
84 },
85 CompileCode {
91 root: PathBuf,
93 #[arg(long)]
95 frontier: PathBuf,
96 #[arg(short, long)]
98 backend: Option<String>,
99 #[arg(long)]
101 max_files: Option<usize>,
102 #[arg(long)]
104 dry_run: bool,
105 #[arg(long)]
107 json: bool,
108 },
109 ReviewPending {
115 #[arg(long)]
116 frontier: PathBuf,
117 #[arg(short, long)]
118 backend: Option<String>,
119 #[arg(long)]
120 max_proposals: Option<usize>,
121 #[arg(long, default_value = "1")]
126 batch_size: usize,
127 #[arg(long)]
128 dry_run: bool,
129 #[arg(long)]
130 json: bool,
131 },
132 FindTensions {
136 #[arg(long)]
137 frontier: PathBuf,
138 #[arg(short, long)]
139 backend: Option<String>,
140 #[arg(long)]
141 max_findings: Option<usize>,
142 #[arg(long)]
143 dry_run: bool,
144 #[arg(long)]
145 json: bool,
146 },
147 PlanExperiments {
151 #[arg(long)]
152 frontier: PathBuf,
153 #[arg(short, long)]
154 backend: Option<String>,
155 #[arg(long)]
156 max_findings: Option<usize>,
157 #[arg(long)]
158 dry_run: bool,
159 #[arg(long)]
160 json: bool,
161 },
162 CompileData {
167 root: PathBuf,
169 #[arg(long)]
171 frontier: PathBuf,
172 #[arg(short, long)]
174 backend: Option<String>,
175 #[arg(long)]
177 sample_rows: Option<usize>,
178 #[arg(long)]
180 dry_run: bool,
181 #[arg(long)]
183 json: bool,
184 },
185 Check {
187 source: Option<PathBuf>,
189 #[arg(long)]
191 schema: bool,
192 #[arg(long)]
194 stats: bool,
195 #[arg(long)]
197 conformance: bool,
198 #[arg(long, default_value = "tests/conformance")]
200 conformance_dir: PathBuf,
201 #[arg(long)]
203 all: bool,
204 #[arg(long)]
206 schema_only: bool,
207 #[arg(long)]
209 strict: bool,
210 #[arg(long)]
212 fix: bool,
213 #[arg(long)]
215 json: bool,
216 },
217 Integrity {
219 frontier: PathBuf,
221 #[arg(long)]
223 json: bool,
224 },
225 Impact {
227 frontier: PathBuf,
229 finding_id: String,
231 #[arg(long)]
233 depth: Option<usize>,
234 #[arg(long)]
236 json: bool,
237 },
238 Discord {
245 frontier: PathBuf,
247 #[arg(long)]
249 json: bool,
250 #[arg(long)]
254 kind: Option<String>,
255 },
256 Normalize {
258 source: PathBuf,
260 #[arg(short, long)]
262 out: Option<PathBuf>,
263 #[arg(long)]
265 write: bool,
266 #[arg(long)]
268 dry_run: bool,
269 #[arg(long)]
271 rewrite_ids: bool,
272 #[arg(long)]
274 id_map: Option<PathBuf>,
275 #[arg(long)]
279 resync_provenance: bool,
280 #[arg(long)]
282 json: bool,
283 },
284 Proof {
286 frontier: PathBuf,
288 #[arg(long, short = 'o', default_value = "proof-packet")]
290 out: PathBuf,
291 #[arg(long, default_value = "bbb-alzheimer")]
293 template: String,
294 #[arg(long)]
296 gold: Option<PathBuf>,
297 #[arg(long)]
299 record_proof_state: bool,
300 #[arg(long)]
302 json: bool,
303 },
304 Repo {
306 #[command(subcommand)]
307 action: RepoAction,
308 },
309 Serve {
311 #[arg(required_unless_present_any = ["frontiers", "setup"])]
313 frontier: Option<PathBuf>,
314 #[arg(long)]
316 frontiers: Option<PathBuf>,
317 #[arg(short, long)]
319 backend: Option<String>,
320 #[arg(long)]
322 http: Option<u16>,
323 #[arg(long)]
325 setup: bool,
326 #[arg(long)]
328 check_tools: bool,
329 #[arg(long)]
331 json: bool,
332 #[arg(long)]
336 workbench: bool,
337 },
338 Status {
342 frontier: PathBuf,
343 #[arg(long)]
345 json: bool,
346 },
347 Log {
350 frontier: PathBuf,
351 #[arg(long, default_value = "20")]
353 limit: usize,
354 #[arg(long)]
356 kind: Option<String>,
357 #[arg(long)]
359 json: bool,
360 },
361 Inbox {
365 frontier: PathBuf,
366 #[arg(long)]
368 kind: Option<String>,
369 #[arg(long, default_value = "30")]
371 limit: usize,
372 #[arg(long)]
374 json: bool,
375 },
376 Ask {
381 frontier: PathBuf,
382 #[arg(trailing_var_arg = true)]
384 question: Vec<String>,
385 #[arg(long)]
387 json: bool,
388 },
389 Stats {
391 frontier: PathBuf,
393 #[arg(long)]
395 json: bool,
396 },
397 Search {
399 query: String,
401 #[arg(long)]
403 source: Option<PathBuf>,
404 #[arg(long)]
406 entity: Option<String>,
407 #[arg(long)]
409 r#type: Option<String>,
410 #[arg(long)]
412 all: Option<PathBuf>,
413 #[arg(long, default_value = "20")]
415 limit: usize,
416 #[arg(long)]
418 json: bool,
419 },
420 Tensions {
422 source: PathBuf,
423 #[arg(long)]
424 both_high: bool,
425 #[arg(long)]
426 cross_domain: bool,
427 #[arg(long, default_value = "20")]
428 top: usize,
429 #[arg(long)]
430 json: bool,
431 },
432 Gaps {
434 #[command(subcommand)]
435 action: GapsAction,
436 },
437 Bridge {
439 #[arg(required = true)]
441 inputs: Vec<PathBuf>,
442 #[arg(long, default_value = "true", action = clap::ArgAction::Set)]
444 novelty: bool,
445 #[arg(long, default_value = "30")]
447 top: usize,
448 },
449 Export {
451 frontier: PathBuf,
452 #[arg(short, long, default_value = "csv")]
453 format: String,
454 #[arg(short, long)]
455 output: Option<PathBuf>,
456 },
457 Packet {
459 #[command(subcommand)]
460 action: PacketAction,
461 },
462 Verify {
469 path: PathBuf,
471 #[arg(long)]
472 json: bool,
473 },
474 Bench {
486 frontier: Option<PathBuf>,
488 #[arg(long)]
490 gold: Option<PathBuf>,
491 #[arg(long)]
495 candidate: Option<PathBuf>,
496 #[arg(long)]
500 sources: Option<PathBuf>,
501 #[arg(long)]
504 threshold: Option<f64>,
505 #[arg(long)]
508 report: Option<PathBuf>,
509 #[arg(long)]
510 entity_gold: Option<PathBuf>,
511 #[arg(long)]
512 link_gold: Option<PathBuf>,
513 #[arg(long)]
514 suite: Option<PathBuf>,
515 #[arg(long)]
516 suite_ready: bool,
517 #[arg(long)]
518 min_f1: Option<f64>,
519 #[arg(long)]
520 min_precision: Option<f64>,
521 #[arg(long)]
522 min_recall: Option<f64>,
523 #[arg(long)]
524 no_thresholds: bool,
525 #[arg(long)]
526 json: bool,
527 },
528 Conformance {
530 #[arg(default_value = "tests/conformance")]
531 dir: PathBuf,
532 },
533 Version,
535 Sign {
537 #[command(subcommand)]
538 action: SignAction,
539 },
540 Actor {
542 #[command(subcommand)]
543 action: ActorAction,
544 },
545 Federation {
550 #[command(subcommand)]
551 action: FederationAction,
552 },
553 Causal {
560 #[command(subcommand)]
561 action: CausalAction,
562 },
563 Frontier {
567 #[command(subcommand)]
568 action: FrontierAction,
569 },
570 Queue {
573 #[command(subcommand)]
574 action: QueueAction,
575 },
576 Registry {
579 #[command(subcommand)]
580 action: RegistryAction,
581 },
582 Init {
584 #[arg(default_value = ".")]
585 path: PathBuf,
586 #[arg(long, default_value = "unnamed")]
587 name: String,
588 #[arg(long, default_value = "default")]
589 template: String,
590 #[arg(long)]
591 no_git: bool,
592 #[arg(long)]
593 json: bool,
594 },
595 Quickstart {
601 #[arg(default_value = "demo")]
603 path: PathBuf,
604 #[arg(long, default_value = "Quickstart frontier")]
606 name: String,
607 #[arg(long, default_value = "reviewer:you")]
611 reviewer: String,
612 #[arg(long)]
615 assertion: Option<String>,
616 #[arg(long)]
619 keys_out: Option<PathBuf>,
620 #[arg(long)]
622 json: bool,
623 },
624 Import {
626 frontier: PathBuf,
627 #[arg(long)]
628 into: Option<PathBuf>,
629 },
630 Diff {
640 target: String,
643 frontier_b: Option<PathBuf>,
646 #[arg(long)]
650 frontier: Option<PathBuf>,
651 #[arg(long, default_value = "reviewer:preview")]
653 reviewer: String,
654 #[arg(long)]
655 json: bool,
656 #[arg(long)]
657 quiet: bool,
658 },
659 Proposals {
661 #[command(subcommand)]
662 action: ProposalAction,
663 },
664 ArtifactToState {
666 frontier: PathBuf,
668 packet: PathBuf,
670 #[arg(long)]
672 actor: String,
673 #[arg(long)]
675 apply_artifacts: bool,
676 #[arg(long)]
677 json: bool,
678 },
679 BridgeKit {
681 #[command(subcommand)]
682 action: BridgeKitAction,
683 },
684 SourceAdapter {
686 #[command(subcommand)]
687 action: SourceAdapterAction,
688 },
689 RuntimeAdapter {
691 #[command(subcommand)]
692 action: RuntimeAdapterAction,
693 },
694 Finding {
696 #[command(subcommand)]
697 command: FindingCommands,
698 },
699 Link {
703 #[command(subcommand)]
704 action: LinkAction,
705 },
706 Workbench {
711 #[arg(default_value = ".")]
713 path: PathBuf,
714 #[arg(long, default_value_t = 3850)]
716 port: u16,
717 #[arg(long)]
719 no_open: bool,
720 },
721 Bridges {
727 #[command(subcommand)]
728 action: BridgesAction,
729 },
730 Entity {
735 #[command(subcommand)]
736 action: EntityAction,
737 },
738 Review {
740 frontier: PathBuf,
742 finding_id: String,
744 #[arg(long)]
746 status: Option<String>,
747 #[arg(long)]
749 reason: Option<String>,
750 #[arg(long)]
752 reviewer: String,
753 #[arg(long)]
755 apply: bool,
756 #[arg(long)]
758 json: bool,
759 },
760 Note {
762 frontier: PathBuf,
763 finding_id: String,
764 #[arg(long)]
765 text: String,
766 #[arg(long)]
767 author: String,
768 #[arg(long)]
770 apply: bool,
771 #[arg(long)]
772 json: bool,
773 },
774 Caveat {
776 frontier: PathBuf,
777 finding_id: String,
778 #[arg(long)]
779 text: String,
780 #[arg(long)]
781 author: String,
782 #[arg(long)]
783 apply: bool,
784 #[arg(long)]
785 json: bool,
786 },
787 Revise {
789 frontier: PathBuf,
790 finding_id: String,
791 #[arg(long)]
793 confidence: f64,
794 #[arg(long)]
796 reason: String,
797 #[arg(long)]
799 reviewer: String,
800 #[arg(long)]
801 apply: bool,
802 #[arg(long)]
803 json: bool,
804 },
805 Reject {
807 frontier: PathBuf,
808 finding_id: String,
809 #[arg(long)]
810 reason: String,
811 #[arg(long)]
812 reviewer: String,
813 #[arg(long)]
814 apply: bool,
815 #[arg(long)]
816 json: bool,
817 },
818 History {
820 frontier: PathBuf,
821 finding_id: String,
822 #[arg(long)]
823 json: bool,
824 #[arg(long, value_name = "RFC3339_TIMESTAMP")]
828 as_of: Option<String>,
829 },
830 ImportEvents {
832 source: PathBuf,
833 #[arg(long)]
834 into: PathBuf,
835 #[arg(long)]
836 json: bool,
837 },
838 Retract {
840 source: PathBuf,
841 finding_id: String,
842 #[arg(long)]
843 reason: String,
844 #[arg(long)]
845 reviewer: String,
846 #[arg(long)]
847 apply: bool,
848 #[arg(long)]
849 json: bool,
850 },
851 EntityAdd {
856 frontier: PathBuf,
857 finding_id: String,
858 #[arg(long)]
859 entity: String,
860 #[arg(long)]
864 entity_type: String,
865 #[arg(long)]
866 reviewer: String,
867 #[arg(long)]
868 reason: String,
869 #[arg(long)]
870 apply: bool,
871 #[arg(long)]
872 json: bool,
873 },
874 EntityResolve {
878 frontier: PathBuf,
879 finding_id: String,
880 #[arg(long)]
881 entity: String,
882 #[arg(long)]
883 source: String,
884 #[arg(long)]
885 id: String,
886 #[arg(long)]
887 confidence: f64,
888 #[arg(long)]
889 matched_name: Option<String>,
890 #[arg(long, default_value = "manual")]
891 resolution_method: String,
892 #[arg(long)]
893 reviewer: String,
894 #[arg(long)]
895 reason: String,
896 #[arg(long)]
897 apply: bool,
898 #[arg(long)]
899 json: bool,
900 },
901 SourceFetch {
909 identifier: String,
912 #[arg(long)]
916 cache: Option<PathBuf>,
917 #[arg(long)]
919 out: Option<PathBuf>,
920 #[arg(long)]
922 refresh: bool,
923 #[arg(long)]
924 json: bool,
925 },
926 SpanRepair {
929 frontier: PathBuf,
930 finding_id: String,
931 #[arg(long)]
932 section: String,
933 #[arg(long)]
934 text: String,
935 #[arg(long)]
936 reviewer: String,
937 #[arg(long)]
938 reason: String,
939 #[arg(long)]
940 apply: bool,
941 #[arg(long)]
942 json: bool,
943 },
944 LocatorRepair {
949 frontier: PathBuf,
950 atom_id: String,
951 #[arg(long)]
954 locator: Option<String>,
955 #[arg(long)]
958 reviewer: String,
959 #[arg(long)]
961 reason: String,
962 #[arg(long)]
964 apply: bool,
965 #[arg(long)]
966 json: bool,
967 },
968 Propagate {
970 frontier: PathBuf,
971 #[arg(long)]
972 retract: Option<String>,
973 #[arg(long)]
974 reduce_confidence: Option<String>,
975 #[arg(long)]
976 to: Option<f64>,
977 #[arg(short, long)]
978 output: Option<PathBuf>,
979 },
980 Replicate {
989 frontier: PathBuf,
991 target: String,
993 #[arg(long)]
995 outcome: String,
996 #[arg(long)]
998 by: String,
999 #[arg(long)]
1003 conditions: String,
1004 #[arg(long)]
1006 source_title: String,
1007 #[arg(long)]
1009 doi: Option<String>,
1010 #[arg(long)]
1012 pmid: Option<String>,
1013 #[arg(long)]
1015 sample_size: Option<String>,
1016 #[arg(long, default_value = "")]
1019 note: String,
1020 #[arg(long)]
1022 previous_attempt: Option<String>,
1023 #[arg(long, default_value_t = false)]
1030 no_cascade: bool,
1031 #[arg(long)]
1033 json: bool,
1034 },
1035 Replications {
1038 frontier: PathBuf,
1040 #[arg(long)]
1042 target: Option<String>,
1043 #[arg(long)]
1045 json: bool,
1046 },
1047 DatasetAdd {
1054 frontier: PathBuf,
1056 #[arg(long)]
1058 name: String,
1059 #[arg(long)]
1061 version: Option<String>,
1062 #[arg(long)]
1066 content_hash: String,
1067 #[arg(long)]
1069 url: Option<String>,
1070 #[arg(long)]
1072 license: Option<String>,
1073 #[arg(long)]
1075 source_title: String,
1076 #[arg(long)]
1078 doi: Option<String>,
1079 #[arg(long)]
1081 row_count: Option<u64>,
1082 #[arg(long)]
1084 json: bool,
1085 },
1086 Datasets {
1088 frontier: PathBuf,
1089 #[arg(long)]
1090 json: bool,
1091 },
1092 CodeAdd {
1096 frontier: PathBuf,
1098 #[arg(long)]
1100 language: String,
1101 #[arg(long)]
1103 repo_url: Option<String>,
1104 #[arg(long)]
1107 commit: Option<String>,
1108 #[arg(long)]
1110 path: String,
1111 #[arg(long)]
1113 content_hash: String,
1114 #[arg(long)]
1116 line_start: Option<u32>,
1117 #[arg(long)]
1119 line_end: Option<u32>,
1120 #[arg(long)]
1122 entry_point: Option<String>,
1123 #[arg(long)]
1125 json: bool,
1126 },
1127 CodeArtifacts {
1129 frontier: PathBuf,
1130 #[arg(long)]
1131 json: bool,
1132 },
1133 ArtifactAdd {
1138 frontier: PathBuf,
1140 #[arg(long)]
1143 kind: String,
1144 #[arg(long)]
1146 name: String,
1147 #[arg(long)]
1150 file: Option<PathBuf>,
1151 #[arg(long)]
1153 url: Option<String>,
1154 #[arg(long)]
1156 content_hash: Option<String>,
1157 #[arg(long)]
1159 media_type: Option<String>,
1160 #[arg(long)]
1162 license: Option<String>,
1163 #[arg(long)]
1165 source_title: Option<String>,
1166 #[arg(long)]
1168 source_url: Option<String>,
1169 #[arg(long)]
1171 doi: Option<String>,
1172 #[arg(long)]
1174 target: Vec<String>,
1175 #[arg(long)]
1177 metadata: Vec<String>,
1178 #[arg(long, default_value = "public")]
1180 access_tier: String,
1181 #[arg(long, default_value = "reviewer:manual")]
1183 deposited_by: String,
1184 #[arg(long, default_value = "artifact deposit")]
1186 reason: String,
1187 #[arg(long)]
1189 json: bool,
1190 },
1191 Artifacts {
1193 frontier: PathBuf,
1194 #[arg(long)]
1196 target: Option<String>,
1197 #[arg(long)]
1198 json: bool,
1199 },
1200 ArtifactAudit {
1202 frontier: PathBuf,
1203 #[arg(long)]
1205 json: bool,
1206 },
1207 DecisionBrief {
1209 frontier: PathBuf,
1210 #[arg(long)]
1212 json: bool,
1213 },
1214 TrialSummary {
1216 frontier: PathBuf,
1217 #[arg(long)]
1219 json: bool,
1220 },
1221 SourceVerification {
1223 frontier: PathBuf,
1224 #[arg(long)]
1226 json: bool,
1227 },
1228 SourceIngestPlan {
1230 frontier: PathBuf,
1231 #[arg(long)]
1233 json: bool,
1234 },
1235 ClinicalTrialImport {
1238 frontier: PathBuf,
1240 nct_id: String,
1242 #[arg(long)]
1245 input_json: Option<PathBuf>,
1246 #[arg(long)]
1248 target: Vec<String>,
1249 #[arg(long, default_value = "reviewer:manual")]
1251 deposited_by: String,
1252 #[arg(long, default_value = "clinical trial record import")]
1254 reason: String,
1255 #[arg(long, default_value = "ClinicalTrials.gov public record")]
1257 license: String,
1258 #[arg(long)]
1260 json: bool,
1261 },
1262 NegativeResultAdd {
1270 frontier: PathBuf,
1272 #[arg(long)]
1274 kind: String,
1275 #[arg(long)]
1277 deposited_by: String,
1278 #[arg(long)]
1280 reason: String,
1281 #[arg(long)]
1284 conditions_text: String,
1285 #[arg(long, default_value = "")]
1287 notes: String,
1288 #[arg(long)]
1291 target: Vec<String>,
1292 #[arg(long)]
1296 endpoint: Option<String>,
1297 #[arg(long)]
1299 intervention: Option<String>,
1300 #[arg(long)]
1302 comparator: Option<String>,
1303 #[arg(long)]
1305 population: Option<String>,
1306 #[arg(long)]
1308 n_enrolled: Option<u32>,
1309 #[arg(long)]
1311 power: Option<f64>,
1312 #[arg(long)]
1314 ci_lower: Option<f64>,
1315 #[arg(long)]
1317 ci_upper: Option<f64>,
1318 #[arg(long)]
1320 effect_size_threshold: Option<f64>,
1321 #[arg(long)]
1323 registry_id: Option<String>,
1324 #[arg(long)]
1327 reagent: Option<String>,
1328 #[arg(long)]
1330 observation: Option<String>,
1331 #[arg(long)]
1333 attempts: Option<u32>,
1334 #[arg(long)]
1337 source_title: String,
1338 #[arg(long)]
1340 doi: Option<String>,
1341 #[arg(long)]
1343 url: Option<String>,
1344 #[arg(long)]
1346 year: Option<i32>,
1347 #[arg(long)]
1349 json: bool,
1350 },
1351 NegativeResults {
1353 frontier: PathBuf,
1354 #[arg(long)]
1356 target: Option<String>,
1357 #[arg(long)]
1358 json: bool,
1359 },
1360 TrajectoryCreate {
1365 frontier: PathBuf,
1367 #[arg(long)]
1369 deposited_by: String,
1370 #[arg(long)]
1372 reason: String,
1373 #[arg(long)]
1378 target: Vec<String>,
1379 #[arg(long, default_value = "")]
1381 notes: String,
1382 #[arg(long)]
1383 json: bool,
1384 },
1385 TrajectoryStep {
1388 frontier: PathBuf,
1390 trajectory_id: String,
1392 #[arg(long)]
1394 kind: String,
1395 #[arg(long)]
1399 description: String,
1400 #[arg(long)]
1402 actor: String,
1403 #[arg(long)]
1405 reason: String,
1406 #[arg(long)]
1409 reference: Vec<String>,
1410 #[arg(long)]
1411 json: bool,
1412 },
1413 Trajectories {
1415 frontier: PathBuf,
1416 #[arg(long)]
1418 target: Option<String>,
1419 #[arg(long)]
1420 json: bool,
1421 },
1422 TierSet {
1428 frontier: PathBuf,
1430 #[arg(long)]
1432 object_type: String,
1433 #[arg(long)]
1435 object_id: String,
1436 #[arg(long)]
1438 tier: String,
1439 #[arg(long)]
1442 actor: String,
1443 #[arg(long)]
1446 reason: String,
1447 #[arg(long)]
1448 json: bool,
1449 },
1450 Predict {
1457 frontier: PathBuf,
1459 #[arg(long)]
1461 by: String,
1462 #[arg(long)]
1465 claim: String,
1466 #[arg(long)]
1468 criterion: String,
1469 #[arg(long)]
1471 resolves_by: Option<String>,
1472 #[arg(long)]
1474 confidence: f64,
1475 #[arg(long, default_value = "")]
1477 target: String,
1478 #[arg(long, default_value = "affirmed")]
1480 outcome: String,
1481 #[arg(long, default_value = "")]
1483 conditions: String,
1484 #[arg(long)]
1486 json: bool,
1487 },
1488 Resolve {
1493 frontier: PathBuf,
1495 prediction: String,
1497 #[arg(long)]
1499 outcome: String,
1500 #[arg(long)]
1502 matched: bool,
1503 #[arg(long)]
1506 by: String,
1507 #[arg(long, default_value = "1.0")]
1509 confidence: f64,
1510 #[arg(long, default_value = "")]
1512 source_title: String,
1513 #[arg(long)]
1515 doi: Option<String>,
1516 #[arg(long)]
1518 json: bool,
1519 },
1520 Predictions {
1522 frontier: PathBuf,
1523 #[arg(long)]
1525 by: Option<String>,
1526 #[arg(long)]
1528 open: bool,
1529 #[arg(long)]
1531 json: bool,
1532 },
1533 Calibration {
1536 frontier: PathBuf,
1537 #[arg(long)]
1539 actor: Option<String>,
1540 #[arg(long)]
1542 json: bool,
1543 },
1544 PredictionsExpire {
1552 frontier: PathBuf,
1553 #[arg(long)]
1556 now: Option<String>,
1557 #[arg(long)]
1560 dry_run: bool,
1561 #[arg(long)]
1562 json: bool,
1563 },
1564 Consensus {
1573 frontier: PathBuf,
1575 target: String,
1577 #[arg(long, default_value = "composite")]
1580 weighting: String,
1581 #[arg(long)]
1586 causal_claim: Option<String>,
1587 #[arg(long)]
1592 causal_grade_min: Option<String>,
1593 #[arg(long)]
1595 json: bool,
1596 },
1597
1598 Ingest {
1614 path: String,
1617 #[arg(long)]
1620 frontier: PathBuf,
1621 #[arg(short, long)]
1625 backend: Option<String>,
1626 #[arg(long)]
1630 actor: Option<String>,
1631 #[arg(long)]
1633 dry_run: bool,
1634 #[arg(long)]
1635 json: bool,
1636 },
1637
1638 Propose {
1644 frontier: PathBuf,
1645 finding_id: String,
1646 #[arg(long)]
1648 status: String,
1649 #[arg(long)]
1650 reason: String,
1651 #[arg(long)]
1652 reviewer: String,
1653 #[arg(long)]
1656 apply: bool,
1657 #[arg(long)]
1658 json: bool,
1659 },
1660
1661 Accept {
1665 frontier: PathBuf,
1666 proposal_id: String,
1667 #[arg(long)]
1668 reviewer: String,
1669 #[arg(long)]
1670 reason: String,
1671 #[arg(long)]
1672 json: bool,
1673 },
1674
1675 Attest {
1687 frontier: PathBuf,
1689 #[arg(long)]
1693 event: Option<String>,
1694 #[arg(long)]
1697 attester: Option<String>,
1698 #[arg(long)]
1701 scope_note: Option<String>,
1702 #[arg(long)]
1705 proof_id: Option<String>,
1706 #[arg(long)]
1711 signature: Option<String>,
1712 #[arg(long)]
1715 key: Option<PathBuf>,
1716 #[arg(long)]
1717 json: bool,
1718 },
1719
1720 Lineage {
1723 frontier: PathBuf,
1724 finding_id: String,
1725 #[arg(long, value_name = "RFC3339_TIMESTAMP")]
1726 as_of: Option<String>,
1727 #[arg(long)]
1728 json: bool,
1729 },
1730
1731 Carina {
1734 #[command(subcommand)]
1735 action: CarinaAction,
1736 },
1737
1738 Atlas {
1743 #[command(subcommand)]
1744 action: AtlasAction,
1745 },
1746
1747 Constellation {
1753 #[command(subcommand)]
1754 action: ConstellationAction,
1755 },
1756}
1757
1758#[derive(Subcommand)]
1763enum AtlasAction {
1764 Init {
1769 name: String,
1772 #[arg(long, value_delimiter = ',', num_args = 1..)]
1774 frontiers: Vec<PathBuf>,
1775 #[arg(long, default_value = "general")]
1778 domain: String,
1779 #[arg(long)]
1781 scope_note: Option<String>,
1782 #[arg(long, default_value = "atlases")]
1784 atlases_root: PathBuf,
1785 #[arg(long)]
1786 json: bool,
1787 },
1788 Materialize {
1792 name: String,
1794 #[arg(long, default_value = "atlases")]
1795 atlases_root: PathBuf,
1796 #[arg(long)]
1797 json: bool,
1798 },
1799 Serve {
1804 name: String,
1805 #[arg(long, default_value = "atlases")]
1806 atlases_root: PathBuf,
1807 #[arg(long, default_value_t = 3848)]
1808 port: u16,
1809 #[arg(long)]
1810 no_open: bool,
1811 },
1812 Update {
1819 name: String,
1820 #[arg(long, value_delimiter = ',')]
1823 add_frontier: Vec<PathBuf>,
1824 #[arg(long, value_delimiter = ',')]
1827 remove_vfr_id: Vec<String>,
1828 #[arg(long, default_value = "atlases")]
1829 atlases_root: PathBuf,
1830 #[arg(long)]
1831 json: bool,
1832 },
1833}
1834
1835#[derive(Subcommand)]
1839enum ConstellationAction {
1840 Init {
1844 name: String,
1845 #[arg(long, value_delimiter = ',', num_args = 1..)]
1847 atlases: Vec<PathBuf>,
1848 #[arg(long)]
1849 scope_note: Option<String>,
1850 #[arg(long, default_value = "constellations")]
1851 constellations_root: PathBuf,
1852 #[arg(long)]
1853 json: bool,
1854 },
1855 Materialize {
1860 name: String,
1861 #[arg(long, default_value = "constellations")]
1862 constellations_root: PathBuf,
1863 #[arg(long)]
1864 json: bool,
1865 },
1866 Serve {
1870 name: String,
1871 #[arg(long, default_value = "constellations")]
1872 constellations_root: PathBuf,
1873 #[arg(long, default_value_t = 3849)]
1874 port: u16,
1875 #[arg(long)]
1876 no_open: bool,
1877 },
1878}
1879
1880#[derive(Subcommand)]
1884enum CarinaAction {
1885 Validate {
1890 path: PathBuf,
1894 #[arg(long)]
1897 primitive: Option<String>,
1898 #[arg(long)]
1899 json: bool,
1900 },
1901 List {
1903 #[arg(long)]
1904 json: bool,
1905 },
1906 Schema { primitive: String },
1908}
1909
1910#[derive(Subcommand)]
1911enum PacketAction {
1912 Inspect {
1914 path: PathBuf,
1915 #[arg(long)]
1916 json: bool,
1917 },
1918 Validate {
1920 path: PathBuf,
1921 #[arg(long)]
1922 json: bool,
1923 },
1924}
1925
1926#[derive(Subcommand)]
1927enum SignAction {
1928 GenerateKeypair {
1930 #[arg(long, default_value = ".vela/keys")]
1931 out: PathBuf,
1932 #[arg(long)]
1933 json: bool,
1934 },
1935 Apply {
1937 frontier: PathBuf,
1938 #[arg(long)]
1939 private_key: PathBuf,
1940 #[arg(long)]
1941 json: bool,
1942 },
1943 Verify {
1945 frontier: PathBuf,
1946 #[arg(long)]
1947 public_key: Option<PathBuf>,
1948 #[arg(long)]
1949 json: bool,
1950 },
1951 ThresholdSet {
1956 frontier: PathBuf,
1957 finding_id: String,
1959 #[arg(long)]
1961 to: u32,
1962 #[arg(long)]
1963 json: bool,
1964 },
1965}
1966
1967#[derive(Subcommand)]
1968enum ActorAction {
1969 Add {
1971 frontier: PathBuf,
1972 id: String,
1974 #[arg(long)]
1976 pubkey: String,
1977 #[arg(long)]
1981 tier: Option<String>,
1982 #[arg(long)]
1986 orcid: Option<String>,
1987 #[arg(long)]
1992 clearance: Option<String>,
1993 #[arg(long)]
1994 json: bool,
1995 },
1996 List {
1998 frontier: PathBuf,
1999 #[arg(long)]
2000 json: bool,
2001 },
2002}
2003
2004#[derive(Subcommand)]
2005enum CausalAction {
2006 Audit {
2010 frontier: PathBuf,
2011 #[arg(long)]
2014 problems_only: bool,
2015 #[arg(long)]
2016 json: bool,
2017 },
2018 Effect {
2031 frontier: PathBuf,
2032 source: String,
2034 #[arg(long)]
2036 on: String,
2037 #[arg(long)]
2038 json: bool,
2039 },
2040 Graph {
2043 frontier: PathBuf,
2044 #[arg(long)]
2046 node: Option<String>,
2047 #[arg(long)]
2048 json: bool,
2049 },
2050 Counterfactual {
2057 frontier: PathBuf,
2058 intervene_on: String,
2060 #[arg(long)]
2062 set_to: f64,
2063 #[arg(long)]
2065 target: String,
2066 #[arg(long)]
2067 json: bool,
2068 },
2069}
2070
2071#[derive(Subcommand)]
2072enum BridgesAction {
2073 Derive {
2077 frontier_a: PathBuf,
2080 #[arg(long, default_value = "a")]
2082 label_a: String,
2083 frontier_b: PathBuf,
2085 #[arg(long, default_value = "b")]
2087 label_b: String,
2088 #[arg(long)]
2089 json: bool,
2090 },
2091 List {
2093 frontier: PathBuf,
2095 #[arg(long)]
2097 status: Option<String>,
2098 #[arg(long)]
2099 json: bool,
2100 },
2101 Show {
2103 frontier: PathBuf,
2104 bridge_id: String,
2105 #[arg(long)]
2106 json: bool,
2107 },
2108 Confirm {
2113 frontier: PathBuf,
2114 bridge_id: String,
2115 #[arg(long)]
2118 reviewer: Option<String>,
2119 #[arg(long)]
2121 note: Option<String>,
2122 #[arg(long)]
2123 json: bool,
2124 },
2125 Refute {
2128 frontier: PathBuf,
2129 bridge_id: String,
2130 #[arg(long)]
2131 reviewer: Option<String>,
2132 #[arg(long)]
2133 note: Option<String>,
2134 #[arg(long)]
2135 json: bool,
2136 },
2137}
2138
2139#[derive(Subcommand)]
2140enum FederationAction {
2141 PeerAdd {
2145 frontier: PathBuf,
2146 id: String,
2148 #[arg(long)]
2150 url: String,
2151 #[arg(long)]
2153 pubkey: String,
2154 #[arg(long, default_value = "")]
2156 note: String,
2157 #[arg(long)]
2158 json: bool,
2159 },
2160 PeerList {
2162 frontier: PathBuf,
2163 #[arg(long)]
2164 json: bool,
2165 },
2166 PeerRemove {
2170 frontier: PathBuf,
2171 id: String,
2172 #[arg(long)]
2173 json: bool,
2174 },
2175 Sync {
2192 frontier: PathBuf,
2193 peer_id: String,
2195 #[arg(long)]
2197 url: Option<String>,
2198 #[arg(long)]
2202 via_hub: bool,
2203 #[arg(long)]
2206 vfr_id: Option<String>,
2207 #[arg(long)]
2214 allow_cross_vfr: bool,
2215 #[arg(long)]
2217 dry_run: bool,
2218 #[arg(long)]
2219 json: bool,
2220 },
2221 PushResolution {
2234 frontier: PathBuf,
2235 conflict_event_id: String,
2239 #[arg(long = "to")]
2241 to: String,
2242 #[arg(long)]
2246 key: Option<PathBuf>,
2247 #[arg(long)]
2250 vfr_id: Option<String>,
2251 #[arg(long)]
2252 json: bool,
2253 },
2254}
2255
2256#[derive(Subcommand)]
2257enum FrontierAction {
2258 New {
2265 path: PathBuf,
2267 #[arg(long)]
2269 name: String,
2270 #[arg(long, default_value = "")]
2272 description: String,
2273 #[arg(long)]
2275 force: bool,
2276 #[arg(long)]
2277 json: bool,
2278 },
2279 Materialize {
2281 frontier: PathBuf,
2283 #[arg(long)]
2284 json: bool,
2285 },
2286 AddDep {
2290 frontier: PathBuf,
2292 vfr_id: String,
2294 #[arg(long)]
2297 locator: String,
2298 #[arg(long)]
2301 snapshot: String,
2302 #[arg(long)]
2304 name: Option<String>,
2305 #[arg(long)]
2306 json: bool,
2307 },
2308 ListDeps {
2310 frontier: PathBuf,
2311 #[arg(long)]
2312 json: bool,
2313 },
2314 RemoveDep {
2317 frontier: PathBuf,
2318 vfr_id: String,
2319 #[arg(long)]
2320 json: bool,
2321 },
2322 RefreshDeps {
2329 frontier: PathBuf,
2330 #[arg(long, default_value = "https://vela-hub.fly.dev")]
2332 from: String,
2333 #[arg(long)]
2335 dry_run: bool,
2336 #[arg(long)]
2337 json: bool,
2338 },
2339 Diff {
2351 frontier: PathBuf,
2353 #[arg(long)]
2356 since: Option<String>,
2357 #[arg(long)]
2360 week: Option<String>,
2361 #[arg(long)]
2363 json: bool,
2364 },
2365}
2366
2367#[derive(Subcommand)]
2368enum RepoAction {
2369 Status {
2371 frontier: PathBuf,
2373 #[arg(long)]
2375 json: bool,
2376 },
2377 Doctor {
2379 frontier: PathBuf,
2381 #[arg(long)]
2383 json: bool,
2384 },
2385}
2386
2387#[derive(Subcommand)]
2388enum QueueAction {
2389 List {
2391 #[arg(long)]
2392 queue_file: Option<PathBuf>,
2393 #[arg(long)]
2394 json: bool,
2395 },
2396 Sign {
2399 #[arg(long)]
2401 actor: String,
2402 #[arg(long)]
2404 key: PathBuf,
2405 #[arg(long)]
2407 queue_file: Option<PathBuf>,
2408 #[arg(long, alias = "all")]
2414 yes_to_all: bool,
2415 #[arg(long)]
2416 json: bool,
2417 },
2418 Clear {
2420 #[arg(long)]
2421 queue_file: Option<PathBuf>,
2422 #[arg(long)]
2423 json: bool,
2424 },
2425}
2426
2427#[derive(Subcommand)]
2428enum RegistryAction {
2429 List {
2431 #[arg(long)]
2433 from: Option<String>,
2434 #[arg(long)]
2435 json: bool,
2436 },
2437 Publish {
2439 frontier: PathBuf,
2441 #[arg(long)]
2443 owner: String,
2444 #[arg(long)]
2446 key: PathBuf,
2447 #[arg(long)]
2454 locator: Option<String>,
2455 #[arg(long)]
2457 to: Option<String>,
2458 #[arg(long)]
2459 json: bool,
2460 },
2461 DependsOn {
2468 vfr_id: String,
2470 #[arg(long, default_value = "https://vela-hub.fly.dev")]
2472 from: String,
2473 #[arg(long)]
2474 json: bool,
2475 },
2476 Mirror {
2484 vfr_id: String,
2486 #[arg(long)]
2488 from: String,
2489 #[arg(long)]
2491 to: String,
2492 #[arg(long)]
2493 json: bool,
2494 },
2495 Pull {
2497 vfr_id: String,
2499 #[arg(long)]
2501 from: Option<String>,
2502 #[arg(long)]
2506 out: PathBuf,
2507 #[arg(long)]
2510 transitive: bool,
2511 #[arg(long, default_value = "4")]
2514 depth: usize,
2515 #[arg(long)]
2516 json: bool,
2517 },
2518}
2519
2520#[derive(Subcommand)]
2521enum GapsAction {
2522 Rank {
2524 frontier: PathBuf,
2525 #[arg(long, default_value = "10")]
2526 top: usize,
2527 #[arg(long)]
2528 domain: Option<String>,
2529 #[arg(long)]
2530 json: bool,
2531 },
2532}
2533
2534#[derive(Subcommand)]
2535enum LinkAction {
2536 Add {
2541 frontier: PathBuf,
2543 #[arg(long)]
2545 from: String,
2546 #[arg(long)]
2548 to: String,
2549 #[arg(long, default_value = "supports")]
2551 r#type: String,
2552 #[arg(long, default_value = "")]
2554 note: String,
2555 #[arg(long, default_value = "reviewer")]
2557 inferred_by: String,
2558 #[arg(long)]
2567 no_check_target: bool,
2568 #[arg(long)]
2569 json: bool,
2570 },
2571}
2572
2573#[derive(Subcommand)]
2574enum EntityAction {
2575 Resolve {
2582 frontier: PathBuf,
2583 #[arg(long)]
2585 force: bool,
2586 #[arg(long)]
2587 json: bool,
2588 },
2589 List {
2591 #[arg(long)]
2592 json: bool,
2593 },
2594}
2595
2596#[derive(Subcommand)]
2597enum FindingCommands {
2598 Add {
2600 frontier: PathBuf,
2602 #[arg(long)]
2604 assertion: String,
2605 #[arg(long, default_value = "mechanism")]
2607 r#type: String,
2608 #[arg(long, default_value = "manual finding")]
2610 source: String,
2611 #[arg(long, default_value = "expert_assertion")]
2613 source_type: String,
2614 #[arg(long)]
2616 author: String,
2617 #[arg(long, default_value = "0.3")]
2619 confidence: f64,
2620 #[arg(long, default_value = "theoretical")]
2622 evidence_type: String,
2623 #[arg(long, default_value = "")]
2625 entities: String,
2626 #[arg(long)]
2628 entities_reviewed: bool,
2629 #[arg(long)]
2631 evidence_span: Vec<String>,
2632 #[arg(long)]
2634 gap: bool,
2635 #[arg(long)]
2637 negative_space: bool,
2638 #[arg(long)]
2640 doi: Option<String>,
2641 #[arg(long)]
2643 pmid: Option<String>,
2644 #[arg(long)]
2646 year: Option<i32>,
2647 #[arg(long)]
2649 journal: Option<String>,
2650 #[arg(long)]
2652 url: Option<String>,
2653 #[arg(long)]
2655 source_authors: Option<String>,
2656 #[arg(long)]
2658 conditions_text: Option<String>,
2659 #[arg(long)]
2661 species: Option<String>,
2662 #[arg(long)]
2664 in_vivo: bool,
2665 #[arg(long)]
2667 in_vitro: bool,
2668 #[arg(long)]
2670 human_data: bool,
2671 #[arg(long)]
2673 clinical_trial: bool,
2674 #[arg(long)]
2676 json: bool,
2677 #[arg(long)]
2679 apply: bool,
2680 },
2681 Supersede {
2688 frontier: PathBuf,
2690 old_id: String,
2692 #[arg(long)]
2694 assertion: String,
2695 #[arg(long, default_value = "mechanism")]
2697 r#type: String,
2698 #[arg(long, default_value = "manual finding")]
2700 source: String,
2701 #[arg(long, default_value = "expert_assertion")]
2703 source_type: String,
2704 #[arg(long)]
2706 author: String,
2707 #[arg(long)]
2709 reason: String,
2710 #[arg(long, default_value = "0.5")]
2712 confidence: f64,
2713 #[arg(long, default_value = "experimental")]
2715 evidence_type: String,
2716 #[arg(long, default_value = "")]
2718 entities: String,
2719 #[arg(long)]
2721 doi: Option<String>,
2722 #[arg(long)]
2724 pmid: Option<String>,
2725 #[arg(long)]
2727 year: Option<i32>,
2728 #[arg(long)]
2730 journal: Option<String>,
2731 #[arg(long)]
2733 url: Option<String>,
2734 #[arg(long)]
2736 source_authors: Option<String>,
2737 #[arg(long)]
2739 conditions_text: Option<String>,
2740 #[arg(long)]
2742 species: Option<String>,
2743 #[arg(long)]
2744 in_vivo: bool,
2745 #[arg(long)]
2746 in_vitro: bool,
2747 #[arg(long)]
2748 human_data: bool,
2749 #[arg(long)]
2750 clinical_trial: bool,
2751 #[arg(long)]
2752 json: bool,
2753 #[arg(long)]
2755 apply: bool,
2756 },
2757 CausalSet {
2763 frontier: PathBuf,
2765 finding_id: String,
2767 #[arg(long)]
2769 claim: String,
2770 #[arg(long)]
2773 grade: Option<String>,
2774 #[arg(long)]
2777 actor: String,
2778 #[arg(long)]
2781 reason: String,
2782 #[arg(long)]
2783 json: bool,
2784 },
2785}
2786
2787#[derive(Subcommand)]
2788enum ProposalAction {
2789 List {
2791 frontier: PathBuf,
2792 #[arg(long)]
2793 status: Option<String>,
2794 #[arg(long)]
2795 json: bool,
2796 },
2797 Show {
2799 frontier: PathBuf,
2800 proposal_id: String,
2801 #[arg(long)]
2802 json: bool,
2803 },
2804 Preview {
2806 frontier: PathBuf,
2807 proposal_id: String,
2808 #[arg(long, default_value = "reviewer:preview")]
2809 reviewer: String,
2810 #[arg(long)]
2811 json: bool,
2812 },
2813 Import {
2815 frontier: PathBuf,
2816 source: PathBuf,
2817 #[arg(long)]
2818 json: bool,
2819 },
2820 Validate {
2822 source: PathBuf,
2823 #[arg(long)]
2824 json: bool,
2825 },
2826 Export {
2828 frontier: PathBuf,
2829 output: PathBuf,
2830 #[arg(long)]
2831 status: Option<String>,
2832 #[arg(long)]
2833 json: bool,
2834 },
2835 Accept {
2837 frontier: PathBuf,
2838 proposal_id: String,
2839 #[arg(long)]
2840 reviewer: String,
2841 #[arg(long)]
2842 reason: String,
2843 #[arg(long)]
2844 json: bool,
2845 },
2846 Reject {
2848 frontier: PathBuf,
2849 proposal_id: String,
2850 #[arg(long)]
2851 reviewer: String,
2852 #[arg(long)]
2853 reason: String,
2854 #[arg(long)]
2855 json: bool,
2856 },
2857}
2858
2859#[derive(Subcommand)]
2860enum SourceAdapterAction {
2861 Run {
2863 frontier: PathBuf,
2865 adapter: String,
2867 #[arg(long)]
2869 actor: String,
2870 #[arg(long = "entry")]
2872 entries: Vec<String>,
2873 #[arg(long)]
2875 priority: Option<String>,
2876 #[arg(long)]
2878 include_excluded: bool,
2879 #[arg(long)]
2881 allow_partial: bool,
2882 #[arg(long)]
2884 dry_run: bool,
2885 #[arg(long)]
2887 input_dir: Option<PathBuf>,
2888 #[arg(long)]
2890 apply_artifacts: bool,
2891 #[arg(long)]
2893 json: bool,
2894 },
2895}
2896
2897#[derive(Subcommand)]
2898enum RuntimeAdapterAction {
2899 Run {
2901 frontier: PathBuf,
2903 adapter: String,
2905 #[arg(long)]
2907 input: PathBuf,
2908 #[arg(long)]
2910 actor: String,
2911 #[arg(long)]
2913 dry_run: bool,
2914 #[arg(long)]
2916 apply_artifacts: bool,
2917 #[arg(long)]
2919 json: bool,
2920 },
2921}
2922
2923#[derive(Subcommand)]
2924enum BridgeKitAction {
2925 Validate {
2927 source: PathBuf,
2929 #[arg(long)]
2931 json: bool,
2932 },
2933}
2934
2935pub async fn run_command() {
2936 dotenvy::dotenv().ok();
2937
2938 match Cli::parse().command {
2939 Commands::Scout {
2940 folder,
2941 frontier,
2942 backend,
2943 dry_run,
2944 json,
2945 } => {
2946 cmd_scout(&folder, &frontier, backend.as_deref(), dry_run, json).await;
2947 }
2948 Commands::CompileNotes {
2949 vault,
2950 frontier,
2951 backend,
2952 max_files,
2953 max_items_per_category,
2954 dry_run,
2955 json,
2956 } => {
2957 cmd_compile_notes(
2958 &vault,
2959 &frontier,
2960 backend.as_deref(),
2961 max_files,
2962 max_items_per_category,
2963 dry_run,
2964 json,
2965 )
2966 .await;
2967 }
2968 Commands::CompileCode {
2969 root,
2970 frontier,
2971 backend,
2972 max_files,
2973 dry_run,
2974 json,
2975 } => {
2976 cmd_compile_code(
2977 &root,
2978 &frontier,
2979 backend.as_deref(),
2980 max_files,
2981 dry_run,
2982 json,
2983 )
2984 .await;
2985 }
2986 Commands::CompileData {
2987 root,
2988 frontier,
2989 backend,
2990 sample_rows,
2991 dry_run,
2992 json,
2993 } => {
2994 cmd_compile_data(
2995 &root,
2996 &frontier,
2997 backend.as_deref(),
2998 sample_rows,
2999 dry_run,
3000 json,
3001 )
3002 .await;
3003 }
3004 Commands::ReviewPending {
3005 frontier,
3006 backend,
3007 max_proposals,
3008 batch_size,
3009 dry_run,
3010 json,
3011 } => {
3012 cmd_review_pending(
3013 &frontier,
3014 backend.as_deref(),
3015 max_proposals,
3016 batch_size,
3017 dry_run,
3018 json,
3019 )
3020 .await;
3021 }
3022 Commands::FindTensions {
3023 frontier,
3024 backend,
3025 max_findings,
3026 dry_run,
3027 json,
3028 } => {
3029 cmd_find_tensions(&frontier, backend.as_deref(), max_findings, dry_run, json).await;
3030 }
3031 Commands::PlanExperiments {
3032 frontier,
3033 backend,
3034 max_findings,
3035 dry_run,
3036 json,
3037 } => {
3038 cmd_plan_experiments(&frontier, backend.as_deref(), max_findings, dry_run, json).await;
3039 }
3040 Commands::Check {
3041 source,
3042 schema,
3043 stats,
3044 conformance,
3045 conformance_dir,
3046 all,
3047 schema_only,
3048 strict,
3049 fix,
3050 json,
3051 } => cmd_check(
3052 source.as_deref(),
3053 schema,
3054 stats,
3055 conformance,
3056 &conformance_dir,
3057 all,
3058 schema_only,
3059 strict,
3060 fix,
3061 json,
3062 ),
3063 Commands::Integrity { frontier, json } => cmd_integrity(&frontier, json),
3064 Commands::Impact {
3065 frontier,
3066 finding_id,
3067 depth,
3068 json,
3069 } => cmd_impact(&frontier, &finding_id, depth, json),
3070 Commands::Discord {
3071 frontier,
3072 json,
3073 kind,
3074 } => cmd_discord(&frontier, json, kind.as_deref()),
3075 Commands::Normalize {
3076 source,
3077 out,
3078 write,
3079 dry_run,
3080 rewrite_ids,
3081 id_map,
3082 resync_provenance,
3083 json,
3084 } => cmd_normalize(
3085 &source,
3086 out.as_deref(),
3087 write,
3088 dry_run,
3089 rewrite_ids,
3090 id_map.as_deref(),
3091 resync_provenance,
3092 json,
3093 ),
3094 Commands::Proof {
3095 frontier,
3096 out,
3097 template,
3098 gold,
3099 record_proof_state,
3100 json,
3101 } => cmd_proof(
3102 &frontier,
3103 &out,
3104 &template,
3105 gold.as_deref(),
3106 record_proof_state,
3107 json,
3108 ),
3109 Commands::Repo { action } => cmd_repo(action),
3110 Commands::Serve {
3111 frontier,
3112 frontiers,
3113 backend,
3114 http,
3115 setup,
3116 check_tools,
3117 json,
3118 workbench,
3119 } => {
3120 if setup {
3121 cmd_mcp_setup(frontier.as_deref(), frontiers.as_deref());
3122 } else if check_tools {
3123 let source =
3124 serve::ProjectSource::from_args(frontier.as_deref(), frontiers.as_deref());
3125 match serve::check_tools(source) {
3126 Ok(report) => {
3127 if json {
3128 println!(
3129 "{}",
3130 serde_json::to_string_pretty(&report)
3131 .expect("failed to serialize tool check report")
3132 );
3133 } else {
3134 print_tool_check_report(&report);
3135 }
3136 }
3137 Err(e) => fail(&format!("Tool check failed: {e}")),
3138 }
3139 } else {
3140 let source =
3141 serve::ProjectSource::from_args(frontier.as_deref(), frontiers.as_deref());
3142 let resolved_port = if workbench {
3144 Some(http.unwrap_or(3848))
3145 } else {
3146 http
3147 };
3148 if let Some(port) = resolved_port {
3149 serve::run_http(source, backend.as_deref(), port, workbench).await;
3150 } else {
3151 serve::run(source, backend.as_deref()).await;
3152 }
3153 }
3154 }
3155 Commands::Status { frontier, json } => cmd_status(&frontier, json),
3156 Commands::Log {
3157 frontier,
3158 limit,
3159 kind,
3160 json,
3161 } => cmd_log(&frontier, limit, kind.as_deref(), json),
3162 Commands::Inbox {
3163 frontier,
3164 kind,
3165 limit,
3166 json,
3167 } => cmd_inbox(&frontier, kind.as_deref(), limit, json),
3168 Commands::Ask {
3169 frontier,
3170 question,
3171 json,
3172 } => cmd_ask(&frontier, &question.join(" "), json),
3173 Commands::Stats { frontier, json } => {
3174 if json {
3175 print_stats_json(&frontier);
3176 } else {
3177 cmd_stats(&frontier);
3178 }
3179 }
3180 Commands::Search {
3181 source,
3182 query,
3183 entity,
3184 r#type,
3185 all,
3186 limit,
3187 json,
3188 } => cmd_search(
3189 source.as_deref(),
3190 &query,
3191 entity.as_deref(),
3192 r#type.as_deref(),
3193 all.as_deref(),
3194 limit,
3195 json,
3196 ),
3197 Commands::Tensions {
3198 source,
3199 both_high,
3200 cross_domain,
3201 top,
3202 json,
3203 } => cmd_tensions(&source, both_high, cross_domain, top, json),
3204 Commands::Gaps { action } => cmd_gaps(action),
3205 Commands::Bridge {
3206 inputs,
3207 novelty,
3208 top,
3209 } => cmd_bridge(&inputs, novelty, top).await,
3210 Commands::Export {
3211 frontier,
3212 format,
3213 output,
3214 } => export::run(&frontier, &format, output.as_deref()),
3215 Commands::Packet { action } => cmd_packet(action),
3216 Commands::Verify { path, json } => cmd_verify(&path, json),
3217 Commands::Bench {
3218 frontier,
3219 gold,
3220 candidate,
3221 sources,
3222 threshold,
3223 report,
3224 entity_gold,
3225 link_gold,
3226 suite,
3227 suite_ready,
3228 min_f1,
3229 min_precision,
3230 min_recall,
3231 no_thresholds,
3232 json,
3233 } => {
3234 if let Some(cand) = candidate.clone() {
3239 let Some(g) = gold.clone() else {
3240 eprintln!(
3241 "{} `vela bench --candidate <…>` requires `--gold <…>`",
3242 style::err_prefix()
3243 );
3244 std::process::exit(2);
3245 };
3246 cmd_agent_bench(
3247 &g,
3248 &cand,
3249 sources.as_deref(),
3250 threshold,
3251 report.as_deref(),
3252 json,
3253 );
3254 } else {
3255 cmd_bench(BenchArgs {
3256 frontier,
3257 gold,
3258 entity_gold,
3259 link_gold,
3260 suite,
3261 suite_ready,
3262 min_f1,
3263 min_precision,
3264 min_recall,
3265 no_thresholds,
3266 json,
3267 });
3268 }
3269 }
3270 Commands::Conformance { dir } => {
3271 let _ = conformance::run(&dir);
3272 }
3273 Commands::Version => println!("vela 0.36.0"),
3274 Commands::Sign { action } => cmd_sign(action),
3275 Commands::Actor { action } => cmd_actor(action),
3276 Commands::Federation { action } => cmd_federation(action),
3277 Commands::Causal { action } => cmd_causal(action),
3278 Commands::Frontier { action } => cmd_frontier(action),
3279 Commands::Queue { action } => cmd_queue(action),
3280 Commands::Registry { action } => cmd_registry(action),
3281 Commands::Init {
3282 path,
3283 name,
3284 template,
3285 no_git,
3286 json,
3287 } => cmd_init(&path, &name, &template, !no_git, json),
3288 Commands::Quickstart {
3289 path,
3290 name,
3291 reviewer,
3292 assertion,
3293 keys_out,
3294 json,
3295 } => cmd_quickstart(
3296 &path,
3297 &name,
3298 &reviewer,
3299 assertion.as_deref(),
3300 keys_out.as_deref(),
3301 json,
3302 ),
3303 Commands::Import { frontier, into } => cmd_import(&frontier, into.as_deref()),
3304 Commands::Diff {
3305 target,
3306 frontier_b,
3307 frontier,
3308 reviewer,
3309 json,
3310 quiet,
3311 } => {
3312 if target.starts_with("vpr_") {
3317 let frontier_root = frontier
3318 .clone()
3319 .or_else(|| frontier_b.clone())
3320 .unwrap_or_else(|| std::path::PathBuf::from("."));
3321 let preview = proposals::preview_at_path(&frontier_root, &target, &reviewer)
3322 .unwrap_or_else(|e| fail_return(&e));
3323 let payload = json!({
3324 "ok": true,
3325 "command": "diff.proposal",
3326 "frontier": frontier_root.display().to_string(),
3327 "proposal_id": target,
3328 "preview": preview,
3329 });
3330 if json {
3331 println!(
3332 "{}",
3333 serde_json::to_string_pretty(&payload)
3334 .expect("failed to serialize diff preview")
3335 );
3336 } else {
3337 println!("vela diff · proposal preview");
3338 println!(" proposal: {}", target);
3339 println!(" kind: {}", preview.kind);
3340 println!(
3341 " findings: {} -> {}",
3342 preview.findings_before, preview.findings_after
3343 );
3344 println!(
3345 " artifacts: {} -> {}",
3346 preview.artifacts_before, preview.artifacts_after
3347 );
3348 println!(
3349 " events: {} -> {}",
3350 preview.events_before, preview.events_after
3351 );
3352 if !preview.changed_findings.is_empty() {
3353 println!(
3354 " findings changed: {}",
3355 preview.changed_findings.join(", ")
3356 );
3357 }
3358 }
3359 } else {
3360 let frontier_a = std::path::PathBuf::from(&target);
3361 let b = frontier_b.unwrap_or_else(|| {
3362 fail_return(
3363 "diff: two-frontier mode needs a second positional path; for proposal preview pass a `vpr_*` id",
3364 )
3365 });
3366 diff::run(&frontier_a, &b, json, quiet);
3367 }
3368 }
3369 Commands::Proposals { action } => cmd_proposals(action),
3370 Commands::ArtifactToState {
3371 frontier,
3372 packet,
3373 actor,
3374 apply_artifacts,
3375 json,
3376 } => cmd_artifact_to_state(&frontier, &packet, &actor, apply_artifacts, json),
3377 Commands::BridgeKit { action } => cmd_bridge_kit(action),
3378 Commands::SourceAdapter { action } => cmd_source_adapter(action).await,
3379 Commands::RuntimeAdapter { action } => cmd_runtime_adapter(action),
3380 Commands::Link { action } => cmd_link(action),
3381 Commands::Workbench {
3382 path,
3383 port,
3384 no_open,
3385 } => {
3386 if let Err(e) = crate::workbench::run(path, port, !no_open).await {
3387 fail(&e);
3388 }
3389 }
3390 Commands::Bridges { action } => cmd_bridges(action),
3391 Commands::Entity { action } => cmd_entity(action),
3392 Commands::Finding { command } => match command {
3393 FindingCommands::Add {
3394 frontier,
3395 assertion,
3396 r#type,
3397 source,
3398 source_type,
3399 author,
3400 confidence,
3401 evidence_type,
3402 entities,
3403 entities_reviewed,
3404 evidence_span,
3405 gap,
3406 negative_space,
3407 doi,
3408 pmid,
3409 year,
3410 journal,
3411 url,
3412 source_authors,
3413 conditions_text,
3414 species,
3415 in_vivo,
3416 in_vitro,
3417 human_data,
3418 clinical_trial,
3419 json,
3420 apply,
3421 } => {
3422 validate_enum_arg("--type", &r#type, bundle::VALID_ASSERTION_TYPES);
3423 validate_enum_arg(
3424 "--evidence-type",
3425 &evidence_type,
3426 bundle::VALID_EVIDENCE_TYPES,
3427 );
3428 validate_enum_arg(
3429 "--source-type",
3430 &source_type,
3431 bundle::VALID_PROVENANCE_SOURCE_TYPES,
3432 );
3433 let parsed_entities = parse_entities(&entities);
3434 let parsed_evidence_spans = parse_evidence_spans(&evidence_span);
3435 for (name, etype) in &parsed_entities {
3436 if !bundle::VALID_ENTITY_TYPES.contains(&etype.as_str()) {
3437 fail(&format!(
3438 "invalid entity type '{}' for '{}'. Valid: {}",
3439 etype,
3440 name,
3441 bundle::VALID_ENTITY_TYPES.join(", "),
3442 ));
3443 }
3444 }
3445 let parsed_source_authors = source_authors
3446 .map(|s| {
3447 s.split(';')
3448 .map(|a| a.trim().to_string())
3449 .filter(|a| !a.is_empty())
3450 .collect()
3451 })
3452 .unwrap_or_default();
3453 let parsed_species = species
3454 .map(|s| {
3455 s.split(';')
3456 .map(|a| a.trim().to_string())
3457 .filter(|a| !a.is_empty())
3458 .collect()
3459 })
3460 .unwrap_or_default();
3461 let report = state::add_finding(
3462 &frontier,
3463 state::FindingDraftOptions {
3464 text: assertion,
3465 assertion_type: r#type,
3466 source,
3467 source_type,
3468 author,
3469 confidence,
3470 evidence_type,
3471 entities: parsed_entities,
3472 doi,
3473 pmid,
3474 year,
3475 journal,
3476 url,
3477 source_authors: parsed_source_authors,
3478 conditions_text,
3479 species: parsed_species,
3480 in_vivo,
3481 in_vitro,
3482 human_data,
3483 clinical_trial,
3484 entities_reviewed,
3485 evidence_spans: parsed_evidence_spans,
3486 gap,
3487 negative_space,
3488 },
3489 apply,
3490 )
3491 .unwrap_or_else(|e| fail_return(&e));
3492 print_state_report(&report, json);
3493 }
3494 FindingCommands::Supersede {
3495 frontier,
3496 old_id,
3497 assertion,
3498 r#type,
3499 source,
3500 source_type,
3501 author,
3502 reason,
3503 confidence,
3504 evidence_type,
3505 entities,
3506 doi,
3507 pmid,
3508 year,
3509 journal,
3510 url,
3511 source_authors,
3512 conditions_text,
3513 species,
3514 in_vivo,
3515 in_vitro,
3516 human_data,
3517 clinical_trial,
3518 json,
3519 apply,
3520 } => {
3521 validate_enum_arg("--type", &r#type, bundle::VALID_ASSERTION_TYPES);
3522 validate_enum_arg(
3523 "--evidence-type",
3524 &evidence_type,
3525 bundle::VALID_EVIDENCE_TYPES,
3526 );
3527 validate_enum_arg(
3528 "--source-type",
3529 &source_type,
3530 bundle::VALID_PROVENANCE_SOURCE_TYPES,
3531 );
3532 let parsed_entities = parse_entities(&entities);
3533 for (name, etype) in &parsed_entities {
3534 if !bundle::VALID_ENTITY_TYPES.contains(&etype.as_str()) {
3535 fail(&format!(
3536 "invalid entity type '{}' for '{}'. Valid: {}",
3537 etype,
3538 name,
3539 bundle::VALID_ENTITY_TYPES.join(", "),
3540 ));
3541 }
3542 }
3543 let parsed_source_authors = source_authors
3544 .map(|s| {
3545 s.split(';')
3546 .map(|a| a.trim().to_string())
3547 .filter(|a| !a.is_empty())
3548 .collect()
3549 })
3550 .unwrap_or_default();
3551 let parsed_species = species
3552 .map(|s| {
3553 s.split(';')
3554 .map(|a| a.trim().to_string())
3555 .filter(|a| !a.is_empty())
3556 .collect()
3557 })
3558 .unwrap_or_default();
3559 let report = state::supersede_finding(
3560 &frontier,
3561 &old_id,
3562 &reason,
3563 state::FindingDraftOptions {
3564 text: assertion,
3565 assertion_type: r#type,
3566 source,
3567 source_type,
3568 author,
3569 confidence,
3570 evidence_type,
3571 entities: parsed_entities,
3572 doi,
3573 pmid,
3574 year,
3575 journal,
3576 url,
3577 source_authors: parsed_source_authors,
3578 conditions_text,
3579 species: parsed_species,
3580 in_vivo,
3581 in_vitro,
3582 human_data,
3583 clinical_trial,
3584 entities_reviewed: false,
3585 evidence_spans: Vec::new(),
3586 gap: false,
3587 negative_space: false,
3588 },
3589 apply,
3590 )
3591 .unwrap_or_else(|e| fail_return(&e));
3592 print_state_report(&report, json);
3593 }
3594 FindingCommands::CausalSet {
3595 frontier,
3596 finding_id,
3597 claim,
3598 grade,
3599 actor,
3600 reason,
3601 json,
3602 } => {
3603 if !bundle::VALID_CAUSAL_CLAIMS.contains(&claim.as_str()) {
3604 fail(&format!(
3605 "invalid --claim '{claim}'; valid: {:?}",
3606 bundle::VALID_CAUSAL_CLAIMS
3607 ));
3608 }
3609 if let Some(g) = grade.as_deref()
3610 && !bundle::VALID_CAUSAL_EVIDENCE_GRADES.contains(&g)
3611 {
3612 fail(&format!(
3613 "invalid --grade '{g}'; valid: {:?}",
3614 bundle::VALID_CAUSAL_EVIDENCE_GRADES
3615 ));
3616 }
3617 let report = state::set_causal(
3618 &frontier,
3619 &finding_id,
3620 &claim,
3621 grade.as_deref(),
3622 &actor,
3623 &reason,
3624 )
3625 .unwrap_or_else(|e| fail_return(&e));
3626 print_state_report(&report, json);
3627 }
3628 },
3629 Commands::Review {
3630 frontier,
3631 finding_id,
3632 status,
3633 reason,
3634 reviewer,
3635 apply,
3636 json,
3637 } => {
3638 let status = status.unwrap_or_else(|| fail_return("--status is required for review"));
3639 let reason = reason.unwrap_or_else(|| fail_return("--reason is required for review"));
3640 let report = state::review_finding(
3641 &frontier,
3642 &finding_id,
3643 state::ReviewOptions {
3644 status,
3645 reason,
3646 reviewer,
3647 },
3648 apply,
3649 )
3650 .unwrap_or_else(|e| fail_return(&e));
3651 print_state_report(&report, json);
3652 }
3653 Commands::Note {
3654 frontier,
3655 finding_id,
3656 text,
3657 author,
3658 apply,
3659 json,
3660 } => {
3661 let report = state::add_note(&frontier, &finding_id, &text, &author, apply)
3662 .unwrap_or_else(|e| fail_return(&e));
3663 print_state_report(&report, json);
3664 }
3665 Commands::Caveat {
3666 frontier,
3667 finding_id,
3668 text,
3669 author,
3670 apply,
3671 json,
3672 } => {
3673 let report = state::caveat_finding(&frontier, &finding_id, &text, &author, apply)
3674 .unwrap_or_else(|e| fail_return(&e));
3675 print_state_report(&report, json);
3676 }
3677 Commands::Revise {
3678 frontier,
3679 finding_id,
3680 confidence,
3681 reason,
3682 reviewer,
3683 apply,
3684 json,
3685 } => {
3686 let report = state::revise_confidence(
3687 &frontier,
3688 &finding_id,
3689 state::ReviseOptions {
3690 confidence,
3691 reason,
3692 reviewer,
3693 },
3694 apply,
3695 )
3696 .unwrap_or_else(|e| fail_return(&e));
3697 print_state_report(&report, json);
3698 }
3699 Commands::Reject {
3700 frontier,
3701 finding_id,
3702 reason,
3703 reviewer,
3704 apply,
3705 json,
3706 } => {
3707 let report = state::reject_finding(&frontier, &finding_id, &reviewer, &reason, apply)
3708 .unwrap_or_else(|e| fail_return(&e));
3709 print_state_report(&report, json);
3710 }
3711 Commands::History {
3712 frontier,
3713 finding_id,
3714 json,
3715 as_of,
3716 } => {
3717 let payload = state::history_as_of(&frontier, &finding_id, as_of.as_deref())
3718 .unwrap_or_else(|e| fail_return(&e));
3719 if json {
3720 println!(
3721 "{}",
3722 serde_json::to_string_pretty(&payload)
3723 .expect("failed to serialize history response")
3724 );
3725 } else {
3726 print_history(&payload);
3727 }
3728 }
3729 Commands::ImportEvents { source, into, json } => {
3730 let report =
3731 review::import_review_events(&source, &into).unwrap_or_else(|e| fail_return(&e));
3732 if json {
3733 println!(
3734 "{}",
3735 serde_json::to_string_pretty(&json!({
3736 "ok": true,
3737 "command": "import-events",
3738 "source": report.source,
3739 "target": into.display().to_string(),
3740 "summary": {
3741 "imported": report.imported,
3742 "new": report.new,
3743 "duplicate": report.duplicate,
3744 "canonical_events_imported": report.events_imported,
3745 "canonical_events_new": report.events_new,
3746 "canonical_events_duplicate": report.events_duplicate,
3747 }
3748 }))
3749 .expect("failed to serialize import-events response")
3750 );
3751 } else {
3752 println!("{report}");
3753 }
3754 }
3755 Commands::Retract {
3756 source,
3757 finding_id,
3758 reason,
3759 reviewer,
3760 apply,
3761 json,
3762 } => {
3763 let report = state::retract_finding(&source, &finding_id, &reviewer, &reason, apply)
3764 .unwrap_or_else(|e| fail_return(&e));
3765 print_state_report(&report, json);
3766 }
3767 Commands::LocatorRepair {
3768 frontier,
3769 atom_id,
3770 locator,
3771 reviewer,
3772 reason,
3773 apply,
3774 json,
3775 } => {
3776 cmd_locator_repair(
3777 &frontier,
3778 &atom_id,
3779 locator.as_deref(),
3780 &reviewer,
3781 &reason,
3782 apply,
3783 json,
3784 );
3785 }
3786 Commands::SourceFetch {
3787 identifier,
3788 cache,
3789 out,
3790 refresh,
3791 json,
3792 } => {
3793 cmd_source_fetch(&identifier, cache.as_deref(), out.as_deref(), refresh, json).await;
3794 }
3795 Commands::SpanRepair {
3796 frontier,
3797 finding_id,
3798 section,
3799 text,
3800 reviewer,
3801 reason,
3802 apply,
3803 json,
3804 } => {
3805 cmd_span_repair(
3806 &frontier,
3807 &finding_id,
3808 §ion,
3809 &text,
3810 &reviewer,
3811 &reason,
3812 apply,
3813 json,
3814 );
3815 }
3816 Commands::EntityAdd {
3817 frontier,
3818 finding_id,
3819 entity,
3820 entity_type,
3821 reviewer,
3822 reason,
3823 apply,
3824 json,
3825 } => {
3826 let report = state::add_finding_entity(
3827 &frontier,
3828 &finding_id,
3829 &entity,
3830 &entity_type,
3831 &reviewer,
3832 &reason,
3833 apply,
3834 )
3835 .unwrap_or_else(|e| fail_return(&e));
3836 print_state_report(&report, json);
3837 }
3838 Commands::EntityResolve {
3839 frontier,
3840 finding_id,
3841 entity,
3842 source,
3843 id,
3844 confidence,
3845 matched_name,
3846 resolution_method,
3847 reviewer,
3848 reason,
3849 apply,
3850 json,
3851 } => {
3852 cmd_entity_resolve(
3853 &frontier,
3854 &finding_id,
3855 &entity,
3856 &source,
3857 &id,
3858 confidence,
3859 matched_name.as_deref(),
3860 &resolution_method,
3861 &reviewer,
3862 &reason,
3863 apply,
3864 json,
3865 );
3866 }
3867 Commands::Propagate {
3868 frontier,
3869 retract,
3870 reduce_confidence,
3871 to,
3872 output,
3873 } => cmd_propagate(&frontier, retract, reduce_confidence, to, output.as_deref()),
3874 Commands::Replicate {
3875 frontier,
3876 target,
3877 outcome,
3878 by,
3879 conditions,
3880 source_title,
3881 doi,
3882 pmid,
3883 sample_size,
3884 note,
3885 previous_attempt,
3886 no_cascade,
3887 json,
3888 } => cmd_replicate(
3889 &frontier,
3890 &target,
3891 &outcome,
3892 &by,
3893 &conditions,
3894 &source_title,
3895 doi.as_deref(),
3896 pmid.as_deref(),
3897 sample_size.as_deref(),
3898 ¬e,
3899 previous_attempt.as_deref(),
3900 no_cascade,
3901 json,
3902 ),
3903 Commands::Replications {
3904 frontier,
3905 target,
3906 json,
3907 } => cmd_replications(&frontier, target.as_deref(), json),
3908 Commands::DatasetAdd {
3909 frontier,
3910 name,
3911 version,
3912 content_hash,
3913 url,
3914 license,
3915 source_title,
3916 doi,
3917 row_count,
3918 json,
3919 } => cmd_dataset_add(
3920 &frontier,
3921 &name,
3922 version.as_deref(),
3923 &content_hash,
3924 url.as_deref(),
3925 license.as_deref(),
3926 &source_title,
3927 doi.as_deref(),
3928 row_count,
3929 json,
3930 ),
3931 Commands::Datasets { frontier, json } => cmd_datasets(&frontier, json),
3932 Commands::CodeAdd {
3933 frontier,
3934 language,
3935 repo_url,
3936 commit,
3937 path,
3938 content_hash,
3939 line_start,
3940 line_end,
3941 entry_point,
3942 json,
3943 } => cmd_code_add(
3944 &frontier,
3945 &language,
3946 repo_url.as_deref(),
3947 commit.as_deref(),
3948 &path,
3949 &content_hash,
3950 line_start,
3951 line_end,
3952 entry_point.as_deref(),
3953 json,
3954 ),
3955 Commands::CodeArtifacts { frontier, json } => cmd_code_artifacts(&frontier, json),
3956 Commands::ArtifactAdd {
3957 frontier,
3958 kind,
3959 name,
3960 file,
3961 url,
3962 content_hash,
3963 media_type,
3964 license,
3965 source_title,
3966 source_url,
3967 doi,
3968 target,
3969 metadata,
3970 access_tier,
3971 deposited_by,
3972 reason,
3973 json,
3974 } => cmd_artifact_add(
3975 &frontier,
3976 &kind,
3977 &name,
3978 file.as_deref(),
3979 url.as_deref(),
3980 content_hash.as_deref(),
3981 media_type.as_deref(),
3982 license.as_deref(),
3983 source_title.as_deref(),
3984 source_url.as_deref(),
3985 doi.as_deref(),
3986 target,
3987 metadata,
3988 &access_tier,
3989 &deposited_by,
3990 &reason,
3991 json,
3992 ),
3993 Commands::Artifacts {
3994 frontier,
3995 target,
3996 json,
3997 } => cmd_artifacts(&frontier, target.as_deref(), json),
3998 Commands::ArtifactAudit { frontier, json } => cmd_artifact_audit(&frontier, json),
3999 Commands::DecisionBrief { frontier, json } => cmd_decision_brief(&frontier, json),
4000 Commands::TrialSummary { frontier, json } => cmd_trial_summary(&frontier, json),
4001 Commands::SourceVerification { frontier, json } => cmd_source_verification(&frontier, json),
4002 Commands::SourceIngestPlan { frontier, json } => cmd_source_ingest_plan(&frontier, json),
4003 Commands::ClinicalTrialImport {
4004 frontier,
4005 nct_id,
4006 input_json,
4007 target,
4008 deposited_by,
4009 reason,
4010 license,
4011 json,
4012 } => {
4013 cmd_clinical_trial_import(
4014 &frontier,
4015 &nct_id,
4016 input_json.as_deref(),
4017 target,
4018 &deposited_by,
4019 &reason,
4020 &license,
4021 json,
4022 )
4023 .await
4024 }
4025 Commands::NegativeResultAdd {
4026 frontier,
4027 kind,
4028 deposited_by,
4029 reason,
4030 conditions_text,
4031 notes,
4032 target,
4033 endpoint,
4034 intervention,
4035 comparator,
4036 population,
4037 n_enrolled,
4038 power,
4039 ci_lower,
4040 ci_upper,
4041 effect_size_threshold,
4042 registry_id,
4043 reagent,
4044 observation,
4045 attempts,
4046 source_title,
4047 doi,
4048 url,
4049 year,
4050 json,
4051 } => cmd_negative_result_add(
4052 &frontier,
4053 &kind,
4054 &deposited_by,
4055 &reason,
4056 &conditions_text,
4057 ¬es,
4058 target,
4059 endpoint.as_deref(),
4060 intervention.as_deref(),
4061 comparator.as_deref(),
4062 population.as_deref(),
4063 n_enrolled,
4064 power,
4065 ci_lower,
4066 ci_upper,
4067 effect_size_threshold,
4068 registry_id.as_deref(),
4069 reagent.as_deref(),
4070 observation.as_deref(),
4071 attempts,
4072 &source_title,
4073 doi.as_deref(),
4074 url.as_deref(),
4075 year,
4076 json,
4077 ),
4078 Commands::NegativeResults {
4079 frontier,
4080 target,
4081 json,
4082 } => cmd_negative_results(&frontier, target.as_deref(), json),
4083 Commands::TrajectoryCreate {
4084 frontier,
4085 deposited_by,
4086 reason,
4087 target,
4088 notes,
4089 json,
4090 } => cmd_trajectory_create(&frontier, &deposited_by, &reason, target, ¬es, json),
4091 Commands::TrajectoryStep {
4092 frontier,
4093 trajectory_id,
4094 kind,
4095 description,
4096 actor,
4097 reason,
4098 reference,
4099 json,
4100 } => cmd_trajectory_step(
4101 &frontier,
4102 &trajectory_id,
4103 &kind,
4104 &description,
4105 &actor,
4106 &reason,
4107 reference,
4108 json,
4109 ),
4110 Commands::Trajectories {
4111 frontier,
4112 target,
4113 json,
4114 } => cmd_trajectories(&frontier, target.as_deref(), json),
4115 Commands::TierSet {
4116 frontier,
4117 object_type,
4118 object_id,
4119 tier,
4120 actor,
4121 reason,
4122 json,
4123 } => cmd_tier_set(
4124 &frontier,
4125 &object_type,
4126 &object_id,
4127 &tier,
4128 &actor,
4129 &reason,
4130 json,
4131 ),
4132 Commands::Predict {
4133 frontier,
4134 by,
4135 claim,
4136 criterion,
4137 resolves_by,
4138 confidence,
4139 target,
4140 outcome,
4141 conditions,
4142 json,
4143 } => cmd_predict(
4144 &frontier,
4145 &by,
4146 &claim,
4147 &criterion,
4148 resolves_by.as_deref(),
4149 confidence,
4150 &target,
4151 &outcome,
4152 &conditions,
4153 json,
4154 ),
4155 Commands::Resolve {
4156 frontier,
4157 prediction,
4158 outcome,
4159 matched,
4160 by,
4161 confidence,
4162 source_title,
4163 doi,
4164 json,
4165 } => cmd_resolve(
4166 &frontier,
4167 &prediction,
4168 &outcome,
4169 matched,
4170 &by,
4171 confidence,
4172 &source_title,
4173 doi.as_deref(),
4174 json,
4175 ),
4176 Commands::Predictions {
4177 frontier,
4178 by,
4179 open,
4180 json,
4181 } => cmd_predictions(&frontier, by.as_deref(), open, json),
4182 Commands::Calibration {
4183 frontier,
4184 actor,
4185 json,
4186 } => cmd_calibration(&frontier, actor.as_deref(), json),
4187 Commands::PredictionsExpire {
4188 frontier,
4189 now,
4190 dry_run,
4191 json,
4192 } => cmd_predictions_expire(&frontier, now.as_deref(), dry_run, json),
4193 Commands::Consensus {
4194 frontier,
4195 target,
4196 weighting,
4197 causal_claim,
4198 causal_grade_min,
4199 json,
4200 } => cmd_consensus(
4201 &frontier,
4202 &target,
4203 &weighting,
4204 causal_claim.as_deref(),
4205 causal_grade_min.as_deref(),
4206 json,
4207 ),
4208
4209 Commands::Ingest {
4212 path,
4213 frontier,
4214 backend,
4215 actor,
4216 dry_run,
4217 json,
4218 } => {
4219 cmd_ingest(
4220 &path,
4221 &frontier,
4222 backend.as_deref(),
4223 actor.as_deref(),
4224 dry_run,
4225 json,
4226 )
4227 .await
4228 }
4229
4230 Commands::Propose {
4231 frontier,
4232 finding_id,
4233 status,
4234 reason,
4235 reviewer,
4236 apply,
4237 json,
4238 } => {
4239 let options = state::ReviewOptions {
4242 status: status.clone(),
4243 reason: reason.clone(),
4244 reviewer: reviewer.clone(),
4245 };
4246 let report = state::review_finding(&frontier, &finding_id, options, apply)
4247 .unwrap_or_else(|e| fail_return(&e));
4248 print_state_report(&report, json);
4249 }
4250
4251 Commands::Accept {
4252 frontier,
4253 proposal_id,
4254 reviewer,
4255 reason,
4256 json,
4257 } => {
4258 let event_id = proposals::accept_at_path(&frontier, &proposal_id, &reviewer, &reason)
4260 .unwrap_or_else(|e| fail_return(&e));
4261 let payload = json!({
4262 "ok": true,
4263 "command": "accept",
4264 "frontier": frontier.display().to_string(),
4265 "proposal_id": proposal_id,
4266 "reviewer": reviewer,
4267 "applied_event_id": event_id,
4268 });
4269 if json {
4270 println!(
4271 "{}",
4272 serde_json::to_string_pretty(&payload)
4273 .expect("failed to serialize accept response")
4274 );
4275 } else {
4276 println!(
4277 "{} accepted and applied proposal {}",
4278 style::ok("ok"),
4279 proposal_id
4280 );
4281 println!(" event: {}", event_id);
4282 }
4283 }
4284
4285 Commands::Attest {
4286 frontier,
4287 event,
4288 attester,
4289 scope_note,
4290 proof_id,
4291 signature,
4292 key,
4293 json,
4294 } => {
4295 if let Some(target_event_id) = event {
4299 let attester_id = attester.unwrap_or_else(|| {
4300 fail_return("attest: --attester is required in per-event mode")
4301 });
4302 let scope = scope_note.unwrap_or_else(|| {
4303 fail_return("attest: --scope-note is required in per-event mode")
4304 });
4305 let attestation_event_id = state::record_attestation(
4306 &frontier,
4307 &target_event_id,
4308 &attester_id,
4309 &scope,
4310 proof_id.as_deref(),
4311 signature.as_deref(),
4312 )
4313 .unwrap_or_else(|e| fail_return(&e));
4314 if json {
4315 let payload = json!({
4316 "ok": true,
4317 "command": "attest.event",
4318 "frontier": frontier.display().to_string(),
4319 "target_event_id": target_event_id,
4320 "attestation_event_id": attestation_event_id,
4321 "attester_id": attester_id,
4322 });
4323 println!(
4324 "{}",
4325 serde_json::to_string_pretty(&payload)
4326 .expect("failed to serialize attest.event response")
4327 );
4328 } else {
4329 println!(
4330 "{} attested {} by {} ({})",
4331 style::ok("ok"),
4332 target_event_id,
4333 attester_id,
4334 attestation_event_id
4335 );
4336 }
4337 return;
4338 }
4339 let key_path = key.unwrap_or_else(|| {
4341 fail_return(
4342 "attest: --key is required in frontier-wide mode (or pass --event for per-event mode)",
4343 )
4344 });
4345 let count =
4346 sign::sign_frontier(&frontier, &key_path).unwrap_or_else(|e| fail_return(&e));
4347 let payload = json!({
4348 "ok": true,
4349 "command": "attest",
4350 "frontier": frontier.display().to_string(),
4351 "private_key": key_path.display().to_string(),
4352 "signed": count,
4353 });
4354 if json {
4355 println!(
4356 "{}",
4357 serde_json::to_string_pretty(&payload)
4358 .expect("failed to serialize attest response")
4359 );
4360 } else {
4361 println!(
4362 "{} {count} findings in {}",
4363 style::ok("attested"),
4364 frontier.display()
4365 );
4366 }
4367 }
4368
4369 Commands::Lineage {
4370 frontier,
4371 finding_id,
4372 as_of,
4373 json,
4374 } => {
4375 let payload = state::history_as_of(&frontier, &finding_id, as_of.as_deref())
4377 .unwrap_or_else(|e| fail_return(&e));
4378 if json {
4379 println!(
4380 "{}",
4381 serde_json::to_string_pretty(&payload)
4382 .expect("failed to serialize lineage response")
4383 );
4384 } else {
4385 print_history(&payload);
4386 }
4387 }
4388
4389 Commands::Carina { action } => cmd_carina(action),
4390
4391 Commands::Atlas { action } => cmd_atlas(action).await,
4392
4393 Commands::Constellation { action } => cmd_constellation(action).await,
4394 }
4395}
4396
4397async fn cmd_atlas(action: AtlasAction) {
4402 match action {
4403 AtlasAction::Init {
4404 name,
4405 frontiers,
4406 domain,
4407 scope_note,
4408 atlases_root,
4409 json,
4410 } => match ATLAS_INIT_HANDLER.get() {
4411 Some(handler) => {
4412 handler(atlases_root, name, domain, scope_note, frontiers, json).await;
4413 }
4414 None => fail("vela atlas init: handler not registered (built without vela-atlas)"),
4415 },
4416 AtlasAction::Materialize {
4417 name,
4418 atlases_root,
4419 json,
4420 } => match ATLAS_MATERIALIZE_HANDLER.get() {
4421 Some(handler) => handler(atlases_root, name, json).await,
4422 None => fail("vela atlas materialize: handler not registered"),
4423 },
4424 AtlasAction::Serve {
4425 name,
4426 atlases_root,
4427 port,
4428 no_open,
4429 } => {
4430 match ATLAS_SERVE_HANDLER.get() {
4434 Some(handler) => handler(atlases_root, name, port, !no_open).await,
4435 None => fail("vela atlas serve: handler not registered"),
4436 }
4437 }
4438 AtlasAction::Update {
4439 name,
4440 add_frontier,
4441 remove_vfr_id,
4442 atlases_root,
4443 json,
4444 } => match ATLAS_UPDATE_HANDLER.get() {
4445 Some(handler) => {
4446 handler(atlases_root, name, add_frontier, remove_vfr_id, json).await;
4447 }
4448 None => fail("vela atlas update: handler not registered"),
4449 },
4450 }
4451}
4452
4453async fn cmd_constellation(action: ConstellationAction) {
4457 match action {
4458 ConstellationAction::Init {
4459 name,
4460 atlases,
4461 scope_note,
4462 constellations_root,
4463 json,
4464 } => match CONSTELLATION_INIT_HANDLER.get() {
4465 Some(handler) => {
4466 handler(constellations_root, name, scope_note, atlases, json).await;
4467 }
4468 None => fail(
4469 "vela constellation init: handler not registered (built without vela-constellation)",
4470 ),
4471 },
4472 ConstellationAction::Materialize {
4473 name,
4474 constellations_root,
4475 json,
4476 } => match CONSTELLATION_MATERIALIZE_HANDLER.get() {
4477 Some(handler) => handler(constellations_root, name, json).await,
4478 None => fail("vela constellation materialize: handler not registered"),
4479 },
4480 ConstellationAction::Serve {
4481 name,
4482 constellations_root,
4483 port,
4484 no_open,
4485 } => match CONSTELLATION_SERVE_HANDLER.get() {
4486 Some(handler) => handler(constellations_root, name, port, !no_open).await,
4487 None => fail("vela constellation serve: handler not registered"),
4488 },
4489 }
4490}
4491
4492fn cmd_carina(action: CarinaAction) {
4495 match action {
4496 CarinaAction::List { json } => {
4497 if json {
4498 println!(
4499 "{}",
4500 serde_json::to_string_pretty(&json!({
4501 "ok": true,
4502 "command": "carina.list",
4503 "primitives": carina_validate::PRIMITIVE_NAMES,
4504 }))
4505 .expect("failed to serialize carina.list")
4506 );
4507 } else {
4508 println!("Carina primitives bundled with this build:");
4509 for name in carina_validate::PRIMITIVE_NAMES {
4510 println!(" · {name}");
4511 }
4512 }
4513 }
4514 CarinaAction::Schema { primitive } => match carina_validate::schema_text(&primitive) {
4515 Some(text) => print!("{text}"),
4516 None => fail(&format!("carina: unknown primitive '{primitive}'")),
4517 },
4518 CarinaAction::Validate {
4519 path,
4520 primitive,
4521 json,
4522 } => {
4523 let text = std::fs::read_to_string(&path)
4524 .unwrap_or_else(|e| fail_return(&format!("read {}: {e}", path.display())));
4525 let value: Value = serde_json::from_str(&text)
4526 .unwrap_or_else(|e| fail_return(&format!("parse {}: {e}", path.display())));
4527 type CarinaValidateOutcome = Result<Option<&'static str>, Vec<String>>;
4533 let mut report: Vec<(String, CarinaValidateOutcome)> = Vec::new();
4534 if value.get("primitives").and_then(Value::as_object).is_some() && primitive.is_none() {
4535 let primitives = value.get("primitives").and_then(Value::as_object).unwrap();
4536 for (key, child) in primitives {
4537 let outcome = carina_validate::validate(key, child)
4538 .map(|()| carina_validate::detect_primitive(child));
4539 report.push((key.clone(), outcome));
4540 }
4541 } else {
4542 let outcome = match primitive.as_deref() {
4543 Some(name) => carina_validate::validate(name, &value).map(|()| {
4544 carina_validate::PRIMITIVE_NAMES
4545 .iter()
4546 .copied()
4547 .find(|p| *p == name)
4548 }),
4549 None => carina_validate::validate_auto(&value).map(Some),
4550 };
4551 let label = primitive.clone().unwrap_or_else(|| "<auto>".to_string());
4552 report.push((label, outcome));
4553 }
4554
4555 let total = report.len();
4556 let pass = report.iter().filter(|(_, r)| r.is_ok()).count();
4557 let fail = total - pass;
4558
4559 if json {
4560 let entries: Vec<Value> = report
4561 .iter()
4562 .map(|(label, r)| match r {
4563 Ok(name) => json!({
4564 "key": label,
4565 "primitive": name,
4566 "ok": true,
4567 }),
4568 Err(errs) => json!({
4569 "key": label,
4570 "ok": false,
4571 "errors": errs,
4572 }),
4573 })
4574 .collect();
4575 println!(
4576 "{}",
4577 serde_json::to_string_pretty(&json!({
4578 "ok": fail == 0,
4579 "command": "carina.validate",
4580 "file": path.display().to_string(),
4581 "total": total,
4582 "passed": pass,
4583 "failed": fail,
4584 "entries": entries,
4585 }))
4586 .expect("failed to serialize carina.validate")
4587 );
4588 } else {
4589 for (label, r) in &report {
4590 match r {
4591 Ok(Some(name)) => println!(" {} {label} (as {name})", style::ok("ok")),
4592 Ok(None) => println!(" {} {label}", style::ok("ok")),
4593 Err(errs) => {
4594 println!(" {} {label}", style::lost("fail"));
4595 for e in errs {
4596 println!(" {e}");
4597 }
4598 }
4599 }
4600 }
4601 println!();
4602 if fail == 0 {
4603 println!("{} {pass}/{total} valid", style::ok("carina.validate"));
4604 } else {
4605 println!(
4606 "{} {pass}/{total} valid · {fail} failed",
4607 style::lost("carina.validate")
4608 );
4609 }
4610 }
4611
4612 if fail > 0 {
4613 std::process::exit(1);
4614 }
4615 }
4616 }
4617}
4618
4619fn cmd_consensus(
4622 frontier: &Path,
4623 target: &str,
4624 weighting_str: &str,
4625 causal_claim: Option<&str>,
4626 causal_grade_min: Option<&str>,
4627 json: bool,
4628) {
4629 use crate::bundle::{CausalClaim, CausalEvidenceGrade};
4630
4631 if !target.starts_with("vf_") {
4632 fail(&format!("target `{target}` is not a vf_ finding id"));
4633 }
4634 let scheme =
4635 crate::aggregate::WeightingScheme::parse(weighting_str).unwrap_or_else(|e| fail_return(&e));
4636
4637 let parsed_claim = match causal_claim {
4638 None => None,
4639 Some("correlation") => Some(CausalClaim::Correlation),
4640 Some("mediation") => Some(CausalClaim::Mediation),
4641 Some("intervention") => Some(CausalClaim::Intervention),
4642 Some(other) => fail_return(&format!(
4643 "invalid --causal-claim '{other}'; valid: correlation | mediation | intervention"
4644 )),
4645 };
4646 let parsed_grade = match causal_grade_min {
4647 None => None,
4648 Some("theoretical") => Some(CausalEvidenceGrade::Theoretical),
4649 Some("observational") => Some(CausalEvidenceGrade::Observational),
4650 Some("quasi_experimental") => Some(CausalEvidenceGrade::QuasiExperimental),
4651 Some("rct") => Some(CausalEvidenceGrade::Rct),
4652 Some(other) => fail_return(&format!(
4653 "invalid --causal-grade-min '{other}'; valid: theoretical | observational | quasi_experimental | rct"
4654 )),
4655 };
4656 let filter = crate::aggregate::AggregateFilter {
4657 causal_claim: parsed_claim,
4658 causal_grade_min: parsed_grade,
4659 };
4660 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
4661
4662 let result = crate::aggregate::consensus_for_with_filter(&project, target, scheme, &filter)
4663 .unwrap_or_else(|| fail_return(&format!("target `{target}` not in frontier")));
4664
4665 if json {
4666 println!(
4667 "{}",
4668 serde_json::to_string_pretty(&result).expect("serialize consensus")
4669 );
4670 return;
4671 }
4672
4673 println!();
4674 println!(
4675 " {}",
4676 format!(
4677 "VELA · CONSENSUS · {} ({})",
4678 result.target, result.weighting
4679 )
4680 .to_uppercase()
4681 .dimmed()
4682 );
4683 println!(" {}", style::tick_row(60));
4684 println!(
4685 " target: {}",
4686 truncate(&result.target_assertion, 80)
4687 );
4688 println!(" similar findings: {}", result.n_findings);
4689 println!(
4690 " consensus: {:.3} ({:.3} – {:.3} 95% credible)",
4691 result.consensus_confidence, result.credible_interval_lo, result.credible_interval_hi
4692 );
4693 println!();
4694 println!(" constituents (sorted by weight):");
4695 let mut sorted = result.constituents.clone();
4696 sorted.sort_by(|a, b| {
4697 b.weight
4698 .partial_cmp(&a.weight)
4699 .unwrap_or(std::cmp::Ordering::Equal)
4700 });
4701 for c in sorted.iter().take(10) {
4702 let repls = if c.n_replications > 0 {
4703 format!(" ({}r {}f)", c.n_replicated, c.n_failed_replications)
4704 } else {
4705 String::new()
4706 };
4707 println!(
4708 " · w={:.2} raw={:.2} adj={:.2}{}",
4709 c.weight, c.raw_score, c.adjusted_score, repls
4710 );
4711 println!(" {}", truncate(&c.assertion_text, 88));
4712 }
4713 if result.constituents.len() > 10 {
4714 println!(" ... ({} more)", result.constituents.len() - 10);
4715 }
4716}
4717
4718fn parse_expected_outcome(s: &str) -> Result<crate::bundle::ExpectedOutcome, String> {
4724 let trimmed = s.trim();
4725 if trimmed.eq_ignore_ascii_case("affirmed") {
4726 return Ok(crate::bundle::ExpectedOutcome::Affirmed);
4727 }
4728 if trimmed.eq_ignore_ascii_case("falsified") {
4729 return Ok(crate::bundle::ExpectedOutcome::Falsified);
4730 }
4731 if let Some(rest) = trimmed.strip_prefix("cat:") {
4732 return Ok(crate::bundle::ExpectedOutcome::Categorical {
4733 value: rest.to_string(),
4734 });
4735 }
4736 if let Some(rest) = trimmed.strip_prefix("quant:") {
4737 let (vt, units) = rest.split_once(' ').unwrap_or((rest, ""));
4738 let (val_s, tol_s) = vt
4739 .split_once('±')
4740 .or_else(|| vt.split_once("+/-"))
4741 .ok_or_else(|| format!("expected `quant:VALUE±TOL UNITS`, got `quant:{rest}`"))?;
4742 let value: f64 = val_s
4743 .parse()
4744 .map_err(|e| format!("bad quant value `{val_s}`: {e}"))?;
4745 let tolerance: f64 = tol_s
4746 .parse()
4747 .map_err(|e| format!("bad quant tolerance `{tol_s}`: {e}"))?;
4748 return Ok(crate::bundle::ExpectedOutcome::Quantitative {
4749 value,
4750 tolerance,
4751 units: units.to_string(),
4752 });
4753 }
4754 Err(format!(
4755 "unknown outcome `{s}`; expected one of: affirmed | falsified | quant:V±T units | cat:label"
4756 ))
4757}
4758
4759#[allow(clippy::too_many_arguments)]
4761fn cmd_predict(
4762 frontier: &Path,
4763 by: &str,
4764 claim: &str,
4765 criterion: &str,
4766 resolves_by: Option<&str>,
4767 confidence: f64,
4768 target_csv: &str,
4769 outcome: &str,
4770 conditions_text: &str,
4771 json: bool,
4772) {
4773 if !(0.0..=1.0).contains(&confidence) {
4774 fail(&format!("confidence must be in [0, 1]; got {confidence}"));
4775 }
4776 let expected = parse_expected_outcome(outcome).unwrap_or_else(|e| fail_return(&e));
4777
4778 let mut project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
4779
4780 let targets: Vec<String> = target_csv
4781 .split(',')
4782 .map(|s| s.trim().to_string())
4783 .filter(|s| !s.is_empty())
4784 .collect();
4785 for t in &targets {
4786 if !t.starts_with("vf_") {
4787 fail(&format!("target `{t}` is not a vf_ id"));
4788 }
4789 if !project.findings.iter().any(|f| f.id == *t) {
4790 fail(&format!("target `{t}` not present in frontier"));
4791 }
4792 }
4793
4794 let lower = conditions_text.to_lowercase();
4795 let conditions = crate::bundle::Conditions {
4796 text: conditions_text.to_string(),
4797 species_verified: Vec::new(),
4798 species_unverified: Vec::new(),
4799 in_vitro: lower.contains("in vitro"),
4800 in_vivo: lower.contains("in vivo"),
4801 human_data: lower.contains("human") || lower.contains("clinical"),
4802 clinical_trial: lower.contains("clinical trial") || lower.contains("phase "),
4803 concentration_range: None,
4804 duration: None,
4805 age_group: None,
4806 cell_type: None,
4807 };
4808
4809 let prediction = crate::bundle::Prediction::new(
4810 claim.to_string(),
4811 targets,
4812 None,
4813 resolves_by.map(|s| s.to_string()),
4814 criterion.to_string(),
4815 expected,
4816 by.to_string(),
4817 confidence,
4818 conditions,
4819 );
4820
4821 if project.predictions.iter().any(|p| p.id == prediction.id) {
4822 if json {
4823 println!(
4824 "{}",
4825 serde_json::to_string_pretty(&json!({
4826 "ok": false,
4827 "command": "predict",
4828 "reason": "prediction_already_exists",
4829 "id": prediction.id,
4830 }))
4831 .expect("serialize")
4832 );
4833 } else {
4834 println!(
4835 "{} prediction {} already exists in {}; skipping.",
4836 style::warn("predict"),
4837 prediction.id,
4838 frontier.display()
4839 );
4840 }
4841 return;
4842 }
4843
4844 let new_id = prediction.id.clone();
4845 project.predictions.push(prediction);
4846 repo::save_to_path(frontier, &project).unwrap_or_else(|e| fail_return(&e));
4847
4848 if json {
4849 println!(
4850 "{}",
4851 serde_json::to_string_pretty(&json!({
4852 "ok": true,
4853 "command": "predict",
4854 "id": new_id,
4855 "made_by": by,
4856 "confidence": confidence,
4857 "frontier": frontier.display().to_string(),
4858 }))
4859 .expect("serialize predict result")
4860 );
4861 } else {
4862 println!();
4863 println!(
4864 " {}",
4865 format!("VELA · PREDICT · {}", new_id)
4866 .to_uppercase()
4867 .dimmed()
4868 );
4869 println!(" {}", style::tick_row(60));
4870 println!(" by: {by}");
4871 println!(" confidence: {confidence:.3}");
4872 if let Some(d) = resolves_by {
4873 println!(" resolves by: {d}");
4874 }
4875 println!(" outcome: {outcome}");
4876 println!(" claim: {}", truncate(claim, 88));
4877 println!();
4878 println!(
4879 " {} prediction recorded in {}",
4880 style::ok("ok"),
4881 frontier.display()
4882 );
4883 }
4884}
4885
4886#[allow(clippy::too_many_arguments)]
4888fn cmd_resolve(
4889 frontier: &Path,
4890 prediction_id: &str,
4891 actual_outcome: &str,
4892 matched: bool,
4893 by: &str,
4894 confidence: f64,
4895 source_title: &str,
4896 doi: Option<&str>,
4897 json: bool,
4898) {
4899 if !prediction_id.starts_with("vpred_") {
4900 fail(&format!("prediction `{prediction_id}` is not a vpred_ id"));
4901 }
4902 if !(0.0..=1.0).contains(&confidence) {
4903 fail(&format!("confidence must be in [0, 1]; got {confidence}"));
4904 }
4905 let mut project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
4906 if !project.predictions.iter().any(|p| p.id == prediction_id) {
4907 fail(&format!(
4908 "prediction `{prediction_id}` not present in frontier"
4909 ));
4910 }
4911
4912 let evidence = crate::bundle::Evidence {
4913 evidence_type: "experimental".to_string(),
4914 model_system: String::new(),
4915 species: None,
4916 method: "prediction_resolution".to_string(),
4917 sample_size: None,
4918 effect_size: None,
4919 p_value: None,
4920 replicated: false,
4921 replication_count: None,
4922 evidence_spans: if source_title.is_empty() {
4923 Vec::new()
4924 } else {
4925 vec![serde_json::json!({"text": source_title})]
4926 },
4927 };
4928
4929 let _ = doi; let resolution = crate::bundle::Resolution::new(
4936 prediction_id.to_string(),
4937 actual_outcome.to_string(),
4938 matched,
4939 by.to_string(),
4940 evidence,
4941 confidence,
4942 );
4943
4944 if project.resolutions.iter().any(|r| r.id == resolution.id) {
4945 if json {
4946 println!(
4947 "{}",
4948 serde_json::to_string_pretty(&json!({
4949 "ok": false,
4950 "command": "resolve",
4951 "reason": "resolution_already_exists",
4952 "id": resolution.id,
4953 }))
4954 .expect("serialize")
4955 );
4956 } else {
4957 println!(
4958 "{} resolution {} already exists in {}; skipping.",
4959 style::warn("resolve"),
4960 resolution.id,
4961 frontier.display()
4962 );
4963 }
4964 return;
4965 }
4966
4967 let new_id = resolution.id.clone();
4968 project.resolutions.push(resolution);
4969 repo::save_to_path(frontier, &project).unwrap_or_else(|e| fail_return(&e));
4970
4971 if json {
4972 println!(
4973 "{}",
4974 serde_json::to_string_pretty(&json!({
4975 "ok": true,
4976 "command": "resolve",
4977 "id": new_id,
4978 "prediction": prediction_id,
4979 "matched": matched,
4980 "frontier": frontier.display().to_string(),
4981 }))
4982 .expect("serialize resolve result")
4983 );
4984 } else {
4985 println!();
4986 println!(
4987 " {}",
4988 format!("VELA · RESOLVE · {}", new_id)
4989 .to_uppercase()
4990 .dimmed()
4991 );
4992 println!(" {}", style::tick_row(60));
4993 println!(" prediction: {prediction_id}");
4994 println!(
4995 " matched: {}",
4996 if matched {
4997 style::ok("yes")
4998 } else {
4999 style::lost("no")
5000 }
5001 );
5002 println!(" by: {by}");
5003 println!(" outcome: {}", truncate(actual_outcome, 80));
5004 println!();
5005 println!(
5006 " {} resolution recorded in {}",
5007 style::ok("ok"),
5008 frontier.display()
5009 );
5010 }
5011}
5012
5013fn cmd_predictions(frontier: &Path, by: Option<&str>, open: bool, json: bool) {
5015 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
5016
5017 let resolved_ids: std::collections::HashSet<&str> = project
5018 .resolutions
5019 .iter()
5020 .map(|r| r.prediction_id.as_str())
5021 .collect();
5022
5023 let mut filtered: Vec<&crate::bundle::Prediction> = project
5024 .predictions
5025 .iter()
5026 .filter(|p| by.is_none_or(|b| p.made_by == b))
5027 .filter(|p| !open || !resolved_ids.contains(p.id.as_str()))
5028 .collect();
5029 filtered.sort_by(|a, b| {
5030 a.resolves_by
5031 .as_deref()
5032 .unwrap_or("9999")
5033 .cmp(b.resolves_by.as_deref().unwrap_or("9999"))
5034 });
5035
5036 if json {
5037 let payload: Vec<serde_json::Value> = filtered
5038 .iter()
5039 .map(|p| {
5040 json!({
5041 "id": p.id,
5042 "claim_text": p.claim_text,
5043 "made_by": p.made_by,
5044 "confidence": p.confidence,
5045 "predicted_at": p.predicted_at,
5046 "resolves_by": p.resolves_by,
5047 "expected_outcome": p.expected_outcome,
5048 "resolved": resolved_ids.contains(p.id.as_str()),
5049 })
5050 })
5051 .collect();
5052 println!(
5053 "{}",
5054 serde_json::to_string_pretty(&json!({
5055 "ok": true,
5056 "command": "predictions",
5057 "frontier": frontier.display().to_string(),
5058 "count": payload.len(),
5059 "predictions": payload,
5060 }))
5061 .expect("serialize predictions")
5062 );
5063 return;
5064 }
5065
5066 println!();
5067 println!(
5068 " {}",
5069 format!("VELA · PREDICTIONS · {}", frontier.display())
5070 .to_uppercase()
5071 .dimmed()
5072 );
5073 println!(" {}", style::tick_row(60));
5074 if filtered.is_empty() {
5075 println!(" (no predictions matching filters)");
5076 return;
5077 }
5078 for p in &filtered {
5079 let resolved = resolved_ids.contains(p.id.as_str());
5080 let chip = if resolved {
5081 style::ok("resolved")
5082 } else {
5083 style::warn("open")
5084 };
5085 let deadline = p.resolves_by.as_deref().unwrap_or("(no deadline)");
5086 println!(
5087 " · {} {} by {} → {}",
5088 p.id.dimmed(),
5089 chip,
5090 p.made_by,
5091 deadline,
5092 );
5093 println!(" claim: {}", truncate(&p.claim_text, 90));
5094 println!(" confidence: {:.2}", p.confidence);
5095 }
5096}
5097
5098fn cmd_predictions_expire(frontier: &Path, now_override: Option<&str>, dry_run: bool, json: bool) {
5103 use chrono::DateTime;
5104
5105 let now_dt = match now_override {
5106 Some(s) => DateTime::parse_from_rfc3339(s)
5107 .map(|dt| dt.with_timezone(&chrono::Utc))
5108 .unwrap_or_else(|e| fail_return(&format!("invalid --now '{s}': {e}"))),
5109 None => chrono::Utc::now(),
5110 };
5111
5112 let mut project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
5113 if dry_run {
5114 let mut probe = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
5116 let report = crate::calibration::expire_overdue_predictions(&mut probe, now_dt);
5117 if json {
5118 println!(
5119 "{}",
5120 serde_json::to_string_pretty(&json!({
5121 "ok": true,
5122 "command": "predictions.expire",
5123 "dry_run": true,
5124 "report": report,
5125 }))
5126 .expect("serialize predictions.expire (dry-run)")
5127 );
5128 } else {
5129 println!(
5130 "{} dry-run @ {}: {} would expire, {} already expired, {} resolved, {} still open",
5131 style::ok("ok"),
5132 report.now,
5133 report.newly_expired.len(),
5134 report.already_expired.len(),
5135 report.already_resolved.len(),
5136 report.still_open.len(),
5137 );
5138 for id in &report.newly_expired {
5139 println!(" · {id}");
5140 }
5141 }
5142 return;
5143 }
5144
5145 let report = crate::calibration::expire_overdue_predictions(&mut project, now_dt);
5146 repo::save_to_path(frontier, &project).unwrap_or_else(|e| fail_return(&e));
5147
5148 if json {
5149 println!(
5150 "{}",
5151 serde_json::to_string_pretty(&json!({
5152 "ok": true,
5153 "command": "predictions.expire",
5154 "report": report,
5155 }))
5156 .expect("serialize predictions.expire")
5157 );
5158 } else {
5159 println!(
5160 "{} @ {}: {} newly expired, {} already expired, {} resolved, {} still open",
5161 style::ok("expired"),
5162 report.now,
5163 report.newly_expired.len(),
5164 report.already_expired.len(),
5165 report.already_resolved.len(),
5166 report.still_open.len(),
5167 );
5168 for id in &report.newly_expired {
5169 println!(" · {id}");
5170 }
5171 }
5172}
5173
5174fn cmd_calibration(frontier: &Path, actor: Option<&str>, json: bool) {
5175 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
5176 let records = match actor {
5177 Some(a) => {
5178 crate::calibration::calibration_for_actor(a, &project.predictions, &project.resolutions)
5179 .map(|r| vec![r])
5180 .unwrap_or_default()
5181 }
5182 None => crate::calibration::calibration_records(&project.predictions, &project.resolutions),
5183 };
5184
5185 if json {
5186 println!(
5187 "{}",
5188 serde_json::to_string_pretty(&json!({
5189 "ok": true,
5190 "command": "calibration",
5191 "frontier": frontier.display().to_string(),
5192 "filter_actor": actor,
5193 "records": records,
5194 }))
5195 .expect("serialize calibration")
5196 );
5197 return;
5198 }
5199
5200 println!();
5201 println!(
5202 " {}",
5203 format!("VELA · CALIBRATION · {}", frontier.display())
5204 .to_uppercase()
5205 .dimmed()
5206 );
5207 println!(" {}", style::tick_row(60));
5208 if records.is_empty() {
5209 println!(" (no calibration records)");
5210 return;
5211 }
5212 for r in &records {
5213 println!(" · {}", r.actor);
5214 println!(
5215 " predictions: {} resolved: {} hits: {}",
5216 r.n_predictions, r.n_resolved, r.n_hit
5217 );
5218 match r.hit_rate {
5219 Some(h) => println!(" hit rate: {:.1}%", h * 100.0),
5220 None => println!(" hit rate: n/a"),
5221 }
5222 match r.brier_score {
5223 Some(b) => println!(
5224 " brier: {:.4} (lower is better; 0.25 = chance)",
5225 b
5226 ),
5227 None => println!(" brier: n/a"),
5228 }
5229 match r.log_score {
5230 Some(l) => println!(
5231 " log score: {:.4} (higher is better; 0 = perfect)",
5232 l
5233 ),
5234 None => println!(" log score: n/a"),
5235 }
5236 }
5237}
5238
5239#[allow(clippy::too_many_arguments)]
5241fn cmd_dataset_add(
5242 frontier: &Path,
5243 name: &str,
5244 version: Option<&str>,
5245 content_hash: &str,
5246 url: Option<&str>,
5247 license: Option<&str>,
5248 source_title: &str,
5249 doi: Option<&str>,
5250 row_count: Option<u64>,
5251 json: bool,
5252) {
5253 let mut project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
5254
5255 let provenance = crate::bundle::Provenance {
5256 source_type: "data_release".to_string(),
5257 doi: doi.map(|s| s.to_string()),
5258 pmid: None,
5259 pmc: None,
5260 openalex_id: None,
5261 url: url.map(|s| s.to_string()),
5262 title: source_title.to_string(),
5263 authors: Vec::new(),
5264 year: None,
5265 journal: None,
5266 license: license.map(|s| s.to_string()),
5267 publisher: None,
5268 funders: Vec::new(),
5269 extraction: crate::bundle::Extraction {
5270 method: "manual_curation".to_string(),
5271 model: None,
5272 model_version: None,
5273 extracted_at: chrono::Utc::now().to_rfc3339(),
5274 extractor_version: env!("CARGO_PKG_VERSION").to_string(),
5275 },
5276 review: None,
5277 citation_count: None,
5278 };
5279
5280 let mut dataset = crate::bundle::Dataset::new(
5281 name.to_string(),
5282 version.map(|s| s.to_string()),
5283 content_hash.to_string(),
5284 url.map(|s| s.to_string()),
5285 license.map(|s| s.to_string()),
5286 provenance,
5287 );
5288 dataset.row_count = row_count;
5289
5290 if project.datasets.iter().any(|d| d.id == dataset.id) {
5291 if json {
5292 println!(
5293 "{}",
5294 serde_json::to_string_pretty(&json!({
5295 "ok": false,
5296 "command": "dataset.add",
5297 "reason": "dataset_already_exists",
5298 "id": dataset.id,
5299 }))
5300 .expect("serialize")
5301 );
5302 } else {
5303 println!(
5304 "{} dataset {} already exists in {}; skipping.",
5305 style::warn("dataset"),
5306 dataset.id,
5307 frontier.display()
5308 );
5309 }
5310 return;
5311 }
5312
5313 let new_id = dataset.id.clone();
5314 project.datasets.push(dataset);
5315 repo::save_to_path(frontier, &project).unwrap_or_else(|e| fail_return(&e));
5316
5317 if json {
5318 println!(
5319 "{}",
5320 serde_json::to_string_pretty(&json!({
5321 "ok": true,
5322 "command": "dataset.add",
5323 "id": new_id,
5324 "name": name,
5325 "version": version,
5326 "frontier": frontier.display().to_string(),
5327 }))
5328 .expect("failed to serialize dataset.add result")
5329 );
5330 } else {
5331 println!();
5332 println!(
5333 " {}",
5334 format!("VELA · DATASET · {}", new_id)
5335 .to_uppercase()
5336 .dimmed()
5337 );
5338 println!(" {}", style::tick_row(60));
5339 println!(" name: {name}");
5340 if let Some(v) = version {
5341 println!(" version: {v}");
5342 }
5343 println!(" content_hash: {content_hash}");
5344 if let Some(u) = url {
5345 println!(" url: {u}");
5346 }
5347 println!(" source: {source_title}");
5348 println!();
5349 println!(
5350 " {} dataset recorded in {}",
5351 style::ok("ok"),
5352 frontier.display()
5353 );
5354 }
5355}
5356
5357#[allow(clippy::too_many_arguments)]
5363fn cmd_negative_result_add(
5364 frontier: &Path,
5365 kind: &str,
5366 deposited_by: &str,
5367 reason: &str,
5368 conditions_text: &str,
5369 notes: &str,
5370 targets: Vec<String>,
5371 endpoint: Option<&str>,
5372 intervention: Option<&str>,
5373 comparator: Option<&str>,
5374 population: Option<&str>,
5375 n_enrolled: Option<u32>,
5376 power: Option<f64>,
5377 ci_lower: Option<f64>,
5378 ci_upper: Option<f64>,
5379 effect_size_threshold: Option<f64>,
5380 registry_id: Option<&str>,
5381 reagent: Option<&str>,
5382 observation: Option<&str>,
5383 attempts: Option<u32>,
5384 source_title: &str,
5385 doi: Option<&str>,
5386 url: Option<&str>,
5387 year: Option<i32>,
5388 json: bool,
5389) {
5390 let nr_kind = match kind {
5391 "registered_trial" => {
5392 let endpoint =
5393 endpoint.unwrap_or_else(|| fail_return("--endpoint required for registered_trial"));
5394 let intervention = intervention
5395 .unwrap_or_else(|| fail_return("--intervention required for registered_trial"));
5396 let comparator = comparator
5397 .unwrap_or_else(|| fail_return("--comparator required for registered_trial"));
5398 let population = population
5399 .unwrap_or_else(|| fail_return("--population required for registered_trial"));
5400 let n_enrolled = n_enrolled
5401 .unwrap_or_else(|| fail_return("--n-enrolled required for registered_trial"));
5402 let power =
5403 power.unwrap_or_else(|| fail_return("--power required for registered_trial"));
5404 let ci_lower =
5405 ci_lower.unwrap_or_else(|| fail_return("--ci-lower required for registered_trial"));
5406 let ci_upper =
5407 ci_upper.unwrap_or_else(|| fail_return("--ci-upper required for registered_trial"));
5408 crate::bundle::NegativeResultKind::RegisteredTrial {
5409 endpoint: endpoint.to_string(),
5410 intervention: intervention.to_string(),
5411 comparator: comparator.to_string(),
5412 population: population.to_string(),
5413 n_enrolled,
5414 power,
5415 effect_size_ci: (ci_lower, ci_upper),
5416 effect_size_threshold,
5417 registry_id: registry_id.map(|s| s.to_string()),
5418 }
5419 }
5420 "exploratory" => {
5421 let reagent =
5422 reagent.unwrap_or_else(|| fail_return("--reagent required for exploratory"));
5423 let observation = observation
5424 .unwrap_or_else(|| fail_return("--observation required for exploratory"));
5425 let attempts =
5426 attempts.unwrap_or_else(|| fail_return("--attempts required for exploratory"));
5427 crate::bundle::NegativeResultKind::Exploratory {
5428 reagent: reagent.to_string(),
5429 observation: observation.to_string(),
5430 attempts,
5431 }
5432 }
5433 other => fail_return(&format!(
5434 "--kind must be 'registered_trial' or 'exploratory', got '{other}'"
5435 )),
5436 };
5437
5438 let conditions = crate::bundle::Conditions {
5439 text: conditions_text.to_string(),
5440 species_verified: Vec::new(),
5441 species_unverified: Vec::new(),
5442 in_vitro: false,
5443 in_vivo: false,
5444 human_data: false,
5445 clinical_trial: matches!(kind, "registered_trial"),
5446 concentration_range: None,
5447 duration: None,
5448 age_group: None,
5449 cell_type: None,
5450 };
5451
5452 let provenance = crate::bundle::Provenance {
5453 source_type: if matches!(kind, "registered_trial") {
5454 "clinical_trial".to_string()
5455 } else {
5456 "lab_notebook".to_string()
5457 },
5458 doi: doi.map(|s| s.to_string()),
5459 pmid: None,
5460 pmc: None,
5461 openalex_id: None,
5462 url: url.map(|s| s.to_string()),
5463 title: source_title.to_string(),
5464 authors: Vec::new(),
5465 year,
5466 journal: None,
5467 license: None,
5468 publisher: None,
5469 funders: Vec::new(),
5470 extraction: crate::bundle::Extraction {
5471 method: "manual_curation".to_string(),
5472 model: None,
5473 model_version: None,
5474 extracted_at: chrono::Utc::now().to_rfc3339(),
5475 extractor_version: env!("CARGO_PKG_VERSION").to_string(),
5476 },
5477 review: None,
5478 citation_count: None,
5479 };
5480
5481 let report = state::add_negative_result(
5482 frontier,
5483 nr_kind,
5484 targets,
5485 deposited_by,
5486 conditions,
5487 provenance,
5488 notes,
5489 reason,
5490 )
5491 .unwrap_or_else(|e| fail_return(&e));
5492
5493 if json {
5494 println!(
5495 "{}",
5496 serde_json::to_string_pretty(&report).expect("serialize report")
5497 );
5498 } else {
5499 println!();
5500 println!(
5501 " {}",
5502 format!("VELA · NEGATIVE-RESULT · {}", report.finding_id)
5503 .to_uppercase()
5504 .dimmed()
5505 );
5506 println!(" {}", style::tick_row(60));
5507 println!(" kind: {kind}");
5508 println!(" deposited_by: {deposited_by}");
5509 if let Some(ev) = &report.applied_event_id {
5510 println!(" event: {ev}");
5511 }
5512 println!(
5513 " {} negative_result deposited in {}",
5514 style::ok("ok"),
5515 frontier.display()
5516 );
5517 }
5518}
5519
5520fn cmd_negative_results(frontier: &Path, target: Option<&str>, json: bool) {
5523 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
5524 let filtered: Vec<&crate::bundle::NegativeResult> = project
5525 .negative_results
5526 .iter()
5527 .filter(|nr| {
5528 target
5529 .map(|t| nr.target_findings.iter().any(|f| f == t))
5530 .unwrap_or(true)
5531 })
5532 .collect();
5533
5534 if json {
5535 println!(
5536 "{}",
5537 serde_json::to_string_pretty(&json!({
5538 "ok": true,
5539 "command": "negative_results",
5540 "frontier": frontier.display().to_string(),
5541 "count": filtered.len(),
5542 "negative_results": filtered,
5543 }))
5544 .expect("serialize negative_results")
5545 );
5546 return;
5547 }
5548
5549 if filtered.is_empty() {
5550 println!(" no negative_results in {}", frontier.display());
5551 return;
5552 }
5553
5554 println!();
5555 println!(
5556 " {} ({})",
5557 "VELA · NEGATIVE RESULTS".dimmed(),
5558 filtered.len()
5559 );
5560 println!(" {}", style::tick_row(60));
5561 for nr in &filtered {
5562 let kind_label = match &nr.kind {
5563 crate::bundle::NegativeResultKind::RegisteredTrial {
5564 endpoint, power, ..
5565 } => format!("trial · {endpoint} · power {power:.2}"),
5566 crate::bundle::NegativeResultKind::Exploratory {
5567 reagent, attempts, ..
5568 } => format!("exploratory · {reagent} · {attempts} attempts"),
5569 };
5570 let retracted = if nr.retracted { " [retracted]" } else { "" };
5571 let review = nr
5572 .review_state
5573 .as_ref()
5574 .map(|s| format!(" [{s:?}]"))
5575 .unwrap_or_default();
5576 println!(" {}{}{}", nr.id, retracted, review);
5577 println!(" {kind_label}");
5578 if !nr.target_findings.is_empty() {
5579 println!(" targets: {}", nr.target_findings.join(", "));
5580 }
5581 }
5582 println!();
5583}
5584
5585#[allow(clippy::too_many_arguments)]
5587fn cmd_tier_set(
5588 frontier: &Path,
5589 object_type: &str,
5590 object_id: &str,
5591 tier: &str,
5592 actor: &str,
5593 reason: &str,
5594 json: bool,
5595) {
5596 let parsed_tier =
5597 crate::access_tier::AccessTier::parse(tier).unwrap_or_else(|e| fail_return(&e));
5598 let report = state::set_tier(frontier, object_type, object_id, parsed_tier, actor, reason)
5599 .unwrap_or_else(|e| fail_return(&e));
5600
5601 if json {
5602 println!(
5603 "{}",
5604 serde_json::to_string_pretty(&report).expect("serialize tier-set report")
5605 );
5606 } else {
5607 println!();
5608 println!(
5609 " {}",
5610 format!("VELA · TIER · {}", object_id)
5611 .to_uppercase()
5612 .dimmed()
5613 );
5614 println!(" {}", style::tick_row(60));
5615 println!(" object_type: {object_type}");
5616 println!(" new_tier: {}", parsed_tier.canonical());
5617 println!(" actor: {actor}");
5618 if let Some(ev) = &report.applied_event_id {
5619 println!(" event: {ev}");
5620 }
5621 println!(" {} tier set in {}", style::ok("ok"), frontier.display());
5622 }
5623}
5624
5625#[allow(clippy::too_many_arguments)]
5627fn cmd_trajectory_create(
5628 frontier: &Path,
5629 deposited_by: &str,
5630 reason: &str,
5631 targets: Vec<String>,
5632 notes: &str,
5633 json: bool,
5634) {
5635 let report = state::create_trajectory(frontier, targets, deposited_by, notes, reason)
5636 .unwrap_or_else(|e| fail_return(&e));
5637
5638 if json {
5639 println!(
5640 "{}",
5641 serde_json::to_string_pretty(&report).expect("serialize trajectory report")
5642 );
5643 } else {
5644 println!();
5645 println!(
5646 " {}",
5647 format!("VELA · TRAJECTORY · {}", report.finding_id)
5648 .to_uppercase()
5649 .dimmed()
5650 );
5651 println!(" {}", style::tick_row(60));
5652 println!(" deposited_by: {deposited_by}");
5653 if let Some(ev) = &report.applied_event_id {
5654 println!(" event: {ev}");
5655 }
5656 println!(
5657 " {} trajectory opened in {}",
5658 style::ok("ok"),
5659 frontier.display()
5660 );
5661 }
5662}
5663
5664#[allow(clippy::too_many_arguments)]
5666fn cmd_trajectory_step(
5667 frontier: &Path,
5668 trajectory_id: &str,
5669 kind: &str,
5670 description: &str,
5671 actor: &str,
5672 reason: &str,
5673 references: Vec<String>,
5674 json: bool,
5675) {
5676 let parsed_kind = match kind {
5677 "hypothesis" => crate::bundle::TrajectoryStepKind::Hypothesis,
5678 "tried" => crate::bundle::TrajectoryStepKind::Tried,
5679 "ruled_out" => crate::bundle::TrajectoryStepKind::RuledOut,
5680 "observed" => crate::bundle::TrajectoryStepKind::Observed,
5681 "refined" => crate::bundle::TrajectoryStepKind::Refined,
5682 other => fail_return(&format!(
5683 "--kind must be one of hypothesis|tried|ruled_out|observed|refined, got '{other}'"
5684 )),
5685 };
5686 let report = state::append_trajectory_step(
5687 frontier,
5688 trajectory_id,
5689 parsed_kind,
5690 description,
5691 actor,
5692 references,
5693 reason,
5694 )
5695 .unwrap_or_else(|e| fail_return(&e));
5696
5697 if json {
5698 println!(
5699 "{}",
5700 serde_json::to_string_pretty(&report).expect("serialize step report")
5701 );
5702 } else {
5703 println!();
5704 println!(
5705 " {}",
5706 format!("VELA · STEP · {}", report.finding_id)
5707 .to_uppercase()
5708 .dimmed()
5709 );
5710 println!(" {}", style::tick_row(60));
5711 println!(" trajectory: {trajectory_id}");
5712 println!(" kind: {kind}");
5713 println!(" actor: {actor}");
5714 println!(
5715 " {} step appended in {}",
5716 style::ok("ok"),
5717 frontier.display()
5718 );
5719 }
5720}
5721
5722fn cmd_trajectories(frontier: &Path, target: Option<&str>, json: bool) {
5724 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
5725 let filtered: Vec<&crate::bundle::Trajectory> = project
5726 .trajectories
5727 .iter()
5728 .filter(|t| {
5729 target
5730 .map(|tg| t.target_findings.iter().any(|f| f == tg))
5731 .unwrap_or(true)
5732 })
5733 .collect();
5734
5735 if json {
5736 println!(
5737 "{}",
5738 serde_json::to_string_pretty(&json!({
5739 "ok": true,
5740 "command": "trajectories",
5741 "frontier": frontier.display().to_string(),
5742 "count": filtered.len(),
5743 "trajectories": filtered,
5744 }))
5745 .expect("serialize trajectories")
5746 );
5747 return;
5748 }
5749
5750 if filtered.is_empty() {
5751 println!(" no trajectories in {}", frontier.display());
5752 return;
5753 }
5754
5755 println!();
5756 println!(" {} ({})", "VELA · TRAJECTORIES".dimmed(), filtered.len());
5757 println!(" {}", style::tick_row(60));
5758 for t in &filtered {
5759 let retracted = if t.retracted { " [retracted]" } else { "" };
5760 let review = t
5761 .review_state
5762 .as_ref()
5763 .map(|s| format!(" [{s:?}]"))
5764 .unwrap_or_default();
5765 println!(" {}{}{}", t.id, retracted, review);
5766 println!(
5767 " {} step(s){}",
5768 t.steps.len(),
5769 if t.target_findings.is_empty() {
5770 String::new()
5771 } else {
5772 format!(" · targets: {}", t.target_findings.join(", "))
5773 }
5774 );
5775 for step in &t.steps {
5776 let label = match step.kind {
5777 crate::bundle::TrajectoryStepKind::Hypothesis => "hypothesis",
5778 crate::bundle::TrajectoryStepKind::Tried => "tried",
5779 crate::bundle::TrajectoryStepKind::RuledOut => "ruled_out",
5780 crate::bundle::TrajectoryStepKind::Observed => "observed",
5781 crate::bundle::TrajectoryStepKind::Refined => "refined",
5782 };
5783 let preview: String = step.description.chars().take(80).collect();
5784 println!(" [{label}] {preview}");
5785 }
5786 }
5787 println!();
5788}
5789
5790fn cmd_datasets(frontier: &Path, json: bool) {
5792 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
5793 if json {
5794 println!(
5795 "{}",
5796 serde_json::to_string_pretty(&json!({
5797 "ok": true,
5798 "command": "datasets",
5799 "frontier": frontier.display().to_string(),
5800 "count": project.datasets.len(),
5801 "datasets": project.datasets,
5802 }))
5803 .expect("serialize datasets")
5804 );
5805 return;
5806 }
5807 println!();
5808 println!(
5809 " {}",
5810 format!("VELA · DATASETS · {}", frontier.display())
5811 .to_uppercase()
5812 .dimmed()
5813 );
5814 println!(" {}", style::tick_row(60));
5815 if project.datasets.is_empty() {
5816 println!(" (no datasets registered)");
5817 return;
5818 }
5819 for ds in &project.datasets {
5820 let v = ds
5821 .version
5822 .as_deref()
5823 .map(|s| format!("@{s}"))
5824 .unwrap_or_default();
5825 println!(" · {} {}{}", ds.id.dimmed(), ds.name, v);
5826 if let Some(u) = &ds.url {
5827 println!(" url: {}", truncate(u, 80));
5828 }
5829 println!(" hash: {}", truncate(&ds.content_hash, 80));
5830 }
5831}
5832
5833#[allow(clippy::too_many_arguments)]
5835fn cmd_code_add(
5836 frontier: &Path,
5837 language: &str,
5838 repo_url: Option<&str>,
5839 commit: Option<&str>,
5840 path: &str,
5841 content_hash: &str,
5842 line_start: Option<u32>,
5843 line_end: Option<u32>,
5844 entry_point: Option<&str>,
5845 json: bool,
5846) {
5847 let mut project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
5848
5849 let line_range = match (line_start, line_end) {
5850 (Some(a), Some(b)) => Some((a, b)),
5851 (Some(a), None) => Some((a, a)),
5852 _ => None,
5853 };
5854
5855 let artifact = crate::bundle::CodeArtifact::new(
5856 language.to_string(),
5857 repo_url.map(|s| s.to_string()),
5858 commit.map(|s| s.to_string()),
5859 path.to_string(),
5860 line_range,
5861 content_hash.to_string(),
5862 entry_point.map(|s| s.to_string()),
5863 );
5864
5865 if project.code_artifacts.iter().any(|c| c.id == artifact.id) {
5866 if json {
5867 println!(
5868 "{}",
5869 serde_json::to_string_pretty(&json!({
5870 "ok": false,
5871 "command": "code.add",
5872 "reason": "artifact_already_exists",
5873 "id": artifact.id,
5874 }))
5875 .expect("serialize")
5876 );
5877 } else {
5878 println!(
5879 "{} code artifact {} already exists in {}; skipping.",
5880 style::warn("code"),
5881 artifact.id,
5882 frontier.display()
5883 );
5884 }
5885 return;
5886 }
5887
5888 let new_id = artifact.id.clone();
5889 project.code_artifacts.push(artifact);
5890 repo::save_to_path(frontier, &project).unwrap_or_else(|e| fail_return(&e));
5891
5892 if json {
5893 println!(
5894 "{}",
5895 serde_json::to_string_pretty(&json!({
5896 "ok": true,
5897 "command": "code.add",
5898 "id": new_id,
5899 "language": language,
5900 "path": path,
5901 "frontier": frontier.display().to_string(),
5902 }))
5903 .expect("failed to serialize code.add result")
5904 );
5905 } else {
5906 println!();
5907 println!(
5908 " {}",
5909 format!("VELA · CODE · {}", new_id).to_uppercase().dimmed()
5910 );
5911 println!(" {}", style::tick_row(60));
5912 println!(" language: {language}");
5913 if let Some(r) = repo_url {
5914 println!(" repo: {r}");
5915 }
5916 if let Some(c) = commit {
5917 println!(" commit: {c}");
5918 }
5919 println!(" path: {path}");
5920 if let Some((a, b)) = line_range {
5921 println!(" lines: {a}-{b}");
5922 }
5923 println!(" content_hash: {content_hash}");
5924 println!();
5925 println!(
5926 " {} code artifact recorded in {}",
5927 style::ok("ok"),
5928 frontier.display()
5929 );
5930 }
5931}
5932
5933fn cmd_code_artifacts(frontier: &Path, json: bool) {
5935 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
5936 if json {
5937 println!(
5938 "{}",
5939 serde_json::to_string_pretty(&json!({
5940 "ok": true,
5941 "command": "code-artifacts",
5942 "frontier": frontier.display().to_string(),
5943 "count": project.code_artifacts.len(),
5944 "code_artifacts": project.code_artifacts,
5945 }))
5946 .expect("serialize code-artifacts")
5947 );
5948 return;
5949 }
5950 println!();
5951 println!(
5952 " {}",
5953 format!("VELA · CODE · {}", frontier.display())
5954 .to_uppercase()
5955 .dimmed()
5956 );
5957 println!(" {}", style::tick_row(60));
5958 if project.code_artifacts.is_empty() {
5959 println!(" (no code artifacts registered)");
5960 return;
5961 }
5962 for c in &project.code_artifacts {
5963 let lr = c
5964 .line_range
5965 .map(|(a, b)| format!(":{a}-{b}"))
5966 .unwrap_or_default();
5967 println!(" · {} {} {}{}", c.id.dimmed(), c.language, c.path, lr);
5968 if let Some(r) = &c.repo_url {
5969 println!(" repo: {}", truncate(r, 80));
5970 }
5971 if let Some(g) = &c.git_commit {
5972 println!(" commit: {g}");
5973 }
5974 }
5975}
5976
5977fn sha256_for_bytes(bytes: &[u8]) -> String {
5978 format!("sha256:{}", hex::encode(Sha256::digest(bytes)))
5979}
5980
5981fn sha256_hex_part(content_hash: &str) -> &str {
5982 content_hash.strip_prefix("sha256:").unwrap_or(content_hash)
5983}
5984
5985fn artifact_blob_locator(frontier: &Path, content_hash: &str, bytes: &[u8]) -> Option<String> {
5986 let Ok(repo::VelaSource::VelaRepo(root)) = repo::detect(frontier) else {
5987 return None;
5988 };
5989 let hex = sha256_hex_part(content_hash);
5990 let rel = format!(".vela/artifact-blobs/sha256/{hex}");
5991 let path = root.join(&rel);
5992 if let Some(parent) = path.parent() {
5993 std::fs::create_dir_all(parent).unwrap_or_else(|e| {
5994 fail(&format!(
5995 "Failed to create artifact blob directory {}: {e}",
5996 parent.display()
5997 ))
5998 });
5999 }
6000 if !path.is_file() {
6001 std::fs::write(&path, bytes)
6002 .unwrap_or_else(|e| fail(&format!("Failed to write artifact blob: {e}")));
6003 }
6004 Some(rel)
6005}
6006
6007fn parse_metadata_pairs(pairs: Vec<String>) -> BTreeMap<String, Value> {
6008 let mut out = BTreeMap::new();
6009 for pair in pairs {
6010 let Some((key, value)) = pair.split_once('=') else {
6011 fail(&format!("--metadata must be key=value, got {pair:?}"));
6012 };
6013 let key = key.trim();
6014 if key.is_empty() {
6015 fail("--metadata key must be non-empty");
6016 }
6017 out.insert(key.to_string(), Value::String(value.trim().to_string()));
6018 }
6019 out
6020}
6021
6022fn artifact_source_type(kind: &str) -> &'static str {
6023 match kind {
6024 "clinical_trial_record" | "protocol" => "clinical_trial",
6025 "dataset" => "data_release",
6026 "model_output" => "model_output",
6027 "registry_record" => "database_record",
6028 "lab_file" => "lab_notebook",
6029 _ => "database_record",
6030 }
6031}
6032
6033fn artifact_provenance(
6034 kind: &str,
6035 title: &str,
6036 url: Option<&str>,
6037 doi: Option<&str>,
6038 license: Option<&str>,
6039) -> crate::bundle::Provenance {
6040 crate::bundle::Provenance {
6041 source_type: artifact_source_type(kind).to_string(),
6042 doi: doi.map(str::to_string),
6043 pmid: None,
6044 pmc: None,
6045 openalex_id: None,
6046 url: url.map(str::to_string),
6047 title: title.to_string(),
6048 authors: Vec::new(),
6049 year: None,
6050 journal: None,
6051 license: license.map(str::to_string),
6052 publisher: None,
6053 funders: Vec::new(),
6054 extraction: crate::bundle::Extraction {
6055 method: "artifact_deposit".to_string(),
6056 model: None,
6057 model_version: None,
6058 extracted_at: chrono::Utc::now().to_rfc3339(),
6059 extractor_version: env!("CARGO_PKG_VERSION").to_string(),
6060 },
6061 review: None,
6062 citation_count: None,
6063 }
6064}
6065
6066#[allow(clippy::too_many_arguments)]
6067fn cmd_artifact_add(
6068 frontier: &Path,
6069 kind: &str,
6070 name: &str,
6071 file: Option<&Path>,
6072 url: Option<&str>,
6073 content_hash: Option<&str>,
6074 media_type: Option<&str>,
6075 license: Option<&str>,
6076 source_title: Option<&str>,
6077 source_url: Option<&str>,
6078 doi: Option<&str>,
6079 target: Vec<String>,
6080 metadata: Vec<String>,
6081 access_tier: &str,
6082 deposited_by: &str,
6083 reason: &str,
6084 json_out: bool,
6085) {
6086 let tier =
6087 crate::access_tier::AccessTier::parse(access_tier).unwrap_or_else(|e| fail_return(&e));
6088 let mut size_bytes = None;
6089 let mut storage_mode = "pointer".to_string();
6090 let mut locator = url.map(str::to_string);
6091 let mut computed_hash = content_hash.map(str::to_string);
6092
6093 if let Some(path) = file {
6094 let bytes = std::fs::read(path)
6095 .unwrap_or_else(|e| fail(&format!("Failed to read artifact file: {e}")));
6096 let actual_hash = sha256_for_bytes(&bytes);
6097 if let Some(expected) = content_hash {
6098 let expected_hex = sha256_hex_part(expected);
6099 let actual_hex = sha256_hex_part(&actual_hash);
6100 if !expected_hex.eq_ignore_ascii_case(actual_hex) {
6101 fail(&format!(
6102 "--content-hash does not match file bytes: expected {expected}, got {actual_hash}"
6103 ));
6104 }
6105 }
6106 size_bytes = Some(bytes.len() as u64);
6107 computed_hash = Some(actual_hash.clone());
6108 if let Some(rel) = artifact_blob_locator(frontier, &actual_hash, &bytes) {
6109 storage_mode = "local_blob".to_string();
6110 locator = Some(rel);
6111 } else {
6112 storage_mode = "local_file".to_string();
6113 locator = Some(path.display().to_string());
6114 }
6115 }
6116
6117 let Some(content_hash) = computed_hash else {
6118 fail("Provide --content-hash unless --file is present.");
6119 };
6120 let content_hash_for_print = content_hash.clone();
6121 if file.is_none() && url.is_some() {
6122 storage_mode = "remote".to_string();
6123 }
6124
6125 let source_url_effective = source_url.or(url);
6126 let source_title = source_title.unwrap_or(name);
6127 let provenance = artifact_provenance(kind, source_title, source_url_effective, doi, license);
6128 let metadata = parse_metadata_pairs(metadata);
6129 let artifact = crate::bundle::Artifact::new(
6130 kind.to_string(),
6131 name.to_string(),
6132 content_hash,
6133 size_bytes,
6134 media_type.map(str::to_string),
6135 storage_mode,
6136 locator,
6137 source_url_effective.map(str::to_string),
6138 license.map(str::to_string),
6139 target,
6140 provenance,
6141 metadata,
6142 tier,
6143 )
6144 .unwrap_or_else(|e| fail_return(&e));
6145
6146 let artifact_id = artifact.id.clone();
6147 let report = state::add_artifact(frontier, artifact, deposited_by, reason)
6148 .unwrap_or_else(|e| fail_return(&e));
6149
6150 if json_out {
6151 println!(
6152 "{}",
6153 serde_json::to_string_pretty(&json!({
6154 "ok": true,
6155 "command": "artifact.add",
6156 "id": artifact_id,
6157 "frontier": frontier.display().to_string(),
6158 "event": report.applied_event_id,
6159 }))
6160 .expect("serialize artifact.add")
6161 );
6162 } else {
6163 println!();
6164 println!(
6165 " {}",
6166 format!("VELA · ARTIFACT · {}", artifact_id)
6167 .to_uppercase()
6168 .dimmed()
6169 );
6170 println!(" {}", style::tick_row(60));
6171 println!(" kind: {kind}");
6172 println!(" name: {name}");
6173 println!(" hash: {content_hash_for_print}");
6174 println!(
6175 " {} artifact recorded in {}",
6176 style::ok("ok"),
6177 frontier.display()
6178 );
6179 }
6180}
6181
6182fn cmd_artifacts(frontier: &Path, target: Option<&str>, json_out: bool) {
6183 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
6184 let filtered: Vec<&crate::bundle::Artifact> = project
6185 .artifacts
6186 .iter()
6187 .filter(|artifact| {
6188 target
6189 .map(|t| artifact.target_findings.iter().any(|f| f == t))
6190 .unwrap_or(true)
6191 })
6192 .collect();
6193
6194 if json_out {
6195 println!(
6196 "{}",
6197 serde_json::to_string_pretty(&json!({
6198 "ok": true,
6199 "command": "artifacts",
6200 "frontier": frontier.display().to_string(),
6201 "count": filtered.len(),
6202 "artifacts": filtered,
6203 }))
6204 .expect("serialize artifacts")
6205 );
6206 return;
6207 }
6208
6209 println!();
6210 println!(
6211 " {}",
6212 format!("VELA · ARTIFACTS · {}", frontier.display())
6213 .to_uppercase()
6214 .dimmed()
6215 );
6216 println!(" {}", style::tick_row(60));
6217 if filtered.is_empty() {
6218 println!(" (no artifacts registered)");
6219 return;
6220 }
6221 for artifact in filtered {
6222 println!(
6223 " · {} {} · {}",
6224 artifact.id.dimmed(),
6225 artifact.kind,
6226 artifact.name
6227 );
6228 if let Some(locator) = &artifact.locator {
6229 println!(" locator: {}", truncate(locator, 88));
6230 }
6231 if !artifact.target_findings.is_empty() {
6232 println!(" targets: {}", artifact.target_findings.join(", "));
6233 }
6234 }
6235}
6236
6237fn cmd_artifact_audit(frontier: &Path, json_out: bool) {
6238 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
6239 let audit = crate::artifact_audit::audit_artifacts(frontier, &project);
6240 if json_out {
6241 println!(
6242 "{}",
6243 serde_json::to_string_pretty(&audit).expect("serialize artifact audit")
6244 );
6245 if !audit.ok {
6246 std::process::exit(1);
6247 }
6248 return;
6249 }
6250
6251 println!();
6252 println!(
6253 " {}",
6254 format!("VELA · ARTIFACT AUDIT · {}", frontier.display())
6255 .to_uppercase()
6256 .dimmed()
6257 );
6258 println!(" {}", style::tick_row(60));
6259 println!(" artifacts: {}", audit.artifact_count);
6260 println!(" checked local blobs: {}", audit.checked_local_blobs);
6261 println!(" local blob bytes: {}", audit.local_blob_bytes);
6262 if !audit.by_kind.is_empty() {
6263 let kinds = audit
6264 .by_kind
6265 .iter()
6266 .map(|(kind, count)| format!("{kind}:{count}"))
6267 .collect::<Vec<_>>()
6268 .join(", ");
6269 println!(" kinds: {kinds}");
6270 }
6271 if audit.ok {
6272 println!(" {} artifact audit passed.", style::ok("ok"));
6273 return;
6274 }
6275 for issue in &audit.issues {
6276 println!(
6277 " {} {} {}: {}",
6278 style::lost("invalid"),
6279 issue.id,
6280 issue.field,
6281 issue.message
6282 );
6283 }
6284 std::process::exit(1);
6285}
6286
6287fn cmd_decision_brief(frontier: &Path, json_out: bool) {
6288 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
6289 let report = decision::load_decision_brief(frontier, &project);
6290 if json_out {
6291 println!(
6292 "{}",
6293 serde_json::to_string_pretty(&report).expect("serialize decision brief report")
6294 );
6295 if !report.ok {
6296 std::process::exit(1);
6297 }
6298 return;
6299 }
6300 println!();
6301 println!(
6302 " {}",
6303 format!("VELA · DECISION BRIEF · {}", project.project.name)
6304 .to_uppercase()
6305 .dimmed()
6306 );
6307 println!(" {}", style::tick_row(60));
6308 if !report.ok {
6309 print_projection_issues(&report.issues, report.error.as_deref());
6310 std::process::exit(1);
6311 }
6312 let brief = report
6313 .projection
6314 .as_ref()
6315 .expect("ok decision report carries projection");
6316 for question in &brief.questions {
6317 println!(" · {} · {}", question.id.dimmed(), question.title);
6318 println!(" answer: {}", wrap_line(&question.short_answer, 82));
6319 println!(" caveat: {}", wrap_line(&question.caveat, 82));
6320 println!(" support: {}", question.supporting_findings.join(", "));
6321 if !question.tension_findings.is_empty() {
6322 println!(" tensions: {}", question.tension_findings.join(", "));
6323 }
6324 if !question.gap_findings.is_empty() {
6325 println!(" gaps: {}", question.gap_findings.join(", "));
6326 }
6327 if !question.artifact_ids.is_empty() {
6328 println!(" artifacts: {}", question.artifact_ids.join(", "));
6329 }
6330 println!(
6331 " would change: {}",
6332 wrap_line(&question.what_would_change_this_answer, 82)
6333 );
6334 }
6335}
6336
6337fn cmd_trial_summary(frontier: &Path, json_out: bool) {
6338 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
6339 let report = decision::load_trial_outcomes(frontier, &project);
6340 if json_out {
6341 println!(
6342 "{}",
6343 serde_json::to_string_pretty(&report).expect("serialize trial summary report")
6344 );
6345 if !report.ok {
6346 std::process::exit(1);
6347 }
6348 return;
6349 }
6350 println!();
6351 println!(
6352 " {}",
6353 format!("VELA · TRIAL SUMMARY · {}", project.project.name)
6354 .to_uppercase()
6355 .dimmed()
6356 );
6357 println!(" {}", style::tick_row(60));
6358 if !report.ok {
6359 print_projection_issues(&report.issues, report.error.as_deref());
6360 std::process::exit(1);
6361 }
6362 let outcomes = report
6363 .projection
6364 .as_ref()
6365 .expect("ok trial report carries projection");
6366 for row in &outcomes.rows {
6367 println!(" · {} · {} ({})", row.id.dimmed(), row.program, row.drug);
6368 println!(" population: {}", wrap_line(&row.population, 82));
6369 println!(" endpoint: {}", wrap_line(&row.primary_endpoint, 82));
6370 println!(" cognition: {}", wrap_line(&row.cognitive_result, 82));
6371 println!(" biomarker: {}", wrap_line(&row.biomarker_result, 82));
6372 println!(" risk: {}", wrap_line(&row.aria_or_safety_result, 82));
6373 println!(" status: {}", wrap_line(&row.regulatory_status, 82));
6374 if !row.finding_ids.is_empty() {
6375 println!(" findings: {}", row.finding_ids.join(", "));
6376 }
6377 if !row.artifact_ids.is_empty() {
6378 println!(" artifacts: {}", row.artifact_ids.join(", "));
6379 }
6380 }
6381}
6382
6383fn cmd_source_verification(frontier: &Path, json_out: bool) {
6384 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
6385 let report = decision::load_source_verification(frontier, &project);
6386 if json_out {
6387 println!(
6388 "{}",
6389 serde_json::to_string_pretty(&report).expect("serialize source verification report")
6390 );
6391 if !report.ok {
6392 std::process::exit(1);
6393 }
6394 return;
6395 }
6396 println!();
6397 println!(
6398 " {}",
6399 format!("VELA · SOURCE VERIFICATION · {}", project.project.name)
6400 .to_uppercase()
6401 .dimmed()
6402 );
6403 println!(" {}", style::tick_row(60));
6404 if !report.ok {
6405 print_projection_issues(&report.issues, report.error.as_deref());
6406 std::process::exit(1);
6407 }
6408 let verification = report
6409 .projection
6410 .as_ref()
6411 .expect("ok source verification report carries projection");
6412 println!(" verified_at: {}", verification.verified_at);
6413 for source in &verification.sources {
6414 println!(" · {} · {}", source.id.dimmed(), source.title);
6415 println!(" agency: {}", source.agency);
6416 println!(" url: {}", truncate(&source.url, 88));
6417 println!(" status: {}", wrap_line(&source.current_status, 82));
6418 }
6419}
6420
6421fn cmd_source_ingest_plan(frontier: &Path, json_out: bool) {
6422 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
6423 let report = decision::load_source_ingest_plan(frontier, &project);
6424 if json_out {
6425 println!(
6426 "{}",
6427 serde_json::to_string_pretty(&report).expect("serialize source ingest plan report")
6428 );
6429 if !report.ok {
6430 std::process::exit(1);
6431 }
6432 return;
6433 }
6434 println!();
6435 println!(
6436 " {}",
6437 format!("VELA · SOURCE INGEST PLAN · {}", project.project.name)
6438 .to_uppercase()
6439 .dimmed()
6440 );
6441 println!(" {}", style::tick_row(60));
6442 if !report.ok {
6443 print_projection_issues(&report.issues, report.error.as_deref());
6444 std::process::exit(1);
6445 }
6446 let plan = report
6447 .projection
6448 .as_ref()
6449 .expect("ok source ingest plan report carries projection");
6450 println!(" verified_at: {}", plan.verified_at);
6451 println!(" entries: {}", plan.entries.len());
6452 for entry in &plan.entries {
6453 println!(
6454 " · {} · {} · {} · {}",
6455 entry.id.dimmed(),
6456 entry.category,
6457 entry.priority,
6458 entry.ingest_status
6459 );
6460 println!(" name: {}", wrap_line(&entry.name, 82));
6461 println!(" locator: {}", truncate(&entry.locator, 88));
6462 println!(" use: {}", wrap_line(&entry.target_use, 82));
6463 if let Some(id) = &entry.current_frontier_artifact_id {
6464 println!(" artifact: {id}");
6465 }
6466 if !entry.target_findings.is_empty() {
6467 println!(" findings: {}", entry.target_findings.join(", "));
6468 }
6469 }
6470}
6471
6472fn print_projection_issues(issues: &[decision::ProjectionIssue], error: Option<&str>) {
6473 if let Some(error) = error {
6474 println!(" {} {error}", style::lost("unavailable"));
6475 }
6476 for issue in issues {
6477 println!(
6478 " {} {}: {}",
6479 style::lost("invalid"),
6480 issue.path,
6481 issue.message
6482 );
6483 }
6484}
6485
6486fn wrap_line(text: &str, max_chars: usize) -> String {
6487 if text.chars().count() <= max_chars {
6488 return text.to_string();
6489 }
6490 let mut out = String::new();
6491 let mut line_len = 0usize;
6492 for word in text.split_whitespace() {
6493 let word_len = word.chars().count();
6494 if line_len > 0 && line_len + 1 + word_len > max_chars {
6495 out.push('\n');
6496 out.push_str(" ");
6497 out.push_str(word);
6498 line_len = word_len;
6499 } else {
6500 if line_len > 0 {
6501 out.push(' ');
6502 line_len += 1;
6503 }
6504 out.push_str(word);
6505 line_len += word_len;
6506 }
6507 }
6508 out
6509}
6510
6511fn clinical_str<'a>(study: &'a Value, pointer: &str) -> Option<&'a str> {
6512 study.pointer(pointer).and_then(Value::as_str)
6513}
6514
6515fn clinical_string_array(study: &Value, pointer: &str) -> Vec<String> {
6516 study
6517 .pointer(pointer)
6518 .and_then(Value::as_array)
6519 .map(|items| {
6520 items
6521 .iter()
6522 .filter_map(Value::as_str)
6523 .map(str::to_string)
6524 .collect()
6525 })
6526 .unwrap_or_default()
6527}
6528
6529fn clinical_named_array(study: &Value, pointer: &str, field: &str) -> Vec<String> {
6530 study
6531 .pointer(pointer)
6532 .and_then(Value::as_array)
6533 .map(|items| {
6534 items
6535 .iter()
6536 .filter_map(|item| item.get(field).and_then(Value::as_str))
6537 .map(str::to_string)
6538 .collect()
6539 })
6540 .unwrap_or_default()
6541}
6542
6543fn insert_string_vec_metadata(
6544 metadata: &mut BTreeMap<String, Value>,
6545 key: &str,
6546 values: Vec<String>,
6547) {
6548 if values.is_empty() {
6549 return;
6550 }
6551 metadata.insert(
6552 key.to_string(),
6553 Value::Array(values.into_iter().map(Value::String).collect()),
6554 );
6555}
6556
6557async fn cmd_clinical_trial_import(
6558 frontier: &Path,
6559 nct_id: &str,
6560 input_json: Option<&Path>,
6561 target: Vec<String>,
6562 deposited_by: &str,
6563 reason: &str,
6564 license: &str,
6565 json_out: bool,
6566) {
6567 let api_url = format!("https://clinicaltrials.gov/api/v2/studies/{nct_id}");
6568 let raw = if let Some(path) = input_json {
6569 std::fs::read_to_string(path)
6570 .unwrap_or_else(|e| fail(&format!("Failed to read ClinicalTrials.gov JSON: {e}")))
6571 } else {
6572 let response = reqwest::get(&api_url).await.unwrap_or_else(|e| {
6573 fail(&format!(
6574 "Failed to fetch ClinicalTrials.gov record {nct_id}: {e}"
6575 ))
6576 });
6577 let response = response.error_for_status().unwrap_or_else(|e| {
6578 fail(&format!(
6579 "Failed to fetch ClinicalTrials.gov record {nct_id}: {e}"
6580 ))
6581 });
6582 response.text().await.unwrap_or_else(|e| {
6583 fail(&format!(
6584 "Failed to read ClinicalTrials.gov record {nct_id}: {e}"
6585 ))
6586 })
6587 };
6588 let study: Value = serde_json::from_str(&raw)
6589 .unwrap_or_else(|e| fail(&format!("Failed to parse ClinicalTrials.gov JSON: {e}")));
6590 let canonical_bytes = crate::canonical::to_canonical_bytes(&study)
6591 .unwrap_or_else(|e| fail(&format!("Failed to canonicalize trial JSON: {e}")));
6592 let content_hash = sha256_for_bytes(&canonical_bytes);
6593 let locator = artifact_blob_locator(frontier, &content_hash, &canonical_bytes)
6594 .unwrap_or_else(|| api_url.clone());
6595 let storage_mode = if locator.starts_with(".vela/") {
6596 "local_blob"
6597 } else {
6598 "remote"
6599 };
6600
6601 let parsed_nct = clinical_str(&study, "/protocolSection/identificationModule/nctId")
6602 .unwrap_or(nct_id)
6603 .to_string();
6604 let title = clinical_str(&study, "/protocolSection/identificationModule/briefTitle")
6605 .or_else(|| {
6606 clinical_str(
6607 &study,
6608 "/protocolSection/identificationModule/officialTitle",
6609 )
6610 })
6611 .unwrap_or(nct_id);
6612 let public_url = format!("https://clinicaltrials.gov/study/{parsed_nct}");
6613 let mut metadata = BTreeMap::new();
6614 metadata.insert("nct_id".to_string(), Value::String(parsed_nct.clone()));
6615 metadata.insert(
6616 "source_api".to_string(),
6617 Value::String("clinicaltrials.gov-v2".to_string()),
6618 );
6619 metadata.insert(
6620 "retrieved_at".to_string(),
6621 Value::String(chrono::Utc::now().to_rfc3339()),
6622 );
6623 for (key, pointer) in [
6624 (
6625 "overall_status",
6626 "/protocolSection/statusModule/overallStatus",
6627 ),
6628 (
6629 "start_date",
6630 "/protocolSection/statusModule/startDateStruct/date",
6631 ),
6632 (
6633 "completion_date",
6634 "/protocolSection/statusModule/completionDateStruct/date",
6635 ),
6636 ] {
6637 if let Some(value) = clinical_str(&study, pointer) {
6638 metadata.insert(key.to_string(), Value::String(value.to_string()));
6639 }
6640 }
6641 insert_string_vec_metadata(
6642 &mut metadata,
6643 "phases",
6644 clinical_string_array(&study, "/protocolSection/designModule/phases"),
6645 );
6646 insert_string_vec_metadata(
6647 &mut metadata,
6648 "conditions",
6649 clinical_string_array(&study, "/protocolSection/conditionsModule/conditions"),
6650 );
6651 insert_string_vec_metadata(
6652 &mut metadata,
6653 "interventions",
6654 clinical_named_array(
6655 &study,
6656 "/protocolSection/armsInterventionsModule/interventions",
6657 "name",
6658 ),
6659 );
6660 insert_string_vec_metadata(
6661 &mut metadata,
6662 "primary_outcomes",
6663 clinical_named_array(
6664 &study,
6665 "/protocolSection/outcomesModule/primaryOutcomes",
6666 "measure",
6667 ),
6668 );
6669 if let Some(has_results) = study.get("hasResults").and_then(Value::as_bool) {
6670 metadata.insert("has_results".to_string(), Value::Bool(has_results));
6671 }
6672
6673 let provenance = artifact_provenance(
6674 "clinical_trial_record",
6675 title,
6676 Some(&public_url),
6677 None,
6678 Some(license),
6679 );
6680 let artifact = crate::bundle::Artifact::new(
6681 "clinical_trial_record",
6682 title.to_string(),
6683 content_hash,
6684 Some(canonical_bytes.len() as u64),
6685 Some("application/json".to_string()),
6686 storage_mode.to_string(),
6687 Some(locator),
6688 Some(public_url.clone()),
6689 Some(license.to_string()),
6690 target,
6691 provenance,
6692 metadata,
6693 crate::access_tier::AccessTier::Public,
6694 )
6695 .unwrap_or_else(|e| fail_return(&e));
6696 let artifact_id = artifact.id.clone();
6697 let report = state::add_artifact(frontier, artifact, deposited_by, reason)
6698 .unwrap_or_else(|e| fail_return(&e));
6699
6700 if json_out {
6701 println!(
6702 "{}",
6703 serde_json::to_string_pretty(&json!({
6704 "ok": true,
6705 "command": "clinical-trial-import",
6706 "nct_id": parsed_nct,
6707 "id": artifact_id,
6708 "frontier": frontier.display().to_string(),
6709 "event": report.applied_event_id,
6710 "source_url": public_url,
6711 }))
6712 .expect("serialize clinical-trial-import")
6713 );
6714 } else {
6715 println!();
6716 println!(
6717 " {}",
6718 format!("VELA · CLINICAL TRIAL · {}", artifact_id)
6719 .to_uppercase()
6720 .dimmed()
6721 );
6722 println!(" {}", style::tick_row(60));
6723 println!(" nct_id: {parsed_nct}");
6724 println!(" title: {}", truncate(title, 96));
6725 println!(" source: {public_url}");
6726 println!(
6727 " {} trial record imported into {}",
6728 style::ok("ok"),
6729 frontier.display()
6730 );
6731 }
6732}
6733
6734#[allow(clippy::too_many_arguments)]
6741fn cmd_replicate(
6742 frontier: &Path,
6743 target: &str,
6744 outcome: &str,
6745 attempted_by: &str,
6746 conditions_text: &str,
6747 source_title: &str,
6748 doi: Option<&str>,
6749 pmid: Option<&str>,
6750 sample_size: Option<&str>,
6751 note: &str,
6752 previous_attempt: Option<&str>,
6753 no_cascade: bool,
6754 json: bool,
6755) {
6756 if !crate::bundle::VALID_REPLICATION_OUTCOMES.contains(&outcome) {
6757 fail(&format!(
6758 "invalid outcome '{outcome}'; valid: {:?}",
6759 crate::bundle::VALID_REPLICATION_OUTCOMES
6760 ));
6761 }
6762 if !target.starts_with("vf_") {
6763 fail(&format!("target '{target}' is not a vf_ finding id"));
6764 }
6765
6766 let mut project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
6767
6768 if !project.findings.iter().any(|f| f.id == target) {
6769 fail(&format!(
6770 "target finding '{target}' not present in frontier '{}'",
6771 frontier.display()
6772 ));
6773 }
6774
6775 let lower = conditions_text.to_lowercase();
6780 let conditions = crate::bundle::Conditions {
6781 text: conditions_text.to_string(),
6782 species_verified: Vec::new(),
6783 species_unverified: Vec::new(),
6784 in_vitro: lower.contains("in vitro") || lower.contains("ipsc"),
6785 in_vivo: lower.contains("in vivo") || lower.contains("mouse") || lower.contains("rat"),
6786 human_data: lower.contains("human")
6787 || lower.contains("clinical")
6788 || lower.contains("patient"),
6789 clinical_trial: lower.contains("clinical trial") || lower.contains("phase "),
6790 concentration_range: None,
6791 duration: None,
6792 age_group: None,
6793 cell_type: None,
6794 };
6795
6796 let evidence = crate::bundle::Evidence {
6797 evidence_type: "experimental".to_string(),
6798 model_system: String::new(),
6799 species: None,
6800 method: "replication_attempt".to_string(),
6801 sample_size: sample_size.map(|s| s.to_string()),
6802 effect_size: None,
6803 p_value: None,
6804 replicated: outcome == "replicated",
6805 replication_count: None,
6806 evidence_spans: Vec::new(),
6807 };
6808
6809 let provenance = crate::bundle::Provenance {
6810 source_type: "published_paper".to_string(),
6811 doi: doi.map(|s| s.to_string()),
6812 pmid: pmid.map(|s| s.to_string()),
6813 pmc: None,
6814 openalex_id: None,
6815 url: None,
6816 title: source_title.to_string(),
6817 authors: Vec::new(),
6818 year: None,
6819 journal: None,
6820 license: None,
6821 publisher: None,
6822 funders: Vec::new(),
6823 extraction: crate::bundle::Extraction {
6824 method: "manual_curation".to_string(),
6825 model: None,
6826 model_version: None,
6827 extracted_at: chrono::Utc::now().to_rfc3339(),
6828 extractor_version: env!("CARGO_PKG_VERSION").to_string(),
6829 },
6830 review: None,
6831 citation_count: None,
6832 };
6833
6834 let mut rep = crate::bundle::Replication::new(
6835 target.to_string(),
6836 attempted_by.to_string(),
6837 outcome.to_string(),
6838 evidence,
6839 conditions,
6840 provenance,
6841 note.to_string(),
6842 );
6843 rep.previous_attempt = previous_attempt.map(|s| s.to_string());
6844
6845 if project.replications.iter().any(|r| r.id == rep.id) {
6848 if json {
6849 println!(
6850 "{}",
6851 serde_json::to_string_pretty(&json!({
6852 "ok": false,
6853 "command": "replicate",
6854 "reason": "replication_already_exists",
6855 "id": rep.id,
6856 }))
6857 .expect("serialize")
6858 );
6859 } else {
6860 println!(
6861 "{} replication {} already exists in {}; skipping.",
6862 style::warn("replicate"),
6863 rep.id,
6864 frontier.display()
6865 );
6866 }
6867 return;
6868 }
6869
6870 let new_id = rep.id.clone();
6871 project.replications.push(rep);
6872
6873 let cascade_result = if no_cascade {
6880 None
6881 } else {
6882 let result = propagate::propagate_correction(
6883 &mut project,
6884 target,
6885 propagate::PropagationAction::ReplicationOutcome {
6886 outcome: outcome.to_string(),
6887 vrep_id: new_id.clone(),
6888 },
6889 );
6890 project.review_events.extend(result.events.clone());
6893 project::recompute_stats(&mut project);
6894 Some(result)
6895 };
6896
6897 repo::save_to_path(frontier, &project).unwrap_or_else(|e| fail_return(&e));
6898
6899 if json {
6900 let cascade_json = cascade_result.as_ref().map(|r| {
6901 json!({
6902 "affected": r.affected,
6903 "events": r.events.len(),
6904 })
6905 });
6906 println!(
6907 "{}",
6908 serde_json::to_string_pretty(&json!({
6909 "ok": true,
6910 "command": "replicate",
6911 "id": new_id,
6912 "target": target,
6913 "outcome": outcome,
6914 "attempted_by": attempted_by,
6915 "cascade": cascade_json,
6916 "frontier": frontier.display().to_string(),
6917 }))
6918 .expect("failed to serialize replicate result")
6919 );
6920 } else {
6921 println!();
6922 println!(
6923 " {}",
6924 format!("VELA · REPLICATE · {}", new_id)
6925 .to_uppercase()
6926 .dimmed()
6927 );
6928 println!(" {}", style::tick_row(60));
6929 println!(" target: {target}");
6930 println!(" outcome: {outcome}");
6931 println!(" attempted by: {attempted_by}");
6932 println!(" conditions: {conditions_text}");
6933 println!(" source: {source_title}");
6934 if let Some(d) = doi {
6935 println!(" doi: {d}");
6936 }
6937 println!();
6938 println!(
6939 " {} replication recorded in {}",
6940 style::ok("ok"),
6941 frontier.display()
6942 );
6943 if let Some(result) = cascade_result {
6944 println!(
6945 " {} cascade: {} dependent(s) flagged, {} review event(s) recorded",
6946 style::ok("ok"),
6947 result.affected,
6948 result.events.len()
6949 );
6950 } else {
6951 println!(" {} cascade skipped (--no-cascade)", style::warn("info"));
6952 }
6953 }
6954}
6955
6956fn cmd_replications(frontier: &Path, target: Option<&str>, json: bool) {
6958 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
6959 let filtered: Vec<&crate::bundle::Replication> = project
6960 .replications
6961 .iter()
6962 .filter(|r| target.is_none_or(|t| r.target_finding == t))
6963 .collect();
6964
6965 if json {
6966 let payload = json!({
6967 "ok": true,
6968 "command": "replications",
6969 "frontier": frontier.display().to_string(),
6970 "filter_target": target,
6971 "count": filtered.len(),
6972 "replications": filtered,
6973 });
6974 println!(
6975 "{}",
6976 serde_json::to_string_pretty(&payload).expect("failed to serialize replications list")
6977 );
6978 return;
6979 }
6980
6981 println!();
6982 let header = match target {
6983 Some(t) => format!("VELA · REPLICATIONS · {t}"),
6984 None => format!("VELA · REPLICATIONS · {}", frontier.display()),
6985 };
6986 println!(" {}", header.to_uppercase().dimmed());
6987 println!(" {}", style::tick_row(60));
6988 if filtered.is_empty() {
6989 println!(" (no replications recorded)");
6990 return;
6991 }
6992 for rep in &filtered {
6993 let outcome_chip = match rep.outcome.as_str() {
6994 "replicated" => style::ok(&rep.outcome),
6995 "failed" => style::lost(&rep.outcome),
6996 "partial" => style::warn(&rep.outcome),
6997 _ => rep.outcome.clone().normal().to_string(),
6998 };
6999 println!(
7000 " · {} {} by {}",
7001 rep.id.dimmed(),
7002 outcome_chip,
7003 rep.attempted_by
7004 );
7005 println!(" target: {}", rep.target_finding);
7006 if !rep.conditions.text.is_empty() {
7007 println!(" conditions: {}", truncate(&rep.conditions.text, 80));
7008 }
7009 if !rep.provenance.title.is_empty() {
7010 println!(" source: {}", truncate(&rep.provenance.title, 80));
7011 }
7012 }
7013}
7014
7015async fn cmd_ingest(
7028 path: &str,
7029 frontier: &Path,
7030 backend: Option<&str>,
7031 actor: Option<&str>,
7032 dry_run: bool,
7033 json: bool,
7034) {
7035 let lowered = path.trim().to_lowercase();
7037 if lowered.starts_with("doi:") || lowered.starts_with("pmid:") || lowered.starts_with("nct:") {
7038 cmd_source_fetch(path.trim(), None, None, false, json).await;
7039 if !json {
7045 eprintln!();
7046 eprintln!(
7047 " vela ingest · note: doi:/pmid:/nct: URIs only fetch metadata; no frontier state was written."
7048 );
7049 eprintln!(
7050 " next: turn this paper into a proposal with `vela finding add {} --assertion '...' --author 'reviewer:you' --apply`",
7051 frontier.display()
7052 );
7053 }
7054 return;
7055 }
7056
7057 let p = std::path::PathBuf::from(path);
7058 if !p.exists() {
7059 fail(&format!(
7060 "ingest: path '{path}' does not exist (and is not a doi:/pmid:/nct: URI)"
7061 ));
7062 }
7063
7064 let ext = p
7066 .extension()
7067 .and_then(|s| s.to_str())
7068 .map(|s| s.to_ascii_lowercase());
7069
7070 if p.is_file() {
7071 match ext.as_deref() {
7072 Some("pdf") => {
7073 cmd_scout(&p, frontier, backend, dry_run, json).await;
7077 }
7078 Some("md") | Some("markdown") => {
7079 cmd_compile_notes(&p, frontier, backend, None, None, dry_run, json).await;
7082 }
7083 Some("csv") | Some("tsv") => {
7084 cmd_compile_data(&p, frontier, backend, None, dry_run, json).await;
7087 }
7088 Some("json") => {
7089 let actor_id = actor.unwrap_or("agent:vela-ingest-bot");
7091 cmd_artifact_to_state(frontier, &p, actor_id, false, json);
7092 }
7093 other => {
7094 fail(&format!(
7095 "ingest: unsupported file type '{}' (expected .pdf, .md, .csv, .tsv, .json, or a doi:/pmid:/nct: URI)",
7096 other.unwrap_or("(none)")
7097 ));
7098 }
7099 }
7100 return;
7101 }
7102
7103 if p.is_dir() {
7104 let mut pdf_count = 0usize;
7111 let mut md_count = 0usize;
7112 let mut data_count = 0usize;
7113 let mut json_count = 0usize;
7114 let mut unhandled_exts: std::collections::BTreeSet<String> =
7115 std::collections::BTreeSet::new();
7116 if let Ok(entries) = std::fs::read_dir(&p) {
7117 for entry in entries.flatten() {
7118 let path = entry.path();
7119 if !path.is_file() {
7120 continue;
7121 }
7122 if let Some(name) = entry.file_name().to_str()
7123 && let Some(dot) = name.rfind('.')
7124 {
7125 let ext = name[dot + 1..].to_ascii_lowercase();
7126 match ext.as_str() {
7127 "pdf" => pdf_count += 1,
7128 "md" | "markdown" => md_count += 1,
7129 "csv" | "tsv" => data_count += 1,
7130 "json" => json_count += 1,
7131 other => {
7132 if !name.starts_with('.') {
7135 unhandled_exts.insert(other.to_string());
7136 }
7137 }
7138 }
7139 }
7140 }
7141 }
7142
7143 let dispatched_types = (pdf_count > 0) as usize
7144 + (md_count > 0) as usize
7145 + (data_count > 0) as usize
7146 + (json_count > 0) as usize;
7147
7148 if dispatched_types == 0 {
7149 cmd_compile_code(&p, frontier, backend, None, dry_run, json).await;
7152 return;
7153 }
7154
7155 if dispatched_types > 1 {
7156 eprintln!(
7157 " vela ingest · folder has multiple handlable types; running each in sequence"
7158 );
7159 eprintln!(
7160 " pdf:{pdf_count} md:{md_count} csv/tsv:{data_count} json:{json_count}"
7161 );
7162 }
7163
7164 if pdf_count > 0 {
7171 cmd_scout(&p, frontier, backend, dry_run, json).await;
7172 }
7173 if md_count > 0 {
7174 cmd_compile_notes(&p, frontier, backend, None, None, dry_run, json).await;
7175 }
7176 if data_count > 0 {
7177 cmd_compile_data(&p, frontier, backend, None, dry_run, json).await;
7178 }
7179 if json_count > 0 {
7180 let actor_id = actor.unwrap_or("agent:vela-ingest-bot");
7183 if let Ok(entries) = std::fs::read_dir(&p) {
7184 for entry in entries.flatten() {
7185 let path = entry.path();
7186 if path.is_file()
7187 && path
7188 .extension()
7189 .and_then(|s| s.to_str())
7190 .map(|s| s.eq_ignore_ascii_case("json"))
7191 .unwrap_or(false)
7192 {
7193 cmd_artifact_to_state(frontier, &path, actor_id, false, json);
7194 }
7195 }
7196 }
7197 }
7198
7199 if !unhandled_exts.is_empty() {
7200 let kinds: Vec<String> = unhandled_exts.into_iter().collect();
7201 eprintln!(
7202 " vela ingest · skipped {} file extension(s) with no handler: {}",
7203 kinds.len(),
7204 kinds.join(", ")
7205 );
7206 }
7207 return;
7208 }
7209
7210 fail(&format!(
7211 "ingest: path '{path}' is neither a file nor a directory"
7212 ));
7213}
7214
7215#[allow(clippy::too_many_arguments)]
7216async fn cmd_compile_data(
7218 root: &Path,
7219 frontier: &Path,
7220 backend: Option<&str>,
7221 sample_rows: Option<usize>,
7222 dry_run: bool,
7223 json_out: bool,
7224) {
7225 match DATASETS_HANDLER.get() {
7226 Some(handler) => {
7227 handler(
7228 root.to_path_buf(),
7229 frontier.to_path_buf(),
7230 backend.map(String::from),
7231 sample_rows,
7232 dry_run,
7233 json_out,
7234 )
7235 .await;
7236 }
7237 None => {
7238 eprintln!(
7239 "{} `vela compile-data` requires the vela CLI binary; the library is unwired without a registered datasets handler.",
7240 style::err_prefix()
7241 );
7242 std::process::exit(1);
7243 }
7244 }
7245}
7246
7247async fn cmd_review_pending(
7250 frontier: &Path,
7251 backend: Option<&str>,
7252 max_proposals: Option<usize>,
7253 batch_size: usize,
7254 dry_run: bool,
7255 json_out: bool,
7256) {
7257 match REVIEWER_HANDLER.get() {
7258 Some(handler) => {
7259 handler(
7260 frontier.to_path_buf(),
7261 backend.map(String::from),
7262 max_proposals,
7263 batch_size,
7264 dry_run,
7265 json_out,
7266 )
7267 .await;
7268 }
7269 None => {
7270 eprintln!(
7271 "{} `vela review-pending` requires the vela CLI binary; the library is unwired without a registered reviewer handler.",
7272 style::err_prefix()
7273 );
7274 std::process::exit(1);
7275 }
7276 }
7277}
7278
7279async fn cmd_find_tensions(
7282 frontier: &Path,
7283 backend: Option<&str>,
7284 max_findings: Option<usize>,
7285 dry_run: bool,
7286 json_out: bool,
7287) {
7288 match TENSIONS_HANDLER.get() {
7289 Some(handler) => {
7290 handler(
7291 frontier.to_path_buf(),
7292 backend.map(String::from),
7293 max_findings,
7294 dry_run,
7295 json_out,
7296 )
7297 .await;
7298 }
7299 None => {
7300 eprintln!(
7301 "{} `vela find-tensions` requires the vela CLI binary; the library is unwired without a registered tensions handler.",
7302 style::err_prefix()
7303 );
7304 std::process::exit(1);
7305 }
7306 }
7307}
7308
7309async fn cmd_plan_experiments(
7312 frontier: &Path,
7313 backend: Option<&str>,
7314 max_findings: Option<usize>,
7315 dry_run: bool,
7316 json_out: bool,
7317) {
7318 match EXPERIMENTS_HANDLER.get() {
7319 Some(handler) => {
7320 handler(
7321 frontier.to_path_buf(),
7322 backend.map(String::from),
7323 max_findings,
7324 dry_run,
7325 json_out,
7326 )
7327 .await;
7328 }
7329 None => {
7330 eprintln!(
7331 "{} `vela plan-experiments` requires the vela CLI binary; the library is unwired without a registered experiments handler.",
7332 style::err_prefix()
7333 );
7334 std::process::exit(1);
7335 }
7336 }
7337}
7338
7339async fn cmd_compile_code(
7342 root: &Path,
7343 frontier: &Path,
7344 backend: Option<&str>,
7345 max_files: Option<usize>,
7346 dry_run: bool,
7347 json_out: bool,
7348) {
7349 match CODE_HANDLER.get() {
7350 Some(handler) => {
7351 handler(
7352 root.to_path_buf(),
7353 frontier.to_path_buf(),
7354 backend.map(String::from),
7355 max_files,
7356 dry_run,
7357 json_out,
7358 )
7359 .await;
7360 }
7361 None => {
7362 eprintln!(
7363 "{} `vela compile-code` requires the vela CLI binary; the library is unwired without a registered code handler.",
7364 style::err_prefix()
7365 );
7366 std::process::exit(1);
7367 }
7368 }
7369}
7370
7371async fn cmd_compile_notes(
7376 vault: &Path,
7377 frontier: &Path,
7378 backend: Option<&str>,
7379 max_files: Option<usize>,
7380 max_items_per_category: Option<usize>,
7381 dry_run: bool,
7382 json_out: bool,
7383) {
7384 match NOTES_HANDLER.get() {
7385 Some(handler) => {
7386 handler(
7387 vault.to_path_buf(),
7388 frontier.to_path_buf(),
7389 backend.map(String::from),
7390 max_files,
7391 max_items_per_category,
7392 dry_run,
7393 json_out,
7394 )
7395 .await;
7396 }
7397 None => {
7398 eprintln!(
7399 "{} `vela compile-notes` requires the vela CLI binary; the library is unwired without a registered notes handler.",
7400 style::err_prefix()
7401 );
7402 std::process::exit(1);
7403 }
7404 }
7405}
7406
7407async fn cmd_scout(
7414 folder: &Path,
7415 frontier: &Path,
7416 backend: Option<&str>,
7417 dry_run: bool,
7418 json_out: bool,
7419) {
7420 match SCOUT_HANDLER.get() {
7421 Some(handler) => {
7422 handler(
7423 folder.to_path_buf(),
7424 frontier.to_path_buf(),
7425 backend.map(String::from),
7426 dry_run,
7427 json_out,
7428 )
7429 .await;
7430 }
7431 None => {
7432 eprintln!(
7433 "{} `vela scout` requires the vela CLI binary; the library is unwired without a registered scout handler.",
7434 style::err_prefix()
7435 );
7436 std::process::exit(1);
7437 }
7438 }
7439}
7440
7441#[allow(clippy::too_many_arguments)]
7442fn cmd_check(
7443 source: Option<&Path>,
7444 schema: bool,
7445 stats: bool,
7446 conformance_flag: bool,
7447 conformance_dir: &Path,
7448 all: bool,
7449 schema_only: bool,
7450 strict: bool,
7451 fix: bool,
7452 json_output: bool,
7453) {
7454 if json_output {
7455 let Some(src) = source else {
7456 fail("--json requires a frontier source");
7457 };
7458 let payload = check_json_payload(src, schema_only, strict);
7459 println!(
7460 "{}",
7461 serde_json::to_string_pretty(&payload).expect("failed to serialize check report")
7462 );
7463 if payload.get("ok").and_then(Value::as_bool) != Some(true) {
7464 std::process::exit(1);
7465 }
7466 return;
7467 }
7468
7469 let run_all = all || (!schema && !stats && !conformance_flag && !schema_only);
7470 if run_all || schema || schema_only {
7471 let Some(src) = source else {
7472 fail("check requires a frontier source");
7473 };
7474 validate::run(src);
7475 }
7476 if !schema_only && (run_all || stats) {
7477 let Some(src) = source else {
7478 fail("--stats requires a frontier source");
7479 };
7480 let frontier = load_frontier_or_fail(src);
7481 let report = lint::lint(&frontier, None, None);
7482 lint::print_report(&report);
7483 let replay_report = events::replay_report(&frontier);
7484 println!("event replay: {}", replay_report.status);
7485 if !replay_report.conflicts.is_empty() {
7486 for conflict in &replay_report.conflicts {
7487 println!(" - {conflict}");
7488 }
7489 }
7490 if let Ok(signature_report) = sign::verify_frontier_data(&frontier, None)
7491 && signature_report.signed > 0
7492 {
7493 println!(
7494 "Signatures: {} valid / {} invalid / {} unsigned",
7495 signature_report.valid, signature_report.invalid, signature_report.unsigned
7496 );
7497 }
7498 let signal_report = signals::analyze(&frontier, &[]);
7499 print_signal_summary(&signal_report, strict);
7500 if !replay_report.ok
7501 || (strict
7502 && (!signal_report.review_queue.is_empty()
7503 || signal_report.proof_readiness.status != "ready"))
7504 {
7505 std::process::exit(1);
7506 }
7507 }
7508 if run_all || conformance_flag {
7509 if conformance_flag || conformance_dir.is_dir() {
7519 conformance::run(conformance_dir);
7520 } else {
7521 eprintln!(
7522 " conformance: skipped ({} not present; pass --conformance-dir <path> to point at the source repo's tests/conformance)",
7523 conformance_dir.display()
7524 );
7525 }
7526 }
7527 let _ = fix;
7528}
7529
7530fn check_json_payload(src: &Path, schema_only: bool, strict: bool) -> Value {
7531 let report = validate::validate(src);
7532 let loaded = repo::load_from_path(src).ok();
7533 let (method_report, graph_report) = if schema_only {
7534 (None, None)
7535 } else if let Some(frontier) = loaded.as_ref() {
7536 (
7537 Some(lint::lint(frontier, None, None)),
7538 Some(lint::lint_frontier(frontier)),
7539 )
7540 } else {
7541 (None, None)
7542 };
7543 let source_hash = hash_path(src).unwrap_or_else(|_| "unavailable".to_string());
7544 let mut diagnostics = Vec::new();
7545 diagnostics.extend(report.errors.iter().map(|e| {
7546 json!({
7547 "severity": "error",
7548 "rule_id": "schema",
7549 "finding_id": null,
7550 "file": &e.file,
7551 "field_path": null,
7552 "message": &e.error,
7553 "suggestion": schema_error_suggestion(&e.error),
7554 "fixable": schema_error_fix(&e.error),
7555 "normalize_action": schema_error_action(&e.error),
7556 })
7557 }));
7558 for (check_id, lint_report) in [
7559 ("methodology", method_report.as_ref()),
7560 ("frontier_graph", graph_report.as_ref()),
7561 ] {
7562 if let Some(lint_report) = lint_report {
7563 diagnostics.extend(lint_report.diagnostics.iter().map(|d| {
7564 json!({
7565 "severity": d.severity.to_string(),
7566 "rule_id": &d.rule_id,
7567 "check": check_id,
7568 "finding_id": &d.finding_id,
7569 "field_path": null,
7570 "message": &d.message,
7571 "suggestion": &d.suggestion,
7572 "fixable": false,
7573 "normalize_action": null,
7574 })
7575 }));
7576 }
7577 }
7578 let method_errors = method_report.as_ref().map_or(0, |r| r.errors);
7579 let method_warnings = method_report.as_ref().map_or(0, |r| r.warnings);
7580 let method_infos = method_report.as_ref().map_or(0, |r| r.infos);
7581 let graph_errors = graph_report.as_ref().map_or(0, |r| r.errors);
7582 let graph_warnings = graph_report.as_ref().map_or(0, |r| r.warnings);
7583 let graph_infos = graph_report.as_ref().map_or(0, |r| r.infos);
7584 let replay_report = loaded.as_ref().map(events::replay_report);
7585 let state_integrity_report = if schema_only {
7586 loaded.as_ref().map(state_integrity::analyze)
7587 } else {
7588 state_integrity::analyze_path(src).ok()
7589 };
7590 if let Some(replay) = replay_report.as_ref()
7591 && !replay.ok
7592 {
7593 diagnostics.extend(replay.conflicts.iter().map(|conflict| {
7594 json!({
7595 "severity": "error",
7596 "rule_id": "event_replay",
7597 "check": "events",
7598 "finding_id": null,
7599 "field_path": null,
7600 "message": conflict,
7601 "suggestion": "Inspect canonical state events and repair the frontier event log before proof export.",
7602 "fixable": false,
7603 "normalize_action": null,
7604 })
7605 }));
7606 }
7607 let event_errors = replay_report
7608 .as_ref()
7609 .map_or(0, |replay| usize::from(!replay.ok));
7610 let state_integrity_errors = state_integrity_report
7611 .as_ref()
7612 .map_or(0, |report| report.structural_errors.len());
7613 let (source_registry, evidence_atoms, conditions, proposal_summary, proof_state) = loaded
7614 .as_ref()
7615 .map(|frontier| {
7616 (
7617 sources::source_summary(frontier),
7618 sources::evidence_summary(frontier),
7619 sources::condition_summary(frontier),
7620 proposals::summary(frontier),
7621 proposals::proof_state_json(&frontier.proof_state),
7622 )
7623 })
7624 .unwrap_or_else(|| {
7625 (
7626 sources::SourceRegistrySummary::default(),
7627 sources::EvidenceAtomSummary::default(),
7628 sources::ConditionSummary::default(),
7629 proposals::ProposalSummary::default(),
7630 Value::Null,
7631 )
7632 });
7633 let signature_report = loaded
7634 .as_ref()
7635 .and_then(|frontier| sign::verify_frontier_data(frontier, None).ok());
7636 if let Some(frontier) = loaded.as_ref()
7637 && !schema_only
7638 {
7639 let projection = sources::derive_projection(frontier);
7640 let existing_sources = frontier
7641 .sources
7642 .iter()
7643 .map(|source| source.id.as_str())
7644 .collect::<std::collections::BTreeSet<_>>();
7645 let existing_atoms = frontier
7646 .evidence_atoms
7647 .iter()
7648 .map(|atom| atom.id.as_str())
7649 .collect::<std::collections::BTreeSet<_>>();
7650 let existing_conditions = frontier
7651 .condition_records
7652 .iter()
7653 .map(|record| record.id.as_str())
7654 .collect::<std::collections::BTreeSet<_>>();
7655 for source in projection
7656 .sources
7657 .iter()
7658 .filter(|source| !existing_sources.contains(source.id.as_str()))
7659 {
7660 diagnostics.push(json!({
7661 "severity": "warning",
7662 "rule_id": "missing_source_record",
7663 "check": "source_registry",
7664 "finding_id": source.finding_ids.first(),
7665 "field_path": "sources",
7666 "message": format!("Source record {} is derivable but not materialized in frontier state.", source.id),
7667 "suggestion": "Run `vela normalize` to materialize source records before proof export.",
7668 "fixable": true,
7669 "normalize_action": "materialize_source_record",
7670 }));
7671 }
7672 for atom in projection
7673 .evidence_atoms
7674 .iter()
7675 .filter(|atom| !existing_atoms.contains(atom.id.as_str()))
7676 {
7677 diagnostics.push(json!({
7678 "severity": "warning",
7679 "rule_id": "missing_evidence_atom",
7680 "check": "evidence_atoms",
7681 "finding_id": atom.finding_id,
7682 "field_path": "evidence_atoms",
7683 "message": format!("Evidence atom {} is derivable but not materialized in frontier state.", atom.id),
7684 "suggestion": "Run `vela normalize` to materialize evidence atoms before proof export.",
7685 "fixable": true,
7686 "normalize_action": "materialize_evidence_atom",
7687 }));
7688 }
7689 for atom in projection
7690 .evidence_atoms
7691 .iter()
7692 .filter(|atom| atom.locator.is_none())
7693 {
7694 diagnostics.push(json!({
7695 "severity": "warning",
7696 "rule_id": "missing_evidence_locator",
7697 "check": "evidence_atoms",
7698 "finding_id": atom.finding_id,
7699 "field_path": "evidence_atoms[].locator",
7700 "message": format!("Evidence atom {} has no source locator.", atom.id),
7701 "suggestion": "Add or verify evidence spans, table rows, pages, sections, or run locators.",
7702 "fixable": false,
7703 "normalize_action": null,
7704 }));
7705 }
7706 for condition in projection
7707 .condition_records
7708 .iter()
7709 .filter(|condition| !existing_conditions.contains(condition.id.as_str()))
7710 {
7711 diagnostics.push(json!({
7712 "severity": "warning",
7713 "rule_id": "condition_record_missing",
7714 "check": "conditions",
7715 "finding_id": condition.finding_id,
7716 "field_path": "condition_records",
7717 "message": format!("Condition record {} is derivable but not materialized in frontier state.", condition.id),
7718 "suggestion": "Run `vela normalize` to materialize condition boundaries before proof export.",
7719 "fixable": true,
7720 "normalize_action": "materialize_condition_record",
7721 }));
7722 }
7723 for proposal in frontier.proposals.iter().filter(|proposal| {
7724 matches!(proposal.status.as_str(), "accepted" | "applied")
7725 && proposal
7726 .reviewed_by
7727 .as_deref()
7728 .is_none_or(proposals::is_placeholder_reviewer)
7729 }) {
7730 diagnostics.push(json!({
7731 "severity": "error",
7732 "rule_id": "reviewer_identity_missing",
7733 "check": "proposals",
7734 "finding_id": proposal.target.id,
7735 "field_path": "proposals[].reviewed_by",
7736 "message": format!("Accepted or applied proposal {} uses a missing or placeholder reviewer identity.", proposal.id),
7737 "suggestion": "Accept the proposal with a stable named reviewer id before strict proof use.",
7738 "fixable": false,
7739 "normalize_action": null,
7740 }));
7741 }
7742 }
7743 let signal_report = loaded
7744 .as_ref()
7745 .map(|frontier| signals::analyze(frontier, &diagnostics))
7746 .unwrap_or_else(empty_signal_report);
7747 let errors =
7748 report.errors.len() + method_errors + graph_errors + event_errors + state_integrity_errors;
7749 let warnings = method_warnings + graph_warnings + signal_report.proof_readiness.warnings;
7750 let infos = method_infos + graph_infos;
7751 let strict_blockers = signal_report
7752 .signals
7753 .iter()
7754 .filter(|signal| signal.blocks.iter().any(|block| block == "strict_check"))
7755 .count();
7756 let fixable = diagnostics
7757 .iter()
7758 .filter(|d| d.get("fixable").and_then(Value::as_bool).unwrap_or(false))
7759 .count();
7760 let ok = errors == 0 && (!strict || (warnings == 0 && strict_blockers == 0));
7761
7762 json!({
7763 "ok": ok,
7764 "command": "check",
7765 "schema_version": project::VELA_SCHEMA_VERSION,
7766 "source": {
7767 "path": src.display().to_string(),
7768 "hash": format!("sha256:{source_hash}"),
7769 },
7770 "summary": {
7771 "status": if ok { "pass" } else { "fail" },
7772 "checked_findings": report.total_files,
7773 "valid_findings": report.valid,
7774 "invalid_findings": report.invalid,
7775 "errors": errors,
7776 "warnings": warnings,
7777 "info": infos,
7778 "fixable": fixable,
7779 "strict": strict,
7780 "schema_only": schema_only,
7781 },
7782 "checks": [
7783 {
7784 "id": "schema",
7785 "status": if report.invalid == 0 { "pass" } else { "fail" },
7786 "checked": report.total_files,
7787 "failed": report.invalid,
7788 "errors": report.errors.iter().map(|e| json!({
7789 "file": e.file,
7790 "message": e.error,
7791 })).collect::<Vec<_>>(),
7792 },
7793 {
7794 "id": "methodology",
7795 "status": if method_errors == 0 { "pass" } else { "fail" },
7796 "checked": method_report.as_ref().map_or(0, |r| r.findings_checked),
7797 "failed": method_errors,
7798 "warnings": method_warnings,
7799 "info": method_infos,
7800 "skipped": schema_only,
7801 },
7802 {
7803 "id": "frontier_graph",
7804 "status": if graph_errors == 0 { "pass" } else { "fail" },
7805 "checked": graph_report.as_ref().map_or(0, |r| r.findings_checked),
7806 "failed": graph_errors,
7807 "warnings": graph_warnings,
7808 "info": graph_infos,
7809 "skipped": schema_only,
7810 },
7811 {
7812 "id": "signals",
7813 "status": if strict_blockers == 0 { "pass" } else { "fail" },
7814 "checked": signal_report.signals.len(),
7815 "failed": strict_blockers,
7816 "warnings": signal_report.proof_readiness.warnings,
7817 "skipped": loaded.is_none(),
7818 "blockers": signal_report.signals.iter()
7819 .filter(|s| s.blocks.iter().any(|b| b == "strict_check"))
7820 .map(|s| json!({
7821 "id": s.id,
7822 "kind": s.kind,
7823 "severity": s.severity,
7824 "reason": s.reason,
7825 }))
7826 .collect::<Vec<_>>(),
7827 },
7828 {
7829 "id": "events",
7830 "status": if replay_report.as_ref().is_none_or(|replay| replay.ok) { "pass" } else { "fail" },
7831 "checked": replay_report.as_ref().map_or(0, |replay| replay.event_log.count),
7832 "failed": event_errors,
7833 "skipped": schema_only || loaded.is_none(),
7834 },
7835 {
7836 "id": "state_integrity",
7837 "status": if state_integrity_report.as_ref().is_none_or(|report| report.status != "fail") { "pass" } else { "fail" },
7838 "checked": state_integrity_report.as_ref().map_or(0, |report| report.summary.get("events").copied().unwrap_or_default()),
7839 "failed": state_integrity_errors,
7840 "skipped": schema_only || loaded.is_none(),
7841 }
7842 ],
7843 "event_log": replay_report.as_ref().map(|replay| &replay.event_log),
7844 "replay": replay_report,
7845 "state_integrity": state_integrity_report,
7846 "source_registry": source_registry,
7847 "evidence_atoms": evidence_atoms,
7848 "conditions": conditions,
7849 "proposals": proposal_summary,
7850 "proof_state": proof_state,
7851 "signatures": signature_report,
7852 "diagnostics": diagnostics,
7853 "signals": signal_report.signals,
7854 "review_queue": signal_report.review_queue,
7855 "proof_readiness": signal_report.proof_readiness,
7856 "repair_plan": build_repair_plan(&diagnostics),
7857 })
7858}
7859
7860#[allow(clippy::too_many_arguments)]
7861fn cmd_normalize(
7862 source: &Path,
7863 out: Option<&Path>,
7864 write: bool,
7865 dry_run: bool,
7866 rewrite_ids: bool,
7867 id_map: Option<&Path>,
7868 resync_provenance: bool,
7869 json_output: bool,
7870) {
7871 if write && out.is_some() {
7872 fail("Use either --write or --out, not both.");
7873 }
7874 if dry_run && (write || out.is_some()) {
7875 fail("--dry-run cannot be combined with --write or --out.");
7876 }
7877 if id_map.is_some() && !rewrite_ids {
7878 fail("--id-map requires --rewrite-ids.");
7879 }
7880
7881 let detected = repo::detect(source).unwrap_or_else(|e| {
7882 eprintln!("{e}");
7883 std::process::exit(1);
7884 });
7885 if matches!(detected, repo::VelaSource::PacketDir(_)) {
7886 fail(
7887 "Cannot normalize a proof packet directory. Export a new packet from frontier state instead.",
7888 );
7889 }
7890 let mut frontier = repo::load(&detected).unwrap_or_else(|e| fail_return(&e));
7891 let has_substantive_events = frontier
7896 .events
7897 .iter()
7898 .any(|event| event.kind != "frontier.created");
7899 if has_substantive_events && (write || out.is_some()) {
7900 fail(
7901 "Refusing to normalize a frontier with canonical events. Normalize before proposal-backed writes, or create a new reviewed transition for the intended change.",
7902 );
7903 }
7904 let source_hash = hash_path(source).unwrap_or_else(|_| "unavailable".to_string());
7905 let before_stats = serde_json::to_value(&frontier.stats).unwrap_or(Value::Null);
7906 let (entity_type_fixes, entity_name_fixes) =
7907 normalize::normalize_findings(&mut frontier.findings);
7908 let confidence_updates =
7909 bundle::recompute_all_confidence(&mut frontier.findings, &frontier.replications);
7910 let provenance_resync_count = if resync_provenance {
7914 sources::resync_provenance_from_sources(&mut frontier)
7915 } else {
7916 0
7917 };
7918 let before_source_count = frontier.sources.len();
7919 let before_evidence_atom_count = frontier.evidence_atoms.len();
7920 let before_condition_record_count = frontier.condition_records.len();
7921
7922 let mut id_rewrites = Vec::new();
7923 if rewrite_ids {
7924 let mut id_map_values = std::collections::BTreeMap::<String, String>::new();
7925 for finding in &frontier.findings {
7926 let expected =
7927 bundle::FindingBundle::content_address(&finding.assertion, &finding.provenance);
7928 if expected != finding.id {
7929 id_map_values.insert(finding.id.clone(), expected);
7930 }
7931 }
7932 let new_ids = id_map_values
7933 .values()
7934 .map(String::as_str)
7935 .collect::<std::collections::HashSet<_>>();
7936 if new_ids.len() != id_map_values.len() {
7937 fail("Refusing to rewrite IDs because two findings map to the same content address.");
7938 }
7939 for finding in &mut frontier.findings {
7940 if let Some(new_id) = id_map_values.get(&finding.id) {
7941 id_rewrites.push(json!({"old": finding.id, "new": new_id}));
7942 finding.previous_version = Some(finding.id.clone());
7943 finding.id = new_id.clone();
7944 }
7945 }
7946 for finding in &mut frontier.findings {
7947 for link in &mut finding.links {
7948 if let Some(new_target) = id_map_values.get(&link.target) {
7949 link.target = new_target.clone();
7950 }
7951 }
7952 }
7953 if let Some(path) = id_map {
7954 std::fs::write(
7955 path,
7956 serde_json::to_string_pretty(&id_map_values)
7957 .expect("failed to serialize normalize id map"),
7958 )
7959 .unwrap_or_else(|e| fail(&format!("Failed to write {}: {e}", path.display())));
7960 }
7961 }
7962
7963 sources::materialize_project(&mut frontier);
7964 let source_records_materialized = frontier.sources.len().saturating_sub(before_source_count);
7965 let evidence_atoms_materialized = frontier
7966 .evidence_atoms
7967 .len()
7968 .saturating_sub(before_evidence_atom_count);
7969 let condition_records_materialized = frontier
7970 .condition_records
7971 .len()
7972 .saturating_sub(before_condition_record_count);
7973 let after_stats = serde_json::to_value(&frontier.stats).unwrap_or(Value::Null);
7974 let id_rewrite_count = id_rewrites.len();
7975 let wrote_to = if write {
7976 repo::save(&detected, &frontier).unwrap_or_else(|e| fail(&e));
7977 Some(source.display().to_string())
7978 } else if let Some(out_path) = out {
7979 repo::save_to_path(out_path, &frontier).unwrap_or_else(|e| fail(&e));
7980 Some(out_path.display().to_string())
7981 } else {
7982 None
7983 };
7984 let wrote = wrote_to.is_some();
7985 let planned_changes = entity_type_fixes
7986 + entity_name_fixes
7987 + confidence_updates
7988 + id_rewrite_count
7989 + source_records_materialized
7990 + evidence_atoms_materialized
7991 + condition_records_materialized
7992 + provenance_resync_count;
7993 let payload = json!({
7994 "ok": true,
7995 "command": "normalize",
7996 "schema_version": project::VELA_SCHEMA_VERSION,
7997 "source": {
7998 "path": source.display().to_string(),
7999 "hash": format!("sha256:{source_hash}"),
8000 },
8001 "dry_run": wrote_to.is_none(),
8002 "wrote_to": wrote_to,
8003 "summary": {
8004 "planned": planned_changes,
8005 "safe": planned_changes,
8006 "unsafe": 0,
8007 "applied": if wrote { planned_changes } else { 0 },
8008 },
8009 "changes": {
8010 "entity_type_fixes": entity_type_fixes,
8011 "entity_name_fixes": entity_name_fixes,
8012 "confidence_updates": confidence_updates,
8013 "id_rewrites": id_rewrite_count,
8014 "source_records_materialized": source_records_materialized,
8015 "evidence_atoms_materialized": evidence_atoms_materialized,
8016 "condition_records_materialized": condition_records_materialized,
8017 "provenance_resyncs": provenance_resync_count,
8018 "stats_changed": before_stats != after_stats,
8019 },
8020 "id_rewrites": id_rewrites,
8021 "repair_plan": if wrote { Vec::<Value>::new() } else {
8022 vec![json!({
8023 "action": "apply_normalization",
8024 "command": "vela normalize <frontier> --out frontier.normalized.json"
8025 })]
8026 },
8027 });
8028 if json_output {
8029 println!(
8030 "{}",
8031 serde_json::to_string_pretty(&payload).expect("failed to serialize normalize report")
8032 );
8033 } else if let Some(path) = payload.get("wrote_to").and_then(Value::as_str) {
8034 println!("{} normalized frontier written to {path}", style::ok("ok"));
8035 println!(
8036 " entity type fixes: {}, entity name fixes: {}, confidence updates: {}, id rewrites: {}",
8037 entity_type_fixes, entity_name_fixes, confidence_updates, id_rewrite_count
8038 );
8039 } else {
8040 println!("normalize dry run for {}", source.display());
8041 println!(
8042 " would apply entity type fixes: {}, entity name fixes: {}, confidence updates: {}, id rewrites: {}",
8043 entity_type_fixes, entity_name_fixes, confidence_updates, id_rewrite_count
8044 );
8045 }
8046}
8047
8048fn cmd_proof(
8049 frontier: &Path,
8050 out: &Path,
8051 template: &str,
8052 gold: Option<&Path>,
8053 record_proof_state: bool,
8054 json_output: bool,
8055) {
8056 if template != "bbb-alzheimer" {
8057 fail(&format!(
8058 "Unsupported proof template '{template}'. Supported: bbb-alzheimer"
8059 ));
8060 }
8061 let mut loaded = load_frontier_or_fail(frontier);
8062 let source_hash = hash_path_or_fail(frontier);
8063 let export_record = export::export_packet_with_source(&loaded, Some(frontier), out)
8064 .unwrap_or_else(|e| fail(&e));
8065 let benchmark_summary = gold.map(|gold_path| {
8066 let summary = benchmark::run_suite(gold_path).unwrap_or_else(|e| {
8067 fail(&format!(
8068 "Failed to run proof benchmark '{}': {e}",
8069 gold_path.display()
8070 ))
8071 });
8072 append_packet_json_file(out, "benchmark-summary.json", &summary).unwrap_or_else(|e| {
8073 fail(&format!("Failed to write benchmark summary: {e}"));
8074 });
8075 if summary.get("ok").and_then(Value::as_bool) != Some(true) {
8076 fail(&format!(
8077 "Proof benchmark failed for {}",
8078 gold_path.display()
8079 ));
8080 }
8081 summary
8082 });
8083 let validation_summary = packet::validate(out).unwrap_or_else(|e| {
8084 fail(&format!("Proof packet validation failed: {e}"));
8085 });
8086 proposals::record_proof_export(
8087 &mut loaded,
8088 proposals::ProofPacketRecord {
8089 generated_at: export_record.generated_at.clone(),
8090 snapshot_hash: export_record.snapshot_hash.clone(),
8091 event_log_hash: export_record.event_log_hash.clone(),
8092 packet_manifest_hash: export_record.packet_manifest_hash.clone(),
8093 },
8094 );
8095 project::recompute_stats(&mut loaded);
8096 if record_proof_state {
8097 repo::save_to_path(frontier, &loaded).unwrap_or_else(|e| fail(&e));
8098 }
8099 let signal_report = signals::analyze(&loaded, &[]);
8100 if json_output {
8101 let payload = json!({
8102 "ok": true,
8103 "command": "proof",
8104 "schema_version": project::VELA_SCHEMA_VERSION,
8105 "recorded_proof_state": record_proof_state,
8106 "frontier": {
8107 "name": &loaded.project.name,
8108 "source": frontier.display().to_string(),
8109 "hash": format!("sha256:{source_hash}"),
8110 },
8111 "template": template,
8112 "gold": gold.map(|p| p.display().to_string()),
8113 "benchmark": benchmark_summary,
8114 "output": out.display().to_string(),
8115 "packet": {
8116 "manifest_path": out.join("manifest.json").display().to_string(),
8117 },
8118 "validation": {
8119 "status": "ok",
8120 "summary": validation_summary,
8121 },
8122 "proposals": proposals::summary(&loaded),
8123 "proof_state": loaded.proof_state,
8124 "signals": signal_report.signals,
8125 "review_queue": signal_report.review_queue,
8126 "proof_readiness": signal_report.proof_readiness,
8127 "trace_path": out.join("proof-trace.json").display().to_string(),
8128 });
8129 println!(
8130 "{}",
8131 serde_json::to_string_pretty(&payload).expect("failed to serialize proof response")
8132 );
8133 } else {
8134 println!("vela proof");
8135 println!(" source: {}", frontier.display());
8136 println!(" template: {template}");
8137 println!(" output: {}", out.display());
8138 println!(" trace: {}", out.join("proof-trace.json").display());
8139 println!(
8140 " proof state: {}",
8141 if record_proof_state {
8142 "recorded"
8143 } else {
8144 "not recorded"
8145 }
8146 );
8147 println!();
8148 println!("{validation_summary}");
8149 }
8150}
8151
8152fn cmd_status(path: &Path, json: bool) {
8156 let project = repo::load_from_path(path).unwrap_or_else(|e| fail_return(&e));
8157
8158 let mut pending_total = 0usize;
8160 let mut pending_by_kind: std::collections::BTreeMap<String, usize> =
8161 std::collections::BTreeMap::new();
8162 for p in &project.proposals {
8163 if p.status == "pending_review" {
8164 pending_total += 1;
8165 *pending_by_kind.entry(p.kind.clone()).or_insert(0) += 1;
8166 }
8167 }
8168
8169 let audit = crate::causal_reasoning::audit_frontier(&project);
8171 let audit_summary = crate::causal_reasoning::summarize_audit(&audit);
8172
8173 let mut last_sync: Option<&crate::events::StateEvent> = None;
8175 let mut last_conflict: Option<&crate::events::StateEvent> = None;
8176 let mut total_conflicts = 0usize;
8177 for e in &project.events {
8178 match e.kind.as_str() {
8179 "frontier.synced_with_peer" => {
8180 if last_sync
8181 .map(|prev| e.timestamp > prev.timestamp)
8182 .unwrap_or(true)
8183 {
8184 last_sync = Some(e);
8185 }
8186 }
8187 "frontier.conflict_detected" => {
8188 total_conflicts += 1;
8189 if last_conflict
8190 .map(|prev| e.timestamp > prev.timestamp)
8191 .unwrap_or(true)
8192 {
8193 last_conflict = Some(e);
8194 }
8195 }
8196 _ => {}
8197 }
8198 }
8199
8200 let mut targets_with_success = std::collections::HashSet::new();
8202 let mut failed_replications = 0usize;
8203 for r in &project.replications {
8204 if r.outcome == "replicated" {
8205 targets_with_success.insert(r.target_finding.clone());
8206 } else if r.outcome == "failed" {
8207 failed_replications += 1;
8208 }
8209 }
8210
8211 if json {
8212 println!(
8213 "{}",
8214 serde_json::to_string_pretty(&json!({
8215 "ok": true,
8216 "command": "status",
8217 "frontier": frontier_label(&project),
8218 "vfr_id": project.frontier_id(),
8219 "findings": project.findings.len(),
8220 "events": project.events.len(),
8221 "actors": project.actors.len(),
8222 "peers": project.peers.len(),
8223 "inbox": {
8224 "pending_total": pending_total,
8225 "pending_by_kind": pending_by_kind,
8226 },
8227 "causal_audit": {
8228 "identified": audit_summary.identified,
8229 "conditional": audit_summary.conditional,
8230 "underidentified": audit_summary.underidentified,
8231 "underdetermined": audit_summary.underdetermined,
8232 },
8233 "replications": {
8234 "total": project.replications.len(),
8235 "findings_with_success": targets_with_success.len(),
8236 "failed": failed_replications,
8237 },
8238 "federation": {
8239 "peers": project.peers.len(),
8240 "last_sync": last_sync.map(|e| e.timestamp.clone()),
8241 "last_conflict": last_conflict.map(|e| e.timestamp.clone()),
8242 "total_conflicts": total_conflicts,
8243 },
8244 }))
8245 .expect("serialize status")
8246 );
8247 return;
8248 }
8249
8250 println!();
8251 println!(
8252 " {}",
8253 format!("VELA · STATUS · {}", path.display())
8254 .to_uppercase()
8255 .dimmed()
8256 );
8257 println!(" {}", style::tick_row(60));
8258 println!();
8259 println!(" frontier: {}", frontier_label(&project));
8260 println!(" vfr_id: {}", project.frontier_id());
8261 println!(
8262 " findings: {} events: {} peers: {} actors: {}",
8263 project.findings.len(),
8264 project.events.len(),
8265 project.peers.len(),
8266 project.actors.len(),
8267 );
8268 println!();
8269 if pending_total > 0 {
8270 println!(
8271 " {} {pending_total} pending proposals",
8272 style::warn("inbox")
8273 );
8274 for (k, n) in &pending_by_kind {
8275 println!(" · {n:>3} {k}");
8276 }
8277 } else {
8278 println!(" {} inbox clean", style::ok("ok"));
8279 }
8280 println!();
8281 if audit_summary.underidentified > 0 || audit_summary.conditional > 0 {
8282 let chip = if audit_summary.underidentified > 0 {
8283 style::lost("audit")
8284 } else {
8285 style::warn("audit")
8286 };
8287 println!(
8288 " {} identified {} · conditional {} · underidentified {} · underdetermined {}",
8289 chip,
8290 audit_summary.identified,
8291 audit_summary.conditional,
8292 audit_summary.underidentified,
8293 audit_summary.underdetermined,
8294 );
8295 if audit_summary.underidentified > 0 {
8296 println!(
8297 " next: vela causal audit {} --problems-only",
8298 path.display()
8299 );
8300 }
8301 } else if audit_summary.underdetermined == 0 {
8302 println!(
8303 " {} causal audit: all {} identified",
8304 style::ok("ok"),
8305 audit_summary.identified
8306 );
8307 } else {
8308 println!(
8309 " {} causal audit: {} identified, {} ungraded",
8310 style::warn("audit"),
8311 audit_summary.identified,
8312 audit_summary.underdetermined,
8313 );
8314 }
8315 println!();
8316 if !project.replications.is_empty() {
8317 println!(
8318 " {} {} records · {} findings replicated · {} failed",
8319 style::ok("replications"),
8320 project.replications.len(),
8321 targets_with_success.len(),
8322 failed_replications,
8323 );
8324 }
8325 if project.peers.is_empty() {
8326 println!(
8327 " {} no federation peers registered",
8328 style::warn("federation")
8329 );
8330 } else {
8331 let last = last_sync
8332 .map(|e| fmt_timestamp(&e.timestamp))
8333 .unwrap_or_else(|| "never".to_string());
8334 let chip = if total_conflicts > 0 {
8335 style::warn("federation")
8336 } else {
8337 style::ok("federation")
8338 };
8339 println!(
8340 " {} {} peer(s) · last sync {} · {} conflict events",
8341 chip,
8342 project.peers.len(),
8343 last,
8344 total_conflicts,
8345 );
8346 }
8347 println!();
8348}
8349
8350fn cmd_log(path: &Path, limit: usize, kind_filter: Option<&str>, json: bool) {
8352 let project = repo::load_from_path(path).unwrap_or_else(|e| fail_return(&e));
8353 let mut events: Vec<&crate::events::StateEvent> = project
8354 .events
8355 .iter()
8356 .filter(|e| match kind_filter {
8357 Some(k) => e.kind.contains(k),
8358 None => true,
8359 })
8360 .collect();
8361 events.sort_by(|a, b| b.timestamp.cmp(&a.timestamp));
8362 events.truncate(limit);
8363
8364 if json {
8365 let payload: Vec<_> = events
8366 .iter()
8367 .map(|e| {
8368 json!({
8369 "id": e.id,
8370 "kind": e.kind,
8371 "actor": e.actor.id,
8372 "target": &e.target.id,
8373 "target_type": &e.target.r#type,
8374 "timestamp": e.timestamp,
8375 "reason": e.reason,
8376 })
8377 })
8378 .collect();
8379 println!(
8380 "{}",
8381 serde_json::to_string_pretty(&json!({
8382 "ok": true,
8383 "command": "log",
8384 "events": payload,
8385 }))
8386 .expect("serialize log")
8387 );
8388 return;
8389 }
8390
8391 println!();
8392 println!(
8393 " {}",
8394 format!("VELA · LOG · {} (latest {})", path.display(), events.len())
8395 .to_uppercase()
8396 .dimmed()
8397 );
8398 println!(" {}", style::tick_row(60));
8399 if events.is_empty() {
8400 println!(" (no events)");
8401 return;
8402 }
8403 for e in &events {
8404 let when = fmt_timestamp(&e.timestamp);
8405 let target_short = if e.target.id.len() > 22 {
8406 format!("{}…", &e.target.id[..21])
8407 } else {
8408 e.target.id.clone()
8409 };
8410 let reason: String = e.reason.chars().take(70).collect();
8411 println!(
8412 " {:<19} {:<32} {:<24} {}",
8413 when, e.kind, target_short, reason
8414 );
8415 }
8416 println!();
8417}
8418
8419fn cmd_inbox(path: &Path, kind_filter: Option<&str>, limit: usize, json: bool) {
8421 let project = repo::load_from_path(path).unwrap_or_else(|e| fail_return(&e));
8422
8423 let mut score_map: std::collections::HashMap<String, (f64, f64, f64, f64)> =
8426 std::collections::HashMap::new();
8427 for p in &project.proposals {
8428 if p.kind != "finding.note" {
8429 continue;
8430 }
8431 if p.actor.id != "agent:reviewer-agent" {
8432 continue;
8433 }
8434 let reason = &p.reason;
8435 let Some(target) = reason.split_whitespace().find(|s| s.starts_with("vpr_")) else {
8436 continue;
8437 };
8438 let text = p.payload.get("text").and_then(|v| v.as_str()).unwrap_or("");
8439 let extract = |k: &str| -> f64 {
8440 let pat = format!("{k} ");
8441 text.find(&pat)
8442 .and_then(|idx| text[idx + pat.len()..].split_whitespace().next())
8443 .and_then(|t| t.parse::<f64>().ok())
8444 .unwrap_or(0.0)
8445 };
8446 score_map.insert(
8447 target.to_string(),
8448 (
8449 extract("plausibility"),
8450 extract("evidence"),
8451 extract("scope"),
8452 extract("duplicate-risk"),
8453 ),
8454 );
8455 }
8456
8457 let mut pending: Vec<&crate::proposals::StateProposal> = project
8458 .proposals
8459 .iter()
8460 .filter(|p| {
8461 p.status == "pending_review"
8462 && match kind_filter {
8463 Some(k) => p.kind.contains(k),
8464 None => true,
8465 }
8466 })
8467 .collect();
8468 pending.sort_by(|a, b| {
8470 let sa = score_map
8471 .get(&a.id)
8472 .map(|(p, e, s, d)| 0.4 * p + 0.3 * e + 0.2 * s - 0.3 * d);
8473 let sb = score_map
8474 .get(&b.id)
8475 .map(|(p, e, s, d)| 0.4 * p + 0.3 * e + 0.2 * s - 0.3 * d);
8476 sb.partial_cmp(&sa).unwrap_or(std::cmp::Ordering::Equal)
8477 });
8478 pending.truncate(limit);
8479
8480 if json {
8481 let payload: Vec<_> = pending
8482 .iter()
8483 .map(|p| {
8484 let assertion_text = p
8485 .payload
8486 .get("finding")
8487 .and_then(|f| f.get("assertion"))
8488 .and_then(|a| a.get("text"))
8489 .and_then(|t| t.as_str());
8490 let assertion_type = p
8491 .payload
8492 .get("finding")
8493 .and_then(|f| f.get("assertion"))
8494 .and_then(|a| a.get("type"))
8495 .and_then(|t| t.as_str());
8496 let composite = score_map
8497 .get(&p.id)
8498 .map(|(pl, e, s, d)| 0.4 * pl + 0.3 * e + 0.2 * s - 0.3 * d);
8499 json!({
8500 "proposal_id": p.id,
8501 "kind": p.kind,
8502 "actor": p.actor,
8503 "reason": p.reason,
8504 "assertion_text": assertion_text,
8505 "assertion_type": assertion_type,
8506 "reviewer_composite": composite,
8507 })
8508 })
8509 .collect();
8510 println!(
8511 "{}",
8512 serde_json::to_string_pretty(&json!({
8513 "ok": true,
8514 "command": "inbox",
8515 "shown": pending.len(),
8516 "proposals": payload,
8517 }))
8518 .expect("serialize inbox")
8519 );
8520 return;
8521 }
8522
8523 println!();
8524 println!(
8525 " {}",
8526 format!(
8527 "VELA · INBOX · {} ({} pending shown)",
8528 path.display(),
8529 pending.len()
8530 )
8531 .to_uppercase()
8532 .dimmed()
8533 );
8534 println!(" {}", style::tick_row(60));
8535 if pending.is_empty() {
8536 println!(" (inbox clean)");
8537 return;
8538 }
8539 for p in &pending {
8540 let assertion_text = p
8541 .payload
8542 .get("finding")
8543 .and_then(|f| f.get("assertion"))
8544 .and_then(|a| a.get("text"))
8545 .and_then(|t| t.as_str())
8546 .unwrap_or("");
8547 let assertion_type = p
8548 .payload
8549 .get("finding")
8550 .and_then(|f| f.get("assertion"))
8551 .and_then(|a| a.get("type"))
8552 .and_then(|t| t.as_str())
8553 .unwrap_or("");
8554 let composite = score_map
8555 .get(&p.id)
8556 .map(|(pl, e, s, d)| 0.4 * pl + 0.3 * e + 0.2 * s - 0.3 * d);
8557 let score_str = composite
8558 .map(|c| format!("[{:.2}]", c))
8559 .unwrap_or_else(|| "[—] ".to_string());
8560 let kind_short = if p.kind.len() > 12 {
8561 format!("{}…", &p.kind[..11])
8562 } else {
8563 p.kind.clone()
8564 };
8565 let summary: String = if !assertion_text.is_empty() {
8566 assertion_text.chars().take(80).collect()
8567 } else {
8568 p.reason.chars().take(80).collect()
8569 };
8570 println!(
8571 " {} {} {:<13} {:<18} {}",
8572 score_str, p.id, kind_short, assertion_type, summary
8573 );
8574 }
8575 println!();
8576}
8577
8578fn cmd_ask(path: &Path, question: &str, json: bool) {
8583 let project = repo::load_from_path(path).unwrap_or_else(|e| fail_return(&e));
8584
8585 if question.trim().is_empty() {
8586 use std::io::{BufRead, Write};
8588 println!();
8589 println!(
8590 " {}",
8591 format!("VELA · ASK · {}", path.display())
8592 .to_uppercase()
8593 .dimmed()
8594 );
8595 println!(" {}", style::tick_row(60));
8596 println!(" Ask a question. Type `exit` to quit.");
8597 println!(" Examples:");
8598 println!(" · what's pending?");
8599 println!(" · what's underidentified?");
8600 println!(" · how many findings?");
8601 println!(" · what changed recently?");
8602 println!(" · who has what calibration?");
8603 println!();
8604 let stdin = std::io::stdin();
8605 let mut stdout = std::io::stdout();
8606 loop {
8607 print!(" ask> ");
8608 stdout.flush().ok();
8609 let mut line = String::new();
8610 if stdin.lock().read_line(&mut line).is_err() {
8611 break;
8612 }
8613 let q = line.trim();
8614 if q.is_empty() {
8615 continue;
8616 }
8617 if matches!(q, "exit" | "quit" | "q") {
8618 break;
8619 }
8620 answer(&project, q, false);
8621 }
8622 return;
8623 }
8624
8625 answer(&project, question, json);
8626}
8627
8628fn answer(project: &crate::project::Project, q: &str, json: bool) {
8629 let lower = q.to_lowercase();
8630
8631 if lower.contains("pending")
8633 || lower.contains("inbox")
8634 || lower.contains("queue")
8635 || lower.contains("to review")
8636 {
8637 let pending: Vec<&crate::proposals::StateProposal> = project
8638 .proposals
8639 .iter()
8640 .filter(|p| p.status == "pending_review")
8641 .collect();
8642 let mut by_kind: std::collections::BTreeMap<String, usize> = Default::default();
8643 for p in &pending {
8644 *by_kind.entry(p.kind.clone()).or_insert(0) += 1;
8645 }
8646 if json {
8647 println!(
8648 "{}",
8649 serde_json::to_string_pretty(&json!({
8650 "answer": "pending",
8651 "total": pending.len(),
8652 "by_kind": by_kind,
8653 }))
8654 .unwrap()
8655 );
8656 } else {
8657 println!(" {} pending proposals.", pending.len());
8658 for (k, n) in &by_kind {
8659 println!(" · {n:>3} {k}");
8660 }
8661 if pending.is_empty() {
8662 println!(" Inbox is clean.");
8663 } else {
8664 println!(" Run `vela inbox <frontier>` to triage.");
8665 }
8666 }
8667 return;
8668 }
8669
8670 if lower.contains("underident")
8672 || lower.contains("audit")
8673 || lower.contains("identif")
8674 || lower.contains("causal")
8675 {
8676 let entries = crate::causal_reasoning::audit_frontier(project);
8677 let summary = crate::causal_reasoning::summarize_audit(&entries);
8678 if json {
8679 println!(
8680 "{}",
8681 serde_json::to_string_pretty(&json!({
8682 "answer": "audit",
8683 "summary": {
8684 "identified": summary.identified,
8685 "conditional": summary.conditional,
8686 "underidentified": summary.underidentified,
8687 "underdetermined": summary.underdetermined,
8688 },
8689 }))
8690 .unwrap()
8691 );
8692 } else {
8693 println!(
8694 " Causal audit: {} identified · {} conditional · {} underidentified · {} underdetermined.",
8695 summary.identified,
8696 summary.conditional,
8697 summary.underidentified,
8698 summary.underdetermined,
8699 );
8700 if summary.underidentified > 0 {
8701 println!(
8702 " The {} underidentified findings are concrete review items:",
8703 summary.underidentified
8704 );
8705 for e in entries
8706 .iter()
8707 .filter(|e| {
8708 matches!(
8709 e.verdict,
8710 crate::causal_reasoning::Identifiability::Underidentified
8711 )
8712 })
8713 .take(8)
8714 {
8715 let txt: String = e.assertion_text.chars().take(70).collect();
8716 println!(" · {} {}", e.finding_id, txt);
8717 }
8718 }
8719 }
8720 return;
8721 }
8722
8723 if lower.contains("recent")
8725 || lower.contains("changed")
8726 || lower.contains("latest")
8727 || lower.contains("happen")
8728 {
8729 let mut events: Vec<&crate::events::StateEvent> = project.events.iter().collect();
8730 events.sort_by(|a, b| b.timestamp.cmp(&a.timestamp));
8731 events.truncate(8);
8732 if json {
8733 println!(
8734 "{}",
8735 serde_json::to_string_pretty(&json!({
8736 "answer": "recent_events",
8737 "events": events.iter().map(|e| json!({
8738 "id": e.id, "kind": e.kind, "timestamp": e.timestamp,
8739 "actor": e.actor.id, "target": e.target.id,
8740 })).collect::<Vec<_>>(),
8741 }))
8742 .unwrap()
8743 );
8744 } else {
8745 println!(" Most recent {} events:", events.len());
8746 for e in &events {
8747 let when = fmt_timestamp(&e.timestamp);
8748 println!(" · {when} {:<28} {}", e.kind, e.target.id);
8749 }
8750 }
8751 return;
8752 }
8753
8754 if lower.starts_with("how many") || lower.contains("count") || lower.contains("total") {
8756 let n = project.findings.len();
8757 let evs = project.events.len();
8758 let peers = project.peers.len();
8759 let actors = project.actors.len();
8760 if json {
8761 println!(
8762 "{}",
8763 serde_json::to_string_pretty(&json!({
8764 "answer": "counts",
8765 "findings": n,
8766 "events": evs,
8767 "peers": peers,
8768 "actors": actors,
8769 "replications": project.replications.len(),
8770 "predictions": project.predictions.len(),
8771 }))
8772 .unwrap()
8773 );
8774 } else {
8775 println!(" {n} findings · {evs} events · {actors} actors · {peers} peers.");
8776 println!(
8777 " {} replications · {} predictions · {} datasets · {} code artifacts.",
8778 project.replications.len(),
8779 project.predictions.len(),
8780 project.datasets.len(),
8781 project.code_artifacts.len(),
8782 );
8783 }
8784 return;
8785 }
8786
8787 if lower.contains("calibration") || lower.contains("brier") || lower.contains("predict") {
8789 let records =
8790 crate::calibration::calibration_records(&project.predictions, &project.resolutions);
8791 if json {
8792 println!("{}", serde_json::to_string_pretty(&records).unwrap());
8793 } else if records.is_empty() {
8794 println!(" No predictions yet. The calibration ledger is empty.");
8795 } else {
8796 println!(" Calibration over {} actor(s):", records.len());
8797 for r in &records {
8798 let brier = r
8799 .brier_score
8800 .map(|b| format!("{:.3}", b))
8801 .unwrap_or_else(|| "—".into());
8802 println!(
8803 " · {:<28} predictions {} · resolved {} · expired {} · Brier {}",
8804 r.actor, r.n_predictions, r.n_resolved, r.n_expired, brier
8805 );
8806 }
8807 }
8808 return;
8809 }
8810
8811 if lower.contains("peer")
8813 || lower.contains("federat")
8814 || lower.contains("sync")
8815 || lower.contains("conflict")
8816 {
8817 let mut total_conflicts = 0usize;
8818 for e in &project.events {
8819 if e.kind == "frontier.conflict_detected" {
8820 total_conflicts += 1;
8821 }
8822 }
8823 if json {
8824 println!(
8825 "{}",
8826 serde_json::to_string_pretty(&json!({
8827 "answer": "federation",
8828 "peers": project.peers.iter().map(|p| &p.id).collect::<Vec<_>>(),
8829 "total_conflicts": total_conflicts,
8830 }))
8831 .unwrap()
8832 );
8833 } else {
8834 println!(" {} peer(s) registered:", project.peers.len());
8835 for p in &project.peers {
8836 println!(" · {:<24} {}", p.id, p.url);
8837 }
8838 println!(" {total_conflicts} conflict events on the canonical log.");
8839 }
8840 return;
8841 }
8842
8843 if json {
8845 println!(
8846 "{}",
8847 serde_json::to_string_pretty(&json!({
8848 "answer": "unknown_question",
8849 "question": q,
8850 "hint": "Try: pending, audit, recent, how many, calibration, peers."
8851 }))
8852 .unwrap()
8853 );
8854 } else {
8855 println!(" Don't know how to route that question yet.");
8856 println!(" Try: pending · audit · recent · how many · calibration · peers");
8857 }
8858}
8859
8860fn frontier_label(p: &crate::project::Project) -> String {
8861 if p.project.name.trim().is_empty() {
8862 "(unnamed)".to_string()
8863 } else {
8864 p.project.name.clone()
8865 }
8866}
8867
8868fn fmt_timestamp(ts: &str) -> String {
8869 chrono::DateTime::parse_from_rfc3339(ts)
8872 .map(|dt| dt.format("%m-%d %H:%M").to_string())
8873 .unwrap_or_else(|_| ts.chars().take(16).collect())
8874}
8875
8876fn cmd_stats(path: &Path) {
8877 let frontier = load_frontier_or_fail(path);
8878 let s = &frontier.stats;
8879 println!();
8880 println!(" {}", "FRONTIER · V0.36.0".dimmed());
8881 println!(" {}", frontier.project.name.bold());
8882 println!(" {}", style::tick_row(60));
8883 println!(" id: {}", frontier.frontier_id());
8884 println!(" compiled: {}", frontier.project.compiled_at);
8885 println!(" papers: {}", frontier.project.papers_processed);
8886 println!(" findings: {}", s.findings);
8887 println!(" links: {}", s.links);
8888 println!(" replicated: {}", s.replicated);
8889 println!(" avg confidence: {}", s.avg_confidence);
8890 println!(" gaps: {}", s.gaps);
8891 println!(" contested: {}", s.contested);
8892 println!(" reviewed: {}", s.human_reviewed);
8893 println!(" proposals: {}", s.proposal_count);
8894 println!(
8895 " recorded proof: {}",
8896 frontier.proof_state.latest_packet.status
8897 );
8898 if frontier.proof_state.latest_packet.status != "never_exported" {
8899 println!(
8900 " proof note: recorded frontier metadata; packet files are checked by `vela packet validate`"
8901 );
8902 }
8903 if !s.categories.is_empty() {
8904 println!();
8905 println!(" {}", "categories".dimmed());
8906 let mut categories = s.categories.iter().collect::<Vec<_>>();
8907 categories.sort_by(|a, b| b.1.cmp(a.1));
8908 for (category, count) in categories {
8909 println!(" {category}: {}", count);
8910 }
8911 }
8912 println!();
8913 println!(" {}", style::tick_row(60));
8914 println!();
8915}
8916
8917fn cmd_proposals(action: ProposalAction) {
8918 match action {
8919 ProposalAction::List {
8920 frontier,
8921 status,
8922 json,
8923 } => {
8924 let frontier_state =
8925 repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
8926 let proposals_list = proposals::list(&frontier_state, status.as_deref());
8927 let payload = json!({
8928 "ok": true,
8929 "command": "proposals.list",
8930 "frontier": frontier_state.project.name,
8931 "status_filter": status,
8932 "summary": proposals::summary(&frontier_state),
8933 "proposals": proposals_list,
8934 });
8935 if json {
8936 println!(
8937 "{}",
8938 serde_json::to_string_pretty(&payload)
8939 .expect("failed to serialize proposals list")
8940 );
8941 } else {
8942 println!("vela proposals list");
8943 println!(" frontier: {}", frontier_state.project.name);
8944 println!(
8945 " proposals: {}",
8946 payload["proposals"].as_array().map_or(0, Vec::len)
8947 );
8948 }
8949 }
8950 ProposalAction::Show {
8951 frontier,
8952 proposal_id,
8953 json,
8954 } => {
8955 let frontier_state =
8956 repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
8957 let proposal =
8958 proposals::show(&frontier_state, &proposal_id).unwrap_or_else(|e| fail_return(&e));
8959 let payload = json!({
8960 "ok": true,
8961 "command": "proposals.show",
8962 "frontier": frontier_state.project.name,
8963 "proposal": proposal,
8964 });
8965 if json {
8966 println!(
8967 "{}",
8968 serde_json::to_string_pretty(&payload)
8969 .expect("failed to serialize proposal show")
8970 );
8971 } else {
8972 println!("vela proposals show");
8973 println!(" frontier: {}", frontier_state.project.name);
8974 println!(" proposal: {}", proposal_id);
8975 println!(" kind: {}", proposal.kind);
8976 println!(" status: {}", proposal.status);
8977 }
8978 }
8979 ProposalAction::Preview {
8980 frontier,
8981 proposal_id,
8982 reviewer,
8983 json,
8984 } => {
8985 let preview = proposals::preview_at_path(&frontier, &proposal_id, &reviewer)
8986 .unwrap_or_else(|e| fail_return(&e));
8987 let payload = json!({
8988 "ok": true,
8989 "command": "proposals.preview",
8990 "frontier": frontier.display().to_string(),
8991 "preview": preview,
8992 });
8993 if json {
8994 println!(
8995 "{}",
8996 serde_json::to_string_pretty(&payload)
8997 .expect("failed to serialize proposal preview")
8998 );
8999 } else {
9000 println!("vela proposals preview");
9001 println!(" proposal: {}", proposal_id);
9002 println!(" kind: {}", preview.kind);
9003 println!(
9004 " findings: {} -> {}",
9005 preview.findings_before, preview.findings_after
9006 );
9007 println!(
9008 " artifacts: {} -> {}",
9009 preview.artifacts_before, preview.artifacts_after
9010 );
9011 println!(
9012 " events: {} -> {}",
9013 preview.events_before, preview.events_after
9014 );
9015 if !preview.changed_findings.is_empty() {
9016 println!(
9017 " findings changed: {}",
9018 preview.changed_findings.join(", ")
9019 );
9020 }
9021 if !preview.changed_artifacts.is_empty() {
9022 println!(
9023 " artifacts changed: {}",
9024 preview.changed_artifacts.join(", ")
9025 );
9026 }
9027 if !preview.event_kinds.is_empty() {
9028 println!(" event kinds: {}", preview.event_kinds.join(", "));
9029 }
9030 println!(" event: {}", preview.applied_event_id);
9031 }
9032 }
9033 ProposalAction::Import {
9034 frontier,
9035 source,
9036 json,
9037 } => {
9038 let report =
9039 proposals::import_from_path(&frontier, &source).unwrap_or_else(|e| fail_return(&e));
9040 let payload = json!({
9041 "ok": true,
9042 "command": "proposals.import",
9043 "frontier": frontier.display().to_string(),
9044 "source": source.display().to_string(),
9045 "summary": {
9046 "imported": report.imported,
9047 "applied": report.applied,
9048 "rejected": report.rejected,
9049 "duplicates": report.duplicates,
9050 },
9051 });
9052 if json {
9053 println!(
9054 "{}",
9055 serde_json::to_string_pretty(&payload)
9056 .expect("failed to serialize proposal import")
9057 );
9058 } else {
9059 println!(
9060 "Imported {} proposals into {}",
9061 report.imported, report.wrote_to
9062 );
9063 }
9064 }
9065 ProposalAction::Validate { source, json } => {
9066 let report = proposals::validate_source(&source).unwrap_or_else(|e| fail_return(&e));
9067 let payload = json!({
9068 "ok": report.ok,
9069 "command": "proposals.validate",
9070 "source": source.display().to_string(),
9071 "summary": {
9072 "checked": report.checked,
9073 "valid": report.valid,
9074 "invalid": report.invalid,
9075 },
9076 "proposal_ids": report.proposal_ids,
9077 "errors": report.errors,
9078 });
9079 if json {
9080 println!(
9081 "{}",
9082 serde_json::to_string_pretty(&payload)
9083 .expect("failed to serialize proposal validation")
9084 );
9085 } else if report.ok {
9086 println!("{} validated {} proposals", style::ok("ok"), report.valid);
9087 } else {
9088 println!(
9089 "{} validated {} proposals, {} invalid",
9090 style::lost("lost"),
9091 report.valid,
9092 report.invalid
9093 );
9094 for error in &report.errors {
9095 println!(" · {error}");
9096 }
9097 std::process::exit(1);
9098 }
9099 }
9100 ProposalAction::Export {
9101 frontier,
9102 output,
9103 status,
9104 json,
9105 } => {
9106 let count = proposals::export_to_path(&frontier, &output, status.as_deref())
9107 .unwrap_or_else(|e| fail_return(&e));
9108 let payload = json!({
9109 "ok": true,
9110 "command": "proposals.export",
9111 "frontier": frontier.display().to_string(),
9112 "output": output.display().to_string(),
9113 "status": status,
9114 "exported": count,
9115 });
9116 if json {
9117 println!(
9118 "{}",
9119 serde_json::to_string_pretty(&payload)
9120 .expect("failed to serialize proposal export")
9121 );
9122 } else {
9123 println!("sealed · {count} proposals · {}", output.display());
9124 }
9125 }
9126 ProposalAction::Accept {
9127 frontier,
9128 proposal_id,
9129 reviewer,
9130 reason,
9131 json,
9132 } => {
9133 let event_id = proposals::accept_at_path(&frontier, &proposal_id, &reviewer, &reason)
9134 .unwrap_or_else(|e| fail_return(&e));
9135 let payload = json!({
9136 "ok": true,
9137 "command": "proposals.accept",
9138 "frontier": frontier.display().to_string(),
9139 "proposal_id": proposal_id,
9140 "reviewer": reviewer,
9141 "applied_event_id": event_id,
9142 });
9143 if json {
9144 println!(
9145 "{}",
9146 serde_json::to_string_pretty(&payload)
9147 .expect("failed to serialize proposal accept")
9148 );
9149 } else {
9150 println!(
9151 "{} accepted and applied proposal {}",
9152 style::ok("ok"),
9153 proposal_id
9154 );
9155 println!(" event: {}", event_id);
9156 }
9157 }
9158 ProposalAction::Reject {
9159 frontier,
9160 proposal_id,
9161 reviewer,
9162 reason,
9163 json,
9164 } => {
9165 proposals::reject_at_path(&frontier, &proposal_id, &reviewer, &reason)
9166 .unwrap_or_else(|e| fail_return(&e));
9167 let payload = json!({
9168 "ok": true,
9169 "command": "proposals.reject",
9170 "frontier": frontier.display().to_string(),
9171 "proposal_id": proposal_id,
9172 "reviewer": reviewer,
9173 "status": "rejected",
9174 });
9175 if json {
9176 println!(
9177 "{}",
9178 serde_json::to_string_pretty(&payload)
9179 .expect("failed to serialize proposal reject")
9180 );
9181 } else {
9182 println!(
9183 "{} rejected proposal {}",
9184 style::warn("rejected"),
9185 proposal_id
9186 );
9187 }
9188 }
9189 }
9190}
9191
9192fn cmd_artifact_to_state(
9193 frontier: &Path,
9194 packet: &Path,
9195 actor: &str,
9196 apply_artifacts: bool,
9197 json: bool,
9198) {
9199 let report =
9200 crate::artifact_to_state::import_packet_at_path(frontier, packet, actor, apply_artifacts)
9201 .unwrap_or_else(|e| fail_return(&e));
9202 if json {
9203 println!(
9204 "{}",
9205 serde_json::to_string_pretty(&report)
9206 .expect("failed to serialize artifact-to-state report")
9207 );
9208 } else {
9209 println!("vela artifact-to-state");
9210 println!(" packet: {}", report.packet_id);
9211 println!(" frontier: {}", report.frontier);
9212 println!(" artifact proposals: {}", report.artifact_proposals);
9213 println!(" finding proposals: {}", report.finding_proposals);
9214 println!(" gap proposals: {}", report.gap_proposals);
9215 println!(
9216 " applied artifact events: {}",
9217 report.applied_artifact_events
9218 );
9219 println!(
9220 " pending truth proposals: {}",
9221 report.pending_truth_proposals
9222 );
9223 }
9224}
9225
9226fn cmd_bridge_kit(action: BridgeKitAction) {
9227 match action {
9228 BridgeKitAction::Validate { source, json } => {
9229 let report = crate::artifact_to_state::validate_bridge_kit_path(&source);
9230 if json {
9231 println!(
9232 "{}",
9233 serde_json::to_string_pretty(&report)
9234 .expect("failed to serialize bridge-kit validation report")
9235 );
9236 } else {
9237 println!("vela bridge-kit validate");
9238 println!(" source: {}", report.source);
9239 println!(" packets: {}", report.packet_count);
9240 println!(" valid: {}", report.valid_packet_count);
9241 println!(" invalid: {}", report.invalid_packet_count);
9242 for packet in &report.packets {
9243 if packet.ok {
9244 println!(
9245 " ok: {} · {} artifacts · {} claims · {} needs",
9246 packet
9247 .packet_id
9248 .as_deref()
9249 .unwrap_or("packet id unavailable"),
9250 packet.artifact_count,
9251 packet.candidate_claim_count,
9252 packet.open_need_count
9253 );
9254 } else {
9255 println!(" invalid: {} · {}", packet.path, packet.errors.join("; "));
9256 }
9257 }
9258 for error in &report.errors {
9259 println!(" error: {error}");
9260 }
9261 }
9262 if !report.ok {
9263 std::process::exit(1);
9264 }
9265 }
9266 }
9267}
9268
9269async fn cmd_source_adapter(action: SourceAdapterAction) {
9270 match action {
9271 SourceAdapterAction::Run {
9272 frontier,
9273 adapter,
9274 actor,
9275 entries,
9276 priority,
9277 include_excluded,
9278 allow_partial,
9279 dry_run,
9280 input_dir,
9281 apply_artifacts,
9282 json,
9283 } => {
9284 let report = crate::source_adapters::run(
9285 &frontier,
9286 crate::source_adapters::SourceAdapterRunOptions {
9287 adapter,
9288 actor,
9289 entries,
9290 priority,
9291 include_excluded,
9292 allow_partial,
9293 dry_run,
9294 input_dir,
9295 apply_artifacts,
9296 },
9297 )
9298 .await
9299 .unwrap_or_else(|e| fail_return(&e));
9300 if json {
9301 println!(
9302 "{}",
9303 serde_json::to_string_pretty(&report)
9304 .expect("failed to serialize source adapter report")
9305 );
9306 } else {
9307 println!("vela source-adapter run");
9308 println!(" adapter: {}", report.adapter);
9309 println!(" run: {}", report.run_id);
9310 println!(" frontier: {}", report.frontier);
9311 println!(" selected entries: {}", report.selected_entries);
9312 println!(" fetched records: {}", report.fetched_records);
9313 println!(" changed records: {}", report.changed_records);
9314 println!(" unchanged records: {}", report.unchanged_records);
9315 println!(" failed records: {}", report.failed_records.len());
9316 if let Some(packet_id) = report.packet_id {
9317 println!(" packet: {packet_id}");
9318 }
9319 println!(" artifact proposals: {}", report.artifact_proposals);
9320 println!(" review note proposals: {}", report.review_note_proposals);
9321 println!(" applied events: {}", report.applied_event_ids.len());
9322 }
9323 }
9324 }
9325}
9326
9327fn cmd_runtime_adapter(action: RuntimeAdapterAction) {
9328 match action {
9329 RuntimeAdapterAction::Run {
9330 frontier,
9331 adapter,
9332 input,
9333 actor,
9334 dry_run,
9335 apply_artifacts,
9336 json,
9337 } => {
9338 let report = crate::runtime_adapters::run(
9339 &frontier,
9340 crate::runtime_adapters::RuntimeAdapterRunOptions {
9341 adapter,
9342 input,
9343 actor,
9344 dry_run,
9345 apply_artifacts,
9346 },
9347 )
9348 .unwrap_or_else(|e| fail_return(&e));
9349 if json {
9350 println!(
9351 "{}",
9352 serde_json::to_string_pretty(&report)
9353 .expect("failed to serialize runtime adapter report")
9354 );
9355 } else {
9356 println!("vela runtime-adapter run");
9357 println!(" adapter: {}", report.adapter);
9358 println!(" run: {}", report.run_id);
9359 println!(" frontier: {}", report.frontier);
9360 if let Some(packet_id) = report.packet_id {
9361 println!(" packet: {packet_id}");
9362 }
9363 println!(" artifact proposals: {}", report.artifact_proposals);
9364 println!(" finding proposals: {}", report.finding_proposals);
9365 println!(" gap proposals: {}", report.gap_proposals);
9366 println!(" review note proposals: {}", report.review_note_proposals);
9367 println!(
9368 " applied artifact events: {}",
9369 report.applied_artifact_events
9370 );
9371 println!(
9372 " pending truth proposals: {}",
9373 report.pending_truth_proposals
9374 );
9375 }
9376 }
9377 }
9378}
9379
9380fn cmd_sign(action: SignAction) {
9381 match action {
9382 SignAction::GenerateKeypair { out, json } => {
9383 let public_key = sign::generate_keypair(&out).unwrap_or_else(|e| fail_return(&e));
9384 let payload = json!({
9385 "ok": true,
9386 "command": "sign.generate-keypair",
9387 "output_dir": out.display().to_string(),
9388 "public_key": public_key,
9389 });
9390 if json {
9391 println!(
9392 "{}",
9393 serde_json::to_string_pretty(&payload)
9394 .expect("failed to serialize sign.generate-keypair")
9395 );
9396 } else {
9397 println!("{} keypair · {}", style::ok("generated"), out.display());
9398 println!(" public key: {public_key}");
9399 }
9400 }
9401 SignAction::Apply {
9402 frontier,
9403 private_key,
9404 json,
9405 } => {
9406 let count =
9407 sign::sign_frontier(&frontier, &private_key).unwrap_or_else(|e| fail_return(&e));
9408 let payload = json!({
9409 "ok": true,
9410 "command": "sign.apply",
9411 "frontier": frontier.display().to_string(),
9412 "private_key": private_key.display().to_string(),
9413 "signed": count,
9414 });
9415 if json {
9416 println!(
9417 "{}",
9418 serde_json::to_string_pretty(&payload).expect("failed to serialize sign.apply")
9419 );
9420 } else {
9421 println!(
9422 "{} {count} findings in {}",
9423 style::ok("signed"),
9424 frontier.display()
9425 );
9426 }
9427 }
9428 SignAction::Verify {
9429 frontier,
9430 public_key,
9431 json,
9432 } => {
9433 let report = sign::verify_frontier(&frontier, public_key.as_deref())
9434 .unwrap_or_else(|e| fail_return(&e));
9435 if json {
9436 println!(
9437 "{}",
9438 serde_json::to_string_pretty(&report).expect("failed to serialize sign.verify")
9439 );
9440 } else {
9441 println!();
9442 println!(
9443 " {}",
9444 format!("VELA · SIGN · VERIFY · {}", frontier.display())
9445 .to_uppercase()
9446 .dimmed()
9447 );
9448 println!(" {}", style::tick_row(60));
9449 println!(" total findings: {}", report.total_findings);
9450 println!(" signed: {}", report.signed);
9451 println!(" unsigned: {}", report.unsigned);
9452 println!(" valid: {}", report.valid);
9453 println!(" invalid: {}", report.invalid);
9454 if report.findings_with_threshold > 0 {
9455 println!(" with threshold: {}", report.findings_with_threshold);
9456 println!(" jointly accepted: {}", report.jointly_accepted);
9457 }
9458 }
9459 }
9460 SignAction::ThresholdSet {
9461 frontier,
9462 finding_id,
9463 to,
9464 json,
9465 } => {
9466 if to == 0 {
9467 fail("--to must be >= 1");
9468 }
9469 let mut project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
9470 let Some(idx) = project.findings.iter().position(|f| f.id == finding_id) else {
9471 fail(&format!("finding '{finding_id}' not present in frontier"));
9472 };
9473 project.findings[idx].flags.signature_threshold = Some(to);
9474 sign::refresh_jointly_accepted(&mut project);
9478 let met = project.findings[idx].flags.jointly_accepted;
9479 repo::save_to_path(&frontier, &project).unwrap_or_else(|e| fail_return(&e));
9480
9481 if json {
9482 println!(
9483 "{}",
9484 serde_json::to_string_pretty(&json!({
9485 "ok": true,
9486 "command": "sign.threshold-set",
9487 "finding_id": finding_id,
9488 "threshold": to,
9489 "jointly_accepted": met,
9490 "frontier": frontier.display().to_string(),
9491 }))
9492 .expect("failed to serialize sign.threshold-set")
9493 );
9494 } else {
9495 println!(
9496 "{} signature_threshold={to} on {finding_id} ({})",
9497 style::ok("set"),
9498 if met {
9499 "jointly accepted"
9500 } else {
9501 "awaiting signatures"
9502 }
9503 );
9504 }
9505 }
9506 }
9507}
9508
9509fn cmd_actor(action: ActorAction) {
9510 match action {
9511 ActorAction::Add {
9512 frontier,
9513 id,
9514 pubkey,
9515 tier,
9516 orcid,
9517 clearance,
9518 json,
9519 } => {
9520 let trimmed = pubkey.trim();
9522 if trimmed.len() != 64 || hex::decode(trimmed).is_err() {
9523 fail("Public key must be 64 hex characters (32-byte Ed25519 pubkey).");
9524 }
9525 let orcid_normalized = orcid
9527 .as_deref()
9528 .map(|s| sign::validate_orcid(s).unwrap_or_else(|e| fail_return(&e)));
9529 let clearance: Option<crate::access_tier::AccessTier> = clearance.as_deref().map(|s| {
9532 crate::access_tier::AccessTier::parse(s).unwrap_or_else(|e| fail_return(&e))
9533 });
9534
9535 let mut project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
9536 if project.actors.iter().any(|actor| actor.id == id) {
9537 fail(&format!(
9538 "Actor '{id}' already registered in this frontier."
9539 ));
9540 }
9541 project.actors.push(sign::ActorRecord {
9542 id: id.clone(),
9543 public_key: trimmed.to_string(),
9544 algorithm: "ed25519".to_string(),
9545 created_at: chrono::Utc::now().to_rfc3339(),
9546 tier: tier.clone(),
9547 orcid: orcid_normalized.clone(),
9548 access_clearance: clearance,
9549 });
9550 repo::save_to_path(&frontier, &project).unwrap_or_else(|e| fail_return(&e));
9551 let payload = json!({
9552 "ok": true,
9553 "command": "actor.add",
9554 "frontier": frontier.display().to_string(),
9555 "actor_id": id,
9556 "public_key": trimmed,
9557 "tier": tier,
9558 "orcid": orcid_normalized,
9559 "registered_count": project.actors.len(),
9560 });
9561 if json {
9562 println!(
9563 "{}",
9564 serde_json::to_string_pretty(&payload).expect("failed to serialize actor.add")
9565 );
9566 } else {
9567 let tier_suffix = tier
9568 .as_deref()
9569 .map_or_else(String::new, |t| format!(" tier={t}"));
9570 println!(
9571 "{} actor {} (pubkey {}{tier_suffix})",
9572 style::ok("registered"),
9573 id,
9574 &trimmed[..16]
9575 );
9576 }
9577 }
9578 ActorAction::List { frontier, json } => {
9579 let project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
9580 if json {
9581 let payload = json!({
9582 "ok": true,
9583 "command": "actor.list",
9584 "frontier": frontier.display().to_string(),
9585 "actors": project.actors,
9586 });
9587 println!(
9588 "{}",
9589 serde_json::to_string_pretty(&payload).expect("failed to serialize actor.list")
9590 );
9591 } else {
9592 println!();
9593 println!(
9594 " {}",
9595 format!("VELA · ACTOR · LIST · {}", frontier.display())
9596 .to_uppercase()
9597 .dimmed()
9598 );
9599 println!(" {}", style::tick_row(60));
9600 if project.actors.is_empty() {
9601 println!(" (no actors registered)");
9602 } else {
9603 for actor in &project.actors {
9604 println!(
9605 " {:<28} {}… registered {}",
9606 actor.id,
9607 &actor.public_key[..16],
9608 actor.created_at
9609 );
9610 }
9611 }
9612 }
9613 }
9614 }
9615}
9616
9617fn cmd_causal(action: CausalAction) {
9619 use crate::causal_reasoning;
9620
9621 match action {
9622 CausalAction::Audit {
9623 frontier,
9624 problems_only,
9625 json,
9626 } => {
9627 let project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
9628 let mut entries = causal_reasoning::audit_frontier(&project);
9629 if problems_only {
9630 entries.retain(|e| e.verdict.needs_reviewer_attention());
9631 }
9632 let summary = causal_reasoning::summarize_audit(&entries);
9633
9634 if json {
9635 println!(
9636 "{}",
9637 serde_json::to_string_pretty(&json!({
9638 "ok": true,
9639 "command": "causal.audit",
9640 "frontier": frontier.display().to_string(),
9641 "summary": summary,
9642 "entries": entries,
9643 }))
9644 .expect("serialize causal.audit")
9645 );
9646 return;
9647 }
9648
9649 println!();
9650 println!(
9651 " {}",
9652 format!("VELA · CAUSAL · AUDIT · {}", frontier.display())
9653 .to_uppercase()
9654 .dimmed()
9655 );
9656 println!(" {}", style::tick_row(60));
9657 println!(
9658 " total: {} identified: {} conditional: {} underidentified: {} underdetermined: {}",
9659 summary.total,
9660 summary.identified,
9661 summary.conditional,
9662 summary.underidentified,
9663 summary.underdetermined,
9664 );
9665 if entries.is_empty() {
9666 println!(" (no entries to report)");
9667 return;
9668 }
9669 for e in &entries {
9670 let chip = match e.verdict {
9671 crate::causal_reasoning::Identifiability::Identified => style::ok("identified"),
9672 crate::causal_reasoning::Identifiability::Conditional => {
9673 style::warn("conditional")
9674 }
9675 crate::causal_reasoning::Identifiability::Underidentified => {
9676 style::lost("underidentified")
9677 }
9678 crate::causal_reasoning::Identifiability::Underdetermined => {
9679 style::warn("underdetermined")
9680 }
9681 };
9682 let claim = e
9683 .causal_claim
9684 .map_or("none".to_string(), |c| format!("{c:?}").to_lowercase());
9685 let grade = e
9686 .causal_evidence_grade
9687 .map_or("none".to_string(), |g| format!("{g:?}").to_lowercase());
9688 println!();
9689 println!(" {chip} {} ({}/{})", e.finding_id, claim, grade);
9690 let assertion_short: String = e.assertion_text.chars().take(78).collect();
9691 println!(" {assertion_short}");
9692 println!(" {} {}", style::ok("why:"), e.rationale);
9693 if e.verdict.needs_reviewer_attention()
9694 || matches!(
9695 e.verdict,
9696 crate::causal_reasoning::Identifiability::Underdetermined
9697 )
9698 {
9699 println!(" {} {}", style::ok("fix:"), e.remediation);
9700 }
9701 }
9702 }
9703 CausalAction::Effect {
9704 frontier,
9705 source,
9706 on: target,
9707 json,
9708 } => {
9709 use crate::causal_graph::{CausalEffectVerdict, identify_effect};
9710
9711 let project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
9712 let verdict = identify_effect(&project, &source, &target);
9713
9714 if json {
9715 println!(
9716 "{}",
9717 serde_json::to_string_pretty(&json!({
9718 "ok": true,
9719 "command": "causal.effect",
9720 "frontier": frontier.display().to_string(),
9721 "source": source,
9722 "target": target,
9723 "verdict": verdict,
9724 }))
9725 .expect("serialize causal.effect")
9726 );
9727 return;
9728 }
9729
9730 println!();
9731 println!(
9732 " {}",
9733 format!("VELA · CAUSAL · EFFECT · {} → {}", source, target)
9734 .to_uppercase()
9735 .dimmed()
9736 );
9737 println!(" {}", style::tick_row(60));
9738 match verdict {
9739 CausalEffectVerdict::Identified {
9740 adjustment_set,
9741 back_door_paths_considered,
9742 } => {
9743 if adjustment_set.is_empty() {
9744 println!(
9745 " {} no back-door adjustment needed",
9746 style::ok("identified")
9747 );
9748 } else {
9749 println!(" {} identified by adjusting on:", style::ok("identified"));
9750 for z in &adjustment_set {
9751 println!(" · {z}");
9752 }
9753 }
9754 println!(
9755 " back-door paths considered: {}",
9756 back_door_paths_considered
9757 );
9758 }
9759 CausalEffectVerdict::IdentifiedByFrontDoor { mediator_set } => {
9760 println!(
9761 " {} identified via front-door criterion (Pearl 1995 §3.3)",
9762 style::ok("identified")
9763 );
9764 println!(" mediators that intercept all directed paths:");
9765 for m in &mediator_set {
9766 println!(" · {m}");
9767 }
9768 println!(
9769 " applies when source-target confounders are unobserved but the mediator chain is."
9770 );
9771 }
9772 CausalEffectVerdict::NoCausalPath { reason } => {
9773 println!(" {} no causal path: {reason}", style::warn("no_path"));
9774 }
9775 CausalEffectVerdict::Underidentified {
9776 unblocked_back_door_paths,
9777 candidates_tried,
9778 } => {
9779 println!(
9780 " {} no observational adjustment set found ({} candidates tried)",
9781 style::lost("underidentified"),
9782 candidates_tried
9783 );
9784 println!(" open back-door paths:");
9785 for path in unblocked_back_door_paths.iter().take(5) {
9786 println!(" · {}", path.join(" — "));
9787 }
9788 println!(
9789 " remediation: either intervene experimentally on {source}, or extend the link graph to make a confounder observable."
9790 );
9791 }
9792 CausalEffectVerdict::UnknownNode { which } => {
9793 fail(&which);
9794 }
9795 }
9796 println!();
9797 }
9798 CausalAction::Graph {
9799 frontier,
9800 node,
9801 json,
9802 } => {
9803 use crate::causal_graph::CausalGraph;
9804 let project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
9805 let graph = CausalGraph::from_project(&project);
9806
9807 let nodes: Vec<&str> = if let Some(n) = node.as_deref() {
9810 if !graph.contains(n) {
9811 fail(&format!("node not in frontier: {n}"));
9812 }
9813 vec![n]
9814 } else {
9815 project.findings.iter().map(|f| f.id.as_str()).collect()
9816 };
9817
9818 if json {
9819 let payload: Vec<_> = nodes
9820 .iter()
9821 .map(|n| {
9822 let parents: Vec<&str> = graph.parents_of(n).collect();
9823 let children: Vec<&str> = graph.children_of(n).collect();
9824 json!({
9825 "node": n,
9826 "parents": parents,
9827 "children": children,
9828 })
9829 })
9830 .collect();
9831 println!(
9832 "{}",
9833 serde_json::to_string_pretty(&json!({
9834 "ok": true,
9835 "command": "causal.graph",
9836 "node_count": graph.node_count(),
9837 "edge_count": graph.edge_count(),
9838 "nodes": payload,
9839 }))
9840 .expect("serialize causal.graph")
9841 );
9842 return;
9843 }
9844
9845 println!();
9846 println!(
9847 " {}",
9848 format!("VELA · CAUSAL · GRAPH · {}", frontier.display())
9849 .to_uppercase()
9850 .dimmed()
9851 );
9852 println!(" {}", style::tick_row(60));
9853 println!(
9854 " {} nodes · {} edges",
9855 graph.node_count(),
9856 graph.edge_count()
9857 );
9858 println!();
9859 for n in &nodes {
9860 let parents: Vec<&str> = graph.parents_of(n).collect();
9861 let children: Vec<&str> = graph.children_of(n).collect();
9862 if parents.is_empty() && children.is_empty() && nodes.len() > 1 {
9863 continue; }
9865 println!(" {n}");
9866 if !parents.is_empty() {
9867 println!(" parents: {}", parents.join(", "));
9868 }
9869 if !children.is_empty() {
9870 println!(" children: {}", children.join(", "));
9871 }
9872 }
9873 }
9874 CausalAction::Counterfactual {
9875 frontier,
9876 intervene_on,
9877 set_to,
9878 target,
9879 json,
9880 } => {
9881 use crate::counterfactual::{
9882 CounterfactualQuery, CounterfactualVerdict, answer_counterfactual,
9883 };
9884
9885 let project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
9886 let query = CounterfactualQuery {
9887 intervene_on: intervene_on.clone(),
9888 set_to,
9889 target: target.clone(),
9890 };
9891 let verdict = answer_counterfactual(&project, &query);
9892
9893 if json {
9894 println!(
9895 "{}",
9896 serde_json::to_string_pretty(&json!({
9897 "ok": true,
9898 "command": "causal.counterfactual",
9899 "frontier": frontier.display().to_string(),
9900 "query": query,
9901 "verdict": verdict,
9902 }))
9903 .expect("serialize causal.counterfactual")
9904 );
9905 return;
9906 }
9907
9908 println!();
9909 println!(
9910 " {}",
9911 format!(
9912 "VELA · CAUSAL · COUNTERFACTUAL · do({intervene_on} := {set_to:.3}) → {target}"
9913 )
9914 .to_uppercase()
9915 .dimmed()
9916 );
9917 println!(" {}", style::tick_row(72));
9918 match verdict {
9919 CounterfactualVerdict::Resolved {
9920 factual,
9921 counterfactual,
9922 delta,
9923 paths_used,
9924 } => {
9925 println!(
9926 " {} factual: {factual:.3} counterfactual: {counterfactual:.3} delta: {delta:+.3}",
9927 style::ok("resolved")
9928 );
9929 println!(
9930 " twin-network propagation through {} causal path(s):",
9931 paths_used.len()
9932 );
9933 for p in paths_used.iter().take(5) {
9934 println!(" · {}", p.join(" → "));
9935 }
9936 println!(
9937 " reading: \"if {intervene_on}'s confidence had been {set_to:.3} \
9938 instead of factual, {target}'s confidence would shift by {delta:+.3}.\""
9939 );
9940 }
9941 CounterfactualVerdict::MechanismUnspecified { unspecified_edges } => {
9942 println!(
9943 " {} causal path exists but {} edge(s) lack a mechanism annotation",
9944 style::warn("mechanism_unspecified"),
9945 unspecified_edges.len()
9946 );
9947 for (parent, child) in unspecified_edges.iter().take(8) {
9948 println!(" · {parent} → {child}");
9949 }
9950 println!(
9951 " remediation: annotate one of the link mechanisms (linear / monotonic / threshold / saturating)."
9952 );
9953 }
9954 CounterfactualVerdict::NoCausalPath { factual } => {
9955 println!(
9956 " {} no directed path from {intervene_on} to {target}; counterfactual = factual = {factual:.3}",
9957 style::warn("no_path")
9958 );
9959 }
9960 CounterfactualVerdict::UnknownNode { which } => {
9961 fail(&format!("node not in frontier: {which}"));
9962 }
9963 CounterfactualVerdict::InvalidIntervention { reason } => {
9964 fail(&reason);
9965 }
9966 }
9967 println!();
9968 }
9969 }
9970}
9971
9972fn cmd_bridges(action: BridgesAction) {
9975 use crate::bridge::{Bridge, BridgeStatus, derive_bridges};
9976 use std::collections::HashMap;
9977
9978 fn bridges_dir(frontier: &Path) -> PathBuf {
9979 frontier.join(".vela/bridges")
9980 }
9981
9982 fn load_bridge(frontier: &Path, id: &str) -> Result<Bridge, String> {
9983 let path = bridges_dir(frontier).join(format!("{id}.json"));
9984 if !path.is_file() {
9985 return Err(format!("bridge not found: {id}"));
9986 }
9987 let data = std::fs::read_to_string(&path).map_err(|e| format!("read {id}: {e}"))?;
9988 serde_json::from_str(&data).map_err(|e| format!("parse {id}: {e}"))
9989 }
9990
9991 fn save_bridge(frontier: &Path, b: &Bridge) -> Result<(), String> {
9992 let dir = bridges_dir(frontier);
9993 std::fs::create_dir_all(&dir).map_err(|e| format!("mkdir bridges/: {e}"))?;
9994 let path = dir.join(format!("{}.json", b.id));
9995 let data = serde_json::to_string_pretty(b).map_err(|e| format!("serialize bridge: {e}"))?;
9996 std::fs::write(&path, format!("{data}\n")).map_err(|e| format!("write bridge: {e}"))
9997 }
9998
9999 fn default_reviewer_id() -> String {
10002 std::env::var("VELA_REVIEWER_ID").unwrap_or_else(|_| "reviewer:will-blair".to_string())
10003 }
10004
10005 fn emit_bridge_reviewed_event(
10016 frontier: &Path,
10017 bridge_id: &str,
10018 status: &str,
10019 reviewer_id: &str,
10020 note: Option<&str>,
10021 ) -> Result<(), String> {
10022 let mut payload = serde_json::json!({
10023 "bridge_id": bridge_id,
10024 "status": status,
10025 });
10026 if let Some(n) = note
10027 && !n.trim().is_empty()
10028 {
10029 payload["note"] = serde_json::Value::String(n.to_string());
10030 }
10031 let known_ids: Vec<String> = list_bridges(frontier)
10033 .unwrap_or_default()
10034 .into_iter()
10035 .map(|b| b.id)
10036 .collect();
10037 crate::events::validate_bridge_reviewed_against_state(&payload, &known_ids)?;
10038 let event = crate::events::new_bridge_reviewed_event(
10039 bridge_id,
10040 reviewer_id,
10041 "human",
10042 &format!("Bridge {status} by {reviewer_id}"),
10043 payload,
10044 Vec::new(),
10045 );
10046 let events_dir = frontier.join(".vela/events");
10047 std::fs::create_dir_all(&events_dir).map_err(|e| format!("mkdir .vela/events: {e}"))?;
10048 let event_path = events_dir.join(format!("{}.json", event.id));
10049 let data =
10050 serde_json::to_string_pretty(&event).map_err(|e| format!("serialize event: {e}"))?;
10051 std::fs::write(&event_path, format!("{data}\n")).map_err(|e| format!("write event: {e}"))
10052 }
10053
10054 fn list_bridges(frontier: &Path) -> Result<Vec<Bridge>, String> {
10055 let dir = bridges_dir(frontier);
10056 if !dir.is_dir() {
10057 return Ok(Vec::new());
10058 }
10059 let mut out = Vec::new();
10060 for entry in std::fs::read_dir(&dir).map_err(|e| format!("read bridges/: {e}"))? {
10061 let entry = entry.map_err(|e| format!("read entry: {e}"))?;
10062 let path = entry.path();
10063 if path.extension().and_then(|s| s.to_str()) != Some("json") {
10064 continue;
10065 }
10066 let data = std::fs::read_to_string(&path).map_err(|e| format!("read {path:?}: {e}"))?;
10067 let b: Bridge =
10068 serde_json::from_str(&data).map_err(|e| format!("parse {path:?}: {e}"))?;
10069 out.push(b);
10070 }
10071 out.sort_by(|a, b| {
10072 b.finding_refs
10073 .len()
10074 .cmp(&a.finding_refs.len())
10075 .then(a.entity_name.cmp(&b.entity_name))
10076 });
10077 Ok(out)
10078 }
10079
10080 match action {
10081 BridgesAction::Derive {
10082 frontier_a,
10083 label_a,
10084 frontier_b,
10085 label_b,
10086 json,
10087 } => {
10088 let a = repo::load_from_path(&frontier_a).unwrap_or_else(|e| fail_return(&e));
10089 let b = repo::load_from_path(&frontier_b).unwrap_or_else(|e| fail_return(&e));
10090 let now = chrono::Utc::now().to_rfc3339();
10091 let new_bridges =
10092 derive_bridges(&[(label_a.as_str(), &a), (label_b.as_str(), &b)], &now);
10093
10094 let existing = list_bridges(&frontier_a).unwrap_or_default();
10098 let existing_by_id: HashMap<String, Bridge> =
10099 existing.iter().map(|b| (b.id.clone(), b.clone())).collect();
10100 let mut written = 0;
10101 let mut preserved = 0;
10102 let mut new_ids = Vec::new();
10103 for mut bridge in new_bridges {
10104 if let Some(prev) = existing_by_id.get(&bridge.id)
10105 && prev.status != BridgeStatus::Derived
10106 {
10107 bridge.status = prev.status;
10109 bridge.derived_at = prev.derived_at.clone();
10110 preserved += 1;
10111 }
10112 save_bridge(&frontier_a, &bridge).unwrap_or_else(|e| fail_return(&e));
10113 new_ids.push(bridge.id.clone());
10114 written += 1;
10115 }
10116
10117 if json {
10118 println!(
10119 "{}",
10120 serde_json::to_string_pretty(&json!({
10121 "ok": true,
10122 "command": "bridges.derive",
10123 "frontier_a": frontier_a.display().to_string(),
10124 "frontier_b": frontier_b.display().to_string(),
10125 "bridges_written": written,
10126 "reviewer_judgments_preserved": preserved,
10127 "ids": new_ids,
10128 }))
10129 .expect("serialize bridges.derive")
10130 );
10131 return;
10132 }
10133
10134 println!();
10135 println!(
10136 " {}",
10137 format!("VELA · BRIDGES · DERIVE · {} ↔ {}", label_a, label_b)
10138 .to_uppercase()
10139 .dimmed()
10140 );
10141 println!(" {}", style::tick_row(60));
10142 println!(" {} {} bridge(s) materialized", style::ok("ok"), written);
10143 if preserved > 0 {
10144 println!(
10145 " {} {} reviewer judgment(s) preserved",
10146 style::ok("kept"),
10147 preserved
10148 );
10149 }
10150 for id in new_ids.iter().take(10) {
10151 println!(" · {id}");
10152 }
10153 if new_ids.len() > 10 {
10154 println!(" … and {} more", new_ids.len() - 10);
10155 }
10156 println!();
10157 }
10158 BridgesAction::List {
10159 frontier,
10160 status,
10161 json,
10162 } => {
10163 let mut bridges = list_bridges(&frontier).unwrap_or_else(|e| fail_return(&e));
10164 if let Some(s) = status.as_deref() {
10165 let want = match s.to_lowercase().as_str() {
10166 "derived" => BridgeStatus::Derived,
10167 "confirmed" => BridgeStatus::Confirmed,
10168 "refuted" => BridgeStatus::Refuted,
10169 other => fail_return(&format!(
10170 "unknown bridge status '{other}' (try derived|confirmed|refuted)"
10171 )),
10172 };
10173 bridges.retain(|b| b.status == want);
10174 }
10175 if json {
10176 println!(
10177 "{}",
10178 serde_json::to_string_pretty(&json!({
10179 "ok": true,
10180 "command": "bridges.list",
10181 "frontier": frontier.display().to_string(),
10182 "count": bridges.len(),
10183 "bridges": bridges,
10184 }))
10185 .expect("serialize bridges.list")
10186 );
10187 return;
10188 }
10189 println!();
10190 println!(
10191 " {}",
10192 format!("VELA · BRIDGES · LIST · {}", frontier.display())
10193 .to_uppercase()
10194 .dimmed()
10195 );
10196 println!(" {}", style::tick_row(60));
10197 println!(" {} bridge(s)", bridges.len());
10198 for b in &bridges {
10199 let chip = match b.status {
10200 BridgeStatus::Derived => style::warn("derived"),
10201 BridgeStatus::Confirmed => style::ok("confirmed"),
10202 BridgeStatus::Refuted => style::lost("refuted"),
10203 };
10204 println!();
10205 println!(
10206 " {chip} {} {} ↔ findings:{}",
10207 b.id,
10208 b.entity_name,
10209 b.finding_refs.len()
10210 );
10211 println!(" frontiers: {}", b.frontiers.join(", "));
10212 if let Some(t) = &b.tension {
10213 println!(" tension: {t}");
10214 }
10215 }
10216 println!();
10217 }
10218 BridgesAction::Show {
10219 frontier,
10220 bridge_id,
10221 json,
10222 } => {
10223 let b = load_bridge(&frontier, &bridge_id).unwrap_or_else(|e| fail_return(&e));
10224 if json {
10225 println!("{}", serde_json::to_string_pretty(&b).expect("serialize"));
10226 return;
10227 }
10228 println!();
10229 println!(
10230 " {}",
10231 format!("VELA · BRIDGES · SHOW · {}", b.id)
10232 .to_uppercase()
10233 .dimmed()
10234 );
10235 println!(" {}", style::tick_row(60));
10236 println!(" entity: {}", b.entity_name);
10237 println!(" status: {:?}", b.status);
10238 println!(" frontiers: {}", b.frontiers.join(", "));
10239 if !b.frontier_ids.is_empty() {
10240 println!(" frontier_ids: {}", b.frontier_ids.join(", "));
10241 }
10242 if let Some(t) = &b.tension {
10243 println!(" tension: {t}");
10244 }
10245 println!(" derived_at: {}", b.derived_at);
10246 println!(" finding refs ({}):", b.finding_refs.len());
10247 for r in &b.finding_refs {
10248 let dir = r.direction.as_deref().unwrap_or("—");
10249 let truncated: String = r.assertion_text.chars().take(72).collect();
10250 println!(
10251 " · [{}] {} (conf={:.2}, dir={})",
10252 r.frontier, r.finding_id, r.confidence, dir
10253 );
10254 println!(" {truncated}");
10255 }
10256 println!();
10257 }
10258 BridgesAction::Confirm {
10259 frontier,
10260 bridge_id,
10261 reviewer,
10262 note,
10263 json,
10264 } => {
10265 let mut b = load_bridge(&frontier, &bridge_id).unwrap_or_else(|e| fail_return(&e));
10266 let reviewer_id = reviewer.unwrap_or_else(default_reviewer_id);
10267 b.status = BridgeStatus::Confirmed;
10268 save_bridge(&frontier, &b).unwrap_or_else(|e| fail_return(&e));
10269 let _ = emit_bridge_reviewed_event(
10273 &frontier,
10274 &bridge_id,
10275 "confirmed",
10276 &reviewer_id,
10277 note.as_deref(),
10278 );
10279 if json {
10280 println!("{}", serde_json::to_string_pretty(&b).expect("serialize"));
10281 return;
10282 }
10283 println!();
10284 println!(" {} {} now confirmed", style::ok("confirmed"), b.id);
10285 println!();
10286 }
10287 BridgesAction::Refute {
10288 frontier,
10289 bridge_id,
10290 reviewer,
10291 note,
10292 json,
10293 } => {
10294 let mut b = load_bridge(&frontier, &bridge_id).unwrap_or_else(|e| fail_return(&e));
10295 let reviewer_id = reviewer.unwrap_or_else(default_reviewer_id);
10296 b.status = BridgeStatus::Refuted;
10297 save_bridge(&frontier, &b).unwrap_or_else(|e| fail_return(&e));
10298 let _ = emit_bridge_reviewed_event(
10299 &frontier,
10300 &bridge_id,
10301 "refuted",
10302 &reviewer_id,
10303 note.as_deref(),
10304 );
10305 if json {
10306 println!("{}", serde_json::to_string_pretty(&b).expect("serialize"));
10307 return;
10308 }
10309 println!();
10310 println!(" {} {} now refuted", style::lost("refuted"), b.id);
10311 println!();
10312 }
10313 }
10314}
10315
10316fn cmd_federation(action: FederationAction) {
10318 use crate::federation::PeerHub;
10319
10320 match action {
10321 FederationAction::PeerAdd {
10322 frontier,
10323 id,
10324 url,
10325 pubkey,
10326 note,
10327 json,
10328 } => {
10329 let peer = PeerHub {
10330 id: id.clone(),
10331 url: url.clone(),
10332 public_key: pubkey.trim().to_string(),
10333 added_at: chrono::Utc::now().to_rfc3339(),
10334 note: note.clone(),
10335 };
10336 peer.validate().unwrap_or_else(|e| fail_return(&e));
10337
10338 let mut project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
10339 if project.peers.iter().any(|p| p.id == id) {
10340 fail(&format!("peer '{id}' already in registry"));
10341 }
10342 project.peers.push(peer.clone());
10343 repo::save_to_path(&frontier, &project).unwrap_or_else(|e| fail_return(&e));
10344
10345 if json {
10346 println!(
10347 "{}",
10348 serde_json::to_string_pretty(&json!({
10349 "ok": true,
10350 "command": "federation.peer-add",
10351 "frontier": frontier.display().to_string(),
10352 "peer": peer,
10353 "registered_count": project.peers.len(),
10354 }))
10355 .expect("serialize federation.peer-add")
10356 );
10357 } else {
10358 println!(
10359 "{} peer {} (pubkey {}…) at {}",
10360 style::ok("registered"),
10361 id,
10362 &peer.public_key[..16],
10363 peer.url
10364 );
10365 }
10366 }
10367 FederationAction::PeerList { frontier, json } => {
10368 let project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
10369 if json {
10370 println!(
10371 "{}",
10372 serde_json::to_string_pretty(&json!({
10373 "ok": true,
10374 "command": "federation.peer-list",
10375 "frontier": frontier.display().to_string(),
10376 "peers": project.peers,
10377 }))
10378 .expect("serialize federation.peer-list")
10379 );
10380 } else {
10381 println!();
10382 println!(
10383 " {}",
10384 format!("VELA · FEDERATION · PEERS · {}", frontier.display())
10385 .to_uppercase()
10386 .dimmed()
10387 );
10388 println!(" {}", style::tick_row(60));
10389 if project.peers.is_empty() {
10390 println!(" (no peers registered)");
10391 } else {
10392 for p in &project.peers {
10393 let note_suffix = if p.note.is_empty() {
10394 String::new()
10395 } else {
10396 format!(" · {}", p.note)
10397 };
10398 println!(
10399 " {:<24} {} {}…{note_suffix}",
10400 p.id,
10401 p.url,
10402 &p.public_key[..16]
10403 );
10404 }
10405 }
10406 }
10407 }
10408 FederationAction::Sync {
10409 frontier,
10410 peer_id,
10411 url,
10412 via_hub,
10413 vfr_id,
10414 allow_cross_vfr,
10415 dry_run,
10416 json,
10417 } => {
10418 use crate::federation::{self, DiscoveryResult};
10419
10420 let mut project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
10421 let Some(peer) = project.peers.iter().find(|p| p.id == peer_id).cloned() else {
10422 fail(&format!(
10423 "peer '{peer_id}' not in registry; run `vela federation peer add` first"
10424 ));
10425 };
10426 let local_frontier_id = project.frontier_id();
10427
10428 if via_hub
10435 && let Some(target) = vfr_id.as_deref()
10436 && target != local_frontier_id
10437 && !allow_cross_vfr
10438 {
10439 fail(&format!(
10440 "cross-vfr sync refused: --vfr-id {target} differs from local frontier_id {local_frontier_id}. \
10441 Pass --allow-cross-vfr to opt in (every peer-side finding will be recorded as a \
10442 missing_locally conflict). Or omit --vfr-id to default to the local frontier id."
10443 ));
10444 }
10445
10446 #[derive(Debug)]
10448 enum SyncOutcome {
10449 Resolved(crate::project::Project, String), BrokenLocator(String, String, u16), UnverifiedEntry(String, String), EntryNotFound(String, u16),
10453 }
10454
10455 let outcome = if via_hub {
10456 let target_vfr = vfr_id.clone().unwrap_or_else(|| local_frontier_id.clone());
10457 match federation::discover_peer_frontier(
10458 &peer.url,
10459 &target_vfr,
10460 Some(&peer.public_key),
10461 ) {
10462 DiscoveryResult::Resolved(p) => {
10463 let src =
10464 format!("{}/entries/{}", peer.url.trim_end_matches('/'), target_vfr);
10465 SyncOutcome::Resolved(p, src)
10466 }
10467 DiscoveryResult::BrokenLocator {
10468 vfr_id,
10469 locator,
10470 status,
10471 } => SyncOutcome::BrokenLocator(vfr_id, locator, status),
10472 DiscoveryResult::UnverifiedEntry { vfr_id, reason } => {
10473 SyncOutcome::UnverifiedEntry(vfr_id, reason)
10474 }
10475 DiscoveryResult::EntryNotFound { vfr_id, status } => {
10476 SyncOutcome::EntryNotFound(vfr_id, status)
10477 }
10478 DiscoveryResult::Unreachable { url, error } => {
10479 fail(&format!("peer hub unreachable ({url}): {error}"));
10480 }
10481 }
10482 } else {
10483 let resolved_url = url.unwrap_or_else(|| {
10484 let base = peer.url.trim_end_matches('/');
10485 format!("{base}/manifest/{local_frontier_id}.json")
10486 });
10487 match federation::fetch_peer_frontier(&resolved_url) {
10488 Ok(p) => SyncOutcome::Resolved(p, resolved_url),
10489 Err(e) => fail(&format!("direct fetch failed: {e}")),
10490 }
10491 };
10492
10493 let peer_source: String;
10496 let peer_state = match outcome {
10497 SyncOutcome::Resolved(p, src) => {
10498 if !json {
10499 println!(" · resolved via {src}");
10500 }
10501 peer_source = src;
10502 p
10503 }
10504 SyncOutcome::BrokenLocator(vfr, locator, status) => {
10505 if dry_run {
10506 if json {
10507 println!(
10508 "{}",
10509 serde_json::to_string_pretty(&json!({
10510 "ok": true,
10511 "command": "federation.sync",
10512 "dry_run": true,
10513 "outcome": "broken_locator",
10514 "vfr_id": vfr,
10515 "locator": locator,
10516 "http_status": status,
10517 }))
10518 .expect("serialize")
10519 );
10520 } else {
10521 println!(
10522 "{} dry-run: peer entry resolved but locator dead",
10523 style::warn("broken_locator")
10524 );
10525 println!(" vfr_id: {vfr}");
10526 println!(" locator: {locator} (HTTP {status})");
10527 }
10528 return;
10529 }
10530 let report = federation::record_locator_failure(
10531 &mut project,
10532 &peer_id,
10533 &vfr,
10534 &locator,
10535 status,
10536 );
10537 repo::save_to_path(&frontier, &project).unwrap_or_else(|e| fail_return(&e));
10538 if json {
10539 println!(
10540 "{}",
10541 serde_json::to_string_pretty(&json!({
10542 "ok": true,
10543 "command": "federation.sync",
10544 "outcome": "broken_locator",
10545 "report": report,
10546 }))
10547 .expect("serialize")
10548 );
10549 } else {
10550 println!(
10551 "{} sync recorded broken-locator conflict against {peer_id}",
10552 style::warn("broken_locator")
10553 );
10554 println!(" vfr_id: {vfr}");
10555 println!(" locator: {locator} (HTTP {status})");
10556 println!(" events appended: {}", report.events_appended);
10557 }
10558 return;
10559 }
10560 SyncOutcome::UnverifiedEntry(vfr, reason) => {
10561 if dry_run {
10562 if json {
10563 println!(
10564 "{}",
10565 serde_json::to_string_pretty(&json!({
10566 "ok": true,
10567 "command": "federation.sync",
10568 "dry_run": true,
10569 "outcome": "unverified_peer_entry",
10570 "vfr_id": vfr,
10571 "reason": reason,
10572 }))
10573 .expect("serialize")
10574 );
10575 } else {
10576 println!(
10577 "{} dry-run: peer entry signature did not verify",
10578 style::lost("unverified_peer_entry")
10579 );
10580 println!(" vfr_id: {vfr}");
10581 println!(" reason: {reason}");
10582 }
10583 return;
10584 }
10585 let report =
10586 federation::record_unverified_entry(&mut project, &peer_id, &vfr, &reason);
10587 repo::save_to_path(&frontier, &project).unwrap_or_else(|e| fail_return(&e));
10588 if json {
10589 println!(
10590 "{}",
10591 serde_json::to_string_pretty(&json!({
10592 "ok": true,
10593 "command": "federation.sync",
10594 "outcome": "unverified_peer_entry",
10595 "report": report,
10596 }))
10597 .expect("serialize")
10598 );
10599 } else {
10600 println!(
10601 "{} sync halted; peer's registry entry signature did not verify",
10602 style::lost("unverified_peer_entry")
10603 );
10604 println!(" vfr_id: {vfr}");
10605 println!(" reason: {reason}");
10606 }
10607 return;
10608 }
10609 SyncOutcome::EntryNotFound(vfr, status) => {
10610 if json {
10611 println!(
10612 "{}",
10613 serde_json::to_string_pretty(&json!({
10614 "ok": false,
10615 "command": "federation.sync",
10616 "outcome": "entry_not_found",
10617 "vfr_id": vfr,
10618 "http_status": status,
10619 }))
10620 .expect("serialize")
10621 );
10622 } else {
10623 println!(
10624 "{} peer's hub does not publish vfr_id {vfr} (HTTP {status})",
10625 style::warn("entry_not_found")
10626 );
10627 }
10628 return;
10629 }
10630 };
10631
10632 if dry_run {
10633 let conflicts = federation::diff_frontiers(&project, &peer_state);
10634 if json {
10635 println!(
10636 "{}",
10637 serde_json::to_string_pretty(&json!({
10638 "ok": true,
10639 "command": "federation.sync",
10640 "dry_run": true,
10641 "peer_id": peer_id,
10642 "peer_source": peer_source,
10643 "conflicts": conflicts,
10644 }))
10645 .expect("serialize federation.sync (dry-run)")
10646 );
10647 } else {
10648 println!(
10649 "{} dry-run vs {peer_id} ({}): {} conflict(s)",
10650 style::ok("ok"),
10651 peer_source,
10652 conflicts.len()
10653 );
10654 for c in &conflicts {
10655 println!(" · {} {} {}", c.kind.as_str(), c.finding_id, c.detail);
10656 }
10657 }
10658 return;
10659 }
10660
10661 let report = federation::sync_with_peer(&mut project, &peer_id, &peer_state);
10662 repo::save_to_path(&frontier, &project).unwrap_or_else(|e| fail_return(&e));
10663
10664 if json {
10665 println!(
10666 "{}",
10667 serde_json::to_string_pretty(&json!({
10668 "ok": true,
10669 "command": "federation.sync",
10670 "peer_id": peer_id,
10671 "peer_source": peer_source,
10672 "report": report,
10673 }))
10674 .expect("serialize federation.sync")
10675 );
10676 } else {
10677 println!(
10678 "{} synced with {} ({})",
10679 style::ok("ok"),
10680 peer_id,
10681 peer_source
10682 );
10683 println!(
10684 " our: {}",
10685 &report.our_snapshot_hash[..16.min(report.our_snapshot_hash.len())]
10686 );
10687 println!(
10688 " peer: {}",
10689 &report.peer_snapshot_hash[..16.min(report.peer_snapshot_hash.len())]
10690 );
10691 println!(
10692 " conflicts: {} events appended: {}",
10693 report.conflicts.len(),
10694 report.events_appended
10695 );
10696 for c in &report.conflicts {
10697 println!(" · {} {} {}", c.kind.as_str(), c.finding_id, c.detail);
10698 }
10699 }
10700 }
10701 FederationAction::PushResolution {
10702 frontier,
10703 conflict_event_id,
10704 to,
10705 key,
10706 vfr_id,
10707 json,
10708 } => {
10709 cmd_federation_push_resolution(frontier, conflict_event_id, to, key, vfr_id, json);
10710 }
10711 FederationAction::PeerRemove { frontier, id, json } => {
10712 let mut project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
10713 let before = project.peers.len();
10714 project.peers.retain(|p| p.id != id);
10715 if project.peers.len() == before {
10716 fail(&format!("peer '{id}' not found in registry"));
10717 }
10718 repo::save_to_path(&frontier, &project).unwrap_or_else(|e| fail_return(&e));
10719
10720 if json {
10721 println!(
10722 "{}",
10723 serde_json::to_string_pretty(&json!({
10724 "ok": true,
10725 "command": "federation.peer-remove",
10726 "frontier": frontier.display().to_string(),
10727 "removed": id,
10728 "remaining": project.peers.len(),
10729 }))
10730 .expect("serialize federation.peer-remove")
10731 );
10732 } else {
10733 println!(
10734 "{} peer {} ({} remaining)",
10735 style::ok("removed"),
10736 id,
10737 project.peers.len()
10738 );
10739 }
10740 }
10741 }
10742}
10743
10744fn cmd_federation_push_resolution(
10756 frontier: PathBuf,
10757 conflict_event_id: String,
10758 to: String,
10759 key: Option<PathBuf>,
10760 vfr_id: Option<String>,
10761 json: bool,
10762) {
10763 use crate::canonical;
10764 use crate::sign;
10765
10766 let project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
10767
10768 let Some(peer) = project.peers.iter().find(|p| p.id == to).cloned() else {
10769 fail(&format!(
10770 "peer '{to}' not in registry; run `vela federation peer-add` first"
10771 ));
10772 };
10773
10774 let Some(resolution) = project
10776 .events
10777 .iter()
10778 .find(|e| {
10779 e.kind == "frontier.conflict_resolved"
10780 && e.payload.get("conflict_event_id").and_then(|v| v.as_str())
10781 == Some(conflict_event_id.as_str())
10782 })
10783 .cloned()
10784 else {
10785 fail(&format!(
10786 "no frontier.conflict_resolved event paired with conflict {conflict_event_id} in {}",
10787 frontier.display()
10788 ));
10789 };
10790
10791 let actor_id = resolution.actor.id.clone();
10794 let Some(actor) = project.actors.iter().find(|a| a.id == actor_id) else {
10795 fail(&format!(
10796 "resolution event's actor.id ({actor_id}) is not in the frontier's actor registry; \
10797 register the reviewer with `vela actor add` before pushing"
10798 ));
10799 };
10800
10801 let key_path = key.unwrap_or_else(|| {
10804 let home = std::env::var("HOME").unwrap_or_default();
10805 let base = PathBuf::from(home)
10806 .join(".config")
10807 .join("vela")
10808 .join("keys");
10809 let safe_id = actor.id.replace([':', '/'], "_");
10810 let by_actor = base.join(format!("{safe_id}.key"));
10811 if by_actor.exists() {
10812 by_actor
10813 } else {
10814 base.join("private.key")
10815 }
10816 });
10817
10818 let signing_key = sign::load_signing_key_from_path(&key_path).unwrap_or_else(|e| {
10819 fail_return(&format!(
10820 "load private key from {}: {e}",
10821 key_path.display()
10822 ))
10823 });
10824 let pubkey_hex = sign::pubkey_hex(&signing_key);
10825 if !pubkey_hex.eq_ignore_ascii_case(&actor.public_key) {
10826 fail(&format!(
10827 "private key at {} does not match actor {}'s registered public key. \
10828 Loaded pubkey {}, expected {}.",
10829 key_path.display(),
10830 actor.id,
10831 &pubkey_hex[..16],
10832 &actor.public_key[..16]
10833 ));
10834 }
10835
10836 let signature_hex = sign::sign_event(&resolution, &signing_key)
10839 .unwrap_or_else(|e| fail_return(&format!("sign event: {e}")));
10840
10841 let mut body = resolution.clone();
10846 body.signature = None;
10847 let body_value =
10848 serde_json::to_value(&body).unwrap_or_else(|e| fail_return(&format!("serialize: {e}")));
10849 let _canonical_check = canonical::to_canonical_bytes(&body_value)
10850 .unwrap_or_else(|e| fail_return(&format!("canonicalize: {e}")));
10851
10852 let target_vfr = vfr_id.unwrap_or_else(|| project.frontier_id());
10853 let url = format!(
10854 "{}/entries/{}/events",
10855 peer.url.trim_end_matches('/'),
10856 target_vfr
10857 );
10858
10859 let url_owned = url.clone();
10861 let pubkey_owned = pubkey_hex.clone();
10862 let signature_owned = signature_hex.clone();
10863 let body_owned = body_value.clone();
10864 let response: Result<(u16, String), String> = std::thread::spawn(move || {
10865 let client = reqwest::blocking::Client::new();
10866 let resp = client
10867 .post(&url_owned)
10868 .header("X-Vela-Signer-Pubkey", &pubkey_owned)
10869 .header("X-Vela-Signature", &signature_owned)
10870 .json(&body_owned)
10871 .send()
10872 .map_err(|e| format!("HTTP POST {url_owned}: {e}"))?;
10873 let status = resp.status().as_u16();
10874 let text = resp.text().unwrap_or_default();
10875 Ok((status, text))
10876 })
10877 .join()
10878 .map_err(|_| "push thread panicked".to_string())
10879 .unwrap_or_else(|e| fail_return(&e));
10880
10881 let (status, text) = response.unwrap_or_else(|e| fail_return(&e));
10882 let parsed: serde_json::Value =
10883 serde_json::from_str(&text).unwrap_or_else(|_| json!({ "raw": text }));
10884
10885 let accepted = matches!(status, 200..=202);
10886 if json {
10887 println!(
10888 "{}",
10889 serde_json::to_string_pretty(&json!({
10890 "ok": accepted,
10891 "command": "federation.push-resolution",
10892 "frontier": frontier.display().to_string(),
10893 "peer_id": to,
10894 "url": url,
10895 "conflict_event_id": conflict_event_id,
10896 "event_id": resolution.id,
10897 "actor_id": actor.id,
10898 "http_status": status,
10899 "response": parsed,
10900 }))
10901 .expect("serialize federation.push-resolution")
10902 );
10903 } else if accepted {
10904 println!(
10905 "{} resolution {} pushed to {} (HTTP {})",
10906 style::ok("ok"),
10907 &resolution.id[..16.min(resolution.id.len())],
10908 to,
10909 status
10910 );
10911 println!(" url: {url}");
10912 println!(" signer: {} (actor {})", &pubkey_hex[..16], actor.id);
10913 } else {
10914 println!("{} push refused (HTTP {})", style::lost("rejected"), status);
10915 println!(" url: {url}");
10916 println!(" response: {text}");
10917 std::process::exit(1);
10918 }
10919}
10920
10921fn cmd_queue(action: QueueAction) {
10926 use crate::queue;
10927 match action {
10928 QueueAction::List { queue_file, json } => {
10929 let path = queue_file.unwrap_or_else(queue::default_queue_path);
10930 let q = queue::load(&path).unwrap_or_else(|e| fail_return(&e));
10931 if json {
10932 let payload = json!({
10933 "ok": true,
10934 "command": "queue.list",
10935 "queue_file": path.display().to_string(),
10936 "schema": q.schema,
10937 "actions": q.actions,
10938 });
10939 println!(
10940 "{}",
10941 serde_json::to_string_pretty(&payload).expect("failed to serialize queue.list")
10942 );
10943 } else {
10944 println!();
10945 println!(
10946 " {}",
10947 format!("VELA · QUEUE · LIST · {}", path.display())
10948 .to_uppercase()
10949 .dimmed()
10950 );
10951 println!(" {}", style::tick_row(60));
10952 if q.actions.is_empty() {
10953 println!(" (queue is empty)");
10954 } else {
10955 for (idx, action) in q.actions.iter().enumerate() {
10956 println!(
10957 " [{idx}] {} → {} queued {}",
10958 action.kind,
10959 action.frontier.display(),
10960 action.queued_at
10961 );
10962 }
10963 }
10964 }
10965 }
10966 QueueAction::Clear { queue_file, json } => {
10967 let path = queue_file.unwrap_or_else(queue::default_queue_path);
10968 let dropped = queue::clear(&path).unwrap_or_else(|e| fail_return(&e));
10969 if json {
10970 let payload = json!({
10971 "ok": true,
10972 "command": "queue.clear",
10973 "queue_file": path.display().to_string(),
10974 "dropped": dropped,
10975 });
10976 println!(
10977 "{}",
10978 serde_json::to_string_pretty(&payload)
10979 .expect("failed to serialize queue.clear")
10980 );
10981 } else {
10982 println!("{} dropped {dropped} queued action(s)", style::ok("ok"));
10983 }
10984 }
10985 QueueAction::Sign {
10986 actor,
10987 key,
10988 queue_file,
10989 yes_to_all,
10990 json,
10991 } => {
10992 let path = queue_file.unwrap_or_else(queue::default_queue_path);
10993 let q = queue::load(&path).unwrap_or_else(|e| fail_return(&e));
10994 if q.actions.is_empty() {
10995 if json {
10996 println!("{}", json!({"ok": true, "signed": 0, "remaining": 0}));
10997 } else {
10998 println!("{} queue is empty", style::ok("ok"));
10999 }
11000 return;
11001 }
11002 let key_hex = std::fs::read_to_string(&key)
11003 .map(|s| s.trim().to_string())
11004 .unwrap_or_else(|e| fail_return(&format!("read key {}: {e}", key.display())));
11005 let signing_key = parse_signing_key(&key_hex);
11006 let mut signed_count = 0usize;
11007 let mut remaining = Vec::new();
11008 for action in q.actions.iter() {
11009 if !yes_to_all && !confirm_action(action) {
11010 remaining.push(action.clone());
11011 continue;
11012 }
11013 match sign_and_apply(&signing_key, &actor, action) {
11014 Ok(report) => {
11015 signed_count += 1;
11016 if !json {
11017 println!(
11018 "{} {} on {} → {}",
11019 style::ok("signed"),
11020 action.kind,
11021 action.frontier.display(),
11022 report
11023 );
11024 }
11025 }
11026 Err(error) => {
11027 remaining.push(action.clone());
11029 if !json {
11030 eprintln!(
11031 "{} {} on {}: {error}",
11032 style::warn("failed"),
11033 action.kind,
11034 action.frontier.display()
11035 );
11036 }
11037 }
11038 }
11039 }
11040 queue::replace_actions(&path, remaining.clone()).unwrap_or_else(|e| fail_return(&e));
11041 if json {
11042 let payload = json!({
11043 "ok": true,
11044 "command": "queue.sign",
11045 "signed": signed_count,
11046 "remaining": remaining.len(),
11047 });
11048 println!(
11049 "{}",
11050 serde_json::to_string_pretty(&payload).expect("failed to serialize queue.sign")
11051 );
11052 } else {
11053 println!(
11054 "{} signed {signed_count} action(s); {} remaining in queue",
11055 style::ok("ok"),
11056 remaining.len()
11057 );
11058 }
11059 }
11060 }
11061}
11062
11063fn parse_signing_key(hex_str: &str) -> ed25519_dalek::SigningKey {
11064 let bytes = hex::decode(hex_str)
11065 .unwrap_or_else(|e| fail_return(&format!("invalid private-key hex: {e}")));
11066 let key_bytes: [u8; 32] = bytes
11067 .try_into()
11068 .unwrap_or_else(|_| fail_return("private key must be 32 bytes"));
11069 ed25519_dalek::SigningKey::from_bytes(&key_bytes)
11070}
11071
11072fn confirm_action(action: &crate::queue::QueuedAction) -> bool {
11073 use std::io::{self, BufRead, Write};
11074 let mut stdout = io::stdout().lock();
11075 let _ = writeln!(
11076 stdout,
11077 " sign {} on {}? [y/N] ",
11078 action.kind,
11079 action.frontier.display()
11080 );
11081 let _ = stdout.flush();
11082 drop(stdout);
11083 let stdin = io::stdin();
11084 let mut line = String::new();
11085 if stdin.lock().read_line(&mut line).is_err() {
11086 return false;
11087 }
11088 matches!(line.trim().to_lowercase().as_str(), "y" | "yes")
11089}
11090
11091fn sign_and_apply(
11096 signing_key: &ed25519_dalek::SigningKey,
11097 actor: &str,
11098 action: &crate::queue::QueuedAction,
11099) -> Result<String, String> {
11100 use crate::events::StateTarget;
11101 use crate::proposals;
11102 let args = &action.args;
11103 match action.kind.as_str() {
11104 "propose_review" | "propose_note" | "propose_revise_confidence" | "propose_retract" => {
11105 let kind = match action.kind.as_str() {
11106 "propose_review" => "finding.review",
11107 "propose_note" => "finding.note",
11108 "propose_revise_confidence" => "finding.confidence_revise",
11109 "propose_retract" => "finding.retract",
11110 _ => unreachable!(),
11111 };
11112 let target_id = args
11113 .get("target_finding_id")
11114 .and_then(Value::as_str)
11115 .ok_or("target_finding_id missing")?;
11116 let reason = args
11117 .get("reason")
11118 .and_then(Value::as_str)
11119 .ok_or("reason missing")?;
11120 let payload = match action.kind.as_str() {
11121 "propose_review" => {
11122 let status = args
11123 .get("status")
11124 .and_then(Value::as_str)
11125 .ok_or("status missing")?;
11126 json!({"status": status})
11127 }
11128 "propose_note" => {
11129 let text = args
11130 .get("text")
11131 .and_then(Value::as_str)
11132 .ok_or("text missing")?;
11133 json!({"text": text})
11134 }
11135 "propose_revise_confidence" => {
11136 let new_score = args
11137 .get("new_score")
11138 .and_then(Value::as_f64)
11139 .ok_or("new_score missing")?;
11140 json!({"new_score": new_score})
11141 }
11142 "propose_retract" => json!({}),
11143 _ => unreachable!(),
11144 };
11145 let created_at = args
11146 .get("created_at")
11147 .and_then(Value::as_str)
11148 .map(String::from)
11149 .unwrap_or_else(|| chrono::Utc::now().to_rfc3339());
11150 let mut proposal = proposals::new_proposal(
11151 kind,
11152 StateTarget {
11153 r#type: "finding".to_string(),
11154 id: target_id.to_string(),
11155 },
11156 actor,
11157 "human",
11158 reason,
11159 payload,
11160 Vec::new(),
11161 Vec::new(),
11162 );
11163 proposal.created_at = created_at;
11164 proposal.id = proposals::proposal_id(&proposal);
11165 let _signature = crate::sign::sign_proposal(&proposal, signing_key)?;
11169 let result = proposals::create_or_apply(&action.frontier, proposal, false)
11170 .map_err(|e| format!("create_or_apply: {e}"))?;
11171 Ok(format!("proposal {}", result.proposal_id))
11172 }
11173 "accept_proposal" | "reject_proposal" => {
11174 let proposal_id = args
11175 .get("proposal_id")
11176 .and_then(Value::as_str)
11177 .ok_or("proposal_id missing")?;
11178 let reason = args
11179 .get("reason")
11180 .and_then(Value::as_str)
11181 .ok_or("reason missing")?;
11182 let timestamp = args
11183 .get("timestamp")
11184 .and_then(Value::as_str)
11185 .map(String::from)
11186 .unwrap_or_else(|| chrono::Utc::now().to_rfc3339());
11187 let preimage = json!({
11189 "action": if action.kind == "accept_proposal" { "accept" } else { "reject" },
11190 "proposal_id": proposal_id,
11191 "reviewer_id": actor,
11192 "reason": reason,
11193 "timestamp": timestamp,
11194 });
11195 let bytes = crate::canonical::to_canonical_bytes(&preimage)?;
11196 use ed25519_dalek::Signer;
11197 let _signature = hex::encode(signing_key.sign(&bytes).to_bytes());
11198 if action.kind == "accept_proposal" {
11199 let event_id =
11200 crate::proposals::accept_at_path(&action.frontier, proposal_id, actor, reason)
11201 .map_err(|e| format!("accept_at_path: {e}"))?;
11202 Ok(format!("event {event_id}"))
11203 } else {
11204 crate::proposals::reject_at_path(&action.frontier, proposal_id, actor, reason)
11205 .map_err(|e| format!("reject_at_path: {e}"))?;
11206 Ok(format!("rejected {proposal_id}"))
11207 }
11208 }
11209 other => Err(format!("unsupported queued action kind '{other}'")),
11210 }
11211}
11212
11213fn cmd_entity(action: EntityAction) {
11225 use crate::entity_resolve;
11226 match action {
11227 EntityAction::Resolve {
11228 frontier,
11229 force,
11230 json,
11231 } => {
11232 let mut p = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
11233 let report = entity_resolve::resolve_frontier(&mut p, force);
11234 repo::save_to_path(&frontier, &p).unwrap_or_else(|e| fail_return(&e));
11235 if json {
11236 println!(
11237 "{}",
11238 serde_json::to_string_pretty(&serde_json::json!({
11239 "ok": true,
11240 "command": "entity.resolve",
11241 "frontier_path": frontier.display().to_string(),
11242 "report": report,
11243 }))
11244 .expect("serialize")
11245 );
11246 } else {
11247 println!(
11248 "{} resolved {} of {} entities ({} already, {} unresolved) across {} findings",
11249 style::ok("entity"),
11250 report.resolved,
11251 report.total_entities,
11252 report.already_resolved,
11253 report.unresolved_count,
11254 report.findings_touched,
11255 );
11256 let unresolved_summary: std::collections::BTreeSet<&str> = report
11257 .per_finding
11258 .iter()
11259 .flat_map(|f| f.unresolved.iter().map(String::as_str))
11260 .collect();
11261 if !unresolved_summary.is_empty() {
11262 let take = unresolved_summary.iter().take(8).collect::<Vec<_>>();
11263 println!(
11264 " unresolved (first {}): {}",
11265 take.len(),
11266 take.iter().copied().cloned().collect::<Vec<_>>().join(", ")
11267 );
11268 }
11269 }
11270 }
11271 EntityAction::List { json } => {
11272 let entries: Vec<serde_json::Value> = entity_resolve::iter_bundled()
11273 .map(|(name, etype, source, id)| {
11274 serde_json::json!({
11275 "canonical_name": name,
11276 "entity_type": etype,
11277 "source": source,
11278 "id": id,
11279 })
11280 })
11281 .collect();
11282 if json {
11283 println!(
11284 "{}",
11285 serde_json::to_string_pretty(&serde_json::json!({
11286 "ok": true,
11287 "command": "entity.list",
11288 "count": entries.len(),
11289 "entries": entries,
11290 }))
11291 .expect("serialize")
11292 );
11293 } else {
11294 println!("{} {} bundled entries", style::ok("entity"), entries.len());
11295 for e in &entries {
11296 println!(
11297 " {:32} {:18} {} {}",
11298 e["canonical_name"].as_str().unwrap_or("?"),
11299 e["entity_type"].as_str().unwrap_or("?"),
11300 e["source"].as_str().unwrap_or("?"),
11301 e["id"].as_str().unwrap_or("?"),
11302 );
11303 }
11304 }
11305 }
11306 }
11307}
11308
11309fn cmd_link(action: LinkAction) {
11310 use crate::bundle::{Link, LinkRef};
11311 match action {
11312 LinkAction::Add {
11313 frontier,
11314 from,
11315 to,
11316 r#type,
11317 note,
11318 inferred_by,
11319 no_check_target,
11320 json,
11321 } => {
11322 validate_enum_arg("--type", &r#type, bundle::VALID_LINK_TYPES);
11323 if !["compiler", "reviewer", "author"].contains(&inferred_by.as_str()) {
11324 fail(&format!(
11325 "invalid --inferred-by '{inferred_by}'. Valid: compiler, reviewer, author"
11326 ));
11327 }
11328 let parsed = LinkRef::parse(&to).unwrap_or_else(|e| {
11329 fail(&format!(
11330 "invalid --to '{to}': {e}. Expected vf_<hex> or vf_<hex>@vfr_<hex>"
11331 ))
11332 });
11333 let mut p = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
11334 let source_idx = p
11335 .findings
11336 .iter()
11337 .position(|f| f.id == from)
11338 .unwrap_or_else(|| {
11339 fail_return(&format!("--from finding '{from}' not in frontier"))
11340 });
11341 if let LinkRef::Local { vf_id } = &parsed
11342 && !p.findings.iter().any(|f| &f.id == vf_id)
11343 {
11344 fail(&format!(
11345 "local --to target '{vf_id}' not in frontier; add the target finding first"
11346 ));
11347 }
11348 if let LinkRef::Cross { vfr_id, .. } = &parsed
11349 && p.dep_for_vfr(vfr_id).is_none()
11350 {
11351 fail(&format!(
11352 "cross-frontier --to references vfr_id '{vfr_id}' but no matching dep is declared. Run `vela frontier add-dep {vfr_id} --locator <url> --snapshot <hash>` first."
11353 ));
11354 }
11355
11356 let mut target_warning: Option<String> = None;
11362 if let LinkRef::Cross {
11363 vfr_id: target_vfr,
11364 vf_id: target_vf,
11365 } = &parsed
11366 && !no_check_target
11367 && let Some(dep) = p.dep_for_vfr(target_vfr)
11368 && let Some(locator) = dep.locator.as_deref()
11369 && (locator.starts_with("http://") || locator.starts_with("https://"))
11370 {
11371 let client = reqwest::blocking::Client::builder()
11372 .timeout(std::time::Duration::from_secs(15))
11373 .build()
11374 .ok();
11375 if let Some(client) = client
11376 && let Ok(resp) = client.get(locator).send()
11377 && resp.status().is_success()
11378 && let Ok(dep_project) = resp.json::<crate::project::Project>()
11379 {
11380 if let Some(target_finding) =
11381 dep_project.findings.iter().find(|f| &f.id == target_vf)
11382 {
11383 if target_finding.flags.superseded {
11384 target_warning = Some(format!(
11385 "warn · cross-frontier target '{target_vf}' in '{target_vfr}' has flags.superseded = true. \
11386You may be linking to outdated wording. Pull --transitive and inspect the supersedes chain to find the current finding. \
11387Use --no-check-target to skip this check."
11388 ));
11389 }
11390 } else {
11391 target_warning = Some(format!(
11392 "warn · cross-frontier target '{target_vf}' not found in dep '{target_vfr}' (fetched from {locator}). \
11393The target may have been removed or never existed in the pinned snapshot."
11394 ));
11395 }
11396 }
11397 }
11398
11399 let now = chrono::Utc::now().to_rfc3339_opts(chrono::SecondsFormat::Secs, true);
11400 let link = Link {
11401 target: to.clone(),
11402 link_type: r#type.clone(),
11403 note: note.clone(),
11404 inferred_by: inferred_by.clone(),
11405 created_at: now,
11406 mechanism: None,
11407 };
11408 p.findings[source_idx].links.push(link);
11409 project::recompute_stats(&mut p);
11410 repo::save_to_path(&frontier, &p).unwrap_or_else(|e| fail_return(&e));
11411 let payload = json!({
11412 "ok": true,
11413 "command": "link.add",
11414 "frontier": frontier.display().to_string(),
11415 "from": from,
11416 "to": to,
11417 "type": r#type,
11418 "cross_frontier": parsed.is_cross_frontier(),
11419 });
11420 if json {
11421 let mut p2 = payload.clone();
11422 if let Some(w) = &target_warning
11423 && let serde_json::Value::Object(m) = &mut p2
11424 {
11425 m.insert(
11426 "target_warning".to_string(),
11427 serde_json::Value::String(w.clone()),
11428 );
11429 }
11430 println!(
11431 "{}",
11432 serde_json::to_string_pretty(&p2).expect("failed to serialize link.add")
11433 );
11434 } else {
11435 println!(
11436 "{} {} --[{}]--> {}{}",
11437 style::ok("link"),
11438 from,
11439 r#type,
11440 to,
11441 if parsed.is_cross_frontier() {
11442 " (cross-frontier)"
11443 } else {
11444 ""
11445 }
11446 );
11447 if let Some(w) = target_warning {
11448 println!(" {w}");
11449 }
11450 }
11451 }
11452 }
11453}
11454
11455fn cmd_frontier(action: FrontierAction) {
11456 use crate::project::ProjectDependency;
11457 use crate::repo;
11458 match action {
11459 FrontierAction::New {
11460 path,
11461 name,
11462 description,
11463 force,
11464 json,
11465 } => {
11466 if path.exists() && !force {
11467 fail(&format!(
11468 "{} already exists; pass --force to overwrite",
11469 path.display()
11470 ));
11471 }
11472 let now = chrono::Utc::now().to_rfc3339_opts(chrono::SecondsFormat::Secs, true);
11473 let project = project::Project {
11474 vela_version: project::VELA_SCHEMA_VERSION.to_string(),
11475 schema: project::VELA_SCHEMA_URL.to_string(),
11476 frontier_id: None,
11477 project: project::ProjectMeta {
11478 name: name.clone(),
11479 description: description.clone(),
11480 compiled_at: now,
11481 compiler: project::VELA_COMPILER_VERSION.to_string(),
11482 papers_processed: 0,
11483 errors: 0,
11484 dependencies: Vec::new(),
11485 },
11486 stats: project::ProjectStats::default(),
11487 findings: Vec::new(),
11488 sources: Vec::new(),
11489 evidence_atoms: Vec::new(),
11490 condition_records: Vec::new(),
11491 review_events: Vec::new(),
11492 confidence_updates: Vec::new(),
11493 events: Vec::new(),
11494 proposals: Vec::new(),
11495 proof_state: proposals::ProofState::default(),
11496 signatures: Vec::new(),
11497 actors: Vec::new(),
11498 replications: Vec::new(),
11499 datasets: Vec::new(),
11500 code_artifacts: Vec::new(),
11501 artifacts: Vec::new(),
11502 predictions: Vec::new(),
11503 resolutions: Vec::new(),
11504 peers: Vec::new(),
11505 negative_results: Vec::new(),
11506 trajectories: Vec::new(),
11507 };
11508 repo::save_to_path(&path, &project).unwrap_or_else(|e| fail_return(&e));
11509 let payload = json!({
11510 "ok": true,
11511 "command": "frontier.new",
11512 "path": path.display().to_string(),
11513 "name": name,
11514 "schema": project::VELA_SCHEMA_URL,
11515 "vela_version": env!("CARGO_PKG_VERSION"),
11516 "next_steps": [
11517 "vela finding add <path> --assertion '...' --author 'reviewer:you' --apply",
11518 "vela sign generate-keypair --out keys",
11519 "vela actor add <path> reviewer:you --pubkey \"$(cat keys/public.key)\"",
11520 "vela registry publish <path> --owner reviewer:you --key keys/private.key --locator <url> --to https://vela-hub.fly.dev",
11521 ],
11522 });
11523 if json {
11524 println!(
11525 "{}",
11526 serde_json::to_string_pretty(&payload)
11527 .expect("failed to serialize frontier.new")
11528 );
11529 } else {
11530 println!(
11531 "{} scaffolded frontier '{name}' at {}",
11532 style::ok("frontier"),
11533 path.display()
11534 );
11535 println!(" next steps:");
11536 println!(
11537 " 1. vela finding add {} --assertion '...' --author 'reviewer:you' --apply",
11538 path.display()
11539 );
11540 println!(" 2. vela sign generate-keypair --out keys");
11541 println!(
11542 " 3. vela actor add {} reviewer:you --pubkey \"$(cat keys/public.key)\"",
11543 path.display()
11544 );
11545 println!(
11546 " 4. vela registry publish {} --owner reviewer:you --key keys/private.key --locator <url> --to https://vela-hub.fly.dev",
11547 path.display()
11548 );
11549 }
11550 }
11551 FrontierAction::Materialize { frontier, json } => {
11552 let payload = frontier_repo::materialize(&frontier).unwrap_or_else(|e| fail_return(&e));
11553 if json {
11554 println!(
11555 "{}",
11556 serde_json::to_string_pretty(&payload)
11557 .expect("failed to serialize frontier materialize")
11558 );
11559 } else {
11560 println!(
11561 "{} materialized frontier repo at {}",
11562 style::ok("frontier"),
11563 frontier.display()
11564 );
11565 }
11566 }
11567 FrontierAction::AddDep {
11568 frontier,
11569 vfr_id,
11570 locator,
11571 snapshot,
11572 name,
11573 json,
11574 } => {
11575 let mut p = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
11576 if p.project
11577 .dependencies
11578 .iter()
11579 .any(|d| d.vfr_id.as_deref() == Some(&vfr_id))
11580 {
11581 fail(&format!(
11582 "cross-frontier dependency '{vfr_id}' already declared; remove it first via `vela frontier remove-dep`"
11583 ));
11584 }
11585 let dep = ProjectDependency {
11586 name: name.unwrap_or_else(|| vfr_id.clone()),
11587 source: "vela.hub".into(),
11588 version: None,
11589 pinned_hash: None,
11590 vfr_id: Some(vfr_id.clone()),
11591 locator: Some(locator.clone()),
11592 pinned_snapshot_hash: Some(snapshot.clone()),
11593 };
11594 p.project.dependencies.push(dep);
11595 repo::save_to_path(&frontier, &p).unwrap_or_else(|e| fail_return(&e));
11596 let payload = json!({
11597 "ok": true,
11598 "command": "frontier.add-dep",
11599 "frontier": frontier.display().to_string(),
11600 "vfr_id": vfr_id,
11601 "locator": locator,
11602 "pinned_snapshot_hash": snapshot,
11603 "declared_count": p.project.dependencies.len(),
11604 });
11605 if json {
11606 println!(
11607 "{}",
11608 serde_json::to_string_pretty(&payload)
11609 .expect("failed to serialize frontier.add-dep")
11610 );
11611 } else {
11612 println!(
11613 "{} declared cross-frontier dep {vfr_id}",
11614 style::ok("frontier")
11615 );
11616 println!(" locator: {locator}");
11617 println!(" snapshot: {snapshot}");
11618 }
11619 }
11620 FrontierAction::ListDeps { frontier, json } => {
11621 let p = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
11622 let deps: Vec<&ProjectDependency> = p.project.dependencies.iter().collect();
11623 if json {
11624 let payload = json!({
11625 "ok": true,
11626 "command": "frontier.list-deps",
11627 "frontier": frontier.display().to_string(),
11628 "count": deps.len(),
11629 "dependencies": deps,
11630 });
11631 println!(
11632 "{}",
11633 serde_json::to_string_pretty(&payload)
11634 .expect("failed to serialize frontier.list-deps")
11635 );
11636 } else {
11637 println!();
11638 println!(
11639 " {}",
11640 format!("VELA · FRONTIER · LIST-DEPS · {}", frontier.display())
11641 .to_uppercase()
11642 .dimmed()
11643 );
11644 println!(" {}", style::tick_row(60));
11645 if deps.is_empty() {
11646 println!(" (no dependencies declared)");
11647 } else {
11648 for d in &deps {
11649 let kind = if d.is_cross_frontier() {
11650 "cross-frontier"
11651 } else {
11652 "compile-time"
11653 };
11654 println!(" · {} [{kind}]", d.name);
11655 if let Some(v) = &d.vfr_id {
11656 println!(" vfr_id: {v}");
11657 }
11658 if let Some(l) = &d.locator {
11659 println!(" locator: {l}");
11660 }
11661 if let Some(s) = &d.pinned_snapshot_hash {
11662 println!(" snapshot: {s}");
11663 }
11664 }
11665 }
11666 }
11667 }
11668 FrontierAction::RemoveDep {
11669 frontier,
11670 vfr_id,
11671 json,
11672 } => {
11673 let mut p = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
11674 for f in &p.findings {
11676 for l in &f.links {
11677 if let Ok(crate::bundle::LinkRef::Cross { vfr_id: ref v, .. }) =
11678 crate::bundle::LinkRef::parse(&l.target)
11679 && v == &vfr_id
11680 {
11681 fail(&format!(
11682 "cannot remove dep '{vfr_id}': finding {} still links to it via {}",
11683 f.id, l.target
11684 ));
11685 }
11686 }
11687 }
11688 let before = p.project.dependencies.len();
11689 p.project
11690 .dependencies
11691 .retain(|d| d.vfr_id.as_deref() != Some(&vfr_id));
11692 let removed = before - p.project.dependencies.len();
11693 if removed == 0 {
11694 fail(&format!("no cross-frontier dependency '{vfr_id}' found"));
11695 }
11696 repo::save_to_path(&frontier, &p).unwrap_or_else(|e| fail_return(&e));
11697 let payload = json!({
11698 "ok": true,
11699 "command": "frontier.remove-dep",
11700 "frontier": frontier.display().to_string(),
11701 "vfr_id": vfr_id,
11702 "removed": removed,
11703 });
11704 if json {
11705 println!(
11706 "{}",
11707 serde_json::to_string_pretty(&payload)
11708 .expect("failed to serialize frontier.remove-dep")
11709 );
11710 } else {
11711 println!(
11712 "{} removed cross-frontier dep {vfr_id}",
11713 style::ok("frontier")
11714 );
11715 }
11716 }
11717 FrontierAction::RefreshDeps {
11718 frontier,
11719 from,
11720 dry_run,
11721 json,
11722 } => {
11723 let mut p = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
11724 let cross_deps: Vec<String> = p
11725 .project
11726 .dependencies
11727 .iter()
11728 .filter_map(|d| d.vfr_id.clone())
11729 .collect();
11730 if cross_deps.is_empty() {
11731 if json {
11732 println!(
11733 "{}",
11734 serde_json::to_string_pretty(&json!({
11735 "ok": true,
11736 "command": "frontier.refresh-deps",
11737 "frontier": frontier.display().to_string(),
11738 "from": from,
11739 "dry_run": dry_run,
11740 "deps": [],
11741 "summary": { "total": 0, "refreshed": 0, "unchanged": 0, "missing": 0, "unreachable": 0 },
11742 })).expect("serialize")
11743 );
11744 } else {
11745 println!(
11746 "{} no cross-frontier deps declared in {}",
11747 style::ok("frontier"),
11748 frontier.display()
11749 );
11750 }
11751 return;
11752 }
11753 let client = reqwest::blocking::Client::builder()
11754 .timeout(std::time::Duration::from_secs(20))
11755 .build()
11756 .unwrap_or_else(|e| fail_return(&format!("http client init failed: {e}")));
11757 let base = from.trim_end_matches('/');
11758 #[derive(serde::Deserialize)]
11759 struct HubEntry {
11760 latest_snapshot_hash: String,
11761 }
11762 let mut per_dep: Vec<serde_json::Value> = Vec::new();
11763 let (mut refreshed, mut unchanged, mut missing, mut unreachable) =
11764 (0u32, 0u32, 0u32, 0u32);
11765 for vfr in &cross_deps {
11766 let url = format!("{base}/entries/{vfr}");
11767 let resp = client.get(&url).send();
11768 let outcome = match resp {
11769 Ok(r) if r.status().as_u16() == 404 => {
11770 missing += 1;
11771 json!({ "vfr_id": vfr, "status": "missing", "url": url })
11772 }
11773 Ok(r) if !r.status().is_success() => {
11774 unreachable += 1;
11775 json!({ "vfr_id": vfr, "status": "unreachable", "http_status": r.status().as_u16() })
11776 }
11777 Err(e) => {
11778 unreachable += 1;
11779 json!({ "vfr_id": vfr, "status": "unreachable", "error": e.to_string() })
11780 }
11781 Ok(r) => match r.json::<HubEntry>() {
11782 Err(e) => {
11783 unreachable += 1;
11784 json!({ "vfr_id": vfr, "status": "unreachable", "error": format!("invalid hub response: {e}") })
11785 }
11786 Ok(entry) => {
11787 match p
11789 .project
11790 .dependencies
11791 .iter()
11792 .position(|d| d.vfr_id.as_deref() == Some(vfr.as_str()))
11793 {
11794 None => {
11795 unreachable += 1;
11796 json!({ "vfr_id": vfr, "status": "unreachable", "error": "dep disappeared mid-scan" })
11797 }
11798 Some(idx) => {
11799 let local_pin =
11800 p.project.dependencies[idx].pinned_snapshot_hash.clone();
11801 let new_pin = entry.latest_snapshot_hash;
11802 if local_pin.as_deref() == Some(new_pin.as_str()) {
11803 unchanged += 1;
11804 json!({ "vfr_id": vfr, "status": "unchanged", "snapshot": new_pin })
11805 } else {
11806 if !dry_run {
11807 p.project.dependencies[idx].pinned_snapshot_hash =
11808 Some(new_pin.clone());
11809 }
11810 refreshed += 1;
11811 json!({
11812 "vfr_id": vfr,
11813 "status": "refreshed",
11814 "old_snapshot": local_pin,
11815 "new_snapshot": new_pin,
11816 })
11817 }
11818 }
11819 }
11820 }
11821 },
11822 };
11823 per_dep.push(outcome);
11824 }
11825 if !dry_run && refreshed > 0 {
11826 repo::save_to_path(&frontier, &p).unwrap_or_else(|e| fail_return(&e));
11827 }
11828 let payload = json!({
11829 "ok": true,
11830 "command": "frontier.refresh-deps",
11831 "frontier": frontier.display().to_string(),
11832 "from": from,
11833 "dry_run": dry_run,
11834 "deps": per_dep,
11835 "summary": {
11836 "total": cross_deps.len(),
11837 "refreshed": refreshed,
11838 "unchanged": unchanged,
11839 "missing": missing,
11840 "unreachable": unreachable,
11841 },
11842 });
11843 if json {
11844 println!(
11845 "{}",
11846 serde_json::to_string_pretty(&payload)
11847 .expect("failed to serialize frontier.refresh-deps")
11848 );
11849 } else {
11850 let mode = if dry_run { " (dry-run)" } else { "" };
11851 println!(
11852 "{} refresh-deps{mode} · {} total · {refreshed} refreshed · {unchanged} unchanged · {missing} missing · {unreachable} unreachable",
11853 style::ok("frontier"),
11854 cross_deps.len()
11855 );
11856 for d in &per_dep {
11857 let vfr = d["vfr_id"].as_str().unwrap_or("?");
11858 let status = d["status"].as_str().unwrap_or("?");
11859 match status {
11860 "refreshed" => println!(
11861 " {vfr} refreshed {} → {}",
11862 d["old_snapshot"]
11863 .as_str()
11864 .unwrap_or("(none)")
11865 .chars()
11866 .take(16)
11867 .collect::<String>(),
11868 d["new_snapshot"]
11869 .as_str()
11870 .unwrap_or("?")
11871 .chars()
11872 .take(16)
11873 .collect::<String>(),
11874 ),
11875 "unchanged" => println!(" {vfr} unchanged"),
11876 "missing" => println!(" {vfr} missing on hub"),
11877 _ => println!(" {vfr} unreachable"),
11878 }
11879 }
11880 }
11881 }
11882 FrontierAction::Diff {
11883 frontier,
11884 since,
11885 week,
11886 json,
11887 } => cmd_frontier_diff(&frontier, since.as_deref(), week.as_deref(), json),
11888 }
11889}
11890
11891fn cmd_repo(action: RepoAction) {
11892 match action {
11893 RepoAction::Status { frontier, json } => {
11894 let payload = frontier_repo::repo_status(&frontier).unwrap_or_else(|e| fail_return(&e));
11895 if json {
11896 println!(
11897 "{}",
11898 serde_json::to_string_pretty(&payload)
11899 .expect("failed to serialize repo status")
11900 );
11901 } else {
11902 let summary = payload.get("summary").unwrap_or(&Value::Null);
11903 let freshness = payload.get("freshness").unwrap_or(&Value::Null);
11904 println!("vela repo status");
11905 println!(" frontier: {}", frontier.display());
11906 println!(
11907 " events: {}",
11908 summary
11909 .get("accepted_events")
11910 .and_then(Value::as_u64)
11911 .unwrap_or_default()
11912 );
11913 println!(
11914 " open proposals: {}",
11915 summary
11916 .get("open_proposals")
11917 .and_then(Value::as_u64)
11918 .unwrap_or_default()
11919 );
11920 println!(
11921 " state: {}",
11922 freshness
11923 .get("materialized_state")
11924 .and_then(Value::as_str)
11925 .unwrap_or("unknown")
11926 );
11927 println!(
11928 " proof: {}",
11929 freshness
11930 .get("proof")
11931 .and_then(Value::as_str)
11932 .unwrap_or("unknown")
11933 );
11934 }
11935 }
11936 RepoAction::Doctor { frontier, json } => {
11937 let payload = frontier_repo::repo_doctor(&frontier).unwrap_or_else(|e| fail_return(&e));
11938 if json {
11939 println!(
11940 "{}",
11941 serde_json::to_string_pretty(&payload)
11942 .expect("failed to serialize repo doctor")
11943 );
11944 } else {
11945 let ok = payload.get("ok").and_then(Value::as_bool) == Some(true);
11946 let issues = payload
11947 .get("issues")
11948 .and_then(Value::as_array)
11949 .map_or(0, Vec::len);
11950 println!("vela repo doctor");
11951 println!(" frontier: {}", frontier.display());
11952 println!(" status: {}", if ok { "ok" } else { "needs attention" });
11953 println!(" issues: {issues}");
11954 }
11955 }
11956 }
11957}
11958
11959fn cmd_proof_verify(frontier: &Path, json_output: bool) {
11960 let payload = frontier_repo::proof_verify(frontier).unwrap_or_else(|e| fail_return(&e));
11961 if json_output {
11962 println!(
11963 "{}",
11964 serde_json::to_string_pretty(&payload).expect("failed to serialize proof verify")
11965 );
11966 if payload.get("ok").and_then(Value::as_bool) != Some(true) {
11967 std::process::exit(1);
11968 }
11969 } else {
11970 let ok = payload.get("ok").and_then(Value::as_bool) == Some(true);
11971 println!("vela proof verify");
11972 println!(" frontier: {}", frontier.display());
11973 println!(" status: {}", if ok { "ok" } else { "failed" });
11974 if let Some(issues) = payload.get("issues").and_then(Value::as_array) {
11975 for issue in issues {
11976 if let Some(message) = issue.get("message").and_then(Value::as_str) {
11977 println!(" issue: {message}");
11978 }
11979 }
11980 }
11981 if !ok {
11982 std::process::exit(1);
11983 }
11984 }
11985}
11986
11987fn cmd_proof_explain(frontier: &Path) {
11988 let text = frontier_repo::proof_explain(frontier).unwrap_or_else(|e| fail_return(&e));
11989 print!("{text}");
11990}
11991
11992fn cmd_frontier_diff(frontier: &Path, since: Option<&str>, week: Option<&str>, json: bool) {
12001 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
12002
12003 let now = chrono::Utc::now();
12005 let (window_start, window_end, week_label): (
12006 chrono::DateTime<chrono::Utc>,
12007 chrono::DateTime<chrono::Utc>,
12008 Option<String>,
12009 ) = if let Some(s) = since {
12010 let parsed = chrono::DateTime::parse_from_rfc3339(s)
12011 .map(|d| d.with_timezone(&chrono::Utc))
12012 .unwrap_or_else(|e| fail_return(&format!("invalid --since timestamp '{s}': {e}")));
12013 (parsed, now, None)
12014 } else {
12015 let key = week
12016 .map(str::to_owned)
12017 .unwrap_or_else(|| iso_week_key_for(now.date_naive()));
12018 let (start, end) = iso_week_bounds(&key)
12019 .unwrap_or_else(|e| fail_return(&format!("invalid --week '{key}': {e}")));
12020 (start, end, Some(key))
12021 };
12022
12023 let mut added: Vec<&crate::bundle::FindingBundle> = Vec::new();
12025 let mut updated: Vec<&crate::bundle::FindingBundle> = Vec::new();
12026 let mut new_contradictions: Vec<&crate::bundle::FindingBundle> = Vec::new();
12027 let mut cumulative: usize = 0;
12028
12029 for f in &project.findings {
12030 let created = chrono::DateTime::parse_from_rfc3339(&f.created)
12031 .map(|d| d.with_timezone(&chrono::Utc))
12032 .ok();
12033 let updated_ts = f
12034 .updated
12035 .as_deref()
12036 .and_then(|u| chrono::DateTime::parse_from_rfc3339(u).ok())
12037 .map(|d| d.with_timezone(&chrono::Utc));
12038
12039 if let Some(c) = created
12040 && c < window_end
12041 {
12042 cumulative += 1;
12043 }
12044
12045 if let Some(c) = created
12046 && c >= window_start
12047 && c < window_end
12048 {
12049 added.push(f);
12050 let is_tension = f.flags.contested || f.assertion.assertion_type == "tension";
12051 if is_tension {
12052 new_contradictions.push(f);
12053 }
12054 continue;
12055 }
12056 if let Some(u) = updated_ts
12057 && u >= window_start
12058 && u < window_end
12059 {
12060 updated.push(f);
12061 }
12062 }
12063
12064 let summary_for = |list: &[&crate::bundle::FindingBundle]| -> Vec<serde_json::Value> {
12066 list.iter()
12067 .map(|f| {
12068 json!({
12069 "id": f.id,
12070 "assertion": f.assertion.text,
12071 "evidence_type": f.evidence.evidence_type,
12072 "confidence": f.confidence.score,
12073 "doi": f.provenance.doi,
12074 "pmid": f.provenance.pmid,
12075 })
12076 })
12077 .collect()
12078 };
12079
12080 let payload = json!({
12081 "ok": true,
12082 "command": "frontier.diff",
12083 "frontier": frontier.display().to_string(),
12084 "frontier_id": project.frontier_id,
12085 "window": {
12086 "start": window_start.to_rfc3339_opts(chrono::SecondsFormat::Secs, true),
12087 "end": window_end.to_rfc3339_opts(chrono::SecondsFormat::Secs, true),
12088 "iso_week": week_label,
12089 },
12090 "totals": {
12091 "added": added.len(),
12092 "updated": updated.len(),
12093 "new_contradictions": new_contradictions.len(),
12094 "cumulative_claims": cumulative,
12095 },
12096 "added": summary_for(&added),
12097 "updated": summary_for(&updated),
12098 "new_contradictions": summary_for(&new_contradictions),
12099 });
12100
12101 if json {
12102 println!(
12103 "{}",
12104 serde_json::to_string_pretty(&payload).expect("failed to serialize frontier.diff")
12105 );
12106 return;
12107 }
12108
12109 let label = week_label
12110 .clone()
12111 .unwrap_or_else(|| format!("since {}", window_start.format("%Y-%m-%d %H:%M UTC")));
12112 println!();
12113 println!(
12114 " {}",
12115 format!("VELA · FRONTIER · DIFF · {label}")
12116 .to_uppercase()
12117 .dimmed()
12118 );
12119 println!(" {}", style::tick_row(60));
12120 println!(
12121 " range: {} → {}",
12122 window_start.format("%Y-%m-%d %H:%M"),
12123 window_end.format("%Y-%m-%d %H:%M")
12124 );
12125 println!(" added: {}", added.len());
12126 println!(" updated: {}", updated.len());
12127 println!(" contradictions: {}", new_contradictions.len());
12128 println!(" cumulative: {cumulative}");
12129 if added.is_empty() && updated.is_empty() {
12130 println!();
12131 println!(" (quiet window — no findings added or updated)");
12132 } else {
12133 println!();
12134 println!(" added:");
12135 for f in &added {
12136 println!(
12137 " · {} {}",
12138 f.id.dimmed(),
12139 truncate(&f.assertion.text, 88)
12140 );
12141 }
12142 if !updated.is_empty() {
12143 println!();
12144 println!(" updated:");
12145 for f in &updated {
12146 println!(
12147 " · {} {}",
12148 f.id.dimmed(),
12149 truncate(&f.assertion.text, 88)
12150 );
12151 }
12152 }
12153 }
12154}
12155
12156fn truncate(s: &str, n: usize) -> String {
12157 if s.chars().count() <= n {
12158 s.to_string()
12159 } else {
12160 let mut out: String = s.chars().take(n.saturating_sub(1)).collect();
12161 out.push('…');
12162 out
12163 }
12164}
12165
12166fn iso_week_key_for(d: chrono::NaiveDate) -> String {
12168 use chrono::Datelike;
12169 let iso = d.iso_week();
12170 format!("{:04}-W{:02}", iso.year(), iso.week())
12171}
12172
12173fn iso_week_bounds(
12176 key: &str,
12177) -> Result<(chrono::DateTime<chrono::Utc>, chrono::DateTime<chrono::Utc>), String> {
12178 let (year_str, week_str) = key
12179 .split_once("-W")
12180 .ok_or_else(|| format!("expected YYYY-Www, got '{key}'"))?;
12181 let year: i32 = year_str
12182 .parse()
12183 .map_err(|e| format!("bad year in '{key}': {e}"))?;
12184 let week: u32 = week_str
12185 .parse()
12186 .map_err(|e| format!("bad week in '{key}': {e}"))?;
12187 let monday = chrono::NaiveDate::from_isoywd_opt(year, week, chrono::Weekday::Mon)
12188 .ok_or_else(|| format!("invalid ISO week: {key}"))?;
12189 let next_monday = monday + chrono::Duration::days(7);
12190 let start = monday.and_hms_opt(0, 0, 0).expect("00:00 valid").and_utc();
12191 let end = next_monday
12192 .and_hms_opt(0, 0, 0)
12193 .expect("00:00 valid")
12194 .and_utc();
12195 Ok((start, end))
12196}
12197
12198fn cmd_registry(action: RegistryAction) {
12203 use crate::registry;
12204 let default_registry = || -> PathBuf {
12205 let home = std::env::var("HOME").unwrap_or_else(|_| ".".to_string());
12206 PathBuf::from(home)
12207 .join(".vela")
12208 .join("registry")
12209 .join("entries.json")
12210 };
12211 match action {
12212 RegistryAction::DependsOn { vfr_id, from, json } => {
12213 let base = from.trim_end_matches('/');
12214 let url = format!("{base}/entries/{vfr_id}/depends-on");
12215 let client = reqwest::blocking::Client::builder()
12216 .timeout(std::time::Duration::from_secs(30))
12217 .build()
12218 .unwrap_or_else(|e| fail_return(&format!("http client init: {e}")));
12219 let resp = client
12220 .get(&url)
12221 .send()
12222 .unwrap_or_else(|e| fail_return(&format!("GET {url}: {e}")));
12223 if !resp.status().is_success() {
12224 fail(&format!("GET {url}: HTTP {}", resp.status()));
12225 }
12226 let body: serde_json::Value = resp
12227 .json()
12228 .unwrap_or_else(|e| fail_return(&format!("parse response: {e}")));
12229 if json {
12230 println!(
12231 "{}",
12232 serde_json::to_string_pretty(&body).expect("serialize")
12233 );
12234 } else {
12235 let dependents = body
12236 .get("dependents")
12237 .and_then(|v| v.as_array())
12238 .cloned()
12239 .unwrap_or_default();
12240 let count = dependents.len();
12241 println!(
12242 "{} {count} {} on {vfr_id}",
12243 style::ok("registry"),
12244 if count == 1 {
12245 "frontier depends"
12246 } else {
12247 "frontiers depend"
12248 },
12249 );
12250 for e in &dependents {
12251 let v = e.get("vfr_id").and_then(|v| v.as_str()).unwrap_or("?");
12252 let n = e.get("name").and_then(|v| v.as_str()).unwrap_or("?");
12253 let o = e
12254 .get("owner_actor_id")
12255 .and_then(|v| v.as_str())
12256 .unwrap_or("?");
12257 println!(" {v} {n} ({o})");
12258 }
12259 }
12260 }
12261 RegistryAction::Mirror {
12262 vfr_id,
12263 from,
12264 to,
12265 json,
12266 } => {
12267 let src_base = from.trim_end_matches('/');
12268 let dst_base = to.trim_end_matches('/');
12269 let src_url = format!("{src_base}/entries/{vfr_id}");
12270 let dst_url = format!("{dst_base}/entries");
12271 let client = reqwest::blocking::Client::builder()
12272 .timeout(std::time::Duration::from_secs(30))
12273 .build()
12274 .unwrap_or_else(|e| fail_return(&format!("http client init: {e}")));
12275
12276 let entry: serde_json::Value = client
12277 .get(&src_url)
12278 .send()
12279 .unwrap_or_else(|e| fail_return(&format!("GET {src_url}: {e}")))
12280 .error_for_status()
12281 .unwrap_or_else(|e| fail_return(&format!("GET {src_url}: {e}")))
12282 .json()
12283 .unwrap_or_else(|e| fail_return(&format!("parse {src_url}: {e}")));
12284
12285 let resp = client
12286 .post(&dst_url)
12287 .header("content-type", "application/json")
12288 .body(
12289 serde_json::to_vec(&entry)
12290 .unwrap_or_else(|e| fail_return(&format!("serialize: {e}"))),
12291 )
12292 .send()
12293 .unwrap_or_else(|e| fail_return(&format!("POST {dst_url}: {e}")));
12294 let status = resp.status();
12295 if !status.is_success() {
12296 let body = resp.text().unwrap_or_default();
12297 fail(&format!(
12298 "POST {dst_url}: HTTP {status}: {}",
12299 body.chars().take(300).collect::<String>()
12300 ));
12301 }
12302 let body: serde_json::Value = resp
12303 .json()
12304 .unwrap_or_else(|e| fail_return(&format!("parse POST response: {e}")));
12305 let duplicate = body
12306 .get("duplicate")
12307 .and_then(serde_json::Value::as_bool)
12308 .unwrap_or(false);
12309 let payload = json!({
12310 "ok": true,
12311 "command": "registry.mirror",
12312 "vfr_id": vfr_id,
12313 "from": src_base,
12314 "to": dst_base,
12315 "duplicate_on_destination": duplicate,
12316 "destination_response": body,
12317 });
12318 if json {
12319 println!(
12320 "{}",
12321 serde_json::to_string_pretty(&payload).expect("serialize")
12322 );
12323 } else {
12324 println!(
12325 "{} mirrored {vfr_id} from {src_base} → {dst_base}{}",
12326 style::ok("registry"),
12327 if duplicate {
12328 " (duplicate; signature already known)"
12329 } else {
12330 " (fresh insert)"
12331 }
12332 );
12333 }
12334 }
12335 RegistryAction::List { from, json } => {
12336 let (label, registry_data) = match &from {
12339 Some(loc) if loc.starts_with("http") => (
12340 loc.clone(),
12341 registry::load_any(loc).unwrap_or_else(|e| fail_return(&e)),
12342 ),
12343 Some(loc) => {
12344 let p = registry::resolve_local(loc).unwrap_or_else(|e| fail_return(&e));
12345 (
12346 p.display().to_string(),
12347 registry::load_local(&p).unwrap_or_else(|e| fail_return(&e)),
12348 )
12349 }
12350 None => {
12351 let p = default_registry();
12352 (
12353 p.display().to_string(),
12354 registry::load_local(&p).unwrap_or_else(|e| fail_return(&e)),
12355 )
12356 }
12357 };
12358 let r = registry_data;
12359 let path_label = label;
12360 if json {
12361 let payload = json!({
12362 "ok": true,
12363 "command": "registry.list",
12364 "registry": path_label,
12365 "entry_count": r.entries.len(),
12366 "entries": r.entries,
12367 });
12368 println!(
12369 "{}",
12370 serde_json::to_string_pretty(&payload)
12371 .expect("failed to serialize registry.list")
12372 );
12373 } else {
12374 println!();
12375 println!(
12376 " {}",
12377 format!("VELA · REGISTRY · LIST · {}", path_label)
12378 .to_uppercase()
12379 .dimmed()
12380 );
12381 println!(" {}", style::tick_row(60));
12382 if r.entries.is_empty() {
12383 println!(" (registry is empty)");
12384 } else {
12385 for entry in &r.entries {
12386 println!(
12387 " {} {} ({}) by {} published {}",
12388 entry.vfr_id,
12389 entry.name,
12390 entry.network_locator,
12391 entry.owner_actor_id,
12392 entry.signed_publish_at
12393 );
12394 }
12395 }
12396 }
12397 }
12398 RegistryAction::Publish {
12399 frontier,
12400 owner,
12401 key,
12402 locator,
12403 to,
12404 json,
12405 } => {
12406 let key_hex = std::fs::read_to_string(&key)
12409 .map(|s| s.trim().to_string())
12410 .unwrap_or_else(|e| fail_return(&format!("read key {}: {e}", key.display())));
12411 let signing_key = parse_signing_key(&key_hex);
12412 let derived = hex::encode(signing_key.verifying_key().to_bytes());
12413
12414 let mut frontier_data =
12416 repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
12417
12418 let pubkey = match frontier_data.actors.iter().find(|actor| actor.id == owner) {
12419 Some(actor) => actor.public_key.clone(),
12420 None => {
12421 eprintln!(
12429 " vela registry publish · auto-registering actor {owner} (derived pubkey {})",
12430 &derived[..16]
12431 );
12432 frontier_data.actors.push(sign::ActorRecord {
12433 id: owner.clone(),
12434 public_key: derived.clone(),
12435 algorithm: "ed25519".to_string(),
12436 created_at: chrono::Utc::now().to_rfc3339(),
12437 tier: None,
12438 orcid: None,
12439 access_clearance: None,
12440 });
12441 repo::save_to_path(&frontier, &frontier_data)
12442 .unwrap_or_else(|e| fail_return(&format!("save actor: {e}")));
12443 derived.clone()
12444 }
12445 };
12446
12447 let snapshot_hash = events::snapshot_hash(&frontier_data);
12451 let event_log_hash = events::event_log_hash(&frontier_data.events);
12452 let vfr_id = frontier_data.frontier_id();
12453 let name = frontier_data.project.name.clone();
12454
12455 if derived != pubkey {
12457 fail(&format!(
12458 "private key does not match registered pubkey for owner '{owner}'"
12459 ));
12460 }
12461
12462 let to_is_remote = matches!(
12470 to.as_deref(),
12471 Some(loc) if loc.starts_with("http://") || loc.starts_with("https://")
12472 );
12473 let resolved_locator = match locator {
12474 Some(l) => l,
12475 None => {
12476 if to_is_remote {
12477 let hub = to.as_deref().unwrap().trim_end_matches('/');
12478 let hub_root = hub.trim_end_matches("/entries");
12479 format!("{hub_root}/entries/{vfr_id}/snapshot")
12480 } else {
12481 fail_return(
12482 "--locator is required for local publishes; pass e.g. \
12483 --locator file:///path/to/frontier.json or an HTTPS URL.",
12484 )
12485 }
12486 }
12487 };
12488
12489 let mut entry = registry::RegistryEntry {
12490 schema: registry::ENTRY_SCHEMA.to_string(),
12491 vfr_id: vfr_id.clone(),
12492 name: name.clone(),
12493 owner_actor_id: owner.clone(),
12494 owner_pubkey: pubkey,
12495 latest_snapshot_hash: snapshot_hash,
12496 latest_event_log_hash: event_log_hash,
12497 network_locator: resolved_locator,
12498 signed_publish_at: chrono::Utc::now().to_rfc3339(),
12499 signature: String::new(),
12500 };
12501 entry.signature =
12502 registry::sign_entry(&entry, &signing_key).unwrap_or_else(|e| fail_return(&e));
12503
12504 let (registry_label, duplicate) = if to_is_remote {
12505 let hub_url = to.clone().unwrap();
12506 let resp = registry::publish_remote(&entry, &hub_url, Some(&frontier_data))
12510 .unwrap_or_else(|e| fail_return(&e));
12511 (hub_url, resp.duplicate)
12512 } else {
12513 let registry_path = match &to {
12514 Some(loc) => registry::resolve_local(loc).unwrap_or_else(|e| fail_return(&e)),
12515 None => default_registry(),
12516 };
12517 registry::publish_entry(®istry_path, entry.clone())
12518 .unwrap_or_else(|e| fail_return(&e));
12519 (registry_path.display().to_string(), false)
12520 };
12521
12522 let payload = json!({
12523 "ok": true,
12524 "command": "registry.publish",
12525 "registry": registry_label,
12526 "vfr_id": vfr_id,
12527 "name": name,
12528 "owner": owner,
12529 "snapshot_hash": entry.latest_snapshot_hash,
12530 "event_log_hash": entry.latest_event_log_hash,
12531 "signed_publish_at": entry.signed_publish_at,
12532 "signature": entry.signature,
12533 "duplicate": duplicate,
12534 });
12535 if json {
12536 println!(
12537 "{}",
12538 serde_json::to_string_pretty(&payload)
12539 .expect("failed to serialize registry.publish")
12540 );
12541 } else {
12542 let dup_suffix = if duplicate { " (duplicate, no-op)" } else { "" };
12543 println!(
12544 "{} published {vfr_id} → {}{}",
12545 style::ok("registry"),
12546 registry_label,
12547 dup_suffix
12548 );
12549 println!(" snapshot: {}", entry.latest_snapshot_hash);
12550 println!(" event_log: {}", entry.latest_event_log_hash);
12551 println!(" signature: {}…", &entry.signature[..16]);
12552 }
12553 }
12554 RegistryAction::Pull {
12555 vfr_id,
12556 from,
12557 out,
12558 transitive,
12559 depth,
12560 json,
12561 } => {
12562 let (registry_label, registry_data) = match &from {
12566 Some(loc) if loc.starts_with("http") => (
12567 loc.clone(),
12568 registry::load_any(loc).unwrap_or_else(|e| fail_return(&e)),
12569 ),
12570 Some(loc) => {
12571 let p = registry::resolve_local(loc).unwrap_or_else(|e| fail_return(&e));
12572 (
12573 p.display().to_string(),
12574 registry::load_local(&p).unwrap_or_else(|e| fail_return(&e)),
12575 )
12576 }
12577 None => {
12578 let p = default_registry();
12579 (
12580 p.display().to_string(),
12581 registry::load_local(&p).unwrap_or_else(|e| fail_return(&e)),
12582 )
12583 }
12584 };
12585 let entry = registry::find_latest(®istry_data, &vfr_id)
12586 .unwrap_or_else(|| fail_return(&format!("{vfr_id} not found in registry")));
12587
12588 if transitive {
12589 let result = registry::pull_transitive(®istry_data, &vfr_id, &out, depth)
12593 .unwrap_or_else(|e| fail_return(&format!("transitive pull failed: {e}")));
12594
12595 let dep_paths_json: serde_json::Value = serde_json::Value::Object(
12596 result
12597 .deps
12598 .iter()
12599 .map(|(k, v)| (k.clone(), serde_json::json!(v.display().to_string())))
12600 .collect(),
12601 );
12602 let payload = json!({
12603 "ok": true,
12604 "command": "registry.pull",
12605 "registry": registry_label,
12606 "vfr_id": vfr_id,
12607 "transitive": true,
12608 "depth": depth,
12609 "out_dir": out.display().to_string(),
12610 "primary": result.primary_path.display().to_string(),
12611 "verified": result.verified,
12612 "deps": dep_paths_json,
12613 });
12614 if json {
12615 println!(
12616 "{}",
12617 serde_json::to_string_pretty(&payload)
12618 .expect("failed to serialize registry.pull")
12619 );
12620 } else {
12621 println!(
12622 "{} pulled {vfr_id} (transitive) → {}",
12623 style::ok("registry"),
12624 out.display()
12625 );
12626 println!(" verified {} frontier(s):", result.verified.len());
12627 for v in &result.verified {
12628 println!(" · {v}");
12629 }
12630 println!(" every cross-frontier dependency's pinned snapshot hash matched");
12631 }
12632 return;
12633 }
12634
12635 registry::fetch_frontier_to_prefer_event_hub(&entry, from.as_deref(), &out)
12638 .unwrap_or_else(|e| fail_return(&format!("fetch frontier: {e}")));
12639 registry::verify_pull(&entry, &out).unwrap_or_else(|e| {
12640 let _ = std::fs::remove_file(&out);
12641 fail_return(&format!("pull verification failed: {e}"))
12642 });
12643
12644 let payload = json!({
12645 "ok": true,
12646 "command": "registry.pull",
12647 "registry": registry_label,
12648 "vfr_id": vfr_id,
12649 "out": out.display().to_string(),
12650 "snapshot_hash": entry.latest_snapshot_hash,
12651 "event_log_hash": entry.latest_event_log_hash,
12652 "verified": true,
12653 });
12654 if json {
12655 println!(
12656 "{}",
12657 serde_json::to_string_pretty(&payload)
12658 .expect("failed to serialize registry.pull")
12659 );
12660 } else {
12661 println!(
12662 "{} pulled {vfr_id} → {}",
12663 style::ok("registry"),
12664 out.display()
12665 );
12666 println!(" verified snapshot+event_log hashes match registry; signature ok");
12667 }
12668 }
12669 }
12670}
12671
12672fn print_stats_json(path: &Path) {
12673 let frontier = load_frontier_or_fail(path);
12674 let source_hash = hash_path_or_fail(path);
12675 let payload = json!({
12676 "ok": true,
12677 "command": "stats",
12678 "schema_version": project::VELA_SCHEMA_VERSION,
12679 "frontier": {
12680 "name": &frontier.project.name,
12681 "description": &frontier.project.description,
12682 "source": path.display().to_string(),
12683 "hash": format!("sha256:{source_hash}"),
12684 "compiled_at": &frontier.project.compiled_at,
12685 "compiler": &frontier.project.compiler,
12686 "papers_processed": frontier.project.papers_processed,
12687 "errors": frontier.project.errors,
12688 },
12689 "stats": frontier.stats,
12690 "proposals": proposals::summary(&frontier),
12691 "proof_state": frontier.proof_state,
12692 });
12693 println!(
12694 "{}",
12695 serde_json::to_string_pretty(&payload).expect("failed to serialize stats")
12696 );
12697}
12698
12699fn cmd_search(
12700 source: Option<&Path>,
12701 query: &str,
12702 entity: Option<&str>,
12703 assertion_type: Option<&str>,
12704 all: Option<&Path>,
12705 limit: usize,
12706 json_output: bool,
12707) {
12708 if let Some(dir) = all {
12709 search::run_all(dir, query, entity, assertion_type, limit);
12710 return;
12711 }
12712 let Some(src) = source else {
12713 fail("Provide --source <frontier> or --all <directory>.");
12714 };
12715 if json_output {
12716 let results = search::search(src, query, entity, assertion_type, limit);
12717 let loaded = load_frontier_or_fail(src);
12718 let source_hash = hash_path_or_fail(src);
12719 let payload = json!({
12720 "ok": true,
12721 "command": "search",
12722 "schema_version": project::VELA_SCHEMA_VERSION,
12723 "query": query,
12724 "frontier": {
12725 "name": &loaded.project.name,
12726 "source": src.display().to_string(),
12727 "hash": format!("sha256:{source_hash}"),
12728 },
12729 "filters": {
12730 "entity": entity,
12731 "assertion_type": assertion_type,
12732 "limit": limit,
12733 },
12734 "count": results.len(),
12735 "results": results.iter().map(|result| json!({
12736 "id": &result.id,
12737 "score": result.score,
12738 "assertion": &result.assertion,
12739 "assertion_type": &result.assertion_type,
12740 "confidence": result.confidence,
12741 "entities": &result.entities,
12742 "doi": &result.doi,
12743 })).collect::<Vec<_>>()
12744 });
12745 println!(
12746 "{}",
12747 serde_json::to_string_pretty(&payload).expect("failed to serialize search results")
12748 );
12749 } else {
12750 search::run(src, query, entity, assertion_type, limit);
12751 }
12752}
12753
12754fn cmd_tensions(source: &Path, both_high: bool, cross_domain: bool, top: usize, json_output: bool) {
12755 let frontier = load_frontier_or_fail(source);
12756 let result = tensions::analyze(&frontier, both_high, cross_domain, top);
12757 if json_output {
12758 let source_hash = hash_path_or_fail(source);
12759 let payload = json!({
12760 "ok": true,
12761 "command": "tensions",
12762 "schema_version": project::VELA_SCHEMA_VERSION,
12763 "frontier": {
12764 "name": &frontier.project.name,
12765 "source": source.display().to_string(),
12766 "hash": format!("sha256:{source_hash}"),
12767 },
12768 "filters": {
12769 "both_high": both_high,
12770 "cross_domain": cross_domain,
12771 "top": top,
12772 },
12773 "count": result.len(),
12774 "tensions": result.iter().map(|t| json!({
12775 "score": t.score,
12776 "resolved": t.resolved,
12777 "superseding_id": &t.superseding_id,
12778 "finding_a": {
12779 "id": &t.finding_a.id,
12780 "assertion": &t.finding_a.assertion,
12781 "confidence": t.finding_a.confidence,
12782 "assertion_type": &t.finding_a.assertion_type,
12783 "citation_count": t.finding_a.citation_count,
12784 "contradicts_count": t.finding_a.contradicts_count,
12785 },
12786 "finding_b": {
12787 "id": &t.finding_b.id,
12788 "assertion": &t.finding_b.assertion,
12789 "confidence": t.finding_b.confidence,
12790 "assertion_type": &t.finding_b.assertion_type,
12791 "citation_count": t.finding_b.citation_count,
12792 "contradicts_count": t.finding_b.contradicts_count,
12793 }
12794 })).collect::<Vec<_>>()
12795 });
12796 println!(
12797 "{}",
12798 serde_json::to_string_pretty(&payload).expect("failed to serialize tensions")
12799 );
12800 } else {
12801 tensions::print_tensions(&result);
12802 }
12803}
12804
12805fn cmd_gaps(action: GapsAction) {
12806 match action {
12807 GapsAction::Rank {
12808 frontier,
12809 top,
12810 domain,
12811 json,
12812 } => cmd_gap_rank(&frontier, top, domain.as_deref(), json),
12813 }
12814}
12815
12816fn cmd_gap_rank(frontier_path: &Path, top: usize, domain: Option<&str>, json_output: bool) {
12817 let frontier = load_frontier_or_fail(frontier_path);
12818 let mut ranked = frontier
12819 .findings
12820 .iter()
12821 .filter(|finding| finding.flags.gap || finding.flags.negative_space)
12822 .filter(|finding| {
12823 domain.is_none_or(|domain| {
12824 finding
12825 .assertion
12826 .text
12827 .to_lowercase()
12828 .contains(&domain.to_lowercase())
12829 || finding
12830 .assertion
12831 .entities
12832 .iter()
12833 .any(|entity| entity.name.to_lowercase().contains(&domain.to_lowercase()))
12834 })
12835 })
12836 .map(|finding| {
12837 let dependency_count = frontier
12838 .findings
12839 .iter()
12840 .flat_map(|candidate| candidate.links.iter())
12841 .filter(|link| link.target == finding.id)
12842 .count();
12843 let score = dependency_count as f64 + finding.confidence.score;
12844 json!({
12845 "id": &finding.id,
12846 "kind": "candidate_gap_review_lead",
12847 "assertion": &finding.assertion.text,
12848 "score": score,
12849 "dependency_count": dependency_count,
12850 "confidence": finding.confidence.score,
12851 "evidence_type": &finding.evidence.evidence_type,
12852 "entities": finding.assertion.entities.iter().map(|e| &e.name).collect::<Vec<_>>(),
12853 "recommended_action": "Review source scope and missing evidence before treating this as an experiment target.",
12854 "caveats": ["Candidate gap rankings are review leads, not guaranteed underexplored areas or experiment targets."],
12855 })
12856 })
12857 .collect::<Vec<_>>();
12858 ranked.sort_by(|a, b| {
12859 b.get("score")
12860 .and_then(Value::as_f64)
12861 .partial_cmp(&a.get("score").and_then(Value::as_f64))
12862 .unwrap_or(std::cmp::Ordering::Equal)
12863 });
12864 ranked.truncate(top);
12865 if json_output {
12866 let source_hash = hash_path_or_fail(frontier_path);
12867 let payload = json!({
12868 "ok": true,
12869 "command": "gaps rank",
12870 "schema_version": project::VELA_SCHEMA_VERSION,
12871 "frontier": {
12872 "name": &frontier.project.name,
12873 "source": frontier_path.display().to_string(),
12874 "hash": format!("sha256:{source_hash}"),
12875 },
12876 "filters": {
12877 "top": top,
12878 "domain": domain,
12879 },
12880 "count": ranked.len(),
12881 "ranking_label": "candidate gap review leads",
12882 "caveats": ["These rankings are navigation signals over flagged findings, not scientific conclusions."],
12883 "review_leads": ranked.clone(),
12884 "gaps": ranked,
12885 });
12886 println!(
12887 "{}",
12888 serde_json::to_string_pretty(&payload).expect("failed to serialize gap ranking")
12889 );
12890 } else {
12891 println!();
12892 println!(" {}", "CANDIDATE GAP REVIEW LEADS".dimmed());
12893 println!(" {}", style::tick_row(60));
12894 println!(" review source scope; these are not guaranteed experiment targets.");
12895 println!();
12896 for (idx, gap) in ranked.iter().enumerate() {
12897 println!(
12898 " {}. [{}] score={} {}",
12899 idx + 1,
12900 gap["id"].as_str().unwrap_or("?"),
12901 gap["score"].as_f64().unwrap_or(0.0),
12902 gap["assertion"].as_str().unwrap_or("")
12903 );
12904 }
12905 }
12906}
12907
12908async fn cmd_bridge(inputs: &[PathBuf], check_novelty: bool, top_n: usize) {
12909 if inputs.len() < 2 {
12910 fail("need at least 2 frontier files for bridge detection.");
12911 }
12912 println!();
12913 println!(" {}", "VELA · BRIDGE · V0.36.0".dimmed());
12914 println!(" {}", style::tick_row(60));
12915 println!(" loading {} frontiers...", inputs.len());
12916 let mut named_projects = Vec::<(String, project::Project)>::new();
12917 let mut total_findings = 0;
12918 for path in inputs {
12919 let frontier = load_frontier_or_fail(path);
12920 let name = path
12921 .file_stem()
12922 .unwrap_or_default()
12923 .to_string_lossy()
12924 .to_string();
12925 println!(" {} · {} findings", name, frontier.stats.findings);
12926 total_findings += frontier.stats.findings;
12927 named_projects.push((name, frontier));
12928 }
12929 let refs = named_projects
12930 .iter()
12931 .map(|(name, frontier)| (name.as_str(), frontier))
12932 .collect::<Vec<_>>();
12933 let mut bridges = bridge::detect_bridges(&refs);
12934 if check_novelty && !bridges.is_empty() {
12935 let client = Client::new();
12936 let check_count = bridges.len().min(top_n);
12937 println!(" running rough PubMed prior-art checks for top {check_count} bridges...");
12938 for bridge_item in bridges.iter_mut().take(check_count) {
12939 let query = bridge::novelty_query(&bridge_item.entity_name, bridge_item);
12940 match bridge::check_novelty(&client, &query).await {
12941 Ok(count) => bridge_item.pubmed_count = Some(count),
12942 Err(e) => eprintln!(
12943 " {} prior-art check failed for {}: {e}",
12944 style::err_prefix(),
12945 bridge_item.entity_name
12946 ),
12947 }
12948 tokio::time::sleep(std::time::Duration::from_millis(350)).await;
12949 }
12950 }
12951 print!("{}", bridge::format_report(&bridges, total_findings));
12952}
12953
12954struct BenchArgs {
12955 frontier: Option<PathBuf>,
12956 gold: Option<PathBuf>,
12957 entity_gold: Option<PathBuf>,
12958 link_gold: Option<PathBuf>,
12959 suite: Option<PathBuf>,
12960 suite_ready: bool,
12961 min_f1: Option<f64>,
12962 min_precision: Option<f64>,
12963 min_recall: Option<f64>,
12964 no_thresholds: bool,
12965 json: bool,
12966}
12967
12968fn cmd_agent_bench(
12973 gold: &Path,
12974 candidate: &Path,
12975 sources: Option<&Path>,
12976 threshold: Option<f64>,
12977 report_path: Option<&Path>,
12978 json_out: bool,
12979) {
12980 let input = crate::agent_bench::BenchInput {
12981 gold_path: gold.to_path_buf(),
12982 candidate_path: candidate.to_path_buf(),
12983 sources: sources.map(Path::to_path_buf),
12984 threshold: threshold.unwrap_or(0.0),
12985 };
12986 let report = match crate::agent_bench::run(input) {
12987 Ok(r) => r,
12988 Err(e) => {
12989 eprintln!("{} bench failed: {e}", style::err_prefix());
12990 std::process::exit(1);
12991 }
12992 };
12993
12994 let json = serde_json::to_string_pretty(&report).unwrap_or_default();
12995 if let Some(path) = report_path
12996 && let Err(e) = std::fs::write(path, &json)
12997 {
12998 eprintln!(
12999 "{} failed to write report to {}: {e}",
13000 style::err_prefix(),
13001 path.display()
13002 );
13003 }
13004
13005 if json_out {
13006 println!("{json}");
13007 } else {
13008 println!();
13009 println!(" {}", "VELA · BENCH · AGENT STATE-UPDATE".dimmed());
13010 println!(" {}", style::tick_row(60));
13011 print!("{}", crate::agent_bench::render_pretty(&report));
13012 println!();
13013 }
13014
13015 if !report.pass {
13016 std::process::exit(1);
13017 }
13018}
13019
13020fn cmd_bench(args: BenchArgs) {
13021 if args.suite_ready {
13022 let suite_path = args
13023 .suite
13024 .unwrap_or_else(|| PathBuf::from("benchmarks/suites/bbb-core.json"));
13025 let payload =
13026 benchmark::suite_ready_report(&suite_path).unwrap_or_else(|e| fail_return(&e));
13027 println!(
13028 "{}",
13029 serde_json::to_string_pretty(&payload).expect("failed to serialize suite-ready report")
13030 );
13031 if payload.get("ok").and_then(Value::as_bool) != Some(true) {
13032 std::process::exit(1);
13033 }
13034 return;
13035 }
13036 if let Some(suite_path) = args.suite {
13037 let payload = benchmark::run_suite(&suite_path).unwrap_or_else(|e| fail_return(&e));
13038 if args.json {
13039 println!(
13040 "{}",
13041 serde_json::to_string_pretty(&payload)
13042 .expect("failed to serialize benchmark suite")
13043 );
13044 } else {
13045 let ok = payload.get("ok").and_then(Value::as_bool) == Some(true);
13046 let metrics = payload.get("metrics").unwrap_or(&Value::Null);
13047 println!();
13048 println!(" {}", "VELA · BENCH · SUITE".dimmed());
13049 println!(" {}", style::tick_row(60));
13050 println!(" suite: {}", suite_path.display());
13051 println!(
13052 " status: {}",
13053 if ok {
13054 style::ok("pass")
13055 } else {
13056 style::lost("fail")
13057 }
13058 );
13059 println!(
13060 " tasks: {}/{} passed",
13061 metrics
13062 .get("tasks_passed")
13063 .and_then(Value::as_u64)
13064 .unwrap_or(0),
13065 metrics
13066 .get("tasks_total")
13067 .and_then(Value::as_u64)
13068 .unwrap_or(0)
13069 );
13070 }
13071 if payload.get("ok").and_then(Value::as_bool) != Some(true) {
13072 std::process::exit(1);
13073 }
13074 return;
13075 }
13076
13077 let frontier = args
13078 .frontier
13079 .unwrap_or_else(|| PathBuf::from("frontiers/bbb-alzheimer.json"));
13080 let thresholds = benchmark::BenchmarkThresholds {
13081 min_f1: if args.no_thresholds {
13082 None
13083 } else {
13084 args.min_f1.or(Some(0.05))
13085 },
13086 min_precision: if args.no_thresholds {
13087 None
13088 } else {
13089 args.min_precision
13090 },
13091 min_recall: if args.no_thresholds {
13092 None
13093 } else {
13094 args.min_recall
13095 },
13096 ..Default::default()
13097 };
13098 if let Some(path) = args.link_gold {
13099 print_benchmark_or_exit(benchmark::task_envelope(
13100 &frontier,
13101 None,
13102 benchmark::BenchmarkMode::Link,
13103 Some(&path),
13104 &thresholds,
13105 None,
13106 ));
13107 } else if let Some(path) = args.entity_gold {
13108 print_benchmark_or_exit(benchmark::task_envelope(
13109 &frontier,
13110 None,
13111 benchmark::BenchmarkMode::Entity,
13112 Some(&path),
13113 &thresholds,
13114 None,
13115 ));
13116 } else if let Some(path) = args.gold {
13117 if args.json {
13118 print_benchmark_or_exit(benchmark::task_envelope(
13119 &frontier,
13120 None,
13121 benchmark::BenchmarkMode::Finding,
13122 Some(&path),
13123 &thresholds,
13124 None,
13125 ));
13126 } else {
13127 benchmark::run(&frontier, &path, false);
13128 }
13129 } else {
13130 fail("Provide --suite, --gold, --entity-gold, or --link-gold.");
13131 }
13132}
13133
13134fn print_benchmark_or_exit(result: Result<Value, String>) {
13135 let payload = result.unwrap_or_else(|e| fail_return(&e));
13136 println!(
13137 "{}",
13138 serde_json::to_string_pretty(&payload).expect("failed to serialize benchmark report")
13139 );
13140 if payload.get("ok").and_then(Value::as_bool) != Some(true) {
13141 std::process::exit(1);
13142 }
13143}
13144
13145fn cmd_packet(action: PacketAction) {
13146 let (result, json_output) = match action {
13147 PacketAction::Inspect { path, json } => (packet::inspect(&path), json),
13148 PacketAction::Validate { path, json } => (packet::validate(&path), json),
13149 };
13150 match result {
13151 Ok(output) if json_output => {
13152 println!(
13153 "{}",
13154 serde_json::to_string_pretty(&json!({
13155 "ok": true,
13156 "command": "packet",
13157 "result": output,
13158 }))
13159 .expect("failed to serialize packet response")
13160 );
13161 }
13162 Ok(output) => println!("{output}"),
13163 Err(e) => fail(&e),
13164 }
13165}
13166
13167fn cmd_verify(path: &Path, json_output: bool) {
13172 let result = packet::validate(path);
13173 match result {
13174 Ok(output) if json_output => {
13175 println!(
13176 "{}",
13177 serde_json::to_string_pretty(&json!({
13178 "ok": true,
13179 "command": "verify",
13180 "result": output,
13181 }))
13182 .expect("failed to serialize verify response")
13183 );
13184 }
13185 Ok(output) => {
13186 println!("{output}");
13187 println!(
13188 "\nverify: ok\n every file in the manifest matched its claimed sha256.\n pull this packet on another machine, run the same command, see the same line."
13189 );
13190 }
13191 Err(e) => fail(&e),
13192 }
13193}
13194
13195fn cmd_init(path: &Path, name: &str, template: &str, initialize_git: bool, json_output: bool) {
13196 if path.join(".vela").exists() {
13197 fail(&format!(
13198 "already initialized: {} exists",
13199 path.join(".vela").display()
13200 ));
13201 }
13202 let payload = frontier_repo::initialize(
13203 path,
13204 frontier_repo::InitOptions {
13205 name,
13206 template,
13207 initialize_git,
13208 },
13209 )
13210 .unwrap_or_else(|e| fail_return(&e));
13211 if json_output {
13212 println!(
13213 "{}",
13214 serde_json::to_string_pretty(&payload).expect("failed to serialize init report")
13215 );
13216 } else {
13217 println!(
13218 "{} initialized frontier repository in {}",
13219 style::ok("ok"),
13220 path.display()
13221 );
13222 }
13223}
13224
13225fn cmd_quickstart(
13232 path: &Path,
13233 name: &str,
13234 reviewer: &str,
13235 assertion: Option<&str>,
13236 keys_out: Option<&Path>,
13237 json_output: bool,
13238) {
13239 use std::process::Command;
13240
13241 if path.join(".vela").exists() {
13242 fail(&format!(
13243 "already initialized: {} exists",
13244 path.join(".vela").display()
13245 ));
13246 }
13247
13248 let exe = std::env::current_exe()
13249 .unwrap_or_else(|e| fail_return(&format!("cannot locate current executable: {e}")));
13250 let keys_dir = keys_out
13251 .map(Path::to_path_buf)
13252 .unwrap_or_else(|| path.join("keys"));
13253 let assertion_text =
13254 assertion.unwrap_or("Quickstart placeholder claim. Replace with your real assertion.");
13255
13256 let run_step = |label: &str, args: &[&str]| -> std::process::Output {
13257 let out = Command::new(&exe)
13258 .args(args)
13259 .output()
13260 .unwrap_or_else(|e| fail_return(&format!("{label}: failed to spawn: {e}")));
13261 if !out.status.success() {
13262 let stderr = String::from_utf8_lossy(&out.stderr);
13263 fail(&format!("{label} failed:\n{stderr}"));
13264 }
13265 out
13266 };
13267
13268 run_step(
13270 "init",
13271 &[
13272 "init",
13273 path.to_string_lossy().as_ref(),
13274 "--name",
13275 name,
13276 "--no-git",
13277 "--json",
13278 ],
13279 );
13280
13281 let keys_out_str = keys_dir.to_string_lossy().into_owned();
13283 let keypair_out = run_step(
13284 "sign.generate-keypair",
13285 &[
13286 "sign",
13287 "generate-keypair",
13288 "--out",
13289 keys_out_str.as_ref(),
13290 "--json",
13291 ],
13292 );
13293 let keypair_json: serde_json::Value = serde_json::from_slice(&keypair_out.stdout)
13294 .unwrap_or_else(|e| fail_return(&format!("sign.generate-keypair: bad json: {e}")));
13295 let public_key = keypair_json
13296 .get("public_key")
13297 .and_then(|v| v.as_str())
13298 .unwrap_or_else(|| fail_return("sign.generate-keypair: missing public_key in output"))
13299 .to_string();
13300
13301 run_step(
13303 "actor.add",
13304 &[
13305 "actor",
13306 "add",
13307 path.to_string_lossy().as_ref(),
13308 reviewer,
13309 "--pubkey",
13310 public_key.as_str(),
13311 "--json",
13312 ],
13313 );
13314
13315 let finding_out = run_step(
13317 "finding.add",
13318 &[
13319 "finding",
13320 "add",
13321 path.to_string_lossy().as_ref(),
13322 "--assertion",
13323 assertion_text,
13324 "--author",
13325 reviewer,
13326 "--apply",
13327 "--json",
13328 ],
13329 );
13330 let finding_json: serde_json::Value = serde_json::from_slice(&finding_out.stdout)
13331 .unwrap_or_else(|e| fail_return(&format!("finding.add: bad json: {e}")));
13332 let finding_id = finding_json
13333 .get("finding_id")
13334 .and_then(|v| v.as_str())
13335 .map(str::to_string);
13336
13337 if json_output {
13338 let payload = json!({
13339 "ok": true,
13340 "command": "quickstart",
13341 "frontier": path.display().to_string(),
13342 "name": name,
13343 "reviewer": reviewer,
13344 "public_key": public_key,
13345 "keys_dir": keys_dir.display().to_string(),
13346 "finding_id": finding_id,
13347 "next_steps": [
13348 format!("vela serve {}", path.display()),
13349 format!(
13350 "vela ingest <paper.pdf|doi:...> --frontier {}",
13351 path.display()
13352 ),
13353 format!("vela log {}", path.display()),
13354 ],
13355 });
13356 println!(
13357 "{}",
13358 serde_json::to_string_pretty(&payload).expect("failed to serialize quickstart")
13359 );
13360 return;
13361 }
13362
13363 println!();
13364 println!(
13365 " {}",
13366 format!("VELA · QUICKSTART · {}", path.display())
13367 .to_uppercase()
13368 .dimmed()
13369 );
13370 println!(" {}", style::tick_row(60));
13371 println!(" frontier: {}", path.display());
13372 println!(" name: {name}");
13373 println!(" reviewer: {reviewer}");
13374 println!(" keys: {}", keys_dir.display());
13375 println!(" pubkey: {}…", &public_key[..16]);
13376 if let Some(id) = finding_id.as_deref() {
13377 println!(" finding: {id}");
13378 }
13379 println!();
13380 println!(" {}", style::ok("done"));
13381 println!(" next:");
13382 println!(" vela serve {}", path.display());
13383 println!(
13384 " vela ingest <paper.pdf|doi:10.xxx|pmid:xxx> --frontier {}",
13385 path.display()
13386 );
13387 println!(" vela log {}", path.display());
13388 println!();
13389}
13390
13391fn cmd_import(frontier_path: &Path, into: Option<&Path>) {
13392 let frontier = repo::load_from_path(frontier_path).unwrap_or_else(|e| fail_return(&e));
13393 let target = into
13394 .map(Path::to_path_buf)
13395 .unwrap_or_else(|| PathBuf::from(frontier.project.name.replace(' ', "-").to_lowercase()));
13396 repo::init_repo(&target, &frontier).unwrap_or_else(|e| fail(&e));
13397 println!(
13398 "{} {} findings · {}",
13399 style::ok("imported"),
13400 frontier.findings.len(),
13401 target.display()
13402 );
13403}
13404
13405fn cmd_locator_repair(
13406 path: &Path,
13407 atom_id: &str,
13408 locator_override: Option<&str>,
13409 reviewer: &str,
13410 reason: &str,
13411 apply: bool,
13412 json_output: bool,
13413) {
13414 let report = state::repair_evidence_atom_locator(
13415 path,
13416 atom_id,
13417 locator_override,
13418 reviewer,
13419 reason,
13420 apply,
13421 )
13422 .unwrap_or_else(|e| fail_return(&e));
13423 print_state_report(&report, json_output);
13424}
13425
13426async fn cmd_source_fetch(
13431 identifier: &str,
13432 cache_root: Option<&Path>,
13433 out_path: Option<&Path>,
13434 refresh: bool,
13435 _json_output: bool,
13436) {
13437 use sha2::{Digest, Sha256};
13438
13439 let normalized = normalize_source_identifier(identifier);
13440 let cache_path = cache_root.map(|root| {
13441 let hash = format!("{:x}", Sha256::digest(normalized.as_bytes()));
13442 root.join("sources")
13443 .join("cache")
13444 .join(format!("{hash}.json"))
13445 });
13446
13447 if !refresh
13448 && let Some(p) = cache_path.as_ref()
13449 && p.is_file()
13450 {
13451 let body = std::fs::read_to_string(p)
13452 .unwrap_or_else(|e| fail_return(&format!("read cache {}: {e}", p.display())));
13453 emit_source_fetch_result(&body, out_path);
13454 return;
13455 }
13456
13457 let result = fetch_source_metadata(&normalized).await;
13458 let json = match result {
13459 Ok(value) => serde_json::to_string_pretty(&value)
13460 .unwrap_or_else(|e| fail_return(&format!("serialize fetched record: {e}"))),
13461 Err(e) => fail_return(&format!("source-fetch '{identifier}': {e}")),
13462 };
13463
13464 if let Some(p) = cache_path.as_ref() {
13465 if let Some(parent) = p.parent() {
13466 std::fs::create_dir_all(parent)
13467 .unwrap_or_else(|e| fail_return(&format!("mkdir {}: {e}", parent.display())));
13468 }
13469 std::fs::write(p, &json)
13470 .unwrap_or_else(|e| fail_return(&format!("write cache {}: {e}", p.display())));
13471 }
13472 emit_source_fetch_result(&json, out_path);
13473}
13474
13475fn emit_source_fetch_result(body: &str, out_path: Option<&Path>) {
13476 if let Some(p) = out_path {
13477 if let Some(parent) = p.parent() {
13478 let _ = std::fs::create_dir_all(parent);
13479 }
13480 std::fs::write(p, body)
13481 .unwrap_or_else(|e| fail_return(&format!("write {}: {e}", p.display())));
13482 } else {
13483 println!("{body}");
13484 }
13485}
13486
13487fn normalize_source_identifier(raw: &str) -> String {
13488 let trimmed = raw.trim();
13489 if trimmed.starts_with("doi:")
13490 || trimmed.starts_with("pmid:")
13491 || trimmed.starts_with("nct:")
13492 || trimmed.starts_with("pmc:")
13493 {
13494 return trimmed.to_string();
13495 }
13496 if trimmed.starts_with("10.") {
13497 return format!("doi:{trimmed}");
13498 }
13499 if trimmed.starts_with("NCT") || trimmed.starts_with("nct") {
13500 return format!(
13501 "nct:{}",
13502 trimmed
13503 .to_uppercase()
13504 .trim_start_matches("NCT")
13505 .to_string()
13506 .split_at(0)
13507 .0
13508 );
13509 }
13510 if trimmed.chars().all(|c| c.is_ascii_digit()) {
13511 return format!("pmid:{trimmed}");
13512 }
13513 trimmed.to_string()
13514}
13515
13516async fn fetch_source_metadata(normalized: &str) -> Result<Value, String> {
13517 let client = Client::builder()
13518 .user_agent("vela/0.66.0 (vela-source-fetch; +https://vela-science.fly.dev)")
13519 .timeout(std::time::Duration::from_secs(30))
13520 .build()
13521 .map_err(|e| format!("client build: {e}"))?;
13522 if let Some(rest) = normalized.strip_prefix("doi:") {
13523 let mut record = fetch_via_crossref(&client, rest).await?;
13530 let crossref_abstract = record
13531 .get("abstract")
13532 .and_then(|v| v.as_str())
13533 .unwrap_or("");
13534 if crossref_abstract.is_empty()
13535 && let Some(pmid) = resolve_doi_to_pmid(&client, rest).await
13536 && let Ok(pubmed_record) = fetch_via_pubmed(&client, &pmid).await
13537 {
13538 let pubmed_abstract = pubmed_record
13539 .get("abstract")
13540 .and_then(|v| v.as_str())
13541 .unwrap_or("")
13542 .to_string();
13543 if !pubmed_abstract.is_empty()
13544 && let Some(obj) = record.as_object_mut()
13545 {
13546 obj.insert("abstract".to_string(), Value::String(pubmed_abstract));
13547 obj.insert(
13548 "abstract_source".to_string(),
13549 Value::String(format!("pubmed:{pmid}")),
13550 );
13551 }
13552 }
13553 return Ok(record);
13554 }
13555 if let Some(rest) = normalized.strip_prefix("pmid:") {
13556 return fetch_via_pubmed(&client, rest).await;
13557 }
13558 if let Some(rest) = normalized.strip_prefix("nct:") {
13559 return fetch_via_ctgov(&client, rest).await;
13560 }
13561 Err(format!(
13562 "unsupported source identifier '{normalized}'. Use doi:, pmid:, or nct: prefix."
13563 ))
13564}
13565
13566async fn resolve_doi_to_pmid(client: &Client, doi: &str) -> Option<String> {
13570 let url = format!(
13571 "https://eutils.ncbi.nlm.nih.gov/entrez/eutils/esearch.fcgi?db=pubmed&term={}[doi]&retmode=json",
13572 urlencoding::encode(doi)
13573 );
13574 let resp = client.get(&url).send().await.ok()?;
13575 if !resp.status().is_success() {
13576 return None;
13577 }
13578 let body: Value = resp.json().await.ok()?;
13579 let id_list = body.pointer("/esearchresult/idlist")?.as_array()?;
13580 if id_list.len() != 1 {
13581 return None;
13584 }
13585 id_list.first()?.as_str().map(|s| s.to_string())
13586}
13587
13588async fn fetch_via_crossref(client: &Client, doi: &str) -> Result<Value, String> {
13589 let url = format!("https://api.crossref.org/works/{doi}");
13590 let resp = client
13591 .get(&url)
13592 .send()
13593 .await
13594 .map_err(|e| format!("crossref get: {e}"))?;
13595 if !resp.status().is_success() {
13596 return Err(format!("crossref returned {}", resp.status()));
13597 }
13598 let body: Value = resp
13599 .json()
13600 .await
13601 .map_err(|e| format!("crossref json: {e}"))?;
13602 let work = body.get("message").cloned().unwrap_or(Value::Null);
13603 let title = work
13604 .get("title")
13605 .and_then(|v| v.as_array())
13606 .and_then(|a| a.first())
13607 .and_then(|v| v.as_str())
13608 .unwrap_or("")
13609 .to_string();
13610 let abstract_html = work
13611 .get("abstract")
13612 .and_then(|v| v.as_str())
13613 .unwrap_or("")
13614 .to_string();
13615 let abstract_text = strip_jats_tags(&abstract_html);
13616 let year = work
13617 .get("issued")
13618 .and_then(|v| v.get("date-parts"))
13619 .and_then(|v| v.as_array())
13620 .and_then(|a| a.first())
13621 .and_then(|v| v.as_array())
13622 .and_then(|a| a.first())
13623 .and_then(|v| v.as_i64());
13624 let journal = work
13625 .get("container-title")
13626 .and_then(|v| v.as_array())
13627 .and_then(|a| a.first())
13628 .and_then(|v| v.as_str())
13629 .unwrap_or("")
13630 .to_string();
13631 let authors = work
13632 .get("author")
13633 .and_then(|v| v.as_array())
13634 .map(|arr| {
13635 arr.iter()
13636 .filter_map(|a| {
13637 let given = a.get("given").and_then(|v| v.as_str()).unwrap_or("");
13638 let family = a.get("family").and_then(|v| v.as_str()).unwrap_or("");
13639 let combined = format!("{given} {family}").trim().to_string();
13640 if combined.is_empty() {
13641 None
13642 } else {
13643 Some(combined)
13644 }
13645 })
13646 .collect::<Vec<_>>()
13647 })
13648 .unwrap_or_default();
13649 Ok(json!({
13650 "schema": "vela.source_fetch.v0.1",
13651 "identifier": format!("doi:{doi}"),
13652 "source": "crossref",
13653 "title": title,
13654 "abstract": abstract_text,
13655 "year": year,
13656 "journal": journal,
13657 "authors": authors,
13658 "retrieved_at": chrono::Utc::now().to_rfc3339(),
13659 }))
13660}
13661
13662async fn fetch_via_pubmed(client: &Client, pmid: &str) -> Result<Value, String> {
13663 let url = format!(
13664 "https://eutils.ncbi.nlm.nih.gov/entrez/eutils/efetch.fcgi?db=pubmed&id={pmid}&retmode=xml"
13665 );
13666 let resp = client
13667 .get(&url)
13668 .send()
13669 .await
13670 .map_err(|e| format!("pubmed get: {e}"))?;
13671 if !resp.status().is_success() {
13672 return Err(format!("pubmed returned {}", resp.status()));
13673 }
13674 let xml = resp.text().await.map_err(|e| format!("pubmed text: {e}"))?;
13675 let title = extract_xml_text(&xml, "<ArticleTitle>", "</ArticleTitle>");
13676 let abstract_text = extract_xml_text(&xml, "<AbstractText>", "</AbstractText>");
13677 let year = extract_xml_text(&xml, "<Year>", "</Year>")
13678 .parse::<i64>()
13679 .ok();
13680 let journal = extract_xml_text(&xml, "<Title>", "</Title>");
13681 Ok(json!({
13682 "schema": "vela.source_fetch.v0.1",
13683 "identifier": format!("pmid:{pmid}"),
13684 "source": "pubmed",
13685 "title": title,
13686 "abstract": abstract_text,
13687 "year": year,
13688 "journal": journal,
13689 "authors": Vec::<String>::new(),
13690 "retrieved_at": chrono::Utc::now().to_rfc3339(),
13691 }))
13692}
13693
13694async fn fetch_via_ctgov(client: &Client, nct: &str) -> Result<Value, String> {
13695 let nct_clean = nct.trim();
13696 let nct_id = if nct_clean.starts_with("NCT") || nct_clean.starts_with("nct") {
13697 nct_clean.to_uppercase()
13698 } else {
13699 format!("NCT{nct_clean}")
13700 };
13701 let url = format!("https://clinicaltrials.gov/api/v2/studies/{nct_id}");
13702 let resp = client
13703 .get(&url)
13704 .send()
13705 .await
13706 .map_err(|e| format!("ctgov get: {e}"))?;
13707 if !resp.status().is_success() {
13708 return Err(format!("ctgov returned {}", resp.status()));
13709 }
13710 let body: Value = resp.json().await.map_err(|e| format!("ctgov json: {e}"))?;
13711 let title = body
13712 .pointer("/protocolSection/identificationModule/briefTitle")
13713 .and_then(|v| v.as_str())
13714 .unwrap_or("")
13715 .to_string();
13716 let abstract_text = body
13717 .pointer("/protocolSection/descriptionModule/briefSummary")
13718 .and_then(|v| v.as_str())
13719 .unwrap_or("")
13720 .to_string();
13721 let phase = body
13722 .pointer("/protocolSection/designModule/phases")
13723 .and_then(|v| v.as_array())
13724 .and_then(|a| a.first())
13725 .and_then(|v| v.as_str())
13726 .unwrap_or("")
13727 .to_string();
13728 Ok(json!({
13729 "schema": "vela.source_fetch.v0.1",
13730 "identifier": format!("nct:{nct_id}"),
13731 "source": "clinicaltrials.gov",
13732 "title": title,
13733 "abstract": abstract_text,
13734 "year": Value::Null,
13735 "journal": phase,
13736 "authors": Vec::<String>::new(),
13737 "retrieved_at": chrono::Utc::now().to_rfc3339(),
13738 }))
13739}
13740
13741fn extract_xml_text(xml: &str, open: &str, close: &str) -> String {
13742 if let Some(start) = xml.find(open) {
13743 let after = &xml[start + open.len()..];
13744 if let Some(end) = after.find(close) {
13745 return after[..end].trim().to_string();
13746 }
13747 }
13748 String::new()
13749}
13750
13751fn strip_jats_tags(html: &str) -> String {
13752 let mut out = String::with_capacity(html.len());
13753 let mut in_tag = false;
13754 for c in html.chars() {
13755 match c {
13756 '<' => in_tag = true,
13757 '>' => in_tag = false,
13758 _ if !in_tag => out.push(c),
13759 _ => {}
13760 }
13761 }
13762 out.split_whitespace().collect::<Vec<_>>().join(" ")
13763}
13764
13765fn cmd_span_repair(
13766 path: &Path,
13767 finding_id: &str,
13768 section: &str,
13769 text: &str,
13770 reviewer: &str,
13771 reason: &str,
13772 apply: bool,
13773 json_output: bool,
13774) {
13775 let report =
13776 state::repair_finding_span(path, finding_id, section, text, reviewer, reason, apply)
13777 .unwrap_or_else(|e| fail_return(&e));
13778 print_state_report(&report, json_output);
13779}
13780
13781#[allow(clippy::too_many_arguments)]
13782fn cmd_entity_resolve(
13783 path: &Path,
13784 finding_id: &str,
13785 entity_name: &str,
13786 source: &str,
13787 id: &str,
13788 confidence: f64,
13789 matched_name: Option<&str>,
13790 resolution_method: &str,
13791 reviewer: &str,
13792 reason: &str,
13793 apply: bool,
13794 json_output: bool,
13795) {
13796 let report = state::resolve_finding_entity(
13797 path,
13798 finding_id,
13799 entity_name,
13800 source,
13801 id,
13802 confidence,
13803 matched_name,
13804 resolution_method,
13805 reviewer,
13806 reason,
13807 apply,
13808 )
13809 .unwrap_or_else(|e| fail_return(&e));
13810 print_state_report(&report, json_output);
13811}
13812
13813fn cmd_propagate(
13814 path: &Path,
13815 retract: Option<String>,
13816 reduce_confidence: Option<String>,
13817 to: Option<f64>,
13818 output: Option<&Path>,
13819) {
13820 let mut frontier = load_frontier_or_fail(path);
13821 let (finding_id, action, label) = if let Some(id) = retract {
13822 (id, propagate::PropagationAction::Retracted, "retraction")
13823 } else if let Some(id) = reduce_confidence {
13824 let score = to.unwrap_or_else(|| fail_return("--reduce-confidence requires --to <score>"));
13825 if !(0.0..=1.0).contains(&score) {
13826 fail("--to must be between 0.0 and 1.0");
13827 }
13828 (
13829 id,
13830 propagate::PropagationAction::ConfidenceReduced { new_score: score },
13831 "confidence reduction",
13832 )
13833 } else {
13834 fail("specify --retract <id> or --reduce-confidence <id> --to <score>");
13835 };
13836 if !frontier.findings.iter().any(|f| f.id == finding_id) {
13837 fail(&format!("finding not found: {finding_id}"));
13838 }
13839 let result = propagate::propagate_correction(&mut frontier, &finding_id, action);
13840 frontier.review_events.extend(result.events.clone());
13845 project::recompute_stats(&mut frontier);
13846 propagate::print_result(&result, label, &finding_id);
13847 let out = output.unwrap_or(path);
13848 repo::save_to_path(out, &frontier).expect("Failed to save frontier");
13849 println!(" output: {}", out.display());
13850}
13851
13852fn cmd_mcp_setup(source: Option<&Path>, frontiers: Option<&Path>) {
13853 let source_desc = source
13854 .map(|p| p.display().to_string())
13855 .or_else(|| frontiers.map(|p| p.display().to_string()))
13856 .unwrap_or_else(|| "frontier.json".to_string());
13857 let args = if let Some(path) = source {
13858 format!(r#""serve", "{}""#, path.display())
13859 } else if let Some(path) = frontiers {
13860 format!(r#""serve", "--frontiers", "{}""#, path.display())
13861 } else {
13862 r#""serve", "frontier.json""#.to_string()
13863 };
13864 println!(
13865 r#"Add this MCP server configuration to your client:
13866
13867{{
13868 "mcpServers": {{
13869 "vela": {{
13870 "command": "vela",
13871 "args": [{args}]
13872 }}
13873 }}
13874}}
13875
13876Source: {source_desc}"#
13877 );
13878}
13879
13880fn parse_entities(input: &str) -> Vec<(String, String)> {
13881 if input.trim().is_empty() {
13882 return Vec::new();
13883 }
13884 input
13885 .split(',')
13886 .filter_map(|pair| {
13887 let parts = pair.trim().splitn(2, ':').collect::<Vec<_>>();
13888 if parts.len() == 2 {
13889 Some((parts[0].trim().to_string(), parts[1].trim().to_string()))
13890 } else {
13891 eprintln!(
13892 "{} skipping malformed entity '{}'",
13893 style::warn("warn"),
13894 pair.trim()
13895 );
13896 None
13897 }
13898 })
13899 .collect()
13900}
13901
13902fn parse_evidence_spans(inputs: &[String]) -> Vec<Value> {
13903 inputs
13904 .iter()
13905 .filter_map(|input| {
13906 let trimmed = input.trim();
13907 if trimmed.is_empty() {
13908 return None;
13909 }
13910 if trimmed.starts_with('{') {
13911 match serde_json::from_str::<Value>(trimmed) {
13912 Ok(value @ Value::Object(_)) => return Some(value),
13913 Ok(_) | Err(_) => {
13914 eprintln!(
13915 "{} evidence span JSON should be an object; storing as text",
13916 style::warn("warn")
13917 );
13918 }
13919 }
13920 }
13921 Some(json!({
13922 "section": "curator_source",
13923 "text": trimmed,
13924 }))
13925 })
13926 .collect()
13927}
13928
13929fn hash_path(path: &Path) -> Result<String, String> {
13930 let mut hasher = Sha256::new();
13931 if path.is_file() {
13932 let bytes = std::fs::read(path)
13933 .map_err(|e| format!("Failed to read {} for hashing: {e}", path.display()))?;
13934 hasher.update(&bytes);
13935 } else if path.is_dir() {
13936 let mut files = Vec::new();
13937 collect_hash_files(path, path, &mut files)?;
13938 files.sort();
13939 for rel in files {
13940 hasher.update(rel.to_string_lossy().as_bytes());
13941 let bytes = std::fs::read(path.join(&rel))
13942 .map_err(|e| format!("Failed to read {} for hashing: {e}", rel.display()))?;
13943 hasher.update(bytes);
13944 }
13945 } else {
13946 return Err(format!("Cannot hash missing path {}", path.display()));
13947 }
13948 Ok(format!("{:x}", hasher.finalize()))
13949}
13950
13951fn load_frontier_or_fail(path: &Path) -> project::Project {
13952 repo::load_from_path(path).unwrap_or_else(|e| {
13953 fail_return(&format!(
13954 "Failed to load frontier '{}': {e}",
13955 path.display()
13956 ))
13957 })
13958}
13959
13960fn hash_path_or_fail(path: &Path) -> String {
13961 hash_path(path).unwrap_or_else(|e| {
13962 fail_return(&format!(
13963 "Failed to hash frontier '{}': {e}",
13964 path.display()
13965 ))
13966 })
13967}
13968
13969fn collect_hash_files(root: &Path, dir: &Path, files: &mut Vec<PathBuf>) -> Result<(), String> {
13970 for entry in
13971 std::fs::read_dir(dir).map_err(|e| format!("Failed to read {}: {e}", dir.display()))?
13972 {
13973 let entry = entry.map_err(|e| format!("Failed to read directory entry: {e}"))?;
13974 let path = entry.path();
13975 if path.is_dir() {
13976 collect_hash_files(root, &path, files)?;
13977 } else if path.is_file() {
13978 files.push(
13979 path.strip_prefix(root)
13980 .map_err(|e| e.to_string())?
13981 .to_path_buf(),
13982 );
13983 }
13984 }
13985 Ok(())
13986}
13987
13988fn schema_error_suggestion(error: &str) -> &'static str {
13989 if schema_error_action(error).is_some() {
13990 "Run `vela normalize` to repair deterministic frontier state."
13991 } else {
13992 "Inspect and correct the referenced frontier field."
13993 }
13994}
13995
13996fn schema_error_fix(error: &str) -> bool {
13997 schema_error_action(error).is_some()
13998}
13999
14000fn schema_error_action(error: &str) -> Option<&'static str> {
14001 if error.contains("stats.findings")
14002 || error.contains("stats.links")
14003 || error.contains("Invalid compiler")
14004 || error.contains("Invalid vela_version")
14005 || error.contains("Invalid schema")
14006 {
14007 Some("normalize_metadata_and_stats")
14008 } else if error.contains("does not match content-address") {
14009 Some("rewrite_ids")
14010 } else {
14011 None
14012 }
14013}
14014
14015fn build_repair_plan(diagnostics: &[Value]) -> Vec<Value> {
14016 let mut actions = std::collections::BTreeMap::<String, usize>::new();
14017 for diagnostic in diagnostics {
14018 if let Some(action) = diagnostic.get("normalize_action").and_then(Value::as_str) {
14019 *actions.entry(action.to_string()).or_default() += 1;
14020 }
14021 }
14022 actions
14023 .into_iter()
14024 .map(|(action, count)| {
14025 let command = if action == "rewrite_ids" {
14026 "vela normalize <frontier> --write --rewrite-ids --id-map id-map.json"
14027 } else {
14028 "vela normalize <frontier> --write"
14029 };
14030 json!({
14031 "action": action,
14032 "count": count,
14033 "command": command,
14034 })
14035 })
14036 .collect()
14037}
14038
14039fn cmd_integrity(frontier: &Path, json: bool) {
14040 let report = state_integrity::analyze_path(frontier).unwrap_or_else(|e| fail_return(&e));
14041 if json {
14042 println!(
14043 "{}",
14044 serde_json::to_string_pretty(&report).expect("failed to serialize integrity report")
14045 );
14046 } else {
14047 println!("vela integrity");
14048 println!(" frontier: {}", frontier.display());
14049 println!(" status: {}", report.status);
14050 println!(" proof freshness: {}", report.proof_freshness);
14051 println!(" structural errors: {}", report.structural_errors.len());
14052 for error in report.structural_errors.iter().take(8) {
14053 println!(" - {}: {}", error.rule_id, error.message);
14054 }
14055 }
14056}
14057
14058fn cmd_impact(frontier: &Path, finding_id: &str, depth: Option<usize>, json: bool) {
14059 let report =
14060 impact::analyze_path(frontier, finding_id, depth).unwrap_or_else(|e| fail_return(&e));
14061 if json {
14062 println!(
14063 "{}",
14064 serde_json::to_string_pretty(&report).expect("failed to serialize impact report")
14065 );
14066 } else {
14067 println!("vela impact");
14068 println!(" finding: {}", report.target.id);
14069 println!(" frontier: {}", report.frontier.vfr_id);
14070 println!(" direct dependents: {}", report.summary.direct_dependents);
14071 println!(" downstream: {}", report.summary.total_downstream);
14072 println!(" open proposals: {}", report.summary.open_proposals);
14073 println!(" accepted events: {}", report.summary.accepted_events);
14074 println!(" proof: {}", report.summary.proof_status);
14075 }
14076}
14077
14078fn cmd_discord(frontier: &Path, json: bool, kind_filter: Option<&str>) {
14079 use crate::discord::DiscordKind;
14080 use crate::discord_compute::compute_discord_assignment;
14081
14082 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
14083 let assignment = compute_discord_assignment(&project);
14084 let support = assignment.frontier_support();
14085
14086 let mut rows: Vec<(String, Vec<String>)> = Vec::new();
14089 for context in support.iter() {
14090 let set = assignment.get(context);
14091 let kinds: Vec<String> = set.iter().map(|k| k.as_str().to_string()).collect();
14092 if let Some(filter) = kind_filter
14093 && !kinds.iter().any(|k| k == filter)
14094 {
14095 continue;
14096 }
14097 rows.push((context.clone(), kinds));
14098 }
14099
14100 let mut histogram: std::collections::BTreeMap<&'static str, usize> =
14104 std::collections::BTreeMap::new();
14105 for kind in DiscordKind::ALL {
14106 let count = assignment
14107 .iter()
14108 .filter(|(_, set)| set.contains(*kind))
14109 .count();
14110 if count > 0 {
14111 histogram.insert(kind.as_str(), count);
14112 }
14113 }
14114
14115 let total_findings = project.findings.len();
14116 let frontier_id = project
14117 .frontier_id
14118 .clone()
14119 .unwrap_or_else(|| String::from("<unknown>"));
14120
14121 if json {
14122 let row_value = |row: &(String, Vec<String>)| {
14123 serde_json::json!({
14124 "finding_id": row.0,
14125 "discord_kinds": row.1,
14126 })
14127 };
14128 let report = serde_json::json!({
14129 "frontier_id": frontier_id,
14130 "total_findings": total_findings,
14131 "frontier_support_size": support.len(),
14132 "filtered_row_count": rows.len(),
14133 "filter_kind": kind_filter,
14134 "histogram": histogram,
14135 "rows": rows.iter().map(row_value).collect::<Vec<_>>(),
14136 });
14137 println!(
14138 "{}",
14139 serde_json::to_string_pretty(&report).expect("serialize discord report")
14140 );
14141 return;
14142 }
14143
14144 println!("vela discord");
14145 println!(" frontier: {frontier_id}");
14146 println!(" total findings: {total_findings}");
14147 println!(
14148 " frontier support (any discord): {} of {}",
14149 support.len(),
14150 total_findings
14151 );
14152 if let Some(k) = kind_filter {
14153 println!(" filter: kind = {k}");
14154 }
14155 println!();
14156 if histogram.is_empty() {
14157 println!(" no discord detected.");
14158 } else {
14159 println!(" discord histogram:");
14160 for (k, n) in &histogram {
14161 println!(" {n:>4} {k}");
14162 }
14163 }
14164 if !rows.is_empty() {
14165 println!();
14166 println!(" findings with discord (showing up to 50):");
14167 for (fid, kinds) in rows.iter().take(50) {
14168 println!(" {fid} · {}", kinds.join(", "));
14169 }
14170 if rows.len() > 50 {
14171 println!(" ... and {} more", rows.len() - 50);
14172 }
14173 }
14174}
14175
14176fn empty_signal_report() -> signals::SignalReport {
14177 signals::SignalReport {
14178 schema: "vela.signals.v0".to_string(),
14179 frontier: "unavailable".to_string(),
14180 signals: Vec::new(),
14181 review_queue: Vec::new(),
14182 proof_readiness: signals::ProofReadiness {
14183 status: "unavailable".to_string(),
14184 blockers: 0,
14185 warnings: 0,
14186 caveats: vec!["Frontier could not be loaded for signal analysis.".to_string()],
14187 },
14188 }
14189}
14190
14191fn print_signal_summary(report: &signals::SignalReport, strict: bool) {
14192 println!();
14193 println!(" {}", "SIGNALS".dimmed());
14194 println!(" {}", style::tick_row(60));
14195 println!(" total signals: {}", report.signals.len());
14196 println!(" proof readiness: {}", report.proof_readiness.status);
14197 if !report.review_queue.is_empty() {
14198 println!(" review queue: {} items", report.review_queue.len());
14199 }
14200 if strict && report.proof_readiness.status != "ready" {
14201 println!(
14202 " {} proof readiness has blocking signals.",
14203 style::lost("strict check failed")
14204 );
14205 }
14206}
14207
14208fn append_packet_json_file(
14209 packet_dir: &Path,
14210 relative_path: &str,
14211 value: &Value,
14212) -> Result<(), String> {
14213 let content = serde_json::to_vec_pretty(value)
14214 .map_err(|e| format!("Failed to serialize packet JSON file: {e}"))?;
14215 let path = packet_dir.join(relative_path);
14216 if let Some(parent) = path.parent() {
14217 std::fs::create_dir_all(parent)
14218 .map_err(|e| format!("Failed to create {}: {e}", parent.display()))?;
14219 }
14220 std::fs::write(&path, &content)
14221 .map_err(|e| format!("Failed to write {}: {e}", path.display()))?;
14222 let entry = json!({
14223 "path": relative_path,
14224 "sha256": hex::encode(Sha256::digest(&content)),
14225 "bytes": content.len(),
14226 });
14227
14228 for manifest_name in ["manifest.json", "packet.lock.json"] {
14229 let manifest_path = packet_dir.join(manifest_name);
14230 let data = std::fs::read_to_string(&manifest_path)
14231 .map_err(|e| format!("Failed to read {}: {e}", manifest_path.display()))?;
14232 let mut manifest: Value = serde_json::from_str(&data)
14233 .map_err(|e| format!("Failed to parse {}: {e}", manifest_path.display()))?;
14234 let array_key = if manifest_name == "manifest.json" {
14235 "included_files"
14236 } else {
14237 "files"
14238 };
14239 let files = manifest
14240 .get_mut(array_key)
14241 .and_then(Value::as_array_mut)
14242 .ok_or_else(|| format!("{} missing {array_key} array", manifest_path.display()))?;
14243 files.retain(|file| {
14244 file.get("path")
14245 .and_then(Value::as_str)
14246 .is_none_or(|path| path != relative_path)
14247 });
14248 files.push(entry.clone());
14249 std::fs::write(
14250 &manifest_path,
14251 serde_json::to_vec_pretty(&manifest)
14252 .map_err(|e| format!("Failed to serialize {}: {e}", manifest_path.display()))?,
14253 )
14254 .map_err(|e| format!("Failed to write {}: {e}", manifest_path.display()))?;
14255 }
14256
14257 let lock_path = packet_dir.join("packet.lock.json");
14258 let lock_content = std::fs::read(&lock_path)
14259 .map_err(|e| format!("Failed to read {}: {e}", lock_path.display()))?;
14260 let lock_entry = json!({
14261 "path": "packet.lock.json",
14262 "sha256": hex::encode(Sha256::digest(&lock_content)),
14263 "bytes": lock_content.len(),
14264 });
14265 let manifest_path = packet_dir.join("manifest.json");
14266 let data = std::fs::read_to_string(&manifest_path)
14267 .map_err(|e| format!("Failed to read {}: {e}", manifest_path.display()))?;
14268 let mut manifest: Value = serde_json::from_str(&data)
14269 .map_err(|e| format!("Failed to parse {}: {e}", manifest_path.display()))?;
14270 let files = manifest
14271 .get_mut("included_files")
14272 .and_then(Value::as_array_mut)
14273 .ok_or_else(|| format!("{} missing included_files array", manifest_path.display()))?;
14274 files.retain(|file| {
14275 file.get("path")
14276 .and_then(Value::as_str)
14277 .is_none_or(|path| path != "packet.lock.json")
14278 });
14279 files.push(lock_entry);
14280 std::fs::write(
14281 &manifest_path,
14282 serde_json::to_vec_pretty(&manifest)
14283 .map_err(|e| format!("Failed to serialize {}: {e}", manifest_path.display()))?,
14284 )
14285 .map_err(|e| format!("Failed to write {}: {e}", manifest_path.display()))?;
14286 Ok(())
14287}
14288
14289fn print_tool_check_report(report: &Value) {
14290 let summary = report.get("summary").unwrap_or(&Value::Null);
14291 let frontier = report.get("frontier").unwrap_or(&Value::Null);
14292 println!();
14293 println!(" {}", "VELA · SERVE · CHECK-TOOLS".dimmed());
14294 println!(" {}", style::tick_row(60));
14295 println!(
14296 "frontier: {}",
14297 frontier
14298 .get("name")
14299 .and_then(Value::as_str)
14300 .unwrap_or("unknown")
14301 );
14302 println!(
14303 "findings: {}",
14304 frontier
14305 .get("findings")
14306 .and_then(Value::as_u64)
14307 .unwrap_or_default()
14308 );
14309 println!(
14310 "checks: {} passed, {} failed",
14311 summary
14312 .get("passed")
14313 .and_then(Value::as_u64)
14314 .unwrap_or_default(),
14315 summary
14316 .get("failed")
14317 .and_then(Value::as_u64)
14318 .unwrap_or_default()
14319 );
14320 if let Some(tools) = report.get("tools").and_then(Value::as_array) {
14321 let names = tools
14322 .iter()
14323 .filter_map(Value::as_str)
14324 .collect::<Vec<_>>()
14325 .join(", ");
14326 println!("tools: {names}");
14327 }
14328 if let Some(checks) = report.get("checks").and_then(Value::as_array) {
14329 for check in checks {
14330 let status = if check.get("ok").and_then(Value::as_bool) == Some(true) {
14331 style::ok("ok")
14332 } else {
14333 style::lost("lost")
14334 };
14335 println!(
14336 " {} {}",
14337 status,
14338 check
14339 .get("tool")
14340 .and_then(Value::as_str)
14341 .unwrap_or("unknown")
14342 );
14343 }
14344 }
14345}
14346
14347fn print_state_report(report: &state::StateCommandReport, json_output: bool) {
14348 if json_output {
14349 println!(
14350 "{}",
14351 serde_json::to_string_pretty(report).expect("failed to serialize state command report")
14352 );
14353 } else {
14354 println!("{}", report.message);
14355 println!(" frontier: {}", report.frontier);
14356 println!(" finding: {}", report.finding_id);
14357 println!(" proposal: {}", report.proposal_id);
14358 println!(" status: {}", report.proposal_status);
14359 if let Some(event_id) = &report.applied_event_id {
14360 println!(" event: {}", event_id);
14361 }
14362 println!(" wrote: {}", report.wrote_to);
14363 }
14364}
14365
14366fn print_history(payload: &Value) {
14367 let finding = payload.get("finding").unwrap_or(&Value::Null);
14368 println!("vela history");
14369 println!(
14370 " finding: {}",
14371 finding
14372 .get("id")
14373 .and_then(Value::as_str)
14374 .unwrap_or("unknown")
14375 );
14376 println!(
14377 " assertion: {}",
14378 finding
14379 .get("assertion")
14380 .and_then(Value::as_str)
14381 .unwrap_or("")
14382 );
14383 println!(
14384 " confidence: {:.3}",
14385 finding
14386 .get("confidence")
14387 .and_then(Value::as_f64)
14388 .unwrap_or_default()
14389 );
14390 let reviews = payload
14391 .get("review_events")
14392 .and_then(Value::as_array)
14393 .map_or(0, Vec::len);
14394 let updates = payload
14395 .get("confidence_updates")
14396 .and_then(Value::as_array)
14397 .map_or(0, Vec::len);
14398 let annotations = finding
14399 .get("annotations")
14400 .and_then(Value::as_array)
14401 .map_or(0, Vec::len);
14402 let sources = payload
14403 .get("sources")
14404 .and_then(Value::as_array)
14405 .map_or(0, Vec::len);
14406 let atoms = payload
14407 .get("evidence_atoms")
14408 .and_then(Value::as_array)
14409 .map_or(0, Vec::len);
14410 let conditions = payload
14411 .get("condition_records")
14412 .and_then(Value::as_array)
14413 .map_or(0, Vec::len);
14414 let proposals = payload
14415 .get("proposals")
14416 .and_then(Value::as_array)
14417 .map_or(0, Vec::len);
14418 let events = payload
14419 .get("events")
14420 .and_then(Value::as_array)
14421 .map_or(0, Vec::len);
14422 println!(" review events: {reviews}");
14423 println!(" confidence updates: {updates}");
14424 println!(" annotations: {annotations}");
14425 println!(" sources: {sources}");
14426 println!(" evidence atoms: {atoms}");
14427 println!(" condition records: {conditions}");
14428 println!(" proposals: {proposals}");
14429 println!(" canonical events: {events}");
14430 if let Some(status) = payload
14431 .get("proof_state")
14432 .and_then(|value| value.get("latest_packet"))
14433 .and_then(|value| value.get("status"))
14434 .and_then(Value::as_str)
14435 {
14436 println!(" proof state: {status}");
14437 }
14438 if let Some(events) = payload.get("review_events").and_then(Value::as_array) {
14439 for event in events.iter().take(8) {
14440 println!(
14441 " - {} {} {}",
14442 event
14443 .get("reviewed_at")
14444 .and_then(Value::as_str)
14445 .unwrap_or(""),
14446 event.get("id").and_then(Value::as_str).unwrap_or(""),
14447 event.get("reason").and_then(Value::as_str).unwrap_or("")
14448 );
14449 }
14450 }
14451}
14452
14453#[derive(Debug, Serialize)]
14454pub struct ProofTrace {
14455 pub trace_version: String,
14456 pub command: Vec<String>,
14457 pub source: String,
14458 pub source_hash: String,
14459 pub schema_version: String,
14460 pub checked_artifacts: Vec<String>,
14461 pub benchmark: Option<Value>,
14462 pub packet_manifest: String,
14463 pub packet_validation: String,
14464 pub caveats: Vec<String>,
14465 pub status: String,
14466 pub trace_path: String,
14467}
14468
14469const SCIENCE_SUBCOMMANDS: &[&str] = &[
14470 "compile-notes",
14471 "compile-code",
14472 "compile-data",
14473 "review-pending",
14474 "find-tensions",
14475 "plan-experiments",
14476 "scout",
14477 "check",
14478 "normalize",
14479 "integrity",
14480 "impact",
14481 "discord",
14482 "quickstart",
14483 "proof",
14484 "repo",
14485 "serve",
14486 "stats",
14487 "search",
14488 "tensions",
14489 "gaps",
14490 "bridge",
14491 "export",
14492 "packet",
14493 "bench",
14494 "conformance",
14495 "version",
14496 "sign",
14497 "actor",
14498 "frontier",
14499 "queue",
14500 "registry",
14501 "init",
14502 "import",
14503 "diff",
14504 "proposals",
14505 "finding",
14506 "link",
14507 "entity",
14508 "review",
14509 "note",
14510 "caveat",
14511 "revise",
14512 "reject",
14513 "history",
14514 "import-events",
14515 "retract",
14516 "propagate",
14517 "replicate",
14519 "replications",
14520 "dataset-add",
14523 "datasets",
14524 "code-add",
14525 "code-artifacts",
14526 "artifact-add",
14527 "artifact-to-state",
14528 "bridge-kit",
14529 "source-adapter",
14530 "runtime-adapter",
14531 "artifacts",
14532 "artifact-audit",
14533 "decision-brief",
14534 "trial-summary",
14535 "source-verification",
14536 "source-ingest-plan",
14537 "clinical-trial-import",
14538 "negative-result-add",
14540 "negative-results",
14541 "trajectory-create",
14543 "trajectory-step",
14544 "trajectories",
14545 "tier-set",
14547 "locator-repair",
14549 "span-repair",
14551 "entity-resolve",
14553 "entity-add",
14555 "source-fetch",
14557 "predict",
14560 "resolve",
14561 "predictions",
14562 "predictions-expire",
14563 "calibration",
14564 "consensus",
14567 "federation",
14569 "causal",
14571 "status",
14575 "log",
14576 "inbox",
14577 "ask",
14578 "bridges",
14580 "workbench",
14582 "verify",
14584 "ingest",
14588 "propose",
14589 "accept",
14590 "attest",
14591 "lineage",
14592 "carina",
14595 "atlas",
14598 "constellation",
14601];
14602
14603pub fn is_science_subcommand(name: &str) -> bool {
14604 SCIENCE_SUBCOMMANDS.contains(&name)
14605}
14606
14607fn print_strict_help() {
14608 println!(
14609 r#"Vela {}
14610Version control for scientific state.
14611
14612Usage:
14613 vela <COMMAND>
14614
14615Core flow (v0.74):
14616 init Initialize a split frontier repo
14617 ingest Ingest a paper, dataset, or Carina packet (dispatches by file type)
14618 propose Create a finding.review proposal
14619 diff Preview a `vpr_*` proposal, or compare two frontier files
14620 accept Apply a proposal under reviewer authority
14621 attest Sign findings under your private key
14622 log Recent canonical state events
14623 lineage State-transition replay for one finding
14624 serve Local Workbench (findings, evidence, diff, lineage)
14625
14626Read-only inspection:
14627 check Validate a frontier, repo, or proof packet
14628 integrity Check accepted frontier state integrity
14629 impact Report downstream finding impact
14630 normalize Apply deterministic frontier-state repairs
14631 proof Export and validate a proof packet
14632 repo Inspect split frontier repository status and shape
14633 stats Show frontier statistics
14634 search Search findings
14635 tensions List candidate contradictions and tensions
14636 gaps Inspect and rank candidate gap review leads
14637 bridge Find candidate cross-domain connections
14638
14639Advanced (proposal-creation, agent inboxes, federation):
14640 scout Run Literature Scout against a folder of PDFs (writes proposals)
14641 compile-notes Run Notes Compiler against a Markdown vault (writes proposals)
14642 compile-code Run Code & Notebook Analyst against a research repo (writes proposals)
14643 compile-data Run Datasets agent against a folder of CSV/TSV data (writes proposals)
14644 review-pending Run Reviewer Agent: score every pending proposal (writes notes)
14645 find-tensions Run Contradiction Finder: surface real contradictions among findings
14646 plan-experiments Run Experiment Planner: propose experiments for open questions / hypotheses
14647 export Export frontier artifacts
14648 packet Inspect or validate proof packets
14649 bench Run deterministic benchmark gates
14650 conformance Run protocol conformance vectors
14651 sign Optional signing and signature verification
14652 runtime-adapter
14653 Normalize external runtime exports into reviewable proposals
14654 version Show version information
14655 import Import frontier.json into a .vela repo
14656 proposals Inspect, validate, export, import, accept, or reject write proposals
14657 artifact-to-state
14658 Import a Carina artifact packet as reviewable proposals
14659 bridge-kit
14660 Validate Carina artifact packets before importing runtime output
14661 source-adapter
14662 Run reviewed source adapters into artifact-to-state proposals
14663 finding Add or manage finding bundles as frontier state
14664 link Add typed links between findings (incl. cross-frontier vf_at-vfr targets)
14665 entity Resolve unresolved entities against a bundled common-entity table (v0.19)
14666 frontier Scaffold (`new`), materialize, and manage frontier metadata + deps
14667 actor Register Ed25519 publisher identities in a frontier
14668 registry Publish, list, or pull frontiers (open hub at https://vela-hub.fly.dev)
14669 review Create a review proposal or review interactively
14670 note Add a lightweight note to a finding
14671 caveat Create an explicit caveat proposal
14672 revise Create a confidence revision proposal
14673 reject Create a rejection proposal
14674 history Show state-transition history for one finding (v0.74 alias: `lineage`)
14675 import-events Import review/state events from a packet or JSON file
14676 retract Create a retraction proposal
14677 propagate Simulate impact over declared dependency links
14678 artifact-add Register a content-addressed artifact
14679 artifacts List content-addressed artifacts
14680 artifact-audit Audit artifact locators, hashes, references, and profiles
14681 decision-brief Show the validated decision brief projection
14682 trial-summary Show the validated trial outcome projection
14683 source-verification Show the validated source verification projection
14684 source-ingest-plan Show the validated source ingest plan
14685 clinical-trial-import Import a ClinicalTrials.gov record as an artifact
14686 locator-repair Mechanically repair an evidence atom's missing source locator
14687 span-repair Mechanically repair a finding's missing evidence span
14688 entity-resolve Resolve a finding entity to a canonical id
14689 source-fetch Fetch metadata + abstract for a doi:/pmid:/nct: source
14690 atlas Compose multiple frontiers into a domain-level Atlas (vat_*) (v0.78+)
14691 constellation Compose multiple Atlases into a cross-domain Constellation (vco_*) (v0.82+)
14692
14693Quick start (the demo):
14694 vela init demo --name "Your bounded question"
14695 vela ingest paper.pdf --frontier demo
14696 vela propose demo <vf_id> --status accepted --reason "..." --reviewer reviewer:you --apply
14697 vela diff <vpr_id> --frontier demo
14698 vela accept demo <vpr_id> --reviewer reviewer:you --reason "applied"
14699 vela serve --path demo
14700
14701Substrate health:
14702 vela frontier materialize my-frontier --json
14703 vela repo status my-frontier --json
14704 vela proof verify my-frontier --json
14705 vela check my-frontier --strict --json
14706
14707Monolithic frontier file:
14708 vela frontier new frontier.json --name "Your bounded question"
14709 vela finding add frontier.json --assertion "..." --author "reviewer:demo" --apply
14710 vela check frontier.json --json
14711 FINDING_ID=$(jq -r '.findings[0].id' frontier.json)
14712 vela review frontier.json "$FINDING_ID" --status contested --reason "Mouse-only evidence" --reviewer reviewer:demo --apply
14713
14714Publish your own frontier (see docs/PUBLISHING.md):
14715 vela frontier new ./frontier.json --name "Your bounded question"
14716 vela finding add ./frontier.json --assertion "..." --author "reviewer:you" --apply
14717 vela sign generate-keypair --out keys
14718 vela actor add ./frontier.json reviewer:you --pubkey "$(cat keys/public.key)"
14719 vela registry publish ./frontier.json --owner reviewer:you --key keys/private.key \
14720 --to https://vela-hub.fly.dev
14721"#,
14722 env!("CARGO_PKG_VERSION")
14723 );
14724}
14725
14726pub type ScoutHandler = fn(
14735 folder: PathBuf,
14736 frontier: PathBuf,
14737 backend: Option<String>,
14738 dry_run: bool,
14739 json: bool,
14740) -> Pin<Box<dyn Future<Output = ()> + Send>>;
14741
14742static SCOUT_HANDLER: OnceLock<ScoutHandler> = OnceLock::new();
14743
14744pub fn register_scout_handler(handler: ScoutHandler) {
14748 let _ = SCOUT_HANDLER.set(handler);
14749}
14750
14751pub type AtlasInitHandler = fn(
14755 atlases_root: PathBuf,
14756 name: String,
14757 domain: String,
14758 scope_note: Option<String>,
14759 frontiers: Vec<PathBuf>,
14760 json: bool,
14761) -> Pin<Box<dyn Future<Output = ()> + Send>>;
14762
14763static ATLAS_INIT_HANDLER: OnceLock<AtlasInitHandler> = OnceLock::new();
14764
14765pub fn register_atlas_init_handler(handler: AtlasInitHandler) {
14766 let _ = ATLAS_INIT_HANDLER.set(handler);
14767}
14768
14769pub type AtlasMaterializeHandler =
14771 fn(atlases_root: PathBuf, name: String, json: bool) -> Pin<Box<dyn Future<Output = ()> + Send>>;
14772
14773static ATLAS_MATERIALIZE_HANDLER: OnceLock<AtlasMaterializeHandler> = OnceLock::new();
14774
14775pub fn register_atlas_materialize_handler(handler: AtlasMaterializeHandler) {
14776 let _ = ATLAS_MATERIALIZE_HANDLER.set(handler);
14777}
14778
14779pub type AtlasServeHandler = fn(
14784 atlases_root: PathBuf,
14785 name: String,
14786 port: u16,
14787 open_browser: bool,
14788) -> Pin<Box<dyn Future<Output = ()> + Send>>;
14789
14790static ATLAS_SERVE_HANDLER: OnceLock<AtlasServeHandler> = OnceLock::new();
14791
14792pub fn register_atlas_serve_handler(handler: AtlasServeHandler) {
14793 let _ = ATLAS_SERVE_HANDLER.set(handler);
14794}
14795
14796pub type AtlasUpdateHandler = fn(
14801 atlases_root: PathBuf,
14802 name: String,
14803 add_frontier: Vec<PathBuf>,
14804 remove_vfr_id: Vec<String>,
14805 json: bool,
14806) -> Pin<Box<dyn Future<Output = ()> + Send>>;
14807
14808static ATLAS_UPDATE_HANDLER: OnceLock<AtlasUpdateHandler> = OnceLock::new();
14809
14810pub fn register_atlas_update_handler(handler: AtlasUpdateHandler) {
14811 let _ = ATLAS_UPDATE_HANDLER.set(handler);
14812}
14813
14814pub type ConstellationInitHandler = fn(
14818 constellations_root: PathBuf,
14819 name: String,
14820 scope_note: Option<String>,
14821 atlases: Vec<PathBuf>,
14822 json: bool,
14823) -> Pin<Box<dyn Future<Output = ()> + Send>>;
14824
14825static CONSTELLATION_INIT_HANDLER: OnceLock<ConstellationInitHandler> = OnceLock::new();
14826
14827pub fn register_constellation_init_handler(handler: ConstellationInitHandler) {
14828 let _ = CONSTELLATION_INIT_HANDLER.set(handler);
14829}
14830
14831pub type ConstellationMaterializeHandler = fn(
14832 constellations_root: PathBuf,
14833 name: String,
14834 json: bool,
14835) -> Pin<Box<dyn Future<Output = ()> + Send>>;
14836
14837static CONSTELLATION_MATERIALIZE_HANDLER: OnceLock<ConstellationMaterializeHandler> =
14838 OnceLock::new();
14839
14840pub fn register_constellation_materialize_handler(handler: ConstellationMaterializeHandler) {
14841 let _ = CONSTELLATION_MATERIALIZE_HANDLER.set(handler);
14842}
14843
14844pub type ConstellationServeHandler = fn(
14845 constellations_root: PathBuf,
14846 name: String,
14847 port: u16,
14848 open_browser: bool,
14849) -> Pin<Box<dyn Future<Output = ()> + Send>>;
14850
14851static CONSTELLATION_SERVE_HANDLER: OnceLock<ConstellationServeHandler> = OnceLock::new();
14852
14853pub fn register_constellation_serve_handler(handler: ConstellationServeHandler) {
14854 let _ = CONSTELLATION_SERVE_HANDLER.set(handler);
14855}
14856
14857pub type NotesHandler = fn(
14861 vault: PathBuf,
14862 frontier: PathBuf,
14863 backend: Option<String>,
14864 max_files: Option<usize>,
14865 max_items_per_category: Option<usize>,
14866 dry_run: bool,
14867 json: bool,
14868) -> Pin<Box<dyn Future<Output = ()> + Send>>;
14869
14870static NOTES_HANDLER: OnceLock<NotesHandler> = OnceLock::new();
14871
14872pub fn register_notes_handler(handler: NotesHandler) {
14874 let _ = NOTES_HANDLER.set(handler);
14875}
14876
14877pub type CodeHandler = fn(
14879 root: PathBuf,
14880 frontier: PathBuf,
14881 backend: Option<String>,
14882 max_files: Option<usize>,
14883 dry_run: bool,
14884 json: bool,
14885) -> Pin<Box<dyn Future<Output = ()> + Send>>;
14886
14887static CODE_HANDLER: OnceLock<CodeHandler> = OnceLock::new();
14888
14889pub fn register_code_handler(handler: CodeHandler) {
14891 let _ = CODE_HANDLER.set(handler);
14892}
14893
14894pub type DatasetsHandler = fn(
14896 root: PathBuf,
14897 frontier: PathBuf,
14898 backend: Option<String>,
14899 sample_rows: Option<usize>,
14900 dry_run: bool,
14901 json: bool,
14902) -> Pin<Box<dyn Future<Output = ()> + Send>>;
14903
14904static DATASETS_HANDLER: OnceLock<DatasetsHandler> = OnceLock::new();
14905
14906pub fn register_datasets_handler(handler: DatasetsHandler) {
14908 let _ = DATASETS_HANDLER.set(handler);
14909}
14910
14911pub type ReviewerHandler = fn(
14913 frontier: PathBuf,
14914 backend: Option<String>,
14915 max_proposals: Option<usize>,
14916 batch_size: usize,
14917 dry_run: bool,
14918 json: bool,
14919) -> Pin<Box<dyn Future<Output = ()> + Send>>;
14920
14921static REVIEWER_HANDLER: OnceLock<ReviewerHandler> = OnceLock::new();
14922
14923pub fn register_reviewer_handler(handler: ReviewerHandler) {
14925 let _ = REVIEWER_HANDLER.set(handler);
14926}
14927
14928pub type TensionsHandler = fn(
14930 frontier: PathBuf,
14931 backend: Option<String>,
14932 max_findings: Option<usize>,
14933 dry_run: bool,
14934 json: bool,
14935) -> Pin<Box<dyn Future<Output = ()> + Send>>;
14936
14937static TENSIONS_HANDLER: OnceLock<TensionsHandler> = OnceLock::new();
14938
14939pub fn register_tensions_handler(handler: TensionsHandler) {
14941 let _ = TENSIONS_HANDLER.set(handler);
14942}
14943
14944pub type ExperimentsHandler = fn(
14946 frontier: PathBuf,
14947 backend: Option<String>,
14948 max_findings: Option<usize>,
14949 dry_run: bool,
14950 json: bool,
14951) -> Pin<Box<dyn Future<Output = ()> + Send>>;
14952
14953static EXPERIMENTS_HANDLER: OnceLock<ExperimentsHandler> = OnceLock::new();
14954
14955pub fn register_experiments_handler(handler: ExperimentsHandler) {
14957 let _ = EXPERIMENTS_HANDLER.set(handler);
14958}
14959
14960fn find_vela_repo() -> Option<PathBuf> {
14976 let mut cur = std::env::current_dir().ok()?;
14977 loop {
14978 if cur.join(".vela").is_dir() {
14979 return Some(cur);
14980 }
14981 if !cur.pop() {
14982 return None;
14983 }
14984 }
14985}
14986
14987fn print_session_help() {
14988 println!();
14989 println!(
14990 " Vela {} · Version control for scientific state.",
14991 env!("CARGO_PKG_VERSION")
14992 );
14993 println!();
14994 println!(" USAGE");
14995 println!(" vela Open a session against the nearest .vela/ repo");
14996 println!(" vela <command> Run a specific subcommand");
14997 println!(" vela help advanced Full subcommand list (30+ commands)");
14998 println!();
14999 println!(" CORE FLOW (v0.74)");
15000 println!(" init Initialize a split frontier repo");
15001 println!(" ingest <path> Ingest a paper, dataset, or Carina packet");
15002 println!(" propose Create a finding.review proposal");
15003 println!(" diff <vpr_id> Preview a pending proposal vs current frontier");
15004 println!(" accept <vpr_id> Apply a proposal under reviewer authority");
15005 println!(" attest Sign findings under your private key");
15006 println!(" log Recent canonical state events");
15007 println!(" lineage <vf_id> State-transition replay for one finding");
15008 println!(" serve Local Workbench (find, evidence, diff, lineage)");
15009 println!();
15010 println!(" DAILY ALSO-RANS");
15011 println!(" status One-screen frontier health");
15012 println!(" inbox Pending review proposals");
15013 println!(" review Review a proposal interactively");
15014 println!(" ask <question> Plain-text query against the frontier");
15015 println!();
15016 println!(" REASONING (Pearl 1 → 2 → 3)");
15017 println!(" causal audit Per-finding identifiability");
15018 println!(" causal effect <src> --on <tgt> Pairwise back-door / front-door");
15019 println!(" causal counterfactual <src> --target <tgt> --set-to <0..1>");
15020 println!();
15021 println!(" COMPOSITION");
15022 println!(" bridge <a> <b> Cross-frontier hypotheses");
15023 println!(" consensus <vf> Field consensus over similar claims");
15024 println!();
15025 println!(" PUBLISH");
15026 println!(" registry publish Push a signed manifest to the hub");
15027 println!(" federation peer-add Federate with another hub");
15028 println!();
15029 println!(" In session, type a single letter for a quick verb, or any");
15030 println!(" question in plain text. `q` or `exit` quits.");
15031 println!();
15032}
15033
15034fn print_session_dashboard(project: &crate::project::Project, repo_path: &Path) {
15035 use crate::causal_reasoning::{audit_frontier, summarize_audit};
15036
15037 let label = frontier_label(project);
15038 let vfr = project.frontier_id();
15039 let vfr_short = vfr.chars().take(16).collect::<String>();
15040
15041 let mut pending = 0usize;
15042 let mut by_kind: std::collections::BTreeMap<String, usize> = std::collections::BTreeMap::new();
15043 for p in &project.proposals {
15044 if p.status == "pending_review" {
15045 pending += 1;
15046 *by_kind.entry(p.kind.clone()).or_insert(0) += 1;
15047 }
15048 }
15049
15050 let audit = audit_frontier(project);
15051 let audit_summary = summarize_audit(&audit);
15052
15053 let bridges_dir = repo_path.join(".vela/bridges");
15054 let mut bridge_total = 0usize;
15055 let mut bridge_confirmed = 0usize;
15056 let mut bridge_derived = 0usize;
15057 if bridges_dir.is_dir()
15058 && let Ok(entries) = std::fs::read_dir(&bridges_dir)
15059 {
15060 for entry in entries.flatten() {
15061 let path = entry.path();
15062 if path.extension().and_then(|s| s.to_str()) != Some("json") {
15063 continue;
15064 }
15065 bridge_total += 1;
15066 if let Ok(data) = std::fs::read_to_string(&path)
15067 && let Ok(b) = serde_json::from_str::<crate::bridge::Bridge>(&data)
15068 {
15069 match b.status {
15070 crate::bridge::BridgeStatus::Confirmed => bridge_confirmed += 1,
15071 crate::bridge::BridgeStatus::Derived => bridge_derived += 1,
15072 _ => {}
15073 }
15074 }
15075 }
15076 }
15077
15078 let mut targets_with_success = std::collections::HashSet::new();
15079 let mut failed_replications = 0usize;
15080 for r in &project.replications {
15081 if r.outcome == "replicated" {
15082 targets_with_success.insert(r.target_finding.clone());
15083 } else if r.outcome == "failed" {
15084 failed_replications += 1;
15085 }
15086 }
15087
15088 println!();
15089 let version = crate::project::VELA_COMPILER_VERSION
15090 .strip_prefix("vela/")
15091 .unwrap_or(crate::project::VELA_COMPILER_VERSION);
15092 println!(
15093 " {}",
15094 format!("VELA · {version} · {label}")
15095 .to_uppercase()
15096 .dimmed()
15097 );
15098 println!(" {}", style::tick_row(60));
15099 println!(
15100 " vfr_id {}… repo {}",
15101 vfr_short,
15102 repo_path.display()
15103 );
15104 println!(
15105 " findings {:>4} events {} proposals pending {}",
15106 project.findings.len(),
15107 project.events.len(),
15108 pending
15109 );
15110
15111 if pending > 0 {
15112 let parts: Vec<String> = by_kind.iter().map(|(k, n)| format!("{n} {k}")).collect();
15113 println!(" {} · {}", style::warn("inbox"), parts.join(" "));
15114 }
15115 if audit_summary.underidentified > 0 || audit_summary.conditional > 0 {
15116 println!(
15117 " {} · {} underidentified · {} conditional",
15118 if audit_summary.underidentified > 0 {
15119 style::lost("audit")
15120 } else {
15121 style::warn("audit")
15122 },
15123 audit_summary.underidentified,
15124 audit_summary.conditional,
15125 );
15126 }
15127 if bridge_total > 0 {
15128 println!(
15129 " {} · {} total · {} confirmed · {} awaiting review",
15130 style::ok("bridges"),
15131 bridge_total,
15132 bridge_confirmed,
15133 bridge_derived
15134 );
15135 }
15136 if !project.replications.is_empty() {
15137 println!(
15138 " {} · {} records · {} findings replicated · {} failed",
15139 style::ok("replications"),
15140 project.replications.len(),
15141 targets_with_success.len(),
15142 failed_replications,
15143 );
15144 }
15145
15146 println!();
15147 println!(" type a verb or ask anything:");
15148 println!(" a audit problems i inbox (pending) b bridges");
15149 println!(" g causal graph l log (recent) c counterfactuals");
15150 println!(" s refresh status h help (more verbs) q quit");
15151 println!();
15152}
15153
15154fn run_session_verb(verb: &str, repo_path: &Path) -> bool {
15156 match verb {
15157 "a" | "audit" => {
15158 let action = CausalAction::Audit {
15159 frontier: repo_path.to_path_buf(),
15160 problems_only: true,
15161 json: false,
15162 };
15163 cmd_causal(action);
15164 true
15165 }
15166 "i" | "inbox" => {
15167 let action = ProposalAction::List {
15168 frontier: repo_path.to_path_buf(),
15169 status: Some("pending_review".into()),
15170 json: false,
15171 };
15172 cmd_proposals(action);
15173 true
15174 }
15175 "b" | "bridges" => {
15176 let action = BridgesAction::List {
15177 frontier: repo_path.to_path_buf(),
15178 status: None,
15179 json: false,
15180 };
15181 cmd_bridges(action);
15182 true
15183 }
15184 "g" | "graph" => {
15185 let action = CausalAction::Graph {
15186 frontier: repo_path.to_path_buf(),
15187 node: None,
15188 json: false,
15189 };
15190 cmd_causal(action);
15191 true
15192 }
15193 "l" | "log" => {
15194 cmd_log(repo_path, 10, None, false);
15195 true
15196 }
15197 "c" | "counterfactual" | "counterfactuals" => {
15198 let project = match repo::load_from_path(repo_path) {
15201 Ok(p) => p,
15202 Err(e) => {
15203 eprintln!("{} {e}", style::err_prefix());
15204 return true;
15205 }
15206 };
15207 println!();
15208 println!(" {}", "VELA · COUNTERFACTUAL · LIVE PAIRS".dimmed());
15209 println!(" {}", style::tick_row(60));
15210 let mut pairs = 0usize;
15214 for child in &project.findings {
15215 for link in &child.links {
15216 if !matches!(link.link_type.as_str(), "depends" | "supports") {
15217 continue;
15218 }
15219 if link.mechanism.is_none() {
15220 continue;
15221 }
15222 let parent = link
15223 .target
15224 .split_once(':')
15225 .map_or(link.target.as_str(), |(_, r)| r);
15226 pairs += 1;
15227 if pairs <= 10 {
15228 println!(" · do({parent}) → {}", child.id);
15229 }
15230 }
15231 }
15232 if pairs == 0 {
15233 println!(" no mechanism-annotated edges found.");
15234 println!(" add a mechanism via the link's `mechanism` field; see /counterfactual");
15235 } else {
15236 println!();
15237 println!(" {pairs} live pair(s). Run with:");
15238 println!(" vela causal counterfactual <repo> <src> --target <tgt> --set-to 0.5");
15239 }
15240 println!();
15241 true
15242 }
15243 "s" | "status" | "refresh" => {
15244 match repo::load_from_path(repo_path) {
15246 Ok(p) => print_session_dashboard(&p, repo_path),
15247 Err(e) => eprintln!("{} {e}", style::err_prefix()),
15248 }
15249 true
15250 }
15251 "h" | "help" | "?" => {
15252 print_session_help();
15253 true
15254 }
15255 _ => false,
15256 }
15257}
15258
15259fn run_session() {
15260 let repo_path = match find_vela_repo() {
15261 Some(p) => p,
15262 None => {
15263 println!();
15264 println!(
15265 " {}",
15266 "VELA · NO FRONTIER FOUND IN CWD OR ANY PARENT".dimmed()
15267 );
15268 println!(" {}", style::tick_row(60));
15269 println!(" Run `vela init` here to create a frontier, or cd into one.");
15270 println!(" Or run `vela help` for the command list.");
15271 println!();
15272 return;
15273 }
15274 };
15275
15276 let project = match repo::load_from_path(&repo_path) {
15277 Ok(p) => p,
15278 Err(e) => {
15279 eprintln!("{} failed to load .vela/ repo: {e}", style::err_prefix());
15280 std::process::exit(1);
15281 }
15282 };
15283
15284 print_session_dashboard(&project, &repo_path);
15285
15286 use std::io::{BufRead, Write};
15287 let stdin = std::io::stdin();
15288 let mut stdout = std::io::stdout();
15289 loop {
15290 print!(" > ");
15291 stdout.flush().ok();
15292 let mut line = String::new();
15293 if stdin.lock().read_line(&mut line).is_err() {
15294 break;
15295 }
15296 let input = line.trim();
15297 if input.is_empty() {
15298 continue;
15299 }
15300 if matches!(input, "q" | "quit" | "exit") {
15301 break;
15302 }
15303 if run_session_verb(input, &repo_path) {
15304 continue;
15305 }
15306 let project = match repo::load_from_path(&repo_path) {
15308 Ok(p) => p,
15309 Err(e) => {
15310 eprintln!("{} {e}", style::err_prefix());
15311 continue;
15312 }
15313 };
15314 answer(&project, input, false);
15315 }
15316}
15317
15318pub fn run_from_args() {
15319 style::init();
15320 let args = std::env::args().collect::<Vec<_>>();
15321 match args.get(1).map(String::as_str) {
15322 None => {
15326 run_session();
15327 return;
15328 }
15329 Some("-h" | "--help" | "help") => {
15330 if args.get(2).map(String::as_str) == Some("advanced") {
15333 print_strict_help();
15334 } else {
15335 print_session_help();
15336 }
15337 return;
15338 }
15339 Some("-V" | "--version" | "version") => {
15340 println!("vela {}", env!("CARGO_PKG_VERSION"));
15341 return;
15342 }
15343 Some("proof") if args.get(2).map(String::as_str) == Some("verify") => {
15344 let json = args.iter().any(|arg| arg == "--json");
15345 let frontier = args
15346 .iter()
15347 .skip(3)
15348 .find(|arg| !arg.starts_with('-'))
15349 .map(PathBuf::from)
15350 .unwrap_or_else(|| {
15351 eprintln!(
15352 "{} proof verify requires a frontier repo",
15353 style::err_prefix()
15354 );
15355 std::process::exit(2);
15356 });
15357 cmd_proof_verify(&frontier, json);
15358 return;
15359 }
15360 Some("proof") if args.get(2).map(String::as_str) == Some("explain") => {
15361 let frontier = args
15362 .iter()
15363 .skip(3)
15364 .find(|arg| !arg.starts_with('-'))
15365 .map(PathBuf::from)
15366 .unwrap_or_else(|| {
15367 eprintln!(
15368 "{} proof explain requires a frontier repo",
15369 style::err_prefix()
15370 );
15371 std::process::exit(2);
15372 });
15373 cmd_proof_explain(&frontier);
15374 return;
15375 }
15376 Some(cmd) if !is_science_subcommand(cmd) => {
15377 eprintln!(
15378 "{} unknown or non-release command: {cmd}",
15379 style::err_prefix()
15380 );
15381 eprintln!("run `vela --help` for the strict v0 command surface.");
15382 std::process::exit(2);
15383 }
15384 Some(_) => {}
15385 }
15386 let runtime = tokio::runtime::Runtime::new().expect("failed to create tokio runtime");
15387 runtime.block_on(run_command());
15388}
15389
15390fn fail(message: &str) -> ! {
15391 eprintln!("{} {message}", style::err_prefix());
15392 std::process::exit(1);
15393}
15394
15395fn validate_enum_arg(flag: &str, value: &str, valid: &[&str]) {
15400 if !valid.contains(&value) {
15401 fail(&format!(
15402 "invalid {flag} '{value}'. Valid: {}",
15403 valid.join(", ")
15404 ));
15405 }
15406}
15407
15408fn fail_return<T>(message: &str) -> T {
15409 fail(message)
15410}