1use crate::{
2 benchmark, bridge, bundle, carina_validate, conformance, decision, diff, events, export,
3 frontier_repo, impact, lint, normalize, packet, project, propagate, proposals, repo, review,
4 search, serve, sign, signals, sources, state, state_integrity, tensions, validate,
5};
6
7use std::collections::BTreeMap;
8use std::future::Future;
9use std::path::{Path, PathBuf};
10use std::pin::Pin;
11use std::sync::OnceLock;
12
13use clap::{Parser, Subcommand};
14use colored::Colorize;
15
16use crate::cli_style as style;
17use reqwest::Client;
18use serde::Serialize;
19use serde_json::{Value, json};
20use sha2::{Digest, Sha256};
21
22#[derive(Parser)]
23#[command(name = "vela", version)]
24#[command(about = "Portable frontier state for science")]
25struct Cli {
26 #[command(subcommand)]
27 command: Commands,
28}
29
30#[derive(Subcommand)]
31enum Commands {
32 Scout {
39 folder: PathBuf,
41 #[arg(long)]
43 frontier: PathBuf,
44 #[arg(short, long)]
46 backend: Option<String>,
47 #[arg(long)]
49 dry_run: bool,
50 #[arg(long)]
52 json: bool,
53 },
54 CompileNotes {
61 vault: PathBuf,
63 #[arg(long)]
65 frontier: PathBuf,
66 #[arg(short, long)]
68 backend: Option<String>,
69 #[arg(long)]
71 max_files: Option<usize>,
72 #[arg(long)]
77 max_items_per_category: Option<usize>,
78 #[arg(long)]
80 dry_run: bool,
81 #[arg(long)]
83 json: bool,
84 },
85 CompileCode {
91 root: PathBuf,
93 #[arg(long)]
95 frontier: PathBuf,
96 #[arg(short, long)]
98 backend: Option<String>,
99 #[arg(long)]
101 max_files: Option<usize>,
102 #[arg(long)]
104 dry_run: bool,
105 #[arg(long)]
107 json: bool,
108 },
109 ReviewPending {
115 #[arg(long)]
116 frontier: PathBuf,
117 #[arg(short, long)]
118 backend: Option<String>,
119 #[arg(long)]
120 max_proposals: Option<usize>,
121 #[arg(long, default_value = "1")]
126 batch_size: usize,
127 #[arg(long)]
128 dry_run: bool,
129 #[arg(long)]
130 json: bool,
131 },
132 FindTensions {
136 #[arg(long)]
137 frontier: PathBuf,
138 #[arg(short, long)]
139 backend: Option<String>,
140 #[arg(long)]
141 max_findings: Option<usize>,
142 #[arg(long)]
143 dry_run: bool,
144 #[arg(long)]
145 json: bool,
146 },
147 PlanExperiments {
151 #[arg(long)]
152 frontier: PathBuf,
153 #[arg(short, long)]
154 backend: Option<String>,
155 #[arg(long)]
156 max_findings: Option<usize>,
157 #[arg(long)]
158 dry_run: bool,
159 #[arg(long)]
160 json: bool,
161 },
162 CompileData {
167 root: PathBuf,
169 #[arg(long)]
171 frontier: PathBuf,
172 #[arg(short, long)]
174 backend: Option<String>,
175 #[arg(long)]
177 sample_rows: Option<usize>,
178 #[arg(long)]
180 dry_run: bool,
181 #[arg(long)]
183 json: bool,
184 },
185 Check {
187 source: Option<PathBuf>,
189 #[arg(long)]
191 schema: bool,
192 #[arg(long)]
194 stats: bool,
195 #[arg(long)]
197 conformance: bool,
198 #[arg(long, default_value = "tests/conformance")]
200 conformance_dir: PathBuf,
201 #[arg(long)]
203 all: bool,
204 #[arg(long)]
206 schema_only: bool,
207 #[arg(long)]
209 strict: bool,
210 #[arg(long)]
212 fix: bool,
213 #[arg(long)]
215 json: bool,
216 },
217 Integrity {
219 frontier: PathBuf,
221 #[arg(long)]
223 json: bool,
224 },
225 Impact {
227 frontier: PathBuf,
229 finding_id: String,
231 #[arg(long)]
233 depth: Option<usize>,
234 #[arg(long)]
236 json: bool,
237 },
238 Discord {
245 frontier: PathBuf,
247 #[arg(long)]
249 json: bool,
250 #[arg(long)]
254 kind: Option<String>,
255 },
256 Normalize {
258 source: PathBuf,
260 #[arg(short, long)]
262 out: Option<PathBuf>,
263 #[arg(long)]
265 write: bool,
266 #[arg(long)]
268 dry_run: bool,
269 #[arg(long)]
271 rewrite_ids: bool,
272 #[arg(long)]
274 id_map: Option<PathBuf>,
275 #[arg(long)]
279 resync_provenance: bool,
280 #[arg(long)]
282 json: bool,
283 },
284 Proof {
286 frontier: PathBuf,
288 #[arg(long, short = 'o', default_value = "proof-packet")]
290 out: PathBuf,
291 #[arg(long, default_value = "bbb-alzheimer")]
293 template: String,
294 #[arg(long)]
296 gold: Option<PathBuf>,
297 #[arg(long)]
299 record_proof_state: bool,
300 #[arg(long)]
302 json: bool,
303 },
304 Repo {
306 #[command(subcommand)]
307 action: RepoAction,
308 },
309 Serve {
311 #[arg(required_unless_present_any = ["frontiers", "setup"])]
313 frontier: Option<PathBuf>,
314 #[arg(long)]
316 frontiers: Option<PathBuf>,
317 #[arg(short, long)]
319 backend: Option<String>,
320 #[arg(long)]
322 http: Option<u16>,
323 #[arg(long)]
325 setup: bool,
326 #[arg(long)]
328 check_tools: bool,
329 #[arg(long)]
331 json: bool,
332 #[arg(long)]
336 workbench: bool,
337 },
338 Status {
342 frontier: PathBuf,
343 #[arg(long)]
345 json: bool,
346 },
347 Log {
350 frontier: PathBuf,
351 #[arg(long, default_value = "20")]
353 limit: usize,
354 #[arg(long)]
356 kind: Option<String>,
357 #[arg(long)]
359 json: bool,
360 },
361 Inbox {
365 frontier: PathBuf,
366 #[arg(long)]
368 kind: Option<String>,
369 #[arg(long, default_value = "30")]
371 limit: usize,
372 #[arg(long)]
374 json: bool,
375 },
376 Ask {
381 frontier: PathBuf,
382 #[arg(trailing_var_arg = true)]
384 question: Vec<String>,
385 #[arg(long)]
387 json: bool,
388 },
389 Stats {
391 frontier: PathBuf,
393 #[arg(long)]
395 json: bool,
396 },
397 Search {
399 query: String,
401 #[arg(long)]
403 source: Option<PathBuf>,
404 #[arg(long)]
406 entity: Option<String>,
407 #[arg(long)]
409 r#type: Option<String>,
410 #[arg(long)]
412 all: Option<PathBuf>,
413 #[arg(long, default_value = "20")]
415 limit: usize,
416 #[arg(long)]
418 json: bool,
419 },
420 Tensions {
422 source: PathBuf,
423 #[arg(long)]
424 both_high: bool,
425 #[arg(long)]
426 cross_domain: bool,
427 #[arg(long, default_value = "20")]
428 top: usize,
429 #[arg(long)]
430 json: bool,
431 },
432 Gaps {
434 #[command(subcommand)]
435 action: GapsAction,
436 },
437 Bridge {
439 #[arg(required = true)]
441 inputs: Vec<PathBuf>,
442 #[arg(long, default_value = "true", action = clap::ArgAction::Set)]
444 novelty: bool,
445 #[arg(long, default_value = "30")]
447 top: usize,
448 },
449 Export {
451 frontier: PathBuf,
452 #[arg(short, long, default_value = "csv")]
453 format: String,
454 #[arg(short, long)]
455 output: Option<PathBuf>,
456 },
457 Packet {
459 #[command(subcommand)]
460 action: PacketAction,
461 },
462 Verify {
469 path: PathBuf,
471 #[arg(long)]
472 json: bool,
473 },
474 Bench {
486 frontier: Option<PathBuf>,
488 #[arg(long)]
490 gold: Option<PathBuf>,
491 #[arg(long)]
495 candidate: Option<PathBuf>,
496 #[arg(long)]
500 sources: Option<PathBuf>,
501 #[arg(long)]
504 threshold: Option<f64>,
505 #[arg(long)]
508 report: Option<PathBuf>,
509 #[arg(long)]
510 entity_gold: Option<PathBuf>,
511 #[arg(long)]
512 link_gold: Option<PathBuf>,
513 #[arg(long)]
514 suite: Option<PathBuf>,
515 #[arg(long)]
516 suite_ready: bool,
517 #[arg(long)]
518 min_f1: Option<f64>,
519 #[arg(long)]
520 min_precision: Option<f64>,
521 #[arg(long)]
522 min_recall: Option<f64>,
523 #[arg(long)]
524 no_thresholds: bool,
525 #[arg(long)]
526 json: bool,
527 },
528 Conformance {
530 #[arg(default_value = "tests/conformance")]
531 dir: PathBuf,
532 },
533 Version,
535 Sign {
537 #[command(subcommand)]
538 action: SignAction,
539 },
540 Actor {
542 #[command(subcommand)]
543 action: ActorAction,
544 },
545 Federation {
550 #[command(subcommand)]
551 action: FederationAction,
552 },
553 Causal {
560 #[command(subcommand)]
561 action: CausalAction,
562 },
563 Frontier {
567 #[command(subcommand)]
568 action: FrontierAction,
569 },
570 Queue {
573 #[command(subcommand)]
574 action: QueueAction,
575 },
576 Registry {
579 #[command(subcommand)]
580 action: RegistryAction,
581 },
582 Init {
584 #[arg(default_value = ".")]
585 path: PathBuf,
586 #[arg(long, default_value = "unnamed")]
587 name: String,
588 #[arg(long, default_value = "default")]
589 template: String,
590 #[arg(long)]
591 no_git: bool,
592 #[arg(long)]
593 json: bool,
594 },
595 Import {
597 frontier: PathBuf,
598 #[arg(long)]
599 into: Option<PathBuf>,
600 },
601 Diff {
611 target: String,
614 frontier_b: Option<PathBuf>,
617 #[arg(long)]
621 frontier: Option<PathBuf>,
622 #[arg(long, default_value = "reviewer:preview")]
624 reviewer: String,
625 #[arg(long)]
626 json: bool,
627 #[arg(long)]
628 quiet: bool,
629 },
630 Proposals {
632 #[command(subcommand)]
633 action: ProposalAction,
634 },
635 ArtifactToState {
637 frontier: PathBuf,
639 packet: PathBuf,
641 #[arg(long)]
643 actor: String,
644 #[arg(long)]
646 apply_artifacts: bool,
647 #[arg(long)]
648 json: bool,
649 },
650 BridgeKit {
652 #[command(subcommand)]
653 action: BridgeKitAction,
654 },
655 SourceAdapter {
657 #[command(subcommand)]
658 action: SourceAdapterAction,
659 },
660 RuntimeAdapter {
662 #[command(subcommand)]
663 action: RuntimeAdapterAction,
664 },
665 Finding {
667 #[command(subcommand)]
668 command: FindingCommands,
669 },
670 Link {
674 #[command(subcommand)]
675 action: LinkAction,
676 },
677 Workbench {
682 #[arg(default_value = ".")]
684 path: PathBuf,
685 #[arg(long, default_value_t = 3850)]
687 port: u16,
688 #[arg(long)]
690 no_open: bool,
691 },
692 Bridges {
698 #[command(subcommand)]
699 action: BridgesAction,
700 },
701 Entity {
706 #[command(subcommand)]
707 action: EntityAction,
708 },
709 Review {
711 frontier: PathBuf,
713 finding_id: String,
715 #[arg(long)]
717 status: Option<String>,
718 #[arg(long)]
720 reason: Option<String>,
721 #[arg(long)]
723 reviewer: String,
724 #[arg(long)]
726 apply: bool,
727 #[arg(long)]
729 json: bool,
730 },
731 Note {
733 frontier: PathBuf,
734 finding_id: String,
735 #[arg(long)]
736 text: String,
737 #[arg(long)]
738 author: String,
739 #[arg(long)]
741 apply: bool,
742 #[arg(long)]
743 json: bool,
744 },
745 Caveat {
747 frontier: PathBuf,
748 finding_id: String,
749 #[arg(long)]
750 text: String,
751 #[arg(long)]
752 author: String,
753 #[arg(long)]
754 apply: bool,
755 #[arg(long)]
756 json: bool,
757 },
758 Revise {
760 frontier: PathBuf,
761 finding_id: String,
762 #[arg(long)]
764 confidence: f64,
765 #[arg(long)]
767 reason: String,
768 #[arg(long)]
770 reviewer: String,
771 #[arg(long)]
772 apply: bool,
773 #[arg(long)]
774 json: bool,
775 },
776 Reject {
778 frontier: PathBuf,
779 finding_id: String,
780 #[arg(long)]
781 reason: String,
782 #[arg(long)]
783 reviewer: String,
784 #[arg(long)]
785 apply: bool,
786 #[arg(long)]
787 json: bool,
788 },
789 History {
791 frontier: PathBuf,
792 finding_id: String,
793 #[arg(long)]
794 json: bool,
795 #[arg(long, value_name = "RFC3339_TIMESTAMP")]
799 as_of: Option<String>,
800 },
801 ImportEvents {
803 source: PathBuf,
804 #[arg(long)]
805 into: PathBuf,
806 #[arg(long)]
807 json: bool,
808 },
809 Retract {
811 source: PathBuf,
812 finding_id: String,
813 #[arg(long)]
814 reason: String,
815 #[arg(long)]
816 reviewer: String,
817 #[arg(long)]
818 apply: bool,
819 #[arg(long)]
820 json: bool,
821 },
822 EntityAdd {
827 frontier: PathBuf,
828 finding_id: String,
829 #[arg(long)]
830 entity: String,
831 #[arg(long)]
835 entity_type: String,
836 #[arg(long)]
837 reviewer: String,
838 #[arg(long)]
839 reason: String,
840 #[arg(long)]
841 apply: bool,
842 #[arg(long)]
843 json: bool,
844 },
845 EntityResolve {
849 frontier: PathBuf,
850 finding_id: String,
851 #[arg(long)]
852 entity: String,
853 #[arg(long)]
854 source: String,
855 #[arg(long)]
856 id: String,
857 #[arg(long)]
858 confidence: f64,
859 #[arg(long)]
860 matched_name: Option<String>,
861 #[arg(long, default_value = "manual")]
862 resolution_method: String,
863 #[arg(long)]
864 reviewer: String,
865 #[arg(long)]
866 reason: String,
867 #[arg(long)]
868 apply: bool,
869 #[arg(long)]
870 json: bool,
871 },
872 SourceFetch {
880 identifier: String,
883 #[arg(long)]
887 cache: Option<PathBuf>,
888 #[arg(long)]
890 out: Option<PathBuf>,
891 #[arg(long)]
893 refresh: bool,
894 #[arg(long)]
895 json: bool,
896 },
897 SpanRepair {
900 frontier: PathBuf,
901 finding_id: String,
902 #[arg(long)]
903 section: String,
904 #[arg(long)]
905 text: String,
906 #[arg(long)]
907 reviewer: String,
908 #[arg(long)]
909 reason: String,
910 #[arg(long)]
911 apply: bool,
912 #[arg(long)]
913 json: bool,
914 },
915 LocatorRepair {
920 frontier: PathBuf,
921 atom_id: String,
922 #[arg(long)]
925 locator: Option<String>,
926 #[arg(long)]
929 reviewer: String,
930 #[arg(long)]
932 reason: String,
933 #[arg(long)]
935 apply: bool,
936 #[arg(long)]
937 json: bool,
938 },
939 Propagate {
941 frontier: PathBuf,
942 #[arg(long)]
943 retract: Option<String>,
944 #[arg(long)]
945 reduce_confidence: Option<String>,
946 #[arg(long)]
947 to: Option<f64>,
948 #[arg(short, long)]
949 output: Option<PathBuf>,
950 },
951 Replicate {
960 frontier: PathBuf,
962 target: String,
964 #[arg(long)]
966 outcome: String,
967 #[arg(long)]
969 by: String,
970 #[arg(long)]
974 conditions: String,
975 #[arg(long)]
977 source_title: String,
978 #[arg(long)]
980 doi: Option<String>,
981 #[arg(long)]
983 pmid: Option<String>,
984 #[arg(long)]
986 sample_size: Option<String>,
987 #[arg(long, default_value = "")]
990 note: String,
991 #[arg(long)]
993 previous_attempt: Option<String>,
994 #[arg(long, default_value_t = false)]
1001 no_cascade: bool,
1002 #[arg(long)]
1004 json: bool,
1005 },
1006 Replications {
1009 frontier: PathBuf,
1011 #[arg(long)]
1013 target: Option<String>,
1014 #[arg(long)]
1016 json: bool,
1017 },
1018 DatasetAdd {
1025 frontier: PathBuf,
1027 #[arg(long)]
1029 name: String,
1030 #[arg(long)]
1032 version: Option<String>,
1033 #[arg(long)]
1037 content_hash: String,
1038 #[arg(long)]
1040 url: Option<String>,
1041 #[arg(long)]
1043 license: Option<String>,
1044 #[arg(long)]
1046 source_title: String,
1047 #[arg(long)]
1049 doi: Option<String>,
1050 #[arg(long)]
1052 row_count: Option<u64>,
1053 #[arg(long)]
1055 json: bool,
1056 },
1057 Datasets {
1059 frontier: PathBuf,
1060 #[arg(long)]
1061 json: bool,
1062 },
1063 CodeAdd {
1067 frontier: PathBuf,
1069 #[arg(long)]
1071 language: String,
1072 #[arg(long)]
1074 repo_url: Option<String>,
1075 #[arg(long)]
1078 commit: Option<String>,
1079 #[arg(long)]
1081 path: String,
1082 #[arg(long)]
1084 content_hash: String,
1085 #[arg(long)]
1087 line_start: Option<u32>,
1088 #[arg(long)]
1090 line_end: Option<u32>,
1091 #[arg(long)]
1093 entry_point: Option<String>,
1094 #[arg(long)]
1096 json: bool,
1097 },
1098 CodeArtifacts {
1100 frontier: PathBuf,
1101 #[arg(long)]
1102 json: bool,
1103 },
1104 ArtifactAdd {
1109 frontier: PathBuf,
1111 #[arg(long)]
1114 kind: String,
1115 #[arg(long)]
1117 name: String,
1118 #[arg(long)]
1121 file: Option<PathBuf>,
1122 #[arg(long)]
1124 url: Option<String>,
1125 #[arg(long)]
1127 content_hash: Option<String>,
1128 #[arg(long)]
1130 media_type: Option<String>,
1131 #[arg(long)]
1133 license: Option<String>,
1134 #[arg(long)]
1136 source_title: Option<String>,
1137 #[arg(long)]
1139 source_url: Option<String>,
1140 #[arg(long)]
1142 doi: Option<String>,
1143 #[arg(long)]
1145 target: Vec<String>,
1146 #[arg(long)]
1148 metadata: Vec<String>,
1149 #[arg(long, default_value = "public")]
1151 access_tier: String,
1152 #[arg(long, default_value = "reviewer:manual")]
1154 deposited_by: String,
1155 #[arg(long, default_value = "artifact deposit")]
1157 reason: String,
1158 #[arg(long)]
1160 json: bool,
1161 },
1162 Artifacts {
1164 frontier: PathBuf,
1165 #[arg(long)]
1167 target: Option<String>,
1168 #[arg(long)]
1169 json: bool,
1170 },
1171 ArtifactAudit {
1173 frontier: PathBuf,
1174 #[arg(long)]
1176 json: bool,
1177 },
1178 DecisionBrief {
1180 frontier: PathBuf,
1181 #[arg(long)]
1183 json: bool,
1184 },
1185 TrialSummary {
1187 frontier: PathBuf,
1188 #[arg(long)]
1190 json: bool,
1191 },
1192 SourceVerification {
1194 frontier: PathBuf,
1195 #[arg(long)]
1197 json: bool,
1198 },
1199 SourceIngestPlan {
1201 frontier: PathBuf,
1202 #[arg(long)]
1204 json: bool,
1205 },
1206 ClinicalTrialImport {
1209 frontier: PathBuf,
1211 nct_id: String,
1213 #[arg(long)]
1216 input_json: Option<PathBuf>,
1217 #[arg(long)]
1219 target: Vec<String>,
1220 #[arg(long, default_value = "reviewer:manual")]
1222 deposited_by: String,
1223 #[arg(long, default_value = "clinical trial record import")]
1225 reason: String,
1226 #[arg(long, default_value = "ClinicalTrials.gov public record")]
1228 license: String,
1229 #[arg(long)]
1231 json: bool,
1232 },
1233 NegativeResultAdd {
1241 frontier: PathBuf,
1243 #[arg(long)]
1245 kind: String,
1246 #[arg(long)]
1248 deposited_by: String,
1249 #[arg(long)]
1251 reason: String,
1252 #[arg(long)]
1255 conditions_text: String,
1256 #[arg(long, default_value = "")]
1258 notes: String,
1259 #[arg(long)]
1262 target: Vec<String>,
1263 #[arg(long)]
1267 endpoint: Option<String>,
1268 #[arg(long)]
1270 intervention: Option<String>,
1271 #[arg(long)]
1273 comparator: Option<String>,
1274 #[arg(long)]
1276 population: Option<String>,
1277 #[arg(long)]
1279 n_enrolled: Option<u32>,
1280 #[arg(long)]
1282 power: Option<f64>,
1283 #[arg(long)]
1285 ci_lower: Option<f64>,
1286 #[arg(long)]
1288 ci_upper: Option<f64>,
1289 #[arg(long)]
1291 effect_size_threshold: Option<f64>,
1292 #[arg(long)]
1294 registry_id: Option<String>,
1295 #[arg(long)]
1298 reagent: Option<String>,
1299 #[arg(long)]
1301 observation: Option<String>,
1302 #[arg(long)]
1304 attempts: Option<u32>,
1305 #[arg(long)]
1308 source_title: String,
1309 #[arg(long)]
1311 doi: Option<String>,
1312 #[arg(long)]
1314 url: Option<String>,
1315 #[arg(long)]
1317 year: Option<i32>,
1318 #[arg(long)]
1320 json: bool,
1321 },
1322 NegativeResults {
1324 frontier: PathBuf,
1325 #[arg(long)]
1327 target: Option<String>,
1328 #[arg(long)]
1329 json: bool,
1330 },
1331 TrajectoryCreate {
1336 frontier: PathBuf,
1338 #[arg(long)]
1340 deposited_by: String,
1341 #[arg(long)]
1343 reason: String,
1344 #[arg(long)]
1349 target: Vec<String>,
1350 #[arg(long, default_value = "")]
1352 notes: String,
1353 #[arg(long)]
1354 json: bool,
1355 },
1356 TrajectoryStep {
1359 frontier: PathBuf,
1361 trajectory_id: String,
1363 #[arg(long)]
1365 kind: String,
1366 #[arg(long)]
1370 description: String,
1371 #[arg(long)]
1373 actor: String,
1374 #[arg(long)]
1376 reason: String,
1377 #[arg(long)]
1380 reference: Vec<String>,
1381 #[arg(long)]
1382 json: bool,
1383 },
1384 Trajectories {
1386 frontier: PathBuf,
1387 #[arg(long)]
1389 target: Option<String>,
1390 #[arg(long)]
1391 json: bool,
1392 },
1393 TierSet {
1399 frontier: PathBuf,
1401 #[arg(long)]
1403 object_type: String,
1404 #[arg(long)]
1406 object_id: String,
1407 #[arg(long)]
1409 tier: String,
1410 #[arg(long)]
1413 actor: String,
1414 #[arg(long)]
1417 reason: String,
1418 #[arg(long)]
1419 json: bool,
1420 },
1421 Predict {
1428 frontier: PathBuf,
1430 #[arg(long)]
1432 by: String,
1433 #[arg(long)]
1436 claim: String,
1437 #[arg(long)]
1439 criterion: String,
1440 #[arg(long)]
1442 resolves_by: Option<String>,
1443 #[arg(long)]
1445 confidence: f64,
1446 #[arg(long, default_value = "")]
1448 target: String,
1449 #[arg(long, default_value = "affirmed")]
1451 outcome: String,
1452 #[arg(long, default_value = "")]
1454 conditions: String,
1455 #[arg(long)]
1457 json: bool,
1458 },
1459 Resolve {
1464 frontier: PathBuf,
1466 prediction: String,
1468 #[arg(long)]
1470 outcome: String,
1471 #[arg(long)]
1473 matched: bool,
1474 #[arg(long)]
1477 by: String,
1478 #[arg(long, default_value = "1.0")]
1480 confidence: f64,
1481 #[arg(long, default_value = "")]
1483 source_title: String,
1484 #[arg(long)]
1486 doi: Option<String>,
1487 #[arg(long)]
1489 json: bool,
1490 },
1491 Predictions {
1493 frontier: PathBuf,
1494 #[arg(long)]
1496 by: Option<String>,
1497 #[arg(long)]
1499 open: bool,
1500 #[arg(long)]
1502 json: bool,
1503 },
1504 Calibration {
1507 frontier: PathBuf,
1508 #[arg(long)]
1510 actor: Option<String>,
1511 #[arg(long)]
1513 json: bool,
1514 },
1515 PredictionsExpire {
1523 frontier: PathBuf,
1524 #[arg(long)]
1527 now: Option<String>,
1528 #[arg(long)]
1531 dry_run: bool,
1532 #[arg(long)]
1533 json: bool,
1534 },
1535 Consensus {
1544 frontier: PathBuf,
1546 target: String,
1548 #[arg(long, default_value = "composite")]
1551 weighting: String,
1552 #[arg(long)]
1557 causal_claim: Option<String>,
1558 #[arg(long)]
1563 causal_grade_min: Option<String>,
1564 #[arg(long)]
1566 json: bool,
1567 },
1568
1569 Ingest {
1585 path: String,
1588 #[arg(long)]
1591 frontier: PathBuf,
1592 #[arg(short, long)]
1596 backend: Option<String>,
1597 #[arg(long)]
1601 actor: Option<String>,
1602 #[arg(long)]
1604 dry_run: bool,
1605 #[arg(long)]
1606 json: bool,
1607 },
1608
1609 Propose {
1615 frontier: PathBuf,
1616 finding_id: String,
1617 #[arg(long)]
1619 status: String,
1620 #[arg(long)]
1621 reason: String,
1622 #[arg(long)]
1623 reviewer: String,
1624 #[arg(long)]
1627 apply: bool,
1628 #[arg(long)]
1629 json: bool,
1630 },
1631
1632 Accept {
1636 frontier: PathBuf,
1637 proposal_id: String,
1638 #[arg(long)]
1639 reviewer: String,
1640 #[arg(long)]
1641 reason: String,
1642 #[arg(long)]
1643 json: bool,
1644 },
1645
1646 Attest {
1658 frontier: PathBuf,
1660 #[arg(long)]
1664 event: Option<String>,
1665 #[arg(long)]
1668 attester: Option<String>,
1669 #[arg(long)]
1672 scope_note: Option<String>,
1673 #[arg(long)]
1676 proof_id: Option<String>,
1677 #[arg(long)]
1682 signature: Option<String>,
1683 #[arg(long)]
1686 key: Option<PathBuf>,
1687 #[arg(long)]
1688 json: bool,
1689 },
1690
1691 Lineage {
1694 frontier: PathBuf,
1695 finding_id: String,
1696 #[arg(long, value_name = "RFC3339_TIMESTAMP")]
1697 as_of: Option<String>,
1698 #[arg(long)]
1699 json: bool,
1700 },
1701
1702 Carina {
1705 #[command(subcommand)]
1706 action: CarinaAction,
1707 },
1708
1709 Atlas {
1714 #[command(subcommand)]
1715 action: AtlasAction,
1716 },
1717
1718 Constellation {
1724 #[command(subcommand)]
1725 action: ConstellationAction,
1726 },
1727}
1728
1729#[derive(Subcommand)]
1734enum AtlasAction {
1735 Init {
1740 name: String,
1743 #[arg(long, value_delimiter = ',', num_args = 1..)]
1745 frontiers: Vec<PathBuf>,
1746 #[arg(long, default_value = "general")]
1749 domain: String,
1750 #[arg(long)]
1752 scope_note: Option<String>,
1753 #[arg(long, default_value = "atlases")]
1755 atlases_root: PathBuf,
1756 #[arg(long)]
1757 json: bool,
1758 },
1759 Materialize {
1763 name: String,
1765 #[arg(long, default_value = "atlases")]
1766 atlases_root: PathBuf,
1767 #[arg(long)]
1768 json: bool,
1769 },
1770 Serve {
1775 name: String,
1776 #[arg(long, default_value = "atlases")]
1777 atlases_root: PathBuf,
1778 #[arg(long, default_value_t = 3848)]
1779 port: u16,
1780 #[arg(long)]
1781 no_open: bool,
1782 },
1783 Update {
1790 name: String,
1791 #[arg(long, value_delimiter = ',')]
1794 add_frontier: Vec<PathBuf>,
1795 #[arg(long, value_delimiter = ',')]
1798 remove_vfr_id: Vec<String>,
1799 #[arg(long, default_value = "atlases")]
1800 atlases_root: PathBuf,
1801 #[arg(long)]
1802 json: bool,
1803 },
1804}
1805
1806#[derive(Subcommand)]
1810enum ConstellationAction {
1811 Init {
1815 name: String,
1816 #[arg(long, value_delimiter = ',', num_args = 1..)]
1818 atlases: Vec<PathBuf>,
1819 #[arg(long)]
1820 scope_note: Option<String>,
1821 #[arg(long, default_value = "constellations")]
1822 constellations_root: PathBuf,
1823 #[arg(long)]
1824 json: bool,
1825 },
1826 Materialize {
1831 name: String,
1832 #[arg(long, default_value = "constellations")]
1833 constellations_root: PathBuf,
1834 #[arg(long)]
1835 json: bool,
1836 },
1837 Serve {
1841 name: String,
1842 #[arg(long, default_value = "constellations")]
1843 constellations_root: PathBuf,
1844 #[arg(long, default_value_t = 3849)]
1845 port: u16,
1846 #[arg(long)]
1847 no_open: bool,
1848 },
1849}
1850
1851#[derive(Subcommand)]
1855enum CarinaAction {
1856 Validate {
1861 path: PathBuf,
1865 #[arg(long)]
1868 primitive: Option<String>,
1869 #[arg(long)]
1870 json: bool,
1871 },
1872 List {
1874 #[arg(long)]
1875 json: bool,
1876 },
1877 Schema { primitive: String },
1879}
1880
1881#[derive(Subcommand)]
1882enum PacketAction {
1883 Inspect {
1885 path: PathBuf,
1886 #[arg(long)]
1887 json: bool,
1888 },
1889 Validate {
1891 path: PathBuf,
1892 #[arg(long)]
1893 json: bool,
1894 },
1895}
1896
1897#[derive(Subcommand)]
1898enum SignAction {
1899 GenerateKeypair {
1901 #[arg(long, default_value = ".vela/keys")]
1902 out: PathBuf,
1903 #[arg(long)]
1904 json: bool,
1905 },
1906 Apply {
1908 frontier: PathBuf,
1909 #[arg(long)]
1910 private_key: PathBuf,
1911 #[arg(long)]
1912 json: bool,
1913 },
1914 Verify {
1916 frontier: PathBuf,
1917 #[arg(long)]
1918 public_key: Option<PathBuf>,
1919 #[arg(long)]
1920 json: bool,
1921 },
1922 ThresholdSet {
1927 frontier: PathBuf,
1928 finding_id: String,
1930 #[arg(long)]
1932 to: u32,
1933 #[arg(long)]
1934 json: bool,
1935 },
1936}
1937
1938#[derive(Subcommand)]
1939enum ActorAction {
1940 Add {
1942 frontier: PathBuf,
1943 id: String,
1945 #[arg(long)]
1947 pubkey: String,
1948 #[arg(long)]
1952 tier: Option<String>,
1953 #[arg(long)]
1957 orcid: Option<String>,
1958 #[arg(long)]
1963 clearance: Option<String>,
1964 #[arg(long)]
1965 json: bool,
1966 },
1967 List {
1969 frontier: PathBuf,
1970 #[arg(long)]
1971 json: bool,
1972 },
1973}
1974
1975#[derive(Subcommand)]
1976enum CausalAction {
1977 Audit {
1981 frontier: PathBuf,
1982 #[arg(long)]
1985 problems_only: bool,
1986 #[arg(long)]
1987 json: bool,
1988 },
1989 Effect {
2002 frontier: PathBuf,
2003 source: String,
2005 #[arg(long)]
2007 on: String,
2008 #[arg(long)]
2009 json: bool,
2010 },
2011 Graph {
2014 frontier: PathBuf,
2015 #[arg(long)]
2017 node: Option<String>,
2018 #[arg(long)]
2019 json: bool,
2020 },
2021 Counterfactual {
2028 frontier: PathBuf,
2029 intervene_on: String,
2031 #[arg(long)]
2033 set_to: f64,
2034 #[arg(long)]
2036 target: String,
2037 #[arg(long)]
2038 json: bool,
2039 },
2040}
2041
2042#[derive(Subcommand)]
2043enum BridgesAction {
2044 Derive {
2048 frontier_a: PathBuf,
2051 #[arg(long, default_value = "a")]
2053 label_a: String,
2054 frontier_b: PathBuf,
2056 #[arg(long, default_value = "b")]
2058 label_b: String,
2059 #[arg(long)]
2060 json: bool,
2061 },
2062 List {
2064 frontier: PathBuf,
2066 #[arg(long)]
2068 status: Option<String>,
2069 #[arg(long)]
2070 json: bool,
2071 },
2072 Show {
2074 frontier: PathBuf,
2075 bridge_id: String,
2076 #[arg(long)]
2077 json: bool,
2078 },
2079 Confirm {
2084 frontier: PathBuf,
2085 bridge_id: String,
2086 #[arg(long)]
2089 reviewer: Option<String>,
2090 #[arg(long)]
2092 note: Option<String>,
2093 #[arg(long)]
2094 json: bool,
2095 },
2096 Refute {
2099 frontier: PathBuf,
2100 bridge_id: String,
2101 #[arg(long)]
2102 reviewer: Option<String>,
2103 #[arg(long)]
2104 note: Option<String>,
2105 #[arg(long)]
2106 json: bool,
2107 },
2108}
2109
2110#[derive(Subcommand)]
2111enum FederationAction {
2112 PeerAdd {
2116 frontier: PathBuf,
2117 id: String,
2119 #[arg(long)]
2121 url: String,
2122 #[arg(long)]
2124 pubkey: String,
2125 #[arg(long, default_value = "")]
2127 note: String,
2128 #[arg(long)]
2129 json: bool,
2130 },
2131 PeerList {
2133 frontier: PathBuf,
2134 #[arg(long)]
2135 json: bool,
2136 },
2137 PeerRemove {
2141 frontier: PathBuf,
2142 id: String,
2143 #[arg(long)]
2144 json: bool,
2145 },
2146 Sync {
2163 frontier: PathBuf,
2164 peer_id: String,
2166 #[arg(long)]
2168 url: Option<String>,
2169 #[arg(long)]
2173 via_hub: bool,
2174 #[arg(long)]
2177 vfr_id: Option<String>,
2178 #[arg(long)]
2185 allow_cross_vfr: bool,
2186 #[arg(long)]
2188 dry_run: bool,
2189 #[arg(long)]
2190 json: bool,
2191 },
2192 PushResolution {
2205 frontier: PathBuf,
2206 conflict_event_id: String,
2210 #[arg(long = "to")]
2212 to: String,
2213 #[arg(long)]
2217 key: Option<PathBuf>,
2218 #[arg(long)]
2221 vfr_id: Option<String>,
2222 #[arg(long)]
2223 json: bool,
2224 },
2225}
2226
2227#[derive(Subcommand)]
2228enum FrontierAction {
2229 New {
2236 path: PathBuf,
2238 #[arg(long)]
2240 name: String,
2241 #[arg(long, default_value = "")]
2243 description: String,
2244 #[arg(long)]
2246 force: bool,
2247 #[arg(long)]
2248 json: bool,
2249 },
2250 Materialize {
2252 frontier: PathBuf,
2254 #[arg(long)]
2255 json: bool,
2256 },
2257 AddDep {
2261 frontier: PathBuf,
2263 vfr_id: String,
2265 #[arg(long)]
2268 locator: String,
2269 #[arg(long)]
2272 snapshot: String,
2273 #[arg(long)]
2275 name: Option<String>,
2276 #[arg(long)]
2277 json: bool,
2278 },
2279 ListDeps {
2281 frontier: PathBuf,
2282 #[arg(long)]
2283 json: bool,
2284 },
2285 RemoveDep {
2288 frontier: PathBuf,
2289 vfr_id: String,
2290 #[arg(long)]
2291 json: bool,
2292 },
2293 RefreshDeps {
2300 frontier: PathBuf,
2301 #[arg(long, default_value = "https://vela-hub.fly.dev")]
2303 from: String,
2304 #[arg(long)]
2306 dry_run: bool,
2307 #[arg(long)]
2308 json: bool,
2309 },
2310 Diff {
2322 frontier: PathBuf,
2324 #[arg(long)]
2327 since: Option<String>,
2328 #[arg(long)]
2331 week: Option<String>,
2332 #[arg(long)]
2334 json: bool,
2335 },
2336}
2337
2338#[derive(Subcommand)]
2339enum RepoAction {
2340 Status {
2342 frontier: PathBuf,
2344 #[arg(long)]
2346 json: bool,
2347 },
2348 Doctor {
2350 frontier: PathBuf,
2352 #[arg(long)]
2354 json: bool,
2355 },
2356}
2357
2358#[derive(Subcommand)]
2359enum QueueAction {
2360 List {
2362 #[arg(long)]
2363 queue_file: Option<PathBuf>,
2364 #[arg(long)]
2365 json: bool,
2366 },
2367 Sign {
2370 #[arg(long)]
2372 actor: String,
2373 #[arg(long)]
2375 key: PathBuf,
2376 #[arg(long)]
2378 queue_file: Option<PathBuf>,
2379 #[arg(long, alias = "all")]
2385 yes_to_all: bool,
2386 #[arg(long)]
2387 json: bool,
2388 },
2389 Clear {
2391 #[arg(long)]
2392 queue_file: Option<PathBuf>,
2393 #[arg(long)]
2394 json: bool,
2395 },
2396}
2397
2398#[derive(Subcommand)]
2399enum RegistryAction {
2400 List {
2402 #[arg(long)]
2404 from: Option<String>,
2405 #[arg(long)]
2406 json: bool,
2407 },
2408 Publish {
2410 frontier: PathBuf,
2412 #[arg(long)]
2414 owner: String,
2415 #[arg(long)]
2417 key: PathBuf,
2418 #[arg(long)]
2425 locator: Option<String>,
2426 #[arg(long)]
2428 to: Option<String>,
2429 #[arg(long)]
2430 json: bool,
2431 },
2432 DependsOn {
2439 vfr_id: String,
2441 #[arg(long, default_value = "https://vela-hub.fly.dev")]
2443 from: String,
2444 #[arg(long)]
2445 json: bool,
2446 },
2447 Mirror {
2455 vfr_id: String,
2457 #[arg(long)]
2459 from: String,
2460 #[arg(long)]
2462 to: String,
2463 #[arg(long)]
2464 json: bool,
2465 },
2466 Pull {
2468 vfr_id: String,
2470 #[arg(long)]
2472 from: Option<String>,
2473 #[arg(long)]
2477 out: PathBuf,
2478 #[arg(long)]
2481 transitive: bool,
2482 #[arg(long, default_value = "4")]
2485 depth: usize,
2486 #[arg(long)]
2487 json: bool,
2488 },
2489}
2490
2491#[derive(Subcommand)]
2492enum GapsAction {
2493 Rank {
2495 frontier: PathBuf,
2496 #[arg(long, default_value = "10")]
2497 top: usize,
2498 #[arg(long)]
2499 domain: Option<String>,
2500 #[arg(long)]
2501 json: bool,
2502 },
2503}
2504
2505#[derive(Subcommand)]
2506enum LinkAction {
2507 Add {
2512 frontier: PathBuf,
2514 #[arg(long)]
2516 from: String,
2517 #[arg(long)]
2519 to: String,
2520 #[arg(long, default_value = "supports")]
2522 r#type: String,
2523 #[arg(long, default_value = "")]
2525 note: String,
2526 #[arg(long, default_value = "reviewer")]
2528 inferred_by: String,
2529 #[arg(long)]
2538 no_check_target: bool,
2539 #[arg(long)]
2540 json: bool,
2541 },
2542}
2543
2544#[derive(Subcommand)]
2545enum EntityAction {
2546 Resolve {
2553 frontier: PathBuf,
2554 #[arg(long)]
2556 force: bool,
2557 #[arg(long)]
2558 json: bool,
2559 },
2560 List {
2562 #[arg(long)]
2563 json: bool,
2564 },
2565}
2566
2567#[derive(Subcommand)]
2568enum FindingCommands {
2569 Add {
2571 frontier: PathBuf,
2573 #[arg(long)]
2575 assertion: String,
2576 #[arg(long, default_value = "mechanism")]
2578 r#type: String,
2579 #[arg(long, default_value = "manual finding")]
2581 source: String,
2582 #[arg(long, default_value = "expert_assertion")]
2584 source_type: String,
2585 #[arg(long)]
2587 author: String,
2588 #[arg(long, default_value = "0.3")]
2590 confidence: f64,
2591 #[arg(long, default_value = "theoretical")]
2593 evidence_type: String,
2594 #[arg(long, default_value = "")]
2596 entities: String,
2597 #[arg(long)]
2599 entities_reviewed: bool,
2600 #[arg(long)]
2602 evidence_span: Vec<String>,
2603 #[arg(long)]
2605 gap: bool,
2606 #[arg(long)]
2608 negative_space: bool,
2609 #[arg(long)]
2611 doi: Option<String>,
2612 #[arg(long)]
2614 pmid: Option<String>,
2615 #[arg(long)]
2617 year: Option<i32>,
2618 #[arg(long)]
2620 journal: Option<String>,
2621 #[arg(long)]
2623 url: Option<String>,
2624 #[arg(long)]
2626 source_authors: Option<String>,
2627 #[arg(long)]
2629 conditions_text: Option<String>,
2630 #[arg(long)]
2632 species: Option<String>,
2633 #[arg(long)]
2635 in_vivo: bool,
2636 #[arg(long)]
2638 in_vitro: bool,
2639 #[arg(long)]
2641 human_data: bool,
2642 #[arg(long)]
2644 clinical_trial: bool,
2645 #[arg(long)]
2647 json: bool,
2648 #[arg(long)]
2650 apply: bool,
2651 },
2652 Supersede {
2659 frontier: PathBuf,
2661 old_id: String,
2663 #[arg(long)]
2665 assertion: String,
2666 #[arg(long, default_value = "mechanism")]
2668 r#type: String,
2669 #[arg(long, default_value = "manual finding")]
2671 source: String,
2672 #[arg(long, default_value = "expert_assertion")]
2674 source_type: String,
2675 #[arg(long)]
2677 author: String,
2678 #[arg(long)]
2680 reason: String,
2681 #[arg(long, default_value = "0.5")]
2683 confidence: f64,
2684 #[arg(long, default_value = "experimental")]
2686 evidence_type: String,
2687 #[arg(long, default_value = "")]
2689 entities: String,
2690 #[arg(long)]
2692 doi: Option<String>,
2693 #[arg(long)]
2695 pmid: Option<String>,
2696 #[arg(long)]
2698 year: Option<i32>,
2699 #[arg(long)]
2701 journal: Option<String>,
2702 #[arg(long)]
2704 url: Option<String>,
2705 #[arg(long)]
2707 source_authors: Option<String>,
2708 #[arg(long)]
2710 conditions_text: Option<String>,
2711 #[arg(long)]
2713 species: Option<String>,
2714 #[arg(long)]
2715 in_vivo: bool,
2716 #[arg(long)]
2717 in_vitro: bool,
2718 #[arg(long)]
2719 human_data: bool,
2720 #[arg(long)]
2721 clinical_trial: bool,
2722 #[arg(long)]
2723 json: bool,
2724 #[arg(long)]
2726 apply: bool,
2727 },
2728 CausalSet {
2734 frontier: PathBuf,
2736 finding_id: String,
2738 #[arg(long)]
2740 claim: String,
2741 #[arg(long)]
2744 grade: Option<String>,
2745 #[arg(long)]
2748 actor: String,
2749 #[arg(long)]
2752 reason: String,
2753 #[arg(long)]
2754 json: bool,
2755 },
2756}
2757
2758#[derive(Subcommand)]
2759enum ProposalAction {
2760 List {
2762 frontier: PathBuf,
2763 #[arg(long)]
2764 status: Option<String>,
2765 #[arg(long)]
2766 json: bool,
2767 },
2768 Show {
2770 frontier: PathBuf,
2771 proposal_id: String,
2772 #[arg(long)]
2773 json: bool,
2774 },
2775 Preview {
2777 frontier: PathBuf,
2778 proposal_id: String,
2779 #[arg(long, default_value = "reviewer:preview")]
2780 reviewer: String,
2781 #[arg(long)]
2782 json: bool,
2783 },
2784 Import {
2786 frontier: PathBuf,
2787 source: PathBuf,
2788 #[arg(long)]
2789 json: bool,
2790 },
2791 Validate {
2793 source: PathBuf,
2794 #[arg(long)]
2795 json: bool,
2796 },
2797 Export {
2799 frontier: PathBuf,
2800 output: PathBuf,
2801 #[arg(long)]
2802 status: Option<String>,
2803 #[arg(long)]
2804 json: bool,
2805 },
2806 Accept {
2808 frontier: PathBuf,
2809 proposal_id: String,
2810 #[arg(long)]
2811 reviewer: String,
2812 #[arg(long)]
2813 reason: String,
2814 #[arg(long)]
2815 json: bool,
2816 },
2817 Reject {
2819 frontier: PathBuf,
2820 proposal_id: String,
2821 #[arg(long)]
2822 reviewer: String,
2823 #[arg(long)]
2824 reason: String,
2825 #[arg(long)]
2826 json: bool,
2827 },
2828}
2829
2830#[derive(Subcommand)]
2831enum SourceAdapterAction {
2832 Run {
2834 frontier: PathBuf,
2836 adapter: String,
2838 #[arg(long)]
2840 actor: String,
2841 #[arg(long = "entry")]
2843 entries: Vec<String>,
2844 #[arg(long)]
2846 priority: Option<String>,
2847 #[arg(long)]
2849 include_excluded: bool,
2850 #[arg(long)]
2852 allow_partial: bool,
2853 #[arg(long)]
2855 dry_run: bool,
2856 #[arg(long)]
2858 input_dir: Option<PathBuf>,
2859 #[arg(long)]
2861 apply_artifacts: bool,
2862 #[arg(long)]
2864 json: bool,
2865 },
2866}
2867
2868#[derive(Subcommand)]
2869enum RuntimeAdapterAction {
2870 Run {
2872 frontier: PathBuf,
2874 adapter: String,
2876 #[arg(long)]
2878 input: PathBuf,
2879 #[arg(long)]
2881 actor: String,
2882 #[arg(long)]
2884 dry_run: bool,
2885 #[arg(long)]
2887 apply_artifacts: bool,
2888 #[arg(long)]
2890 json: bool,
2891 },
2892}
2893
2894#[derive(Subcommand)]
2895enum BridgeKitAction {
2896 Validate {
2898 source: PathBuf,
2900 #[arg(long)]
2902 json: bool,
2903 },
2904}
2905
2906pub async fn run_command() {
2907 dotenvy::dotenv().ok();
2908
2909 match Cli::parse().command {
2910 Commands::Scout {
2911 folder,
2912 frontier,
2913 backend,
2914 dry_run,
2915 json,
2916 } => {
2917 cmd_scout(&folder, &frontier, backend.as_deref(), dry_run, json).await;
2918 }
2919 Commands::CompileNotes {
2920 vault,
2921 frontier,
2922 backend,
2923 max_files,
2924 max_items_per_category,
2925 dry_run,
2926 json,
2927 } => {
2928 cmd_compile_notes(
2929 &vault,
2930 &frontier,
2931 backend.as_deref(),
2932 max_files,
2933 max_items_per_category,
2934 dry_run,
2935 json,
2936 )
2937 .await;
2938 }
2939 Commands::CompileCode {
2940 root,
2941 frontier,
2942 backend,
2943 max_files,
2944 dry_run,
2945 json,
2946 } => {
2947 cmd_compile_code(
2948 &root,
2949 &frontier,
2950 backend.as_deref(),
2951 max_files,
2952 dry_run,
2953 json,
2954 )
2955 .await;
2956 }
2957 Commands::CompileData {
2958 root,
2959 frontier,
2960 backend,
2961 sample_rows,
2962 dry_run,
2963 json,
2964 } => {
2965 cmd_compile_data(
2966 &root,
2967 &frontier,
2968 backend.as_deref(),
2969 sample_rows,
2970 dry_run,
2971 json,
2972 )
2973 .await;
2974 }
2975 Commands::ReviewPending {
2976 frontier,
2977 backend,
2978 max_proposals,
2979 batch_size,
2980 dry_run,
2981 json,
2982 } => {
2983 cmd_review_pending(
2984 &frontier,
2985 backend.as_deref(),
2986 max_proposals,
2987 batch_size,
2988 dry_run,
2989 json,
2990 )
2991 .await;
2992 }
2993 Commands::FindTensions {
2994 frontier,
2995 backend,
2996 max_findings,
2997 dry_run,
2998 json,
2999 } => {
3000 cmd_find_tensions(&frontier, backend.as_deref(), max_findings, dry_run, json).await;
3001 }
3002 Commands::PlanExperiments {
3003 frontier,
3004 backend,
3005 max_findings,
3006 dry_run,
3007 json,
3008 } => {
3009 cmd_plan_experiments(&frontier, backend.as_deref(), max_findings, dry_run, json).await;
3010 }
3011 Commands::Check {
3012 source,
3013 schema,
3014 stats,
3015 conformance,
3016 conformance_dir,
3017 all,
3018 schema_only,
3019 strict,
3020 fix,
3021 json,
3022 } => cmd_check(
3023 source.as_deref(),
3024 schema,
3025 stats,
3026 conformance,
3027 &conformance_dir,
3028 all,
3029 schema_only,
3030 strict,
3031 fix,
3032 json,
3033 ),
3034 Commands::Integrity { frontier, json } => cmd_integrity(&frontier, json),
3035 Commands::Impact {
3036 frontier,
3037 finding_id,
3038 depth,
3039 json,
3040 } => cmd_impact(&frontier, &finding_id, depth, json),
3041 Commands::Discord {
3042 frontier,
3043 json,
3044 kind,
3045 } => cmd_discord(&frontier, json, kind.as_deref()),
3046 Commands::Normalize {
3047 source,
3048 out,
3049 write,
3050 dry_run,
3051 rewrite_ids,
3052 id_map,
3053 resync_provenance,
3054 json,
3055 } => cmd_normalize(
3056 &source,
3057 out.as_deref(),
3058 write,
3059 dry_run,
3060 rewrite_ids,
3061 id_map.as_deref(),
3062 resync_provenance,
3063 json,
3064 ),
3065 Commands::Proof {
3066 frontier,
3067 out,
3068 template,
3069 gold,
3070 record_proof_state,
3071 json,
3072 } => cmd_proof(
3073 &frontier,
3074 &out,
3075 &template,
3076 gold.as_deref(),
3077 record_proof_state,
3078 json,
3079 ),
3080 Commands::Repo { action } => cmd_repo(action),
3081 Commands::Serve {
3082 frontier,
3083 frontiers,
3084 backend,
3085 http,
3086 setup,
3087 check_tools,
3088 json,
3089 workbench,
3090 } => {
3091 if setup {
3092 cmd_mcp_setup(frontier.as_deref(), frontiers.as_deref());
3093 } else if check_tools {
3094 let source =
3095 serve::ProjectSource::from_args(frontier.as_deref(), frontiers.as_deref());
3096 match serve::check_tools(source) {
3097 Ok(report) => {
3098 if json {
3099 println!(
3100 "{}",
3101 serde_json::to_string_pretty(&report)
3102 .expect("failed to serialize tool check report")
3103 );
3104 } else {
3105 print_tool_check_report(&report);
3106 }
3107 }
3108 Err(e) => fail(&format!("Tool check failed: {e}")),
3109 }
3110 } else {
3111 let source =
3112 serve::ProjectSource::from_args(frontier.as_deref(), frontiers.as_deref());
3113 let resolved_port = if workbench {
3115 Some(http.unwrap_or(3848))
3116 } else {
3117 http
3118 };
3119 if let Some(port) = resolved_port {
3120 serve::run_http(source, backend.as_deref(), port, workbench).await;
3121 } else {
3122 serve::run(source, backend.as_deref()).await;
3123 }
3124 }
3125 }
3126 Commands::Status { frontier, json } => cmd_status(&frontier, json),
3127 Commands::Log {
3128 frontier,
3129 limit,
3130 kind,
3131 json,
3132 } => cmd_log(&frontier, limit, kind.as_deref(), json),
3133 Commands::Inbox {
3134 frontier,
3135 kind,
3136 limit,
3137 json,
3138 } => cmd_inbox(&frontier, kind.as_deref(), limit, json),
3139 Commands::Ask {
3140 frontier,
3141 question,
3142 json,
3143 } => cmd_ask(&frontier, &question.join(" "), json),
3144 Commands::Stats { frontier, json } => {
3145 if json {
3146 print_stats_json(&frontier);
3147 } else {
3148 cmd_stats(&frontier);
3149 }
3150 }
3151 Commands::Search {
3152 source,
3153 query,
3154 entity,
3155 r#type,
3156 all,
3157 limit,
3158 json,
3159 } => cmd_search(
3160 source.as_deref(),
3161 &query,
3162 entity.as_deref(),
3163 r#type.as_deref(),
3164 all.as_deref(),
3165 limit,
3166 json,
3167 ),
3168 Commands::Tensions {
3169 source,
3170 both_high,
3171 cross_domain,
3172 top,
3173 json,
3174 } => cmd_tensions(&source, both_high, cross_domain, top, json),
3175 Commands::Gaps { action } => cmd_gaps(action),
3176 Commands::Bridge {
3177 inputs,
3178 novelty,
3179 top,
3180 } => cmd_bridge(&inputs, novelty, top).await,
3181 Commands::Export {
3182 frontier,
3183 format,
3184 output,
3185 } => export::run(&frontier, &format, output.as_deref()),
3186 Commands::Packet { action } => cmd_packet(action),
3187 Commands::Verify { path, json } => cmd_verify(&path, json),
3188 Commands::Bench {
3189 frontier,
3190 gold,
3191 candidate,
3192 sources,
3193 threshold,
3194 report,
3195 entity_gold,
3196 link_gold,
3197 suite,
3198 suite_ready,
3199 min_f1,
3200 min_precision,
3201 min_recall,
3202 no_thresholds,
3203 json,
3204 } => {
3205 if let Some(cand) = candidate.clone() {
3210 let Some(g) = gold.clone() else {
3211 eprintln!(
3212 "{} `vela bench --candidate <…>` requires `--gold <…>`",
3213 style::err_prefix()
3214 );
3215 std::process::exit(2);
3216 };
3217 cmd_agent_bench(
3218 &g,
3219 &cand,
3220 sources.as_deref(),
3221 threshold,
3222 report.as_deref(),
3223 json,
3224 );
3225 } else {
3226 cmd_bench(BenchArgs {
3227 frontier,
3228 gold,
3229 entity_gold,
3230 link_gold,
3231 suite,
3232 suite_ready,
3233 min_f1,
3234 min_precision,
3235 min_recall,
3236 no_thresholds,
3237 json,
3238 });
3239 }
3240 }
3241 Commands::Conformance { dir } => {
3242 let _ = conformance::run(&dir);
3243 }
3244 Commands::Version => println!("vela 0.36.0"),
3245 Commands::Sign { action } => cmd_sign(action),
3246 Commands::Actor { action } => cmd_actor(action),
3247 Commands::Federation { action } => cmd_federation(action),
3248 Commands::Causal { action } => cmd_causal(action),
3249 Commands::Frontier { action } => cmd_frontier(action),
3250 Commands::Queue { action } => cmd_queue(action),
3251 Commands::Registry { action } => cmd_registry(action),
3252 Commands::Init {
3253 path,
3254 name,
3255 template,
3256 no_git,
3257 json,
3258 } => cmd_init(&path, &name, &template, !no_git, json),
3259 Commands::Import { frontier, into } => cmd_import(&frontier, into.as_deref()),
3260 Commands::Diff {
3261 target,
3262 frontier_b,
3263 frontier,
3264 reviewer,
3265 json,
3266 quiet,
3267 } => {
3268 if target.starts_with("vpr_") {
3273 let frontier_root = frontier
3274 .clone()
3275 .or_else(|| frontier_b.clone())
3276 .unwrap_or_else(|| std::path::PathBuf::from("."));
3277 let preview = proposals::preview_at_path(&frontier_root, &target, &reviewer)
3278 .unwrap_or_else(|e| fail_return(&e));
3279 let payload = json!({
3280 "ok": true,
3281 "command": "diff.proposal",
3282 "frontier": frontier_root.display().to_string(),
3283 "proposal_id": target,
3284 "preview": preview,
3285 });
3286 if json {
3287 println!(
3288 "{}",
3289 serde_json::to_string_pretty(&payload)
3290 .expect("failed to serialize diff preview")
3291 );
3292 } else {
3293 println!("vela diff · proposal preview");
3294 println!(" proposal: {}", target);
3295 println!(" kind: {}", preview.kind);
3296 println!(
3297 " findings: {} -> {}",
3298 preview.findings_before, preview.findings_after
3299 );
3300 println!(
3301 " artifacts: {} -> {}",
3302 preview.artifacts_before, preview.artifacts_after
3303 );
3304 println!(
3305 " events: {} -> {}",
3306 preview.events_before, preview.events_after
3307 );
3308 if !preview.changed_findings.is_empty() {
3309 println!(
3310 " findings changed: {}",
3311 preview.changed_findings.join(", ")
3312 );
3313 }
3314 }
3315 } else {
3316 let frontier_a = std::path::PathBuf::from(&target);
3317 let b = frontier_b.unwrap_or_else(|| {
3318 fail_return(
3319 "diff: two-frontier mode needs a second positional path; for proposal preview pass a `vpr_*` id",
3320 )
3321 });
3322 diff::run(&frontier_a, &b, json, quiet);
3323 }
3324 }
3325 Commands::Proposals { action } => cmd_proposals(action),
3326 Commands::ArtifactToState {
3327 frontier,
3328 packet,
3329 actor,
3330 apply_artifacts,
3331 json,
3332 } => cmd_artifact_to_state(&frontier, &packet, &actor, apply_artifacts, json),
3333 Commands::BridgeKit { action } => cmd_bridge_kit(action),
3334 Commands::SourceAdapter { action } => cmd_source_adapter(action).await,
3335 Commands::RuntimeAdapter { action } => cmd_runtime_adapter(action),
3336 Commands::Link { action } => cmd_link(action),
3337 Commands::Workbench {
3338 path,
3339 port,
3340 no_open,
3341 } => {
3342 if let Err(e) = crate::workbench::run(path, port, !no_open).await {
3343 fail(&e);
3344 }
3345 }
3346 Commands::Bridges { action } => cmd_bridges(action),
3347 Commands::Entity { action } => cmd_entity(action),
3348 Commands::Finding { command } => match command {
3349 FindingCommands::Add {
3350 frontier,
3351 assertion,
3352 r#type,
3353 source,
3354 source_type,
3355 author,
3356 confidence,
3357 evidence_type,
3358 entities,
3359 entities_reviewed,
3360 evidence_span,
3361 gap,
3362 negative_space,
3363 doi,
3364 pmid,
3365 year,
3366 journal,
3367 url,
3368 source_authors,
3369 conditions_text,
3370 species,
3371 in_vivo,
3372 in_vitro,
3373 human_data,
3374 clinical_trial,
3375 json,
3376 apply,
3377 } => {
3378 validate_enum_arg("--type", &r#type, bundle::VALID_ASSERTION_TYPES);
3379 validate_enum_arg(
3380 "--evidence-type",
3381 &evidence_type,
3382 bundle::VALID_EVIDENCE_TYPES,
3383 );
3384 validate_enum_arg(
3385 "--source-type",
3386 &source_type,
3387 bundle::VALID_PROVENANCE_SOURCE_TYPES,
3388 );
3389 let parsed_entities = parse_entities(&entities);
3390 let parsed_evidence_spans = parse_evidence_spans(&evidence_span);
3391 for (name, etype) in &parsed_entities {
3392 if !bundle::VALID_ENTITY_TYPES.contains(&etype.as_str()) {
3393 fail(&format!(
3394 "invalid entity type '{}' for '{}'. Valid: {}",
3395 etype,
3396 name,
3397 bundle::VALID_ENTITY_TYPES.join(", "),
3398 ));
3399 }
3400 }
3401 let parsed_source_authors = source_authors
3402 .map(|s| {
3403 s.split(';')
3404 .map(|a| a.trim().to_string())
3405 .filter(|a| !a.is_empty())
3406 .collect()
3407 })
3408 .unwrap_or_default();
3409 let parsed_species = species
3410 .map(|s| {
3411 s.split(';')
3412 .map(|a| a.trim().to_string())
3413 .filter(|a| !a.is_empty())
3414 .collect()
3415 })
3416 .unwrap_or_default();
3417 let report = state::add_finding(
3418 &frontier,
3419 state::FindingDraftOptions {
3420 text: assertion,
3421 assertion_type: r#type,
3422 source,
3423 source_type,
3424 author,
3425 confidence,
3426 evidence_type,
3427 entities: parsed_entities,
3428 doi,
3429 pmid,
3430 year,
3431 journal,
3432 url,
3433 source_authors: parsed_source_authors,
3434 conditions_text,
3435 species: parsed_species,
3436 in_vivo,
3437 in_vitro,
3438 human_data,
3439 clinical_trial,
3440 entities_reviewed,
3441 evidence_spans: parsed_evidence_spans,
3442 gap,
3443 negative_space,
3444 },
3445 apply,
3446 )
3447 .unwrap_or_else(|e| fail_return(&e));
3448 print_state_report(&report, json);
3449 }
3450 FindingCommands::Supersede {
3451 frontier,
3452 old_id,
3453 assertion,
3454 r#type,
3455 source,
3456 source_type,
3457 author,
3458 reason,
3459 confidence,
3460 evidence_type,
3461 entities,
3462 doi,
3463 pmid,
3464 year,
3465 journal,
3466 url,
3467 source_authors,
3468 conditions_text,
3469 species,
3470 in_vivo,
3471 in_vitro,
3472 human_data,
3473 clinical_trial,
3474 json,
3475 apply,
3476 } => {
3477 validate_enum_arg("--type", &r#type, bundle::VALID_ASSERTION_TYPES);
3478 validate_enum_arg(
3479 "--evidence-type",
3480 &evidence_type,
3481 bundle::VALID_EVIDENCE_TYPES,
3482 );
3483 validate_enum_arg(
3484 "--source-type",
3485 &source_type,
3486 bundle::VALID_PROVENANCE_SOURCE_TYPES,
3487 );
3488 let parsed_entities = parse_entities(&entities);
3489 for (name, etype) in &parsed_entities {
3490 if !bundle::VALID_ENTITY_TYPES.contains(&etype.as_str()) {
3491 fail(&format!(
3492 "invalid entity type '{}' for '{}'. Valid: {}",
3493 etype,
3494 name,
3495 bundle::VALID_ENTITY_TYPES.join(", "),
3496 ));
3497 }
3498 }
3499 let parsed_source_authors = source_authors
3500 .map(|s| {
3501 s.split(';')
3502 .map(|a| a.trim().to_string())
3503 .filter(|a| !a.is_empty())
3504 .collect()
3505 })
3506 .unwrap_or_default();
3507 let parsed_species = species
3508 .map(|s| {
3509 s.split(';')
3510 .map(|a| a.trim().to_string())
3511 .filter(|a| !a.is_empty())
3512 .collect()
3513 })
3514 .unwrap_or_default();
3515 let report = state::supersede_finding(
3516 &frontier,
3517 &old_id,
3518 &reason,
3519 state::FindingDraftOptions {
3520 text: assertion,
3521 assertion_type: r#type,
3522 source,
3523 source_type,
3524 author,
3525 confidence,
3526 evidence_type,
3527 entities: parsed_entities,
3528 doi,
3529 pmid,
3530 year,
3531 journal,
3532 url,
3533 source_authors: parsed_source_authors,
3534 conditions_text,
3535 species: parsed_species,
3536 in_vivo,
3537 in_vitro,
3538 human_data,
3539 clinical_trial,
3540 entities_reviewed: false,
3541 evidence_spans: Vec::new(),
3542 gap: false,
3543 negative_space: false,
3544 },
3545 apply,
3546 )
3547 .unwrap_or_else(|e| fail_return(&e));
3548 print_state_report(&report, json);
3549 }
3550 FindingCommands::CausalSet {
3551 frontier,
3552 finding_id,
3553 claim,
3554 grade,
3555 actor,
3556 reason,
3557 json,
3558 } => {
3559 if !bundle::VALID_CAUSAL_CLAIMS.contains(&claim.as_str()) {
3560 fail(&format!(
3561 "invalid --claim '{claim}'; valid: {:?}",
3562 bundle::VALID_CAUSAL_CLAIMS
3563 ));
3564 }
3565 if let Some(g) = grade.as_deref()
3566 && !bundle::VALID_CAUSAL_EVIDENCE_GRADES.contains(&g)
3567 {
3568 fail(&format!(
3569 "invalid --grade '{g}'; valid: {:?}",
3570 bundle::VALID_CAUSAL_EVIDENCE_GRADES
3571 ));
3572 }
3573 let report = state::set_causal(
3574 &frontier,
3575 &finding_id,
3576 &claim,
3577 grade.as_deref(),
3578 &actor,
3579 &reason,
3580 )
3581 .unwrap_or_else(|e| fail_return(&e));
3582 print_state_report(&report, json);
3583 }
3584 },
3585 Commands::Review {
3586 frontier,
3587 finding_id,
3588 status,
3589 reason,
3590 reviewer,
3591 apply,
3592 json,
3593 } => {
3594 let status = status.unwrap_or_else(|| fail_return("--status is required for review"));
3595 let reason = reason.unwrap_or_else(|| fail_return("--reason is required for review"));
3596 let report = state::review_finding(
3597 &frontier,
3598 &finding_id,
3599 state::ReviewOptions {
3600 status,
3601 reason,
3602 reviewer,
3603 },
3604 apply,
3605 )
3606 .unwrap_or_else(|e| fail_return(&e));
3607 print_state_report(&report, json);
3608 }
3609 Commands::Note {
3610 frontier,
3611 finding_id,
3612 text,
3613 author,
3614 apply,
3615 json,
3616 } => {
3617 let report = state::add_note(&frontier, &finding_id, &text, &author, apply)
3618 .unwrap_or_else(|e| fail_return(&e));
3619 print_state_report(&report, json);
3620 }
3621 Commands::Caveat {
3622 frontier,
3623 finding_id,
3624 text,
3625 author,
3626 apply,
3627 json,
3628 } => {
3629 let report = state::caveat_finding(&frontier, &finding_id, &text, &author, apply)
3630 .unwrap_or_else(|e| fail_return(&e));
3631 print_state_report(&report, json);
3632 }
3633 Commands::Revise {
3634 frontier,
3635 finding_id,
3636 confidence,
3637 reason,
3638 reviewer,
3639 apply,
3640 json,
3641 } => {
3642 let report = state::revise_confidence(
3643 &frontier,
3644 &finding_id,
3645 state::ReviseOptions {
3646 confidence,
3647 reason,
3648 reviewer,
3649 },
3650 apply,
3651 )
3652 .unwrap_or_else(|e| fail_return(&e));
3653 print_state_report(&report, json);
3654 }
3655 Commands::Reject {
3656 frontier,
3657 finding_id,
3658 reason,
3659 reviewer,
3660 apply,
3661 json,
3662 } => {
3663 let report = state::reject_finding(&frontier, &finding_id, &reviewer, &reason, apply)
3664 .unwrap_or_else(|e| fail_return(&e));
3665 print_state_report(&report, json);
3666 }
3667 Commands::History {
3668 frontier,
3669 finding_id,
3670 json,
3671 as_of,
3672 } => {
3673 let payload = state::history_as_of(&frontier, &finding_id, as_of.as_deref())
3674 .unwrap_or_else(|e| fail_return(&e));
3675 if json {
3676 println!(
3677 "{}",
3678 serde_json::to_string_pretty(&payload)
3679 .expect("failed to serialize history response")
3680 );
3681 } else {
3682 print_history(&payload);
3683 }
3684 }
3685 Commands::ImportEvents { source, into, json } => {
3686 let report =
3687 review::import_review_events(&source, &into).unwrap_or_else(|e| fail_return(&e));
3688 if json {
3689 println!(
3690 "{}",
3691 serde_json::to_string_pretty(&json!({
3692 "ok": true,
3693 "command": "import-events",
3694 "source": report.source,
3695 "target": into.display().to_string(),
3696 "summary": {
3697 "imported": report.imported,
3698 "new": report.new,
3699 "duplicate": report.duplicate,
3700 "canonical_events_imported": report.events_imported,
3701 "canonical_events_new": report.events_new,
3702 "canonical_events_duplicate": report.events_duplicate,
3703 }
3704 }))
3705 .expect("failed to serialize import-events response")
3706 );
3707 } else {
3708 println!("{report}");
3709 }
3710 }
3711 Commands::Retract {
3712 source,
3713 finding_id,
3714 reason,
3715 reviewer,
3716 apply,
3717 json,
3718 } => {
3719 let report = state::retract_finding(&source, &finding_id, &reviewer, &reason, apply)
3720 .unwrap_or_else(|e| fail_return(&e));
3721 print_state_report(&report, json);
3722 }
3723 Commands::LocatorRepair {
3724 frontier,
3725 atom_id,
3726 locator,
3727 reviewer,
3728 reason,
3729 apply,
3730 json,
3731 } => {
3732 cmd_locator_repair(
3733 &frontier,
3734 &atom_id,
3735 locator.as_deref(),
3736 &reviewer,
3737 &reason,
3738 apply,
3739 json,
3740 );
3741 }
3742 Commands::SourceFetch {
3743 identifier,
3744 cache,
3745 out,
3746 refresh,
3747 json,
3748 } => {
3749 cmd_source_fetch(&identifier, cache.as_deref(), out.as_deref(), refresh, json).await;
3750 }
3751 Commands::SpanRepair {
3752 frontier,
3753 finding_id,
3754 section,
3755 text,
3756 reviewer,
3757 reason,
3758 apply,
3759 json,
3760 } => {
3761 cmd_span_repair(
3762 &frontier,
3763 &finding_id,
3764 §ion,
3765 &text,
3766 &reviewer,
3767 &reason,
3768 apply,
3769 json,
3770 );
3771 }
3772 Commands::EntityAdd {
3773 frontier,
3774 finding_id,
3775 entity,
3776 entity_type,
3777 reviewer,
3778 reason,
3779 apply,
3780 json,
3781 } => {
3782 let report = state::add_finding_entity(
3783 &frontier,
3784 &finding_id,
3785 &entity,
3786 &entity_type,
3787 &reviewer,
3788 &reason,
3789 apply,
3790 )
3791 .unwrap_or_else(|e| fail_return(&e));
3792 print_state_report(&report, json);
3793 }
3794 Commands::EntityResolve {
3795 frontier,
3796 finding_id,
3797 entity,
3798 source,
3799 id,
3800 confidence,
3801 matched_name,
3802 resolution_method,
3803 reviewer,
3804 reason,
3805 apply,
3806 json,
3807 } => {
3808 cmd_entity_resolve(
3809 &frontier,
3810 &finding_id,
3811 &entity,
3812 &source,
3813 &id,
3814 confidence,
3815 matched_name.as_deref(),
3816 &resolution_method,
3817 &reviewer,
3818 &reason,
3819 apply,
3820 json,
3821 );
3822 }
3823 Commands::Propagate {
3824 frontier,
3825 retract,
3826 reduce_confidence,
3827 to,
3828 output,
3829 } => cmd_propagate(&frontier, retract, reduce_confidence, to, output.as_deref()),
3830 Commands::Replicate {
3831 frontier,
3832 target,
3833 outcome,
3834 by,
3835 conditions,
3836 source_title,
3837 doi,
3838 pmid,
3839 sample_size,
3840 note,
3841 previous_attempt,
3842 no_cascade,
3843 json,
3844 } => cmd_replicate(
3845 &frontier,
3846 &target,
3847 &outcome,
3848 &by,
3849 &conditions,
3850 &source_title,
3851 doi.as_deref(),
3852 pmid.as_deref(),
3853 sample_size.as_deref(),
3854 ¬e,
3855 previous_attempt.as_deref(),
3856 no_cascade,
3857 json,
3858 ),
3859 Commands::Replications {
3860 frontier,
3861 target,
3862 json,
3863 } => cmd_replications(&frontier, target.as_deref(), json),
3864 Commands::DatasetAdd {
3865 frontier,
3866 name,
3867 version,
3868 content_hash,
3869 url,
3870 license,
3871 source_title,
3872 doi,
3873 row_count,
3874 json,
3875 } => cmd_dataset_add(
3876 &frontier,
3877 &name,
3878 version.as_deref(),
3879 &content_hash,
3880 url.as_deref(),
3881 license.as_deref(),
3882 &source_title,
3883 doi.as_deref(),
3884 row_count,
3885 json,
3886 ),
3887 Commands::Datasets { frontier, json } => cmd_datasets(&frontier, json),
3888 Commands::CodeAdd {
3889 frontier,
3890 language,
3891 repo_url,
3892 commit,
3893 path,
3894 content_hash,
3895 line_start,
3896 line_end,
3897 entry_point,
3898 json,
3899 } => cmd_code_add(
3900 &frontier,
3901 &language,
3902 repo_url.as_deref(),
3903 commit.as_deref(),
3904 &path,
3905 &content_hash,
3906 line_start,
3907 line_end,
3908 entry_point.as_deref(),
3909 json,
3910 ),
3911 Commands::CodeArtifacts { frontier, json } => cmd_code_artifacts(&frontier, json),
3912 Commands::ArtifactAdd {
3913 frontier,
3914 kind,
3915 name,
3916 file,
3917 url,
3918 content_hash,
3919 media_type,
3920 license,
3921 source_title,
3922 source_url,
3923 doi,
3924 target,
3925 metadata,
3926 access_tier,
3927 deposited_by,
3928 reason,
3929 json,
3930 } => cmd_artifact_add(
3931 &frontier,
3932 &kind,
3933 &name,
3934 file.as_deref(),
3935 url.as_deref(),
3936 content_hash.as_deref(),
3937 media_type.as_deref(),
3938 license.as_deref(),
3939 source_title.as_deref(),
3940 source_url.as_deref(),
3941 doi.as_deref(),
3942 target,
3943 metadata,
3944 &access_tier,
3945 &deposited_by,
3946 &reason,
3947 json,
3948 ),
3949 Commands::Artifacts {
3950 frontier,
3951 target,
3952 json,
3953 } => cmd_artifacts(&frontier, target.as_deref(), json),
3954 Commands::ArtifactAudit { frontier, json } => cmd_artifact_audit(&frontier, json),
3955 Commands::DecisionBrief { frontier, json } => cmd_decision_brief(&frontier, json),
3956 Commands::TrialSummary { frontier, json } => cmd_trial_summary(&frontier, json),
3957 Commands::SourceVerification { frontier, json } => cmd_source_verification(&frontier, json),
3958 Commands::SourceIngestPlan { frontier, json } => cmd_source_ingest_plan(&frontier, json),
3959 Commands::ClinicalTrialImport {
3960 frontier,
3961 nct_id,
3962 input_json,
3963 target,
3964 deposited_by,
3965 reason,
3966 license,
3967 json,
3968 } => {
3969 cmd_clinical_trial_import(
3970 &frontier,
3971 &nct_id,
3972 input_json.as_deref(),
3973 target,
3974 &deposited_by,
3975 &reason,
3976 &license,
3977 json,
3978 )
3979 .await
3980 }
3981 Commands::NegativeResultAdd {
3982 frontier,
3983 kind,
3984 deposited_by,
3985 reason,
3986 conditions_text,
3987 notes,
3988 target,
3989 endpoint,
3990 intervention,
3991 comparator,
3992 population,
3993 n_enrolled,
3994 power,
3995 ci_lower,
3996 ci_upper,
3997 effect_size_threshold,
3998 registry_id,
3999 reagent,
4000 observation,
4001 attempts,
4002 source_title,
4003 doi,
4004 url,
4005 year,
4006 json,
4007 } => cmd_negative_result_add(
4008 &frontier,
4009 &kind,
4010 &deposited_by,
4011 &reason,
4012 &conditions_text,
4013 ¬es,
4014 target,
4015 endpoint.as_deref(),
4016 intervention.as_deref(),
4017 comparator.as_deref(),
4018 population.as_deref(),
4019 n_enrolled,
4020 power,
4021 ci_lower,
4022 ci_upper,
4023 effect_size_threshold,
4024 registry_id.as_deref(),
4025 reagent.as_deref(),
4026 observation.as_deref(),
4027 attempts,
4028 &source_title,
4029 doi.as_deref(),
4030 url.as_deref(),
4031 year,
4032 json,
4033 ),
4034 Commands::NegativeResults {
4035 frontier,
4036 target,
4037 json,
4038 } => cmd_negative_results(&frontier, target.as_deref(), json),
4039 Commands::TrajectoryCreate {
4040 frontier,
4041 deposited_by,
4042 reason,
4043 target,
4044 notes,
4045 json,
4046 } => cmd_trajectory_create(&frontier, &deposited_by, &reason, target, ¬es, json),
4047 Commands::TrajectoryStep {
4048 frontier,
4049 trajectory_id,
4050 kind,
4051 description,
4052 actor,
4053 reason,
4054 reference,
4055 json,
4056 } => cmd_trajectory_step(
4057 &frontier,
4058 &trajectory_id,
4059 &kind,
4060 &description,
4061 &actor,
4062 &reason,
4063 reference,
4064 json,
4065 ),
4066 Commands::Trajectories {
4067 frontier,
4068 target,
4069 json,
4070 } => cmd_trajectories(&frontier, target.as_deref(), json),
4071 Commands::TierSet {
4072 frontier,
4073 object_type,
4074 object_id,
4075 tier,
4076 actor,
4077 reason,
4078 json,
4079 } => cmd_tier_set(
4080 &frontier,
4081 &object_type,
4082 &object_id,
4083 &tier,
4084 &actor,
4085 &reason,
4086 json,
4087 ),
4088 Commands::Predict {
4089 frontier,
4090 by,
4091 claim,
4092 criterion,
4093 resolves_by,
4094 confidence,
4095 target,
4096 outcome,
4097 conditions,
4098 json,
4099 } => cmd_predict(
4100 &frontier,
4101 &by,
4102 &claim,
4103 &criterion,
4104 resolves_by.as_deref(),
4105 confidence,
4106 &target,
4107 &outcome,
4108 &conditions,
4109 json,
4110 ),
4111 Commands::Resolve {
4112 frontier,
4113 prediction,
4114 outcome,
4115 matched,
4116 by,
4117 confidence,
4118 source_title,
4119 doi,
4120 json,
4121 } => cmd_resolve(
4122 &frontier,
4123 &prediction,
4124 &outcome,
4125 matched,
4126 &by,
4127 confidence,
4128 &source_title,
4129 doi.as_deref(),
4130 json,
4131 ),
4132 Commands::Predictions {
4133 frontier,
4134 by,
4135 open,
4136 json,
4137 } => cmd_predictions(&frontier, by.as_deref(), open, json),
4138 Commands::Calibration {
4139 frontier,
4140 actor,
4141 json,
4142 } => cmd_calibration(&frontier, actor.as_deref(), json),
4143 Commands::PredictionsExpire {
4144 frontier,
4145 now,
4146 dry_run,
4147 json,
4148 } => cmd_predictions_expire(&frontier, now.as_deref(), dry_run, json),
4149 Commands::Consensus {
4150 frontier,
4151 target,
4152 weighting,
4153 causal_claim,
4154 causal_grade_min,
4155 json,
4156 } => cmd_consensus(
4157 &frontier,
4158 &target,
4159 &weighting,
4160 causal_claim.as_deref(),
4161 causal_grade_min.as_deref(),
4162 json,
4163 ),
4164
4165 Commands::Ingest {
4168 path,
4169 frontier,
4170 backend,
4171 actor,
4172 dry_run,
4173 json,
4174 } => {
4175 cmd_ingest(
4176 &path,
4177 &frontier,
4178 backend.as_deref(),
4179 actor.as_deref(),
4180 dry_run,
4181 json,
4182 )
4183 .await
4184 }
4185
4186 Commands::Propose {
4187 frontier,
4188 finding_id,
4189 status,
4190 reason,
4191 reviewer,
4192 apply,
4193 json,
4194 } => {
4195 let options = state::ReviewOptions {
4198 status: status.clone(),
4199 reason: reason.clone(),
4200 reviewer: reviewer.clone(),
4201 };
4202 let report = state::review_finding(&frontier, &finding_id, options, apply)
4203 .unwrap_or_else(|e| fail_return(&e));
4204 print_state_report(&report, json);
4205 }
4206
4207 Commands::Accept {
4208 frontier,
4209 proposal_id,
4210 reviewer,
4211 reason,
4212 json,
4213 } => {
4214 let event_id = proposals::accept_at_path(&frontier, &proposal_id, &reviewer, &reason)
4216 .unwrap_or_else(|e| fail_return(&e));
4217 let payload = json!({
4218 "ok": true,
4219 "command": "accept",
4220 "frontier": frontier.display().to_string(),
4221 "proposal_id": proposal_id,
4222 "reviewer": reviewer,
4223 "applied_event_id": event_id,
4224 });
4225 if json {
4226 println!(
4227 "{}",
4228 serde_json::to_string_pretty(&payload)
4229 .expect("failed to serialize accept response")
4230 );
4231 } else {
4232 println!(
4233 "{} accepted and applied proposal {}",
4234 style::ok("ok"),
4235 proposal_id
4236 );
4237 println!(" event: {}", event_id);
4238 }
4239 }
4240
4241 Commands::Attest {
4242 frontier,
4243 event,
4244 attester,
4245 scope_note,
4246 proof_id,
4247 signature,
4248 key,
4249 json,
4250 } => {
4251 if let Some(target_event_id) = event {
4255 let attester_id = attester.unwrap_or_else(|| {
4256 fail_return("attest: --attester is required in per-event mode")
4257 });
4258 let scope = scope_note.unwrap_or_else(|| {
4259 fail_return("attest: --scope-note is required in per-event mode")
4260 });
4261 let attestation_event_id = state::record_attestation(
4262 &frontier,
4263 &target_event_id,
4264 &attester_id,
4265 &scope,
4266 proof_id.as_deref(),
4267 signature.as_deref(),
4268 )
4269 .unwrap_or_else(|e| fail_return(&e));
4270 if json {
4271 let payload = json!({
4272 "ok": true,
4273 "command": "attest.event",
4274 "frontier": frontier.display().to_string(),
4275 "target_event_id": target_event_id,
4276 "attestation_event_id": attestation_event_id,
4277 "attester_id": attester_id,
4278 });
4279 println!(
4280 "{}",
4281 serde_json::to_string_pretty(&payload)
4282 .expect("failed to serialize attest.event response")
4283 );
4284 } else {
4285 println!(
4286 "{} attested {} by {} ({})",
4287 style::ok("ok"),
4288 target_event_id,
4289 attester_id,
4290 attestation_event_id
4291 );
4292 }
4293 return;
4294 }
4295 let key_path = key.unwrap_or_else(|| {
4297 fail_return(
4298 "attest: --key is required in frontier-wide mode (or pass --event for per-event mode)",
4299 )
4300 });
4301 let count =
4302 sign::sign_frontier(&frontier, &key_path).unwrap_or_else(|e| fail_return(&e));
4303 let payload = json!({
4304 "ok": true,
4305 "command": "attest",
4306 "frontier": frontier.display().to_string(),
4307 "private_key": key_path.display().to_string(),
4308 "signed": count,
4309 });
4310 if json {
4311 println!(
4312 "{}",
4313 serde_json::to_string_pretty(&payload)
4314 .expect("failed to serialize attest response")
4315 );
4316 } else {
4317 println!(
4318 "{} {count} findings in {}",
4319 style::ok("attested"),
4320 frontier.display()
4321 );
4322 }
4323 }
4324
4325 Commands::Lineage {
4326 frontier,
4327 finding_id,
4328 as_of,
4329 json,
4330 } => {
4331 let payload = state::history_as_of(&frontier, &finding_id, as_of.as_deref())
4333 .unwrap_or_else(|e| fail_return(&e));
4334 if json {
4335 println!(
4336 "{}",
4337 serde_json::to_string_pretty(&payload)
4338 .expect("failed to serialize lineage response")
4339 );
4340 } else {
4341 print_history(&payload);
4342 }
4343 }
4344
4345 Commands::Carina { action } => cmd_carina(action),
4346
4347 Commands::Atlas { action } => cmd_atlas(action).await,
4348
4349 Commands::Constellation { action } => cmd_constellation(action).await,
4350 }
4351}
4352
4353async fn cmd_atlas(action: AtlasAction) {
4358 match action {
4359 AtlasAction::Init {
4360 name,
4361 frontiers,
4362 domain,
4363 scope_note,
4364 atlases_root,
4365 json,
4366 } => match ATLAS_INIT_HANDLER.get() {
4367 Some(handler) => {
4368 handler(atlases_root, name, domain, scope_note, frontiers, json).await;
4369 }
4370 None => fail("vela atlas init: handler not registered (built without vela-atlas)"),
4371 },
4372 AtlasAction::Materialize {
4373 name,
4374 atlases_root,
4375 json,
4376 } => match ATLAS_MATERIALIZE_HANDLER.get() {
4377 Some(handler) => handler(atlases_root, name, json).await,
4378 None => fail("vela atlas materialize: handler not registered"),
4379 },
4380 AtlasAction::Serve {
4381 name,
4382 atlases_root,
4383 port,
4384 no_open,
4385 } => {
4386 match ATLAS_SERVE_HANDLER.get() {
4390 Some(handler) => handler(atlases_root, name, port, !no_open).await,
4391 None => fail("vela atlas serve: handler not registered"),
4392 }
4393 }
4394 AtlasAction::Update {
4395 name,
4396 add_frontier,
4397 remove_vfr_id,
4398 atlases_root,
4399 json,
4400 } => match ATLAS_UPDATE_HANDLER.get() {
4401 Some(handler) => {
4402 handler(atlases_root, name, add_frontier, remove_vfr_id, json).await;
4403 }
4404 None => fail("vela atlas update: handler not registered"),
4405 },
4406 }
4407}
4408
4409async fn cmd_constellation(action: ConstellationAction) {
4413 match action {
4414 ConstellationAction::Init {
4415 name,
4416 atlases,
4417 scope_note,
4418 constellations_root,
4419 json,
4420 } => match CONSTELLATION_INIT_HANDLER.get() {
4421 Some(handler) => {
4422 handler(constellations_root, name, scope_note, atlases, json).await;
4423 }
4424 None => fail(
4425 "vela constellation init: handler not registered (built without vela-constellation)",
4426 ),
4427 },
4428 ConstellationAction::Materialize {
4429 name,
4430 constellations_root,
4431 json,
4432 } => match CONSTELLATION_MATERIALIZE_HANDLER.get() {
4433 Some(handler) => handler(constellations_root, name, json).await,
4434 None => fail("vela constellation materialize: handler not registered"),
4435 },
4436 ConstellationAction::Serve {
4437 name,
4438 constellations_root,
4439 port,
4440 no_open,
4441 } => match CONSTELLATION_SERVE_HANDLER.get() {
4442 Some(handler) => handler(constellations_root, name, port, !no_open).await,
4443 None => fail("vela constellation serve: handler not registered"),
4444 },
4445 }
4446}
4447
4448fn cmd_carina(action: CarinaAction) {
4451 match action {
4452 CarinaAction::List { json } => {
4453 if json {
4454 println!(
4455 "{}",
4456 serde_json::to_string_pretty(&json!({
4457 "ok": true,
4458 "command": "carina.list",
4459 "primitives": carina_validate::PRIMITIVE_NAMES,
4460 }))
4461 .expect("failed to serialize carina.list")
4462 );
4463 } else {
4464 println!("Carina primitives bundled with this build:");
4465 for name in carina_validate::PRIMITIVE_NAMES {
4466 println!(" · {name}");
4467 }
4468 }
4469 }
4470 CarinaAction::Schema { primitive } => match carina_validate::schema_text(&primitive) {
4471 Some(text) => print!("{text}"),
4472 None => fail(&format!("carina: unknown primitive '{primitive}'")),
4473 },
4474 CarinaAction::Validate {
4475 path,
4476 primitive,
4477 json,
4478 } => {
4479 let text = std::fs::read_to_string(&path)
4480 .unwrap_or_else(|e| fail_return(&format!("read {}: {e}", path.display())));
4481 let value: Value = serde_json::from_str(&text)
4482 .unwrap_or_else(|e| fail_return(&format!("parse {}: {e}", path.display())));
4483 type CarinaValidateOutcome = Result<Option<&'static str>, Vec<String>>;
4489 let mut report: Vec<(String, CarinaValidateOutcome)> = Vec::new();
4490 if value.get("primitives").and_then(Value::as_object).is_some() && primitive.is_none() {
4491 let primitives = value.get("primitives").and_then(Value::as_object).unwrap();
4492 for (key, child) in primitives {
4493 let outcome = carina_validate::validate(key, child)
4494 .map(|()| carina_validate::detect_primitive(child));
4495 report.push((key.clone(), outcome));
4496 }
4497 } else {
4498 let outcome = match primitive.as_deref() {
4499 Some(name) => carina_validate::validate(name, &value).map(|()| {
4500 carina_validate::PRIMITIVE_NAMES
4501 .iter()
4502 .copied()
4503 .find(|p| *p == name)
4504 }),
4505 None => carina_validate::validate_auto(&value).map(Some),
4506 };
4507 let label = primitive.clone().unwrap_or_else(|| "<auto>".to_string());
4508 report.push((label, outcome));
4509 }
4510
4511 let total = report.len();
4512 let pass = report.iter().filter(|(_, r)| r.is_ok()).count();
4513 let fail = total - pass;
4514
4515 if json {
4516 let entries: Vec<Value> = report
4517 .iter()
4518 .map(|(label, r)| match r {
4519 Ok(name) => json!({
4520 "key": label,
4521 "primitive": name,
4522 "ok": true,
4523 }),
4524 Err(errs) => json!({
4525 "key": label,
4526 "ok": false,
4527 "errors": errs,
4528 }),
4529 })
4530 .collect();
4531 println!(
4532 "{}",
4533 serde_json::to_string_pretty(&json!({
4534 "ok": fail == 0,
4535 "command": "carina.validate",
4536 "file": path.display().to_string(),
4537 "total": total,
4538 "passed": pass,
4539 "failed": fail,
4540 "entries": entries,
4541 }))
4542 .expect("failed to serialize carina.validate")
4543 );
4544 } else {
4545 for (label, r) in &report {
4546 match r {
4547 Ok(Some(name)) => println!(" {} {label} (as {name})", style::ok("ok")),
4548 Ok(None) => println!(" {} {label}", style::ok("ok")),
4549 Err(errs) => {
4550 println!(" {} {label}", style::lost("fail"));
4551 for e in errs {
4552 println!(" {e}");
4553 }
4554 }
4555 }
4556 }
4557 println!();
4558 if fail == 0 {
4559 println!("{} {pass}/{total} valid", style::ok("carina.validate"));
4560 } else {
4561 println!(
4562 "{} {pass}/{total} valid · {fail} failed",
4563 style::lost("carina.validate")
4564 );
4565 }
4566 }
4567
4568 if fail > 0 {
4569 std::process::exit(1);
4570 }
4571 }
4572 }
4573}
4574
4575fn cmd_consensus(
4578 frontier: &Path,
4579 target: &str,
4580 weighting_str: &str,
4581 causal_claim: Option<&str>,
4582 causal_grade_min: Option<&str>,
4583 json: bool,
4584) {
4585 use crate::bundle::{CausalClaim, CausalEvidenceGrade};
4586
4587 if !target.starts_with("vf_") {
4588 fail(&format!("target `{target}` is not a vf_ finding id"));
4589 }
4590 let scheme =
4591 crate::aggregate::WeightingScheme::parse(weighting_str).unwrap_or_else(|e| fail_return(&e));
4592
4593 let parsed_claim = match causal_claim {
4594 None => None,
4595 Some("correlation") => Some(CausalClaim::Correlation),
4596 Some("mediation") => Some(CausalClaim::Mediation),
4597 Some("intervention") => Some(CausalClaim::Intervention),
4598 Some(other) => fail_return(&format!(
4599 "invalid --causal-claim '{other}'; valid: correlation | mediation | intervention"
4600 )),
4601 };
4602 let parsed_grade = match causal_grade_min {
4603 None => None,
4604 Some("theoretical") => Some(CausalEvidenceGrade::Theoretical),
4605 Some("observational") => Some(CausalEvidenceGrade::Observational),
4606 Some("quasi_experimental") => Some(CausalEvidenceGrade::QuasiExperimental),
4607 Some("rct") => Some(CausalEvidenceGrade::Rct),
4608 Some(other) => fail_return(&format!(
4609 "invalid --causal-grade-min '{other}'; valid: theoretical | observational | quasi_experimental | rct"
4610 )),
4611 };
4612 let filter = crate::aggregate::AggregateFilter {
4613 causal_claim: parsed_claim,
4614 causal_grade_min: parsed_grade,
4615 };
4616 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
4617
4618 let result = crate::aggregate::consensus_for_with_filter(&project, target, scheme, &filter)
4619 .unwrap_or_else(|| fail_return(&format!("target `{target}` not in frontier")));
4620
4621 if json {
4622 println!(
4623 "{}",
4624 serde_json::to_string_pretty(&result).expect("serialize consensus")
4625 );
4626 return;
4627 }
4628
4629 println!();
4630 println!(
4631 " {}",
4632 format!(
4633 "VELA · CONSENSUS · {} ({})",
4634 result.target, result.weighting
4635 )
4636 .to_uppercase()
4637 .dimmed()
4638 );
4639 println!(" {}", style::tick_row(60));
4640 println!(
4641 " target: {}",
4642 truncate(&result.target_assertion, 80)
4643 );
4644 println!(" similar findings: {}", result.n_findings);
4645 println!(
4646 " consensus: {:.3} ({:.3} – {:.3} 95% credible)",
4647 result.consensus_confidence, result.credible_interval_lo, result.credible_interval_hi
4648 );
4649 println!();
4650 println!(" constituents (sorted by weight):");
4651 let mut sorted = result.constituents.clone();
4652 sorted.sort_by(|a, b| {
4653 b.weight
4654 .partial_cmp(&a.weight)
4655 .unwrap_or(std::cmp::Ordering::Equal)
4656 });
4657 for c in sorted.iter().take(10) {
4658 let repls = if c.n_replications > 0 {
4659 format!(" ({}r {}f)", c.n_replicated, c.n_failed_replications)
4660 } else {
4661 String::new()
4662 };
4663 println!(
4664 " · w={:.2} raw={:.2} adj={:.2}{}",
4665 c.weight, c.raw_score, c.adjusted_score, repls
4666 );
4667 println!(" {}", truncate(&c.assertion_text, 88));
4668 }
4669 if result.constituents.len() > 10 {
4670 println!(" ... ({} more)", result.constituents.len() - 10);
4671 }
4672}
4673
4674fn parse_expected_outcome(s: &str) -> Result<crate::bundle::ExpectedOutcome, String> {
4680 let trimmed = s.trim();
4681 if trimmed.eq_ignore_ascii_case("affirmed") {
4682 return Ok(crate::bundle::ExpectedOutcome::Affirmed);
4683 }
4684 if trimmed.eq_ignore_ascii_case("falsified") {
4685 return Ok(crate::bundle::ExpectedOutcome::Falsified);
4686 }
4687 if let Some(rest) = trimmed.strip_prefix("cat:") {
4688 return Ok(crate::bundle::ExpectedOutcome::Categorical {
4689 value: rest.to_string(),
4690 });
4691 }
4692 if let Some(rest) = trimmed.strip_prefix("quant:") {
4693 let (vt, units) = rest.split_once(' ').unwrap_or((rest, ""));
4694 let (val_s, tol_s) = vt
4695 .split_once('±')
4696 .or_else(|| vt.split_once("+/-"))
4697 .ok_or_else(|| format!("expected `quant:VALUE±TOL UNITS`, got `quant:{rest}`"))?;
4698 let value: f64 = val_s
4699 .parse()
4700 .map_err(|e| format!("bad quant value `{val_s}`: {e}"))?;
4701 let tolerance: f64 = tol_s
4702 .parse()
4703 .map_err(|e| format!("bad quant tolerance `{tol_s}`: {e}"))?;
4704 return Ok(crate::bundle::ExpectedOutcome::Quantitative {
4705 value,
4706 tolerance,
4707 units: units.to_string(),
4708 });
4709 }
4710 Err(format!(
4711 "unknown outcome `{s}`; expected one of: affirmed | falsified | quant:V±T units | cat:label"
4712 ))
4713}
4714
4715#[allow(clippy::too_many_arguments)]
4717fn cmd_predict(
4718 frontier: &Path,
4719 by: &str,
4720 claim: &str,
4721 criterion: &str,
4722 resolves_by: Option<&str>,
4723 confidence: f64,
4724 target_csv: &str,
4725 outcome: &str,
4726 conditions_text: &str,
4727 json: bool,
4728) {
4729 if !(0.0..=1.0).contains(&confidence) {
4730 fail(&format!("confidence must be in [0, 1]; got {confidence}"));
4731 }
4732 let expected = parse_expected_outcome(outcome).unwrap_or_else(|e| fail_return(&e));
4733
4734 let mut project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
4735
4736 let targets: Vec<String> = target_csv
4737 .split(',')
4738 .map(|s| s.trim().to_string())
4739 .filter(|s| !s.is_empty())
4740 .collect();
4741 for t in &targets {
4742 if !t.starts_with("vf_") {
4743 fail(&format!("target `{t}` is not a vf_ id"));
4744 }
4745 if !project.findings.iter().any(|f| f.id == *t) {
4746 fail(&format!("target `{t}` not present in frontier"));
4747 }
4748 }
4749
4750 let lower = conditions_text.to_lowercase();
4751 let conditions = crate::bundle::Conditions {
4752 text: conditions_text.to_string(),
4753 species_verified: Vec::new(),
4754 species_unverified: Vec::new(),
4755 in_vitro: lower.contains("in vitro"),
4756 in_vivo: lower.contains("in vivo"),
4757 human_data: lower.contains("human") || lower.contains("clinical"),
4758 clinical_trial: lower.contains("clinical trial") || lower.contains("phase "),
4759 concentration_range: None,
4760 duration: None,
4761 age_group: None,
4762 cell_type: None,
4763 };
4764
4765 let prediction = crate::bundle::Prediction::new(
4766 claim.to_string(),
4767 targets,
4768 None,
4769 resolves_by.map(|s| s.to_string()),
4770 criterion.to_string(),
4771 expected,
4772 by.to_string(),
4773 confidence,
4774 conditions,
4775 );
4776
4777 if project.predictions.iter().any(|p| p.id == prediction.id) {
4778 if json {
4779 println!(
4780 "{}",
4781 serde_json::to_string_pretty(&json!({
4782 "ok": false,
4783 "command": "predict",
4784 "reason": "prediction_already_exists",
4785 "id": prediction.id,
4786 }))
4787 .expect("serialize")
4788 );
4789 } else {
4790 println!(
4791 "{} prediction {} already exists in {}; skipping.",
4792 style::warn("predict"),
4793 prediction.id,
4794 frontier.display()
4795 );
4796 }
4797 return;
4798 }
4799
4800 let new_id = prediction.id.clone();
4801 project.predictions.push(prediction);
4802 repo::save_to_path(frontier, &project).unwrap_or_else(|e| fail_return(&e));
4803
4804 if json {
4805 println!(
4806 "{}",
4807 serde_json::to_string_pretty(&json!({
4808 "ok": true,
4809 "command": "predict",
4810 "id": new_id,
4811 "made_by": by,
4812 "confidence": confidence,
4813 "frontier": frontier.display().to_string(),
4814 }))
4815 .expect("serialize predict result")
4816 );
4817 } else {
4818 println!();
4819 println!(
4820 " {}",
4821 format!("VELA · PREDICT · {}", new_id)
4822 .to_uppercase()
4823 .dimmed()
4824 );
4825 println!(" {}", style::tick_row(60));
4826 println!(" by: {by}");
4827 println!(" confidence: {confidence:.3}");
4828 if let Some(d) = resolves_by {
4829 println!(" resolves by: {d}");
4830 }
4831 println!(" outcome: {outcome}");
4832 println!(" claim: {}", truncate(claim, 88));
4833 println!();
4834 println!(
4835 " {} prediction recorded in {}",
4836 style::ok("ok"),
4837 frontier.display()
4838 );
4839 }
4840}
4841
4842#[allow(clippy::too_many_arguments)]
4844fn cmd_resolve(
4845 frontier: &Path,
4846 prediction_id: &str,
4847 actual_outcome: &str,
4848 matched: bool,
4849 by: &str,
4850 confidence: f64,
4851 source_title: &str,
4852 doi: Option<&str>,
4853 json: bool,
4854) {
4855 if !prediction_id.starts_with("vpred_") {
4856 fail(&format!("prediction `{prediction_id}` is not a vpred_ id"));
4857 }
4858 if !(0.0..=1.0).contains(&confidence) {
4859 fail(&format!("confidence must be in [0, 1]; got {confidence}"));
4860 }
4861 let mut project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
4862 if !project.predictions.iter().any(|p| p.id == prediction_id) {
4863 fail(&format!(
4864 "prediction `{prediction_id}` not present in frontier"
4865 ));
4866 }
4867
4868 let evidence = crate::bundle::Evidence {
4869 evidence_type: "experimental".to_string(),
4870 model_system: String::new(),
4871 species: None,
4872 method: "prediction_resolution".to_string(),
4873 sample_size: None,
4874 effect_size: None,
4875 p_value: None,
4876 replicated: false,
4877 replication_count: None,
4878 evidence_spans: if source_title.is_empty() {
4879 Vec::new()
4880 } else {
4881 vec![serde_json::json!({"text": source_title})]
4882 },
4883 };
4884
4885 let _ = doi; let resolution = crate::bundle::Resolution::new(
4892 prediction_id.to_string(),
4893 actual_outcome.to_string(),
4894 matched,
4895 by.to_string(),
4896 evidence,
4897 confidence,
4898 );
4899
4900 if project.resolutions.iter().any(|r| r.id == resolution.id) {
4901 if json {
4902 println!(
4903 "{}",
4904 serde_json::to_string_pretty(&json!({
4905 "ok": false,
4906 "command": "resolve",
4907 "reason": "resolution_already_exists",
4908 "id": resolution.id,
4909 }))
4910 .expect("serialize")
4911 );
4912 } else {
4913 println!(
4914 "{} resolution {} already exists in {}; skipping.",
4915 style::warn("resolve"),
4916 resolution.id,
4917 frontier.display()
4918 );
4919 }
4920 return;
4921 }
4922
4923 let new_id = resolution.id.clone();
4924 project.resolutions.push(resolution);
4925 repo::save_to_path(frontier, &project).unwrap_or_else(|e| fail_return(&e));
4926
4927 if json {
4928 println!(
4929 "{}",
4930 serde_json::to_string_pretty(&json!({
4931 "ok": true,
4932 "command": "resolve",
4933 "id": new_id,
4934 "prediction": prediction_id,
4935 "matched": matched,
4936 "frontier": frontier.display().to_string(),
4937 }))
4938 .expect("serialize resolve result")
4939 );
4940 } else {
4941 println!();
4942 println!(
4943 " {}",
4944 format!("VELA · RESOLVE · {}", new_id)
4945 .to_uppercase()
4946 .dimmed()
4947 );
4948 println!(" {}", style::tick_row(60));
4949 println!(" prediction: {prediction_id}");
4950 println!(
4951 " matched: {}",
4952 if matched {
4953 style::ok("yes")
4954 } else {
4955 style::lost("no")
4956 }
4957 );
4958 println!(" by: {by}");
4959 println!(" outcome: {}", truncate(actual_outcome, 80));
4960 println!();
4961 println!(
4962 " {} resolution recorded in {}",
4963 style::ok("ok"),
4964 frontier.display()
4965 );
4966 }
4967}
4968
4969fn cmd_predictions(frontier: &Path, by: Option<&str>, open: bool, json: bool) {
4971 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
4972
4973 let resolved_ids: std::collections::HashSet<&str> = project
4974 .resolutions
4975 .iter()
4976 .map(|r| r.prediction_id.as_str())
4977 .collect();
4978
4979 let mut filtered: Vec<&crate::bundle::Prediction> = project
4980 .predictions
4981 .iter()
4982 .filter(|p| by.is_none_or(|b| p.made_by == b))
4983 .filter(|p| !open || !resolved_ids.contains(p.id.as_str()))
4984 .collect();
4985 filtered.sort_by(|a, b| {
4986 a.resolves_by
4987 .as_deref()
4988 .unwrap_or("9999")
4989 .cmp(b.resolves_by.as_deref().unwrap_or("9999"))
4990 });
4991
4992 if json {
4993 let payload: Vec<serde_json::Value> = filtered
4994 .iter()
4995 .map(|p| {
4996 json!({
4997 "id": p.id,
4998 "claim_text": p.claim_text,
4999 "made_by": p.made_by,
5000 "confidence": p.confidence,
5001 "predicted_at": p.predicted_at,
5002 "resolves_by": p.resolves_by,
5003 "expected_outcome": p.expected_outcome,
5004 "resolved": resolved_ids.contains(p.id.as_str()),
5005 })
5006 })
5007 .collect();
5008 println!(
5009 "{}",
5010 serde_json::to_string_pretty(&json!({
5011 "ok": true,
5012 "command": "predictions",
5013 "frontier": frontier.display().to_string(),
5014 "count": payload.len(),
5015 "predictions": payload,
5016 }))
5017 .expect("serialize predictions")
5018 );
5019 return;
5020 }
5021
5022 println!();
5023 println!(
5024 " {}",
5025 format!("VELA · PREDICTIONS · {}", frontier.display())
5026 .to_uppercase()
5027 .dimmed()
5028 );
5029 println!(" {}", style::tick_row(60));
5030 if filtered.is_empty() {
5031 println!(" (no predictions matching filters)");
5032 return;
5033 }
5034 for p in &filtered {
5035 let resolved = resolved_ids.contains(p.id.as_str());
5036 let chip = if resolved {
5037 style::ok("resolved")
5038 } else {
5039 style::warn("open")
5040 };
5041 let deadline = p.resolves_by.as_deref().unwrap_or("(no deadline)");
5042 println!(
5043 " · {} {} by {} → {}",
5044 p.id.dimmed(),
5045 chip,
5046 p.made_by,
5047 deadline,
5048 );
5049 println!(" claim: {}", truncate(&p.claim_text, 90));
5050 println!(" confidence: {:.2}", p.confidence);
5051 }
5052}
5053
5054fn cmd_predictions_expire(frontier: &Path, now_override: Option<&str>, dry_run: bool, json: bool) {
5059 use chrono::DateTime;
5060
5061 let now_dt = match now_override {
5062 Some(s) => DateTime::parse_from_rfc3339(s)
5063 .map(|dt| dt.with_timezone(&chrono::Utc))
5064 .unwrap_or_else(|e| fail_return(&format!("invalid --now '{s}': {e}"))),
5065 None => chrono::Utc::now(),
5066 };
5067
5068 let mut project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
5069 if dry_run {
5070 let mut probe = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
5072 let report = crate::calibration::expire_overdue_predictions(&mut probe, now_dt);
5073 if json {
5074 println!(
5075 "{}",
5076 serde_json::to_string_pretty(&json!({
5077 "ok": true,
5078 "command": "predictions.expire",
5079 "dry_run": true,
5080 "report": report,
5081 }))
5082 .expect("serialize predictions.expire (dry-run)")
5083 );
5084 } else {
5085 println!(
5086 "{} dry-run @ {}: {} would expire, {} already expired, {} resolved, {} still open",
5087 style::ok("ok"),
5088 report.now,
5089 report.newly_expired.len(),
5090 report.already_expired.len(),
5091 report.already_resolved.len(),
5092 report.still_open.len(),
5093 );
5094 for id in &report.newly_expired {
5095 println!(" · {id}");
5096 }
5097 }
5098 return;
5099 }
5100
5101 let report = crate::calibration::expire_overdue_predictions(&mut project, now_dt);
5102 repo::save_to_path(frontier, &project).unwrap_or_else(|e| fail_return(&e));
5103
5104 if json {
5105 println!(
5106 "{}",
5107 serde_json::to_string_pretty(&json!({
5108 "ok": true,
5109 "command": "predictions.expire",
5110 "report": report,
5111 }))
5112 .expect("serialize predictions.expire")
5113 );
5114 } else {
5115 println!(
5116 "{} @ {}: {} newly expired, {} already expired, {} resolved, {} still open",
5117 style::ok("expired"),
5118 report.now,
5119 report.newly_expired.len(),
5120 report.already_expired.len(),
5121 report.already_resolved.len(),
5122 report.still_open.len(),
5123 );
5124 for id in &report.newly_expired {
5125 println!(" · {id}");
5126 }
5127 }
5128}
5129
5130fn cmd_calibration(frontier: &Path, actor: Option<&str>, json: bool) {
5131 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
5132 let records = match actor {
5133 Some(a) => {
5134 crate::calibration::calibration_for_actor(a, &project.predictions, &project.resolutions)
5135 .map(|r| vec![r])
5136 .unwrap_or_default()
5137 }
5138 None => crate::calibration::calibration_records(&project.predictions, &project.resolutions),
5139 };
5140
5141 if json {
5142 println!(
5143 "{}",
5144 serde_json::to_string_pretty(&json!({
5145 "ok": true,
5146 "command": "calibration",
5147 "frontier": frontier.display().to_string(),
5148 "filter_actor": actor,
5149 "records": records,
5150 }))
5151 .expect("serialize calibration")
5152 );
5153 return;
5154 }
5155
5156 println!();
5157 println!(
5158 " {}",
5159 format!("VELA · CALIBRATION · {}", frontier.display())
5160 .to_uppercase()
5161 .dimmed()
5162 );
5163 println!(" {}", style::tick_row(60));
5164 if records.is_empty() {
5165 println!(" (no calibration records)");
5166 return;
5167 }
5168 for r in &records {
5169 println!(" · {}", r.actor);
5170 println!(
5171 " predictions: {} resolved: {} hits: {}",
5172 r.n_predictions, r.n_resolved, r.n_hit
5173 );
5174 match r.hit_rate {
5175 Some(h) => println!(" hit rate: {:.1}%", h * 100.0),
5176 None => println!(" hit rate: n/a"),
5177 }
5178 match r.brier_score {
5179 Some(b) => println!(
5180 " brier: {:.4} (lower is better; 0.25 = chance)",
5181 b
5182 ),
5183 None => println!(" brier: n/a"),
5184 }
5185 match r.log_score {
5186 Some(l) => println!(
5187 " log score: {:.4} (higher is better; 0 = perfect)",
5188 l
5189 ),
5190 None => println!(" log score: n/a"),
5191 }
5192 }
5193}
5194
5195#[allow(clippy::too_many_arguments)]
5197fn cmd_dataset_add(
5198 frontier: &Path,
5199 name: &str,
5200 version: Option<&str>,
5201 content_hash: &str,
5202 url: Option<&str>,
5203 license: Option<&str>,
5204 source_title: &str,
5205 doi: Option<&str>,
5206 row_count: Option<u64>,
5207 json: bool,
5208) {
5209 let mut project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
5210
5211 let provenance = crate::bundle::Provenance {
5212 source_type: "data_release".to_string(),
5213 doi: doi.map(|s| s.to_string()),
5214 pmid: None,
5215 pmc: None,
5216 openalex_id: None,
5217 url: url.map(|s| s.to_string()),
5218 title: source_title.to_string(),
5219 authors: Vec::new(),
5220 year: None,
5221 journal: None,
5222 license: license.map(|s| s.to_string()),
5223 publisher: None,
5224 funders: Vec::new(),
5225 extraction: crate::bundle::Extraction {
5226 method: "manual_curation".to_string(),
5227 model: None,
5228 model_version: None,
5229 extracted_at: chrono::Utc::now().to_rfc3339(),
5230 extractor_version: env!("CARGO_PKG_VERSION").to_string(),
5231 },
5232 review: None,
5233 citation_count: None,
5234 };
5235
5236 let mut dataset = crate::bundle::Dataset::new(
5237 name.to_string(),
5238 version.map(|s| s.to_string()),
5239 content_hash.to_string(),
5240 url.map(|s| s.to_string()),
5241 license.map(|s| s.to_string()),
5242 provenance,
5243 );
5244 dataset.row_count = row_count;
5245
5246 if project.datasets.iter().any(|d| d.id == dataset.id) {
5247 if json {
5248 println!(
5249 "{}",
5250 serde_json::to_string_pretty(&json!({
5251 "ok": false,
5252 "command": "dataset.add",
5253 "reason": "dataset_already_exists",
5254 "id": dataset.id,
5255 }))
5256 .expect("serialize")
5257 );
5258 } else {
5259 println!(
5260 "{} dataset {} already exists in {}; skipping.",
5261 style::warn("dataset"),
5262 dataset.id,
5263 frontier.display()
5264 );
5265 }
5266 return;
5267 }
5268
5269 let new_id = dataset.id.clone();
5270 project.datasets.push(dataset);
5271 repo::save_to_path(frontier, &project).unwrap_or_else(|e| fail_return(&e));
5272
5273 if json {
5274 println!(
5275 "{}",
5276 serde_json::to_string_pretty(&json!({
5277 "ok": true,
5278 "command": "dataset.add",
5279 "id": new_id,
5280 "name": name,
5281 "version": version,
5282 "frontier": frontier.display().to_string(),
5283 }))
5284 .expect("failed to serialize dataset.add result")
5285 );
5286 } else {
5287 println!();
5288 println!(
5289 " {}",
5290 format!("VELA · DATASET · {}", new_id)
5291 .to_uppercase()
5292 .dimmed()
5293 );
5294 println!(" {}", style::tick_row(60));
5295 println!(" name: {name}");
5296 if let Some(v) = version {
5297 println!(" version: {v}");
5298 }
5299 println!(" content_hash: {content_hash}");
5300 if let Some(u) = url {
5301 println!(" url: {u}");
5302 }
5303 println!(" source: {source_title}");
5304 println!();
5305 println!(
5306 " {} dataset recorded in {}",
5307 style::ok("ok"),
5308 frontier.display()
5309 );
5310 }
5311}
5312
5313#[allow(clippy::too_many_arguments)]
5319fn cmd_negative_result_add(
5320 frontier: &Path,
5321 kind: &str,
5322 deposited_by: &str,
5323 reason: &str,
5324 conditions_text: &str,
5325 notes: &str,
5326 targets: Vec<String>,
5327 endpoint: Option<&str>,
5328 intervention: Option<&str>,
5329 comparator: Option<&str>,
5330 population: Option<&str>,
5331 n_enrolled: Option<u32>,
5332 power: Option<f64>,
5333 ci_lower: Option<f64>,
5334 ci_upper: Option<f64>,
5335 effect_size_threshold: Option<f64>,
5336 registry_id: Option<&str>,
5337 reagent: Option<&str>,
5338 observation: Option<&str>,
5339 attempts: Option<u32>,
5340 source_title: &str,
5341 doi: Option<&str>,
5342 url: Option<&str>,
5343 year: Option<i32>,
5344 json: bool,
5345) {
5346 let nr_kind = match kind {
5347 "registered_trial" => {
5348 let endpoint =
5349 endpoint.unwrap_or_else(|| fail_return("--endpoint required for registered_trial"));
5350 let intervention = intervention
5351 .unwrap_or_else(|| fail_return("--intervention required for registered_trial"));
5352 let comparator = comparator
5353 .unwrap_or_else(|| fail_return("--comparator required for registered_trial"));
5354 let population = population
5355 .unwrap_or_else(|| fail_return("--population required for registered_trial"));
5356 let n_enrolled = n_enrolled
5357 .unwrap_or_else(|| fail_return("--n-enrolled required for registered_trial"));
5358 let power =
5359 power.unwrap_or_else(|| fail_return("--power required for registered_trial"));
5360 let ci_lower =
5361 ci_lower.unwrap_or_else(|| fail_return("--ci-lower required for registered_trial"));
5362 let ci_upper =
5363 ci_upper.unwrap_or_else(|| fail_return("--ci-upper required for registered_trial"));
5364 crate::bundle::NegativeResultKind::RegisteredTrial {
5365 endpoint: endpoint.to_string(),
5366 intervention: intervention.to_string(),
5367 comparator: comparator.to_string(),
5368 population: population.to_string(),
5369 n_enrolled,
5370 power,
5371 effect_size_ci: (ci_lower, ci_upper),
5372 effect_size_threshold,
5373 registry_id: registry_id.map(|s| s.to_string()),
5374 }
5375 }
5376 "exploratory" => {
5377 let reagent =
5378 reagent.unwrap_or_else(|| fail_return("--reagent required for exploratory"));
5379 let observation = observation
5380 .unwrap_or_else(|| fail_return("--observation required for exploratory"));
5381 let attempts =
5382 attempts.unwrap_or_else(|| fail_return("--attempts required for exploratory"));
5383 crate::bundle::NegativeResultKind::Exploratory {
5384 reagent: reagent.to_string(),
5385 observation: observation.to_string(),
5386 attempts,
5387 }
5388 }
5389 other => fail_return(&format!(
5390 "--kind must be 'registered_trial' or 'exploratory', got '{other}'"
5391 )),
5392 };
5393
5394 let conditions = crate::bundle::Conditions {
5395 text: conditions_text.to_string(),
5396 species_verified: Vec::new(),
5397 species_unverified: Vec::new(),
5398 in_vitro: false,
5399 in_vivo: false,
5400 human_data: false,
5401 clinical_trial: matches!(kind, "registered_trial"),
5402 concentration_range: None,
5403 duration: None,
5404 age_group: None,
5405 cell_type: None,
5406 };
5407
5408 let provenance = crate::bundle::Provenance {
5409 source_type: if matches!(kind, "registered_trial") {
5410 "clinical_trial".to_string()
5411 } else {
5412 "lab_notebook".to_string()
5413 },
5414 doi: doi.map(|s| s.to_string()),
5415 pmid: None,
5416 pmc: None,
5417 openalex_id: None,
5418 url: url.map(|s| s.to_string()),
5419 title: source_title.to_string(),
5420 authors: Vec::new(),
5421 year,
5422 journal: None,
5423 license: None,
5424 publisher: None,
5425 funders: Vec::new(),
5426 extraction: crate::bundle::Extraction {
5427 method: "manual_curation".to_string(),
5428 model: None,
5429 model_version: None,
5430 extracted_at: chrono::Utc::now().to_rfc3339(),
5431 extractor_version: env!("CARGO_PKG_VERSION").to_string(),
5432 },
5433 review: None,
5434 citation_count: None,
5435 };
5436
5437 let report = state::add_negative_result(
5438 frontier,
5439 nr_kind,
5440 targets,
5441 deposited_by,
5442 conditions,
5443 provenance,
5444 notes,
5445 reason,
5446 )
5447 .unwrap_or_else(|e| fail_return(&e));
5448
5449 if json {
5450 println!(
5451 "{}",
5452 serde_json::to_string_pretty(&report).expect("serialize report")
5453 );
5454 } else {
5455 println!();
5456 println!(
5457 " {}",
5458 format!("VELA · NEGATIVE-RESULT · {}", report.finding_id)
5459 .to_uppercase()
5460 .dimmed()
5461 );
5462 println!(" {}", style::tick_row(60));
5463 println!(" kind: {kind}");
5464 println!(" deposited_by: {deposited_by}");
5465 if let Some(ev) = &report.applied_event_id {
5466 println!(" event: {ev}");
5467 }
5468 println!(
5469 " {} negative_result deposited in {}",
5470 style::ok("ok"),
5471 frontier.display()
5472 );
5473 }
5474}
5475
5476fn cmd_negative_results(frontier: &Path, target: Option<&str>, json: bool) {
5479 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
5480 let filtered: Vec<&crate::bundle::NegativeResult> = project
5481 .negative_results
5482 .iter()
5483 .filter(|nr| {
5484 target
5485 .map(|t| nr.target_findings.iter().any(|f| f == t))
5486 .unwrap_or(true)
5487 })
5488 .collect();
5489
5490 if json {
5491 println!(
5492 "{}",
5493 serde_json::to_string_pretty(&json!({
5494 "ok": true,
5495 "command": "negative_results",
5496 "frontier": frontier.display().to_string(),
5497 "count": filtered.len(),
5498 "negative_results": filtered,
5499 }))
5500 .expect("serialize negative_results")
5501 );
5502 return;
5503 }
5504
5505 if filtered.is_empty() {
5506 println!(" no negative_results in {}", frontier.display());
5507 return;
5508 }
5509
5510 println!();
5511 println!(
5512 " {} ({})",
5513 "VELA · NEGATIVE RESULTS".dimmed(),
5514 filtered.len()
5515 );
5516 println!(" {}", style::tick_row(60));
5517 for nr in &filtered {
5518 let kind_label = match &nr.kind {
5519 crate::bundle::NegativeResultKind::RegisteredTrial {
5520 endpoint, power, ..
5521 } => format!("trial · {endpoint} · power {power:.2}"),
5522 crate::bundle::NegativeResultKind::Exploratory {
5523 reagent, attempts, ..
5524 } => format!("exploratory · {reagent} · {attempts} attempts"),
5525 };
5526 let retracted = if nr.retracted { " [retracted]" } else { "" };
5527 let review = nr
5528 .review_state
5529 .as_ref()
5530 .map(|s| format!(" [{s:?}]"))
5531 .unwrap_or_default();
5532 println!(" {}{}{}", nr.id, retracted, review);
5533 println!(" {kind_label}");
5534 if !nr.target_findings.is_empty() {
5535 println!(" targets: {}", nr.target_findings.join(", "));
5536 }
5537 }
5538 println!();
5539}
5540
5541#[allow(clippy::too_many_arguments)]
5543fn cmd_tier_set(
5544 frontier: &Path,
5545 object_type: &str,
5546 object_id: &str,
5547 tier: &str,
5548 actor: &str,
5549 reason: &str,
5550 json: bool,
5551) {
5552 let parsed_tier =
5553 crate::access_tier::AccessTier::parse(tier).unwrap_or_else(|e| fail_return(&e));
5554 let report = state::set_tier(frontier, object_type, object_id, parsed_tier, actor, reason)
5555 .unwrap_or_else(|e| fail_return(&e));
5556
5557 if json {
5558 println!(
5559 "{}",
5560 serde_json::to_string_pretty(&report).expect("serialize tier-set report")
5561 );
5562 } else {
5563 println!();
5564 println!(
5565 " {}",
5566 format!("VELA · TIER · {}", object_id)
5567 .to_uppercase()
5568 .dimmed()
5569 );
5570 println!(" {}", style::tick_row(60));
5571 println!(" object_type: {object_type}");
5572 println!(" new_tier: {}", parsed_tier.canonical());
5573 println!(" actor: {actor}");
5574 if let Some(ev) = &report.applied_event_id {
5575 println!(" event: {ev}");
5576 }
5577 println!(" {} tier set in {}", style::ok("ok"), frontier.display());
5578 }
5579}
5580
5581#[allow(clippy::too_many_arguments)]
5583fn cmd_trajectory_create(
5584 frontier: &Path,
5585 deposited_by: &str,
5586 reason: &str,
5587 targets: Vec<String>,
5588 notes: &str,
5589 json: bool,
5590) {
5591 let report = state::create_trajectory(frontier, targets, deposited_by, notes, reason)
5592 .unwrap_or_else(|e| fail_return(&e));
5593
5594 if json {
5595 println!(
5596 "{}",
5597 serde_json::to_string_pretty(&report).expect("serialize trajectory report")
5598 );
5599 } else {
5600 println!();
5601 println!(
5602 " {}",
5603 format!("VELA · TRAJECTORY · {}", report.finding_id)
5604 .to_uppercase()
5605 .dimmed()
5606 );
5607 println!(" {}", style::tick_row(60));
5608 println!(" deposited_by: {deposited_by}");
5609 if let Some(ev) = &report.applied_event_id {
5610 println!(" event: {ev}");
5611 }
5612 println!(
5613 " {} trajectory opened in {}",
5614 style::ok("ok"),
5615 frontier.display()
5616 );
5617 }
5618}
5619
5620#[allow(clippy::too_many_arguments)]
5622fn cmd_trajectory_step(
5623 frontier: &Path,
5624 trajectory_id: &str,
5625 kind: &str,
5626 description: &str,
5627 actor: &str,
5628 reason: &str,
5629 references: Vec<String>,
5630 json: bool,
5631) {
5632 let parsed_kind = match kind {
5633 "hypothesis" => crate::bundle::TrajectoryStepKind::Hypothesis,
5634 "tried" => crate::bundle::TrajectoryStepKind::Tried,
5635 "ruled_out" => crate::bundle::TrajectoryStepKind::RuledOut,
5636 "observed" => crate::bundle::TrajectoryStepKind::Observed,
5637 "refined" => crate::bundle::TrajectoryStepKind::Refined,
5638 other => fail_return(&format!(
5639 "--kind must be one of hypothesis|tried|ruled_out|observed|refined, got '{other}'"
5640 )),
5641 };
5642 let report = state::append_trajectory_step(
5643 frontier,
5644 trajectory_id,
5645 parsed_kind,
5646 description,
5647 actor,
5648 references,
5649 reason,
5650 )
5651 .unwrap_or_else(|e| fail_return(&e));
5652
5653 if json {
5654 println!(
5655 "{}",
5656 serde_json::to_string_pretty(&report).expect("serialize step report")
5657 );
5658 } else {
5659 println!();
5660 println!(
5661 " {}",
5662 format!("VELA · STEP · {}", report.finding_id)
5663 .to_uppercase()
5664 .dimmed()
5665 );
5666 println!(" {}", style::tick_row(60));
5667 println!(" trajectory: {trajectory_id}");
5668 println!(" kind: {kind}");
5669 println!(" actor: {actor}");
5670 println!(
5671 " {} step appended in {}",
5672 style::ok("ok"),
5673 frontier.display()
5674 );
5675 }
5676}
5677
5678fn cmd_trajectories(frontier: &Path, target: Option<&str>, json: bool) {
5680 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
5681 let filtered: Vec<&crate::bundle::Trajectory> = project
5682 .trajectories
5683 .iter()
5684 .filter(|t| {
5685 target
5686 .map(|tg| t.target_findings.iter().any(|f| f == tg))
5687 .unwrap_or(true)
5688 })
5689 .collect();
5690
5691 if json {
5692 println!(
5693 "{}",
5694 serde_json::to_string_pretty(&json!({
5695 "ok": true,
5696 "command": "trajectories",
5697 "frontier": frontier.display().to_string(),
5698 "count": filtered.len(),
5699 "trajectories": filtered,
5700 }))
5701 .expect("serialize trajectories")
5702 );
5703 return;
5704 }
5705
5706 if filtered.is_empty() {
5707 println!(" no trajectories in {}", frontier.display());
5708 return;
5709 }
5710
5711 println!();
5712 println!(" {} ({})", "VELA · TRAJECTORIES".dimmed(), filtered.len());
5713 println!(" {}", style::tick_row(60));
5714 for t in &filtered {
5715 let retracted = if t.retracted { " [retracted]" } else { "" };
5716 let review = t
5717 .review_state
5718 .as_ref()
5719 .map(|s| format!(" [{s:?}]"))
5720 .unwrap_or_default();
5721 println!(" {}{}{}", t.id, retracted, review);
5722 println!(
5723 " {} step(s){}",
5724 t.steps.len(),
5725 if t.target_findings.is_empty() {
5726 String::new()
5727 } else {
5728 format!(" · targets: {}", t.target_findings.join(", "))
5729 }
5730 );
5731 for step in &t.steps {
5732 let label = match step.kind {
5733 crate::bundle::TrajectoryStepKind::Hypothesis => "hypothesis",
5734 crate::bundle::TrajectoryStepKind::Tried => "tried",
5735 crate::bundle::TrajectoryStepKind::RuledOut => "ruled_out",
5736 crate::bundle::TrajectoryStepKind::Observed => "observed",
5737 crate::bundle::TrajectoryStepKind::Refined => "refined",
5738 };
5739 let preview: String = step.description.chars().take(80).collect();
5740 println!(" [{label}] {preview}");
5741 }
5742 }
5743 println!();
5744}
5745
5746fn cmd_datasets(frontier: &Path, json: bool) {
5748 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
5749 if json {
5750 println!(
5751 "{}",
5752 serde_json::to_string_pretty(&json!({
5753 "ok": true,
5754 "command": "datasets",
5755 "frontier": frontier.display().to_string(),
5756 "count": project.datasets.len(),
5757 "datasets": project.datasets,
5758 }))
5759 .expect("serialize datasets")
5760 );
5761 return;
5762 }
5763 println!();
5764 println!(
5765 " {}",
5766 format!("VELA · DATASETS · {}", frontier.display())
5767 .to_uppercase()
5768 .dimmed()
5769 );
5770 println!(" {}", style::tick_row(60));
5771 if project.datasets.is_empty() {
5772 println!(" (no datasets registered)");
5773 return;
5774 }
5775 for ds in &project.datasets {
5776 let v = ds
5777 .version
5778 .as_deref()
5779 .map(|s| format!("@{s}"))
5780 .unwrap_or_default();
5781 println!(" · {} {}{}", ds.id.dimmed(), ds.name, v);
5782 if let Some(u) = &ds.url {
5783 println!(" url: {}", truncate(u, 80));
5784 }
5785 println!(" hash: {}", truncate(&ds.content_hash, 80));
5786 }
5787}
5788
5789#[allow(clippy::too_many_arguments)]
5791fn cmd_code_add(
5792 frontier: &Path,
5793 language: &str,
5794 repo_url: Option<&str>,
5795 commit: Option<&str>,
5796 path: &str,
5797 content_hash: &str,
5798 line_start: Option<u32>,
5799 line_end: Option<u32>,
5800 entry_point: Option<&str>,
5801 json: bool,
5802) {
5803 let mut project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
5804
5805 let line_range = match (line_start, line_end) {
5806 (Some(a), Some(b)) => Some((a, b)),
5807 (Some(a), None) => Some((a, a)),
5808 _ => None,
5809 };
5810
5811 let artifact = crate::bundle::CodeArtifact::new(
5812 language.to_string(),
5813 repo_url.map(|s| s.to_string()),
5814 commit.map(|s| s.to_string()),
5815 path.to_string(),
5816 line_range,
5817 content_hash.to_string(),
5818 entry_point.map(|s| s.to_string()),
5819 );
5820
5821 if project.code_artifacts.iter().any(|c| c.id == artifact.id) {
5822 if json {
5823 println!(
5824 "{}",
5825 serde_json::to_string_pretty(&json!({
5826 "ok": false,
5827 "command": "code.add",
5828 "reason": "artifact_already_exists",
5829 "id": artifact.id,
5830 }))
5831 .expect("serialize")
5832 );
5833 } else {
5834 println!(
5835 "{} code artifact {} already exists in {}; skipping.",
5836 style::warn("code"),
5837 artifact.id,
5838 frontier.display()
5839 );
5840 }
5841 return;
5842 }
5843
5844 let new_id = artifact.id.clone();
5845 project.code_artifacts.push(artifact);
5846 repo::save_to_path(frontier, &project).unwrap_or_else(|e| fail_return(&e));
5847
5848 if json {
5849 println!(
5850 "{}",
5851 serde_json::to_string_pretty(&json!({
5852 "ok": true,
5853 "command": "code.add",
5854 "id": new_id,
5855 "language": language,
5856 "path": path,
5857 "frontier": frontier.display().to_string(),
5858 }))
5859 .expect("failed to serialize code.add result")
5860 );
5861 } else {
5862 println!();
5863 println!(
5864 " {}",
5865 format!("VELA · CODE · {}", new_id).to_uppercase().dimmed()
5866 );
5867 println!(" {}", style::tick_row(60));
5868 println!(" language: {language}");
5869 if let Some(r) = repo_url {
5870 println!(" repo: {r}");
5871 }
5872 if let Some(c) = commit {
5873 println!(" commit: {c}");
5874 }
5875 println!(" path: {path}");
5876 if let Some((a, b)) = line_range {
5877 println!(" lines: {a}-{b}");
5878 }
5879 println!(" content_hash: {content_hash}");
5880 println!();
5881 println!(
5882 " {} code artifact recorded in {}",
5883 style::ok("ok"),
5884 frontier.display()
5885 );
5886 }
5887}
5888
5889fn cmd_code_artifacts(frontier: &Path, json: bool) {
5891 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
5892 if json {
5893 println!(
5894 "{}",
5895 serde_json::to_string_pretty(&json!({
5896 "ok": true,
5897 "command": "code-artifacts",
5898 "frontier": frontier.display().to_string(),
5899 "count": project.code_artifacts.len(),
5900 "code_artifacts": project.code_artifacts,
5901 }))
5902 .expect("serialize code-artifacts")
5903 );
5904 return;
5905 }
5906 println!();
5907 println!(
5908 " {}",
5909 format!("VELA · CODE · {}", frontier.display())
5910 .to_uppercase()
5911 .dimmed()
5912 );
5913 println!(" {}", style::tick_row(60));
5914 if project.code_artifacts.is_empty() {
5915 println!(" (no code artifacts registered)");
5916 return;
5917 }
5918 for c in &project.code_artifacts {
5919 let lr = c
5920 .line_range
5921 .map(|(a, b)| format!(":{a}-{b}"))
5922 .unwrap_or_default();
5923 println!(" · {} {} {}{}", c.id.dimmed(), c.language, c.path, lr);
5924 if let Some(r) = &c.repo_url {
5925 println!(" repo: {}", truncate(r, 80));
5926 }
5927 if let Some(g) = &c.git_commit {
5928 println!(" commit: {g}");
5929 }
5930 }
5931}
5932
5933fn sha256_for_bytes(bytes: &[u8]) -> String {
5934 format!("sha256:{}", hex::encode(Sha256::digest(bytes)))
5935}
5936
5937fn sha256_hex_part(content_hash: &str) -> &str {
5938 content_hash.strip_prefix("sha256:").unwrap_or(content_hash)
5939}
5940
5941fn artifact_blob_locator(frontier: &Path, content_hash: &str, bytes: &[u8]) -> Option<String> {
5942 let Ok(repo::VelaSource::VelaRepo(root)) = repo::detect(frontier) else {
5943 return None;
5944 };
5945 let hex = sha256_hex_part(content_hash);
5946 let rel = format!(".vela/artifact-blobs/sha256/{hex}");
5947 let path = root.join(&rel);
5948 if let Some(parent) = path.parent() {
5949 std::fs::create_dir_all(parent).unwrap_or_else(|e| {
5950 fail(&format!(
5951 "Failed to create artifact blob directory {}: {e}",
5952 parent.display()
5953 ))
5954 });
5955 }
5956 if !path.is_file() {
5957 std::fs::write(&path, bytes)
5958 .unwrap_or_else(|e| fail(&format!("Failed to write artifact blob: {e}")));
5959 }
5960 Some(rel)
5961}
5962
5963fn parse_metadata_pairs(pairs: Vec<String>) -> BTreeMap<String, Value> {
5964 let mut out = BTreeMap::new();
5965 for pair in pairs {
5966 let Some((key, value)) = pair.split_once('=') else {
5967 fail(&format!("--metadata must be key=value, got {pair:?}"));
5968 };
5969 let key = key.trim();
5970 if key.is_empty() {
5971 fail("--metadata key must be non-empty");
5972 }
5973 out.insert(key.to_string(), Value::String(value.trim().to_string()));
5974 }
5975 out
5976}
5977
5978fn artifact_source_type(kind: &str) -> &'static str {
5979 match kind {
5980 "clinical_trial_record" | "protocol" => "clinical_trial",
5981 "dataset" => "data_release",
5982 "model_output" => "model_output",
5983 "registry_record" => "database_record",
5984 "lab_file" => "lab_notebook",
5985 _ => "database_record",
5986 }
5987}
5988
5989fn artifact_provenance(
5990 kind: &str,
5991 title: &str,
5992 url: Option<&str>,
5993 doi: Option<&str>,
5994 license: Option<&str>,
5995) -> crate::bundle::Provenance {
5996 crate::bundle::Provenance {
5997 source_type: artifact_source_type(kind).to_string(),
5998 doi: doi.map(str::to_string),
5999 pmid: None,
6000 pmc: None,
6001 openalex_id: None,
6002 url: url.map(str::to_string),
6003 title: title.to_string(),
6004 authors: Vec::new(),
6005 year: None,
6006 journal: None,
6007 license: license.map(str::to_string),
6008 publisher: None,
6009 funders: Vec::new(),
6010 extraction: crate::bundle::Extraction {
6011 method: "artifact_deposit".to_string(),
6012 model: None,
6013 model_version: None,
6014 extracted_at: chrono::Utc::now().to_rfc3339(),
6015 extractor_version: env!("CARGO_PKG_VERSION").to_string(),
6016 },
6017 review: None,
6018 citation_count: None,
6019 }
6020}
6021
6022#[allow(clippy::too_many_arguments)]
6023fn cmd_artifact_add(
6024 frontier: &Path,
6025 kind: &str,
6026 name: &str,
6027 file: Option<&Path>,
6028 url: Option<&str>,
6029 content_hash: Option<&str>,
6030 media_type: Option<&str>,
6031 license: Option<&str>,
6032 source_title: Option<&str>,
6033 source_url: Option<&str>,
6034 doi: Option<&str>,
6035 target: Vec<String>,
6036 metadata: Vec<String>,
6037 access_tier: &str,
6038 deposited_by: &str,
6039 reason: &str,
6040 json_out: bool,
6041) {
6042 let tier =
6043 crate::access_tier::AccessTier::parse(access_tier).unwrap_or_else(|e| fail_return(&e));
6044 let mut size_bytes = None;
6045 let mut storage_mode = "pointer".to_string();
6046 let mut locator = url.map(str::to_string);
6047 let mut computed_hash = content_hash.map(str::to_string);
6048
6049 if let Some(path) = file {
6050 let bytes = std::fs::read(path)
6051 .unwrap_or_else(|e| fail(&format!("Failed to read artifact file: {e}")));
6052 let actual_hash = sha256_for_bytes(&bytes);
6053 if let Some(expected) = content_hash {
6054 let expected_hex = sha256_hex_part(expected);
6055 let actual_hex = sha256_hex_part(&actual_hash);
6056 if !expected_hex.eq_ignore_ascii_case(actual_hex) {
6057 fail(&format!(
6058 "--content-hash does not match file bytes: expected {expected}, got {actual_hash}"
6059 ));
6060 }
6061 }
6062 size_bytes = Some(bytes.len() as u64);
6063 computed_hash = Some(actual_hash.clone());
6064 if let Some(rel) = artifact_blob_locator(frontier, &actual_hash, &bytes) {
6065 storage_mode = "local_blob".to_string();
6066 locator = Some(rel);
6067 } else {
6068 storage_mode = "local_file".to_string();
6069 locator = Some(path.display().to_string());
6070 }
6071 }
6072
6073 let Some(content_hash) = computed_hash else {
6074 fail("Provide --content-hash unless --file is present.");
6075 };
6076 let content_hash_for_print = content_hash.clone();
6077 if file.is_none() && url.is_some() {
6078 storage_mode = "remote".to_string();
6079 }
6080
6081 let source_url_effective = source_url.or(url);
6082 let source_title = source_title.unwrap_or(name);
6083 let provenance = artifact_provenance(kind, source_title, source_url_effective, doi, license);
6084 let metadata = parse_metadata_pairs(metadata);
6085 let artifact = crate::bundle::Artifact::new(
6086 kind.to_string(),
6087 name.to_string(),
6088 content_hash,
6089 size_bytes,
6090 media_type.map(str::to_string),
6091 storage_mode,
6092 locator,
6093 source_url_effective.map(str::to_string),
6094 license.map(str::to_string),
6095 target,
6096 provenance,
6097 metadata,
6098 tier,
6099 )
6100 .unwrap_or_else(|e| fail_return(&e));
6101
6102 let artifact_id = artifact.id.clone();
6103 let report = state::add_artifact(frontier, artifact, deposited_by, reason)
6104 .unwrap_or_else(|e| fail_return(&e));
6105
6106 if json_out {
6107 println!(
6108 "{}",
6109 serde_json::to_string_pretty(&json!({
6110 "ok": true,
6111 "command": "artifact.add",
6112 "id": artifact_id,
6113 "frontier": frontier.display().to_string(),
6114 "event": report.applied_event_id,
6115 }))
6116 .expect("serialize artifact.add")
6117 );
6118 } else {
6119 println!();
6120 println!(
6121 " {}",
6122 format!("VELA · ARTIFACT · {}", artifact_id)
6123 .to_uppercase()
6124 .dimmed()
6125 );
6126 println!(" {}", style::tick_row(60));
6127 println!(" kind: {kind}");
6128 println!(" name: {name}");
6129 println!(" hash: {content_hash_for_print}");
6130 println!(
6131 " {} artifact recorded in {}",
6132 style::ok("ok"),
6133 frontier.display()
6134 );
6135 }
6136}
6137
6138fn cmd_artifacts(frontier: &Path, target: Option<&str>, json_out: bool) {
6139 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
6140 let filtered: Vec<&crate::bundle::Artifact> = project
6141 .artifacts
6142 .iter()
6143 .filter(|artifact| {
6144 target
6145 .map(|t| artifact.target_findings.iter().any(|f| f == t))
6146 .unwrap_or(true)
6147 })
6148 .collect();
6149
6150 if json_out {
6151 println!(
6152 "{}",
6153 serde_json::to_string_pretty(&json!({
6154 "ok": true,
6155 "command": "artifacts",
6156 "frontier": frontier.display().to_string(),
6157 "count": filtered.len(),
6158 "artifacts": filtered,
6159 }))
6160 .expect("serialize artifacts")
6161 );
6162 return;
6163 }
6164
6165 println!();
6166 println!(
6167 " {}",
6168 format!("VELA · ARTIFACTS · {}", frontier.display())
6169 .to_uppercase()
6170 .dimmed()
6171 );
6172 println!(" {}", style::tick_row(60));
6173 if filtered.is_empty() {
6174 println!(" (no artifacts registered)");
6175 return;
6176 }
6177 for artifact in filtered {
6178 println!(
6179 " · {} {} · {}",
6180 artifact.id.dimmed(),
6181 artifact.kind,
6182 artifact.name
6183 );
6184 if let Some(locator) = &artifact.locator {
6185 println!(" locator: {}", truncate(locator, 88));
6186 }
6187 if !artifact.target_findings.is_empty() {
6188 println!(" targets: {}", artifact.target_findings.join(", "));
6189 }
6190 }
6191}
6192
6193fn cmd_artifact_audit(frontier: &Path, json_out: bool) {
6194 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
6195 let audit = crate::artifact_audit::audit_artifacts(frontier, &project);
6196 if json_out {
6197 println!(
6198 "{}",
6199 serde_json::to_string_pretty(&audit).expect("serialize artifact audit")
6200 );
6201 if !audit.ok {
6202 std::process::exit(1);
6203 }
6204 return;
6205 }
6206
6207 println!();
6208 println!(
6209 " {}",
6210 format!("VELA · ARTIFACT AUDIT · {}", frontier.display())
6211 .to_uppercase()
6212 .dimmed()
6213 );
6214 println!(" {}", style::tick_row(60));
6215 println!(" artifacts: {}", audit.artifact_count);
6216 println!(" checked local blobs: {}", audit.checked_local_blobs);
6217 println!(" local blob bytes: {}", audit.local_blob_bytes);
6218 if !audit.by_kind.is_empty() {
6219 let kinds = audit
6220 .by_kind
6221 .iter()
6222 .map(|(kind, count)| format!("{kind}:{count}"))
6223 .collect::<Vec<_>>()
6224 .join(", ");
6225 println!(" kinds: {kinds}");
6226 }
6227 if audit.ok {
6228 println!(" {} artifact audit passed.", style::ok("ok"));
6229 return;
6230 }
6231 for issue in &audit.issues {
6232 println!(
6233 " {} {} {}: {}",
6234 style::lost("invalid"),
6235 issue.id,
6236 issue.field,
6237 issue.message
6238 );
6239 }
6240 std::process::exit(1);
6241}
6242
6243fn cmd_decision_brief(frontier: &Path, json_out: bool) {
6244 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
6245 let report = decision::load_decision_brief(frontier, &project);
6246 if json_out {
6247 println!(
6248 "{}",
6249 serde_json::to_string_pretty(&report).expect("serialize decision brief report")
6250 );
6251 if !report.ok {
6252 std::process::exit(1);
6253 }
6254 return;
6255 }
6256 println!();
6257 println!(
6258 " {}",
6259 format!("VELA · DECISION BRIEF · {}", project.project.name)
6260 .to_uppercase()
6261 .dimmed()
6262 );
6263 println!(" {}", style::tick_row(60));
6264 if !report.ok {
6265 print_projection_issues(&report.issues, report.error.as_deref());
6266 std::process::exit(1);
6267 }
6268 let brief = report
6269 .projection
6270 .as_ref()
6271 .expect("ok decision report carries projection");
6272 for question in &brief.questions {
6273 println!(" · {} · {}", question.id.dimmed(), question.title);
6274 println!(" answer: {}", wrap_line(&question.short_answer, 82));
6275 println!(" caveat: {}", wrap_line(&question.caveat, 82));
6276 println!(" support: {}", question.supporting_findings.join(", "));
6277 if !question.tension_findings.is_empty() {
6278 println!(" tensions: {}", question.tension_findings.join(", "));
6279 }
6280 if !question.gap_findings.is_empty() {
6281 println!(" gaps: {}", question.gap_findings.join(", "));
6282 }
6283 if !question.artifact_ids.is_empty() {
6284 println!(" artifacts: {}", question.artifact_ids.join(", "));
6285 }
6286 println!(
6287 " would change: {}",
6288 wrap_line(&question.what_would_change_this_answer, 82)
6289 );
6290 }
6291}
6292
6293fn cmd_trial_summary(frontier: &Path, json_out: bool) {
6294 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
6295 let report = decision::load_trial_outcomes(frontier, &project);
6296 if json_out {
6297 println!(
6298 "{}",
6299 serde_json::to_string_pretty(&report).expect("serialize trial summary report")
6300 );
6301 if !report.ok {
6302 std::process::exit(1);
6303 }
6304 return;
6305 }
6306 println!();
6307 println!(
6308 " {}",
6309 format!("VELA · TRIAL SUMMARY · {}", project.project.name)
6310 .to_uppercase()
6311 .dimmed()
6312 );
6313 println!(" {}", style::tick_row(60));
6314 if !report.ok {
6315 print_projection_issues(&report.issues, report.error.as_deref());
6316 std::process::exit(1);
6317 }
6318 let outcomes = report
6319 .projection
6320 .as_ref()
6321 .expect("ok trial report carries projection");
6322 for row in &outcomes.rows {
6323 println!(" · {} · {} ({})", row.id.dimmed(), row.program, row.drug);
6324 println!(" population: {}", wrap_line(&row.population, 82));
6325 println!(" endpoint: {}", wrap_line(&row.primary_endpoint, 82));
6326 println!(" cognition: {}", wrap_line(&row.cognitive_result, 82));
6327 println!(" biomarker: {}", wrap_line(&row.biomarker_result, 82));
6328 println!(" risk: {}", wrap_line(&row.aria_or_safety_result, 82));
6329 println!(" status: {}", wrap_line(&row.regulatory_status, 82));
6330 if !row.finding_ids.is_empty() {
6331 println!(" findings: {}", row.finding_ids.join(", "));
6332 }
6333 if !row.artifact_ids.is_empty() {
6334 println!(" artifacts: {}", row.artifact_ids.join(", "));
6335 }
6336 }
6337}
6338
6339fn cmd_source_verification(frontier: &Path, json_out: bool) {
6340 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
6341 let report = decision::load_source_verification(frontier, &project);
6342 if json_out {
6343 println!(
6344 "{}",
6345 serde_json::to_string_pretty(&report).expect("serialize source verification report")
6346 );
6347 if !report.ok {
6348 std::process::exit(1);
6349 }
6350 return;
6351 }
6352 println!();
6353 println!(
6354 " {}",
6355 format!("VELA · SOURCE VERIFICATION · {}", project.project.name)
6356 .to_uppercase()
6357 .dimmed()
6358 );
6359 println!(" {}", style::tick_row(60));
6360 if !report.ok {
6361 print_projection_issues(&report.issues, report.error.as_deref());
6362 std::process::exit(1);
6363 }
6364 let verification = report
6365 .projection
6366 .as_ref()
6367 .expect("ok source verification report carries projection");
6368 println!(" verified_at: {}", verification.verified_at);
6369 for source in &verification.sources {
6370 println!(" · {} · {}", source.id.dimmed(), source.title);
6371 println!(" agency: {}", source.agency);
6372 println!(" url: {}", truncate(&source.url, 88));
6373 println!(" status: {}", wrap_line(&source.current_status, 82));
6374 }
6375}
6376
6377fn cmd_source_ingest_plan(frontier: &Path, json_out: bool) {
6378 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
6379 let report = decision::load_source_ingest_plan(frontier, &project);
6380 if json_out {
6381 println!(
6382 "{}",
6383 serde_json::to_string_pretty(&report).expect("serialize source ingest plan report")
6384 );
6385 if !report.ok {
6386 std::process::exit(1);
6387 }
6388 return;
6389 }
6390 println!();
6391 println!(
6392 " {}",
6393 format!("VELA · SOURCE INGEST PLAN · {}", project.project.name)
6394 .to_uppercase()
6395 .dimmed()
6396 );
6397 println!(" {}", style::tick_row(60));
6398 if !report.ok {
6399 print_projection_issues(&report.issues, report.error.as_deref());
6400 std::process::exit(1);
6401 }
6402 let plan = report
6403 .projection
6404 .as_ref()
6405 .expect("ok source ingest plan report carries projection");
6406 println!(" verified_at: {}", plan.verified_at);
6407 println!(" entries: {}", plan.entries.len());
6408 for entry in &plan.entries {
6409 println!(
6410 " · {} · {} · {} · {}",
6411 entry.id.dimmed(),
6412 entry.category,
6413 entry.priority,
6414 entry.ingest_status
6415 );
6416 println!(" name: {}", wrap_line(&entry.name, 82));
6417 println!(" locator: {}", truncate(&entry.locator, 88));
6418 println!(" use: {}", wrap_line(&entry.target_use, 82));
6419 if let Some(id) = &entry.current_frontier_artifact_id {
6420 println!(" artifact: {id}");
6421 }
6422 if !entry.target_findings.is_empty() {
6423 println!(" findings: {}", entry.target_findings.join(", "));
6424 }
6425 }
6426}
6427
6428fn print_projection_issues(issues: &[decision::ProjectionIssue], error: Option<&str>) {
6429 if let Some(error) = error {
6430 println!(" {} {error}", style::lost("unavailable"));
6431 }
6432 for issue in issues {
6433 println!(
6434 " {} {}: {}",
6435 style::lost("invalid"),
6436 issue.path,
6437 issue.message
6438 );
6439 }
6440}
6441
6442fn wrap_line(text: &str, max_chars: usize) -> String {
6443 if text.chars().count() <= max_chars {
6444 return text.to_string();
6445 }
6446 let mut out = String::new();
6447 let mut line_len = 0usize;
6448 for word in text.split_whitespace() {
6449 let word_len = word.chars().count();
6450 if line_len > 0 && line_len + 1 + word_len > max_chars {
6451 out.push('\n');
6452 out.push_str(" ");
6453 out.push_str(word);
6454 line_len = word_len;
6455 } else {
6456 if line_len > 0 {
6457 out.push(' ');
6458 line_len += 1;
6459 }
6460 out.push_str(word);
6461 line_len += word_len;
6462 }
6463 }
6464 out
6465}
6466
6467fn clinical_str<'a>(study: &'a Value, pointer: &str) -> Option<&'a str> {
6468 study.pointer(pointer).and_then(Value::as_str)
6469}
6470
6471fn clinical_string_array(study: &Value, pointer: &str) -> Vec<String> {
6472 study
6473 .pointer(pointer)
6474 .and_then(Value::as_array)
6475 .map(|items| {
6476 items
6477 .iter()
6478 .filter_map(Value::as_str)
6479 .map(str::to_string)
6480 .collect()
6481 })
6482 .unwrap_or_default()
6483}
6484
6485fn clinical_named_array(study: &Value, pointer: &str, field: &str) -> Vec<String> {
6486 study
6487 .pointer(pointer)
6488 .and_then(Value::as_array)
6489 .map(|items| {
6490 items
6491 .iter()
6492 .filter_map(|item| item.get(field).and_then(Value::as_str))
6493 .map(str::to_string)
6494 .collect()
6495 })
6496 .unwrap_or_default()
6497}
6498
6499fn insert_string_vec_metadata(
6500 metadata: &mut BTreeMap<String, Value>,
6501 key: &str,
6502 values: Vec<String>,
6503) {
6504 if values.is_empty() {
6505 return;
6506 }
6507 metadata.insert(
6508 key.to_string(),
6509 Value::Array(values.into_iter().map(Value::String).collect()),
6510 );
6511}
6512
6513async fn cmd_clinical_trial_import(
6514 frontier: &Path,
6515 nct_id: &str,
6516 input_json: Option<&Path>,
6517 target: Vec<String>,
6518 deposited_by: &str,
6519 reason: &str,
6520 license: &str,
6521 json_out: bool,
6522) {
6523 let api_url = format!("https://clinicaltrials.gov/api/v2/studies/{nct_id}");
6524 let raw = if let Some(path) = input_json {
6525 std::fs::read_to_string(path)
6526 .unwrap_or_else(|e| fail(&format!("Failed to read ClinicalTrials.gov JSON: {e}")))
6527 } else {
6528 let response = reqwest::get(&api_url).await.unwrap_or_else(|e| {
6529 fail(&format!(
6530 "Failed to fetch ClinicalTrials.gov record {nct_id}: {e}"
6531 ))
6532 });
6533 let response = response.error_for_status().unwrap_or_else(|e| {
6534 fail(&format!(
6535 "Failed to fetch ClinicalTrials.gov record {nct_id}: {e}"
6536 ))
6537 });
6538 response.text().await.unwrap_or_else(|e| {
6539 fail(&format!(
6540 "Failed to read ClinicalTrials.gov record {nct_id}: {e}"
6541 ))
6542 })
6543 };
6544 let study: Value = serde_json::from_str(&raw)
6545 .unwrap_or_else(|e| fail(&format!("Failed to parse ClinicalTrials.gov JSON: {e}")));
6546 let canonical_bytes = crate::canonical::to_canonical_bytes(&study)
6547 .unwrap_or_else(|e| fail(&format!("Failed to canonicalize trial JSON: {e}")));
6548 let content_hash = sha256_for_bytes(&canonical_bytes);
6549 let locator = artifact_blob_locator(frontier, &content_hash, &canonical_bytes)
6550 .unwrap_or_else(|| api_url.clone());
6551 let storage_mode = if locator.starts_with(".vela/") {
6552 "local_blob"
6553 } else {
6554 "remote"
6555 };
6556
6557 let parsed_nct = clinical_str(&study, "/protocolSection/identificationModule/nctId")
6558 .unwrap_or(nct_id)
6559 .to_string();
6560 let title = clinical_str(&study, "/protocolSection/identificationModule/briefTitle")
6561 .or_else(|| {
6562 clinical_str(
6563 &study,
6564 "/protocolSection/identificationModule/officialTitle",
6565 )
6566 })
6567 .unwrap_or(nct_id);
6568 let public_url = format!("https://clinicaltrials.gov/study/{parsed_nct}");
6569 let mut metadata = BTreeMap::new();
6570 metadata.insert("nct_id".to_string(), Value::String(parsed_nct.clone()));
6571 metadata.insert(
6572 "source_api".to_string(),
6573 Value::String("clinicaltrials.gov-v2".to_string()),
6574 );
6575 metadata.insert(
6576 "retrieved_at".to_string(),
6577 Value::String(chrono::Utc::now().to_rfc3339()),
6578 );
6579 for (key, pointer) in [
6580 (
6581 "overall_status",
6582 "/protocolSection/statusModule/overallStatus",
6583 ),
6584 (
6585 "start_date",
6586 "/protocolSection/statusModule/startDateStruct/date",
6587 ),
6588 (
6589 "completion_date",
6590 "/protocolSection/statusModule/completionDateStruct/date",
6591 ),
6592 ] {
6593 if let Some(value) = clinical_str(&study, pointer) {
6594 metadata.insert(key.to_string(), Value::String(value.to_string()));
6595 }
6596 }
6597 insert_string_vec_metadata(
6598 &mut metadata,
6599 "phases",
6600 clinical_string_array(&study, "/protocolSection/designModule/phases"),
6601 );
6602 insert_string_vec_metadata(
6603 &mut metadata,
6604 "conditions",
6605 clinical_string_array(&study, "/protocolSection/conditionsModule/conditions"),
6606 );
6607 insert_string_vec_metadata(
6608 &mut metadata,
6609 "interventions",
6610 clinical_named_array(
6611 &study,
6612 "/protocolSection/armsInterventionsModule/interventions",
6613 "name",
6614 ),
6615 );
6616 insert_string_vec_metadata(
6617 &mut metadata,
6618 "primary_outcomes",
6619 clinical_named_array(
6620 &study,
6621 "/protocolSection/outcomesModule/primaryOutcomes",
6622 "measure",
6623 ),
6624 );
6625 if let Some(has_results) = study.get("hasResults").and_then(Value::as_bool) {
6626 metadata.insert("has_results".to_string(), Value::Bool(has_results));
6627 }
6628
6629 let provenance = artifact_provenance(
6630 "clinical_trial_record",
6631 title,
6632 Some(&public_url),
6633 None,
6634 Some(license),
6635 );
6636 let artifact = crate::bundle::Artifact::new(
6637 "clinical_trial_record",
6638 title.to_string(),
6639 content_hash,
6640 Some(canonical_bytes.len() as u64),
6641 Some("application/json".to_string()),
6642 storage_mode.to_string(),
6643 Some(locator),
6644 Some(public_url.clone()),
6645 Some(license.to_string()),
6646 target,
6647 provenance,
6648 metadata,
6649 crate::access_tier::AccessTier::Public,
6650 )
6651 .unwrap_or_else(|e| fail_return(&e));
6652 let artifact_id = artifact.id.clone();
6653 let report = state::add_artifact(frontier, artifact, deposited_by, reason)
6654 .unwrap_or_else(|e| fail_return(&e));
6655
6656 if json_out {
6657 println!(
6658 "{}",
6659 serde_json::to_string_pretty(&json!({
6660 "ok": true,
6661 "command": "clinical-trial-import",
6662 "nct_id": parsed_nct,
6663 "id": artifact_id,
6664 "frontier": frontier.display().to_string(),
6665 "event": report.applied_event_id,
6666 "source_url": public_url,
6667 }))
6668 .expect("serialize clinical-trial-import")
6669 );
6670 } else {
6671 println!();
6672 println!(
6673 " {}",
6674 format!("VELA · CLINICAL TRIAL · {}", artifact_id)
6675 .to_uppercase()
6676 .dimmed()
6677 );
6678 println!(" {}", style::tick_row(60));
6679 println!(" nct_id: {parsed_nct}");
6680 println!(" title: {}", truncate(title, 96));
6681 println!(" source: {public_url}");
6682 println!(
6683 " {} trial record imported into {}",
6684 style::ok("ok"),
6685 frontier.display()
6686 );
6687 }
6688}
6689
6690#[allow(clippy::too_many_arguments)]
6697fn cmd_replicate(
6698 frontier: &Path,
6699 target: &str,
6700 outcome: &str,
6701 attempted_by: &str,
6702 conditions_text: &str,
6703 source_title: &str,
6704 doi: Option<&str>,
6705 pmid: Option<&str>,
6706 sample_size: Option<&str>,
6707 note: &str,
6708 previous_attempt: Option<&str>,
6709 no_cascade: bool,
6710 json: bool,
6711) {
6712 if !crate::bundle::VALID_REPLICATION_OUTCOMES.contains(&outcome) {
6713 fail(&format!(
6714 "invalid outcome '{outcome}'; valid: {:?}",
6715 crate::bundle::VALID_REPLICATION_OUTCOMES
6716 ));
6717 }
6718 if !target.starts_with("vf_") {
6719 fail(&format!("target '{target}' is not a vf_ finding id"));
6720 }
6721
6722 let mut project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
6723
6724 if !project.findings.iter().any(|f| f.id == target) {
6725 fail(&format!(
6726 "target finding '{target}' not present in frontier '{}'",
6727 frontier.display()
6728 ));
6729 }
6730
6731 let lower = conditions_text.to_lowercase();
6736 let conditions = crate::bundle::Conditions {
6737 text: conditions_text.to_string(),
6738 species_verified: Vec::new(),
6739 species_unverified: Vec::new(),
6740 in_vitro: lower.contains("in vitro") || lower.contains("ipsc"),
6741 in_vivo: lower.contains("in vivo") || lower.contains("mouse") || lower.contains("rat"),
6742 human_data: lower.contains("human")
6743 || lower.contains("clinical")
6744 || lower.contains("patient"),
6745 clinical_trial: lower.contains("clinical trial") || lower.contains("phase "),
6746 concentration_range: None,
6747 duration: None,
6748 age_group: None,
6749 cell_type: None,
6750 };
6751
6752 let evidence = crate::bundle::Evidence {
6753 evidence_type: "experimental".to_string(),
6754 model_system: String::new(),
6755 species: None,
6756 method: "replication_attempt".to_string(),
6757 sample_size: sample_size.map(|s| s.to_string()),
6758 effect_size: None,
6759 p_value: None,
6760 replicated: outcome == "replicated",
6761 replication_count: None,
6762 evidence_spans: Vec::new(),
6763 };
6764
6765 let provenance = crate::bundle::Provenance {
6766 source_type: "published_paper".to_string(),
6767 doi: doi.map(|s| s.to_string()),
6768 pmid: pmid.map(|s| s.to_string()),
6769 pmc: None,
6770 openalex_id: None,
6771 url: None,
6772 title: source_title.to_string(),
6773 authors: Vec::new(),
6774 year: None,
6775 journal: None,
6776 license: None,
6777 publisher: None,
6778 funders: Vec::new(),
6779 extraction: crate::bundle::Extraction {
6780 method: "manual_curation".to_string(),
6781 model: None,
6782 model_version: None,
6783 extracted_at: chrono::Utc::now().to_rfc3339(),
6784 extractor_version: env!("CARGO_PKG_VERSION").to_string(),
6785 },
6786 review: None,
6787 citation_count: None,
6788 };
6789
6790 let mut rep = crate::bundle::Replication::new(
6791 target.to_string(),
6792 attempted_by.to_string(),
6793 outcome.to_string(),
6794 evidence,
6795 conditions,
6796 provenance,
6797 note.to_string(),
6798 );
6799 rep.previous_attempt = previous_attempt.map(|s| s.to_string());
6800
6801 if project.replications.iter().any(|r| r.id == rep.id) {
6804 if json {
6805 println!(
6806 "{}",
6807 serde_json::to_string_pretty(&json!({
6808 "ok": false,
6809 "command": "replicate",
6810 "reason": "replication_already_exists",
6811 "id": rep.id,
6812 }))
6813 .expect("serialize")
6814 );
6815 } else {
6816 println!(
6817 "{} replication {} already exists in {}; skipping.",
6818 style::warn("replicate"),
6819 rep.id,
6820 frontier.display()
6821 );
6822 }
6823 return;
6824 }
6825
6826 let new_id = rep.id.clone();
6827 project.replications.push(rep);
6828
6829 let cascade_result = if no_cascade {
6836 None
6837 } else {
6838 let result = propagate::propagate_correction(
6839 &mut project,
6840 target,
6841 propagate::PropagationAction::ReplicationOutcome {
6842 outcome: outcome.to_string(),
6843 vrep_id: new_id.clone(),
6844 },
6845 );
6846 project.review_events.extend(result.events.clone());
6849 project::recompute_stats(&mut project);
6850 Some(result)
6851 };
6852
6853 repo::save_to_path(frontier, &project).unwrap_or_else(|e| fail_return(&e));
6854
6855 if json {
6856 let cascade_json = cascade_result.as_ref().map(|r| {
6857 json!({
6858 "affected": r.affected,
6859 "events": r.events.len(),
6860 })
6861 });
6862 println!(
6863 "{}",
6864 serde_json::to_string_pretty(&json!({
6865 "ok": true,
6866 "command": "replicate",
6867 "id": new_id,
6868 "target": target,
6869 "outcome": outcome,
6870 "attempted_by": attempted_by,
6871 "cascade": cascade_json,
6872 "frontier": frontier.display().to_string(),
6873 }))
6874 .expect("failed to serialize replicate result")
6875 );
6876 } else {
6877 println!();
6878 println!(
6879 " {}",
6880 format!("VELA · REPLICATE · {}", new_id)
6881 .to_uppercase()
6882 .dimmed()
6883 );
6884 println!(" {}", style::tick_row(60));
6885 println!(" target: {target}");
6886 println!(" outcome: {outcome}");
6887 println!(" attempted by: {attempted_by}");
6888 println!(" conditions: {conditions_text}");
6889 println!(" source: {source_title}");
6890 if let Some(d) = doi {
6891 println!(" doi: {d}");
6892 }
6893 println!();
6894 println!(
6895 " {} replication recorded in {}",
6896 style::ok("ok"),
6897 frontier.display()
6898 );
6899 if let Some(result) = cascade_result {
6900 println!(
6901 " {} cascade: {} dependent(s) flagged, {} review event(s) recorded",
6902 style::ok("ok"),
6903 result.affected,
6904 result.events.len()
6905 );
6906 } else {
6907 println!(" {} cascade skipped (--no-cascade)", style::warn("info"));
6908 }
6909 }
6910}
6911
6912fn cmd_replications(frontier: &Path, target: Option<&str>, json: bool) {
6914 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
6915 let filtered: Vec<&crate::bundle::Replication> = project
6916 .replications
6917 .iter()
6918 .filter(|r| target.is_none_or(|t| r.target_finding == t))
6919 .collect();
6920
6921 if json {
6922 let payload = json!({
6923 "ok": true,
6924 "command": "replications",
6925 "frontier": frontier.display().to_string(),
6926 "filter_target": target,
6927 "count": filtered.len(),
6928 "replications": filtered,
6929 });
6930 println!(
6931 "{}",
6932 serde_json::to_string_pretty(&payload).expect("failed to serialize replications list")
6933 );
6934 return;
6935 }
6936
6937 println!();
6938 let header = match target {
6939 Some(t) => format!("VELA · REPLICATIONS · {t}"),
6940 None => format!("VELA · REPLICATIONS · {}", frontier.display()),
6941 };
6942 println!(" {}", header.to_uppercase().dimmed());
6943 println!(" {}", style::tick_row(60));
6944 if filtered.is_empty() {
6945 println!(" (no replications recorded)");
6946 return;
6947 }
6948 for rep in &filtered {
6949 let outcome_chip = match rep.outcome.as_str() {
6950 "replicated" => style::ok(&rep.outcome),
6951 "failed" => style::lost(&rep.outcome),
6952 "partial" => style::warn(&rep.outcome),
6953 _ => rep.outcome.clone().normal().to_string(),
6954 };
6955 println!(
6956 " · {} {} by {}",
6957 rep.id.dimmed(),
6958 outcome_chip,
6959 rep.attempted_by
6960 );
6961 println!(" target: {}", rep.target_finding);
6962 if !rep.conditions.text.is_empty() {
6963 println!(" conditions: {}", truncate(&rep.conditions.text, 80));
6964 }
6965 if !rep.provenance.title.is_empty() {
6966 println!(" source: {}", truncate(&rep.provenance.title, 80));
6967 }
6968 }
6969}
6970
6971async fn cmd_ingest(
6984 path: &str,
6985 frontier: &Path,
6986 backend: Option<&str>,
6987 actor: Option<&str>,
6988 dry_run: bool,
6989 json: bool,
6990) {
6991 let lowered = path.trim().to_lowercase();
6993 if lowered.starts_with("doi:") || lowered.starts_with("pmid:") || lowered.starts_with("nct:") {
6994 cmd_source_fetch(path.trim(), None, None, false, json).await;
6995 if !json {
7001 eprintln!();
7002 eprintln!(
7003 " vela ingest · note: doi:/pmid:/nct: URIs only fetch metadata; no frontier state was written."
7004 );
7005 eprintln!(
7006 " next: turn this paper into a proposal with `vela finding add {} --assertion '...' --author 'reviewer:you' --apply`",
7007 frontier.display()
7008 );
7009 }
7010 return;
7011 }
7012
7013 let p = std::path::PathBuf::from(path);
7014 if !p.exists() {
7015 fail(&format!(
7016 "ingest: path '{path}' does not exist (and is not a doi:/pmid:/nct: URI)"
7017 ));
7018 }
7019
7020 let ext = p
7022 .extension()
7023 .and_then(|s| s.to_str())
7024 .map(|s| s.to_ascii_lowercase());
7025
7026 if p.is_file() {
7027 match ext.as_deref() {
7028 Some("pdf") => {
7029 cmd_scout(&p, frontier, backend, dry_run, json).await;
7033 }
7034 Some("md") | Some("markdown") => {
7035 cmd_compile_notes(&p, frontier, backend, None, None, dry_run, json).await;
7038 }
7039 Some("csv") | Some("tsv") => {
7040 cmd_compile_data(&p, frontier, backend, None, dry_run, json).await;
7043 }
7044 Some("json") => {
7045 let actor_id = actor.unwrap_or("agent:vela-ingest-bot");
7047 cmd_artifact_to_state(frontier, &p, actor_id, false, json);
7048 }
7049 other => {
7050 fail(&format!(
7051 "ingest: unsupported file type '{}' (expected .pdf, .md, .csv, .tsv, .json, or a doi:/pmid:/nct: URI)",
7052 other.unwrap_or("(none)")
7053 ));
7054 }
7055 }
7056 return;
7057 }
7058
7059 if p.is_dir() {
7060 let mut pdf_count = 0usize;
7067 let mut md_count = 0usize;
7068 let mut data_count = 0usize;
7069 let mut json_count = 0usize;
7070 let mut unhandled_exts: std::collections::BTreeSet<String> =
7071 std::collections::BTreeSet::new();
7072 if let Ok(entries) = std::fs::read_dir(&p) {
7073 for entry in entries.flatten() {
7074 let path = entry.path();
7075 if !path.is_file() {
7076 continue;
7077 }
7078 if let Some(name) = entry.file_name().to_str()
7079 && let Some(dot) = name.rfind('.')
7080 {
7081 let ext = name[dot + 1..].to_ascii_lowercase();
7082 match ext.as_str() {
7083 "pdf" => pdf_count += 1,
7084 "md" | "markdown" => md_count += 1,
7085 "csv" | "tsv" => data_count += 1,
7086 "json" => json_count += 1,
7087 other => {
7088 if !name.starts_with('.') {
7091 unhandled_exts.insert(other.to_string());
7092 }
7093 }
7094 }
7095 }
7096 }
7097 }
7098
7099 let dispatched_types = (pdf_count > 0) as usize
7100 + (md_count > 0) as usize
7101 + (data_count > 0) as usize
7102 + (json_count > 0) as usize;
7103
7104 if dispatched_types == 0 {
7105 cmd_compile_code(&p, frontier, backend, None, dry_run, json).await;
7108 return;
7109 }
7110
7111 if dispatched_types > 1 {
7112 eprintln!(
7113 " vela ingest · folder has multiple handlable types; running each in sequence"
7114 );
7115 eprintln!(
7116 " pdf:{pdf_count} md:{md_count} csv/tsv:{data_count} json:{json_count}"
7117 );
7118 }
7119
7120 if pdf_count > 0 {
7127 cmd_scout(&p, frontier, backend, dry_run, json).await;
7128 }
7129 if md_count > 0 {
7130 cmd_compile_notes(&p, frontier, backend, None, None, dry_run, json).await;
7131 }
7132 if data_count > 0 {
7133 cmd_compile_data(&p, frontier, backend, None, dry_run, json).await;
7134 }
7135 if json_count > 0 {
7136 let actor_id = actor.unwrap_or("agent:vela-ingest-bot");
7139 if let Ok(entries) = std::fs::read_dir(&p) {
7140 for entry in entries.flatten() {
7141 let path = entry.path();
7142 if path.is_file()
7143 && path
7144 .extension()
7145 .and_then(|s| s.to_str())
7146 .map(|s| s.eq_ignore_ascii_case("json"))
7147 .unwrap_or(false)
7148 {
7149 cmd_artifact_to_state(frontier, &path, actor_id, false, json);
7150 }
7151 }
7152 }
7153 }
7154
7155 if !unhandled_exts.is_empty() {
7156 let kinds: Vec<String> = unhandled_exts.into_iter().collect();
7157 eprintln!(
7158 " vela ingest · skipped {} file extension(s) with no handler: {}",
7159 kinds.len(),
7160 kinds.join(", ")
7161 );
7162 }
7163 return;
7164 }
7165
7166 fail(&format!(
7167 "ingest: path '{path}' is neither a file nor a directory"
7168 ));
7169}
7170
7171#[allow(clippy::too_many_arguments)]
7172async fn cmd_compile_data(
7174 root: &Path,
7175 frontier: &Path,
7176 backend: Option<&str>,
7177 sample_rows: Option<usize>,
7178 dry_run: bool,
7179 json_out: bool,
7180) {
7181 match DATASETS_HANDLER.get() {
7182 Some(handler) => {
7183 handler(
7184 root.to_path_buf(),
7185 frontier.to_path_buf(),
7186 backend.map(String::from),
7187 sample_rows,
7188 dry_run,
7189 json_out,
7190 )
7191 .await;
7192 }
7193 None => {
7194 eprintln!(
7195 "{} `vela compile-data` requires the vela CLI binary; the library is unwired without a registered datasets handler.",
7196 style::err_prefix()
7197 );
7198 std::process::exit(1);
7199 }
7200 }
7201}
7202
7203async fn cmd_review_pending(
7206 frontier: &Path,
7207 backend: Option<&str>,
7208 max_proposals: Option<usize>,
7209 batch_size: usize,
7210 dry_run: bool,
7211 json_out: bool,
7212) {
7213 match REVIEWER_HANDLER.get() {
7214 Some(handler) => {
7215 handler(
7216 frontier.to_path_buf(),
7217 backend.map(String::from),
7218 max_proposals,
7219 batch_size,
7220 dry_run,
7221 json_out,
7222 )
7223 .await;
7224 }
7225 None => {
7226 eprintln!(
7227 "{} `vela review-pending` requires the vela CLI binary; the library is unwired without a registered reviewer handler.",
7228 style::err_prefix()
7229 );
7230 std::process::exit(1);
7231 }
7232 }
7233}
7234
7235async fn cmd_find_tensions(
7238 frontier: &Path,
7239 backend: Option<&str>,
7240 max_findings: Option<usize>,
7241 dry_run: bool,
7242 json_out: bool,
7243) {
7244 match TENSIONS_HANDLER.get() {
7245 Some(handler) => {
7246 handler(
7247 frontier.to_path_buf(),
7248 backend.map(String::from),
7249 max_findings,
7250 dry_run,
7251 json_out,
7252 )
7253 .await;
7254 }
7255 None => {
7256 eprintln!(
7257 "{} `vela find-tensions` requires the vela CLI binary; the library is unwired without a registered tensions handler.",
7258 style::err_prefix()
7259 );
7260 std::process::exit(1);
7261 }
7262 }
7263}
7264
7265async fn cmd_plan_experiments(
7268 frontier: &Path,
7269 backend: Option<&str>,
7270 max_findings: Option<usize>,
7271 dry_run: bool,
7272 json_out: bool,
7273) {
7274 match EXPERIMENTS_HANDLER.get() {
7275 Some(handler) => {
7276 handler(
7277 frontier.to_path_buf(),
7278 backend.map(String::from),
7279 max_findings,
7280 dry_run,
7281 json_out,
7282 )
7283 .await;
7284 }
7285 None => {
7286 eprintln!(
7287 "{} `vela plan-experiments` requires the vela CLI binary; the library is unwired without a registered experiments handler.",
7288 style::err_prefix()
7289 );
7290 std::process::exit(1);
7291 }
7292 }
7293}
7294
7295async fn cmd_compile_code(
7298 root: &Path,
7299 frontier: &Path,
7300 backend: Option<&str>,
7301 max_files: Option<usize>,
7302 dry_run: bool,
7303 json_out: bool,
7304) {
7305 match CODE_HANDLER.get() {
7306 Some(handler) => {
7307 handler(
7308 root.to_path_buf(),
7309 frontier.to_path_buf(),
7310 backend.map(String::from),
7311 max_files,
7312 dry_run,
7313 json_out,
7314 )
7315 .await;
7316 }
7317 None => {
7318 eprintln!(
7319 "{} `vela compile-code` requires the vela CLI binary; the library is unwired without a registered code handler.",
7320 style::err_prefix()
7321 );
7322 std::process::exit(1);
7323 }
7324 }
7325}
7326
7327async fn cmd_compile_notes(
7332 vault: &Path,
7333 frontier: &Path,
7334 backend: Option<&str>,
7335 max_files: Option<usize>,
7336 max_items_per_category: Option<usize>,
7337 dry_run: bool,
7338 json_out: bool,
7339) {
7340 match NOTES_HANDLER.get() {
7341 Some(handler) => {
7342 handler(
7343 vault.to_path_buf(),
7344 frontier.to_path_buf(),
7345 backend.map(String::from),
7346 max_files,
7347 max_items_per_category,
7348 dry_run,
7349 json_out,
7350 )
7351 .await;
7352 }
7353 None => {
7354 eprintln!(
7355 "{} `vela compile-notes` requires the vela CLI binary; the library is unwired without a registered notes handler.",
7356 style::err_prefix()
7357 );
7358 std::process::exit(1);
7359 }
7360 }
7361}
7362
7363async fn cmd_scout(
7370 folder: &Path,
7371 frontier: &Path,
7372 backend: Option<&str>,
7373 dry_run: bool,
7374 json_out: bool,
7375) {
7376 match SCOUT_HANDLER.get() {
7377 Some(handler) => {
7378 handler(
7379 folder.to_path_buf(),
7380 frontier.to_path_buf(),
7381 backend.map(String::from),
7382 dry_run,
7383 json_out,
7384 )
7385 .await;
7386 }
7387 None => {
7388 eprintln!(
7389 "{} `vela scout` requires the vela CLI binary; the library is unwired without a registered scout handler.",
7390 style::err_prefix()
7391 );
7392 std::process::exit(1);
7393 }
7394 }
7395}
7396
7397#[allow(clippy::too_many_arguments)]
7398fn cmd_check(
7399 source: Option<&Path>,
7400 schema: bool,
7401 stats: bool,
7402 conformance_flag: bool,
7403 conformance_dir: &Path,
7404 all: bool,
7405 schema_only: bool,
7406 strict: bool,
7407 fix: bool,
7408 json_output: bool,
7409) {
7410 if json_output {
7411 let Some(src) = source else {
7412 fail("--json requires a frontier source");
7413 };
7414 let payload = check_json_payload(src, schema_only, strict);
7415 println!(
7416 "{}",
7417 serde_json::to_string_pretty(&payload).expect("failed to serialize check report")
7418 );
7419 if payload.get("ok").and_then(Value::as_bool) != Some(true) {
7420 std::process::exit(1);
7421 }
7422 return;
7423 }
7424
7425 let run_all = all || (!schema && !stats && !conformance_flag && !schema_only);
7426 if run_all || schema || schema_only {
7427 let Some(src) = source else {
7428 fail("check requires a frontier source");
7429 };
7430 validate::run(src);
7431 }
7432 if !schema_only && (run_all || stats) {
7433 let Some(src) = source else {
7434 fail("--stats requires a frontier source");
7435 };
7436 let frontier = load_frontier_or_fail(src);
7437 let report = lint::lint(&frontier, None, None);
7438 lint::print_report(&report);
7439 let replay_report = events::replay_report(&frontier);
7440 println!("event replay: {}", replay_report.status);
7441 if !replay_report.conflicts.is_empty() {
7442 for conflict in &replay_report.conflicts {
7443 println!(" - {conflict}");
7444 }
7445 }
7446 if let Ok(signature_report) = sign::verify_frontier_data(&frontier, None)
7447 && signature_report.signed > 0
7448 {
7449 println!(
7450 "Signatures: {} valid / {} invalid / {} unsigned",
7451 signature_report.valid, signature_report.invalid, signature_report.unsigned
7452 );
7453 }
7454 let signal_report = signals::analyze(&frontier, &[]);
7455 print_signal_summary(&signal_report, strict);
7456 if !replay_report.ok
7457 || (strict
7458 && (!signal_report.review_queue.is_empty()
7459 || signal_report.proof_readiness.status != "ready"))
7460 {
7461 std::process::exit(1);
7462 }
7463 }
7464 if run_all || conformance_flag {
7465 conformance::run(conformance_dir);
7466 }
7467 let _ = fix;
7468}
7469
7470fn check_json_payload(src: &Path, schema_only: bool, strict: bool) -> Value {
7471 let report = validate::validate(src);
7472 let loaded = repo::load_from_path(src).ok();
7473 let (method_report, graph_report) = if schema_only {
7474 (None, None)
7475 } else if let Some(frontier) = loaded.as_ref() {
7476 (
7477 Some(lint::lint(frontier, None, None)),
7478 Some(lint::lint_frontier(frontier)),
7479 )
7480 } else {
7481 (None, None)
7482 };
7483 let source_hash = hash_path(src).unwrap_or_else(|_| "unavailable".to_string());
7484 let mut diagnostics = Vec::new();
7485 diagnostics.extend(report.errors.iter().map(|e| {
7486 json!({
7487 "severity": "error",
7488 "rule_id": "schema",
7489 "finding_id": null,
7490 "file": &e.file,
7491 "field_path": null,
7492 "message": &e.error,
7493 "suggestion": schema_error_suggestion(&e.error),
7494 "fixable": schema_error_fix(&e.error),
7495 "normalize_action": schema_error_action(&e.error),
7496 })
7497 }));
7498 for (check_id, lint_report) in [
7499 ("methodology", method_report.as_ref()),
7500 ("frontier_graph", graph_report.as_ref()),
7501 ] {
7502 if let Some(lint_report) = lint_report {
7503 diagnostics.extend(lint_report.diagnostics.iter().map(|d| {
7504 json!({
7505 "severity": d.severity.to_string(),
7506 "rule_id": &d.rule_id,
7507 "check": check_id,
7508 "finding_id": &d.finding_id,
7509 "field_path": null,
7510 "message": &d.message,
7511 "suggestion": &d.suggestion,
7512 "fixable": false,
7513 "normalize_action": null,
7514 })
7515 }));
7516 }
7517 }
7518 let method_errors = method_report.as_ref().map_or(0, |r| r.errors);
7519 let method_warnings = method_report.as_ref().map_or(0, |r| r.warnings);
7520 let method_infos = method_report.as_ref().map_or(0, |r| r.infos);
7521 let graph_errors = graph_report.as_ref().map_or(0, |r| r.errors);
7522 let graph_warnings = graph_report.as_ref().map_or(0, |r| r.warnings);
7523 let graph_infos = graph_report.as_ref().map_or(0, |r| r.infos);
7524 let replay_report = loaded.as_ref().map(events::replay_report);
7525 let state_integrity_report = if schema_only {
7526 loaded.as_ref().map(state_integrity::analyze)
7527 } else {
7528 state_integrity::analyze_path(src).ok()
7529 };
7530 if let Some(replay) = replay_report.as_ref()
7531 && !replay.ok
7532 {
7533 diagnostics.extend(replay.conflicts.iter().map(|conflict| {
7534 json!({
7535 "severity": "error",
7536 "rule_id": "event_replay",
7537 "check": "events",
7538 "finding_id": null,
7539 "field_path": null,
7540 "message": conflict,
7541 "suggestion": "Inspect canonical state events and repair the frontier event log before proof export.",
7542 "fixable": false,
7543 "normalize_action": null,
7544 })
7545 }));
7546 }
7547 let event_errors = replay_report
7548 .as_ref()
7549 .map_or(0, |replay| usize::from(!replay.ok));
7550 let state_integrity_errors = state_integrity_report
7551 .as_ref()
7552 .map_or(0, |report| report.structural_errors.len());
7553 let (source_registry, evidence_atoms, conditions, proposal_summary, proof_state) = loaded
7554 .as_ref()
7555 .map(|frontier| {
7556 (
7557 sources::source_summary(frontier),
7558 sources::evidence_summary(frontier),
7559 sources::condition_summary(frontier),
7560 proposals::summary(frontier),
7561 proposals::proof_state_json(&frontier.proof_state),
7562 )
7563 })
7564 .unwrap_or_else(|| {
7565 (
7566 sources::SourceRegistrySummary::default(),
7567 sources::EvidenceAtomSummary::default(),
7568 sources::ConditionSummary::default(),
7569 proposals::ProposalSummary::default(),
7570 Value::Null,
7571 )
7572 });
7573 let signature_report = loaded
7574 .as_ref()
7575 .and_then(|frontier| sign::verify_frontier_data(frontier, None).ok());
7576 if let Some(frontier) = loaded.as_ref()
7577 && !schema_only
7578 {
7579 let projection = sources::derive_projection(frontier);
7580 let existing_sources = frontier
7581 .sources
7582 .iter()
7583 .map(|source| source.id.as_str())
7584 .collect::<std::collections::BTreeSet<_>>();
7585 let existing_atoms = frontier
7586 .evidence_atoms
7587 .iter()
7588 .map(|atom| atom.id.as_str())
7589 .collect::<std::collections::BTreeSet<_>>();
7590 let existing_conditions = frontier
7591 .condition_records
7592 .iter()
7593 .map(|record| record.id.as_str())
7594 .collect::<std::collections::BTreeSet<_>>();
7595 for source in projection
7596 .sources
7597 .iter()
7598 .filter(|source| !existing_sources.contains(source.id.as_str()))
7599 {
7600 diagnostics.push(json!({
7601 "severity": "warning",
7602 "rule_id": "missing_source_record",
7603 "check": "source_registry",
7604 "finding_id": source.finding_ids.first(),
7605 "field_path": "sources",
7606 "message": format!("Source record {} is derivable but not materialized in frontier state.", source.id),
7607 "suggestion": "Run `vela normalize` to materialize source records before proof export.",
7608 "fixable": true,
7609 "normalize_action": "materialize_source_record",
7610 }));
7611 }
7612 for atom in projection
7613 .evidence_atoms
7614 .iter()
7615 .filter(|atom| !existing_atoms.contains(atom.id.as_str()))
7616 {
7617 diagnostics.push(json!({
7618 "severity": "warning",
7619 "rule_id": "missing_evidence_atom",
7620 "check": "evidence_atoms",
7621 "finding_id": atom.finding_id,
7622 "field_path": "evidence_atoms",
7623 "message": format!("Evidence atom {} is derivable but not materialized in frontier state.", atom.id),
7624 "suggestion": "Run `vela normalize` to materialize evidence atoms before proof export.",
7625 "fixable": true,
7626 "normalize_action": "materialize_evidence_atom",
7627 }));
7628 }
7629 for atom in projection
7630 .evidence_atoms
7631 .iter()
7632 .filter(|atom| atom.locator.is_none())
7633 {
7634 diagnostics.push(json!({
7635 "severity": "warning",
7636 "rule_id": "missing_evidence_locator",
7637 "check": "evidence_atoms",
7638 "finding_id": atom.finding_id,
7639 "field_path": "evidence_atoms[].locator",
7640 "message": format!("Evidence atom {} has no source locator.", atom.id),
7641 "suggestion": "Add or verify evidence spans, table rows, pages, sections, or run locators.",
7642 "fixable": false,
7643 "normalize_action": null,
7644 }));
7645 }
7646 for condition in projection
7647 .condition_records
7648 .iter()
7649 .filter(|condition| !existing_conditions.contains(condition.id.as_str()))
7650 {
7651 diagnostics.push(json!({
7652 "severity": "warning",
7653 "rule_id": "condition_record_missing",
7654 "check": "conditions",
7655 "finding_id": condition.finding_id,
7656 "field_path": "condition_records",
7657 "message": format!("Condition record {} is derivable but not materialized in frontier state.", condition.id),
7658 "suggestion": "Run `vela normalize` to materialize condition boundaries before proof export.",
7659 "fixable": true,
7660 "normalize_action": "materialize_condition_record",
7661 }));
7662 }
7663 for proposal in frontier.proposals.iter().filter(|proposal| {
7664 matches!(proposal.status.as_str(), "accepted" | "applied")
7665 && proposal
7666 .reviewed_by
7667 .as_deref()
7668 .is_none_or(proposals::is_placeholder_reviewer)
7669 }) {
7670 diagnostics.push(json!({
7671 "severity": "error",
7672 "rule_id": "reviewer_identity_missing",
7673 "check": "proposals",
7674 "finding_id": proposal.target.id,
7675 "field_path": "proposals[].reviewed_by",
7676 "message": format!("Accepted or applied proposal {} uses a missing or placeholder reviewer identity.", proposal.id),
7677 "suggestion": "Accept the proposal with a stable named reviewer id before strict proof use.",
7678 "fixable": false,
7679 "normalize_action": null,
7680 }));
7681 }
7682 }
7683 let signal_report = loaded
7684 .as_ref()
7685 .map(|frontier| signals::analyze(frontier, &diagnostics))
7686 .unwrap_or_else(empty_signal_report);
7687 let errors =
7688 report.errors.len() + method_errors + graph_errors + event_errors + state_integrity_errors;
7689 let warnings = method_warnings + graph_warnings + signal_report.proof_readiness.warnings;
7690 let infos = method_infos + graph_infos;
7691 let strict_blockers = signal_report
7692 .signals
7693 .iter()
7694 .filter(|signal| signal.blocks.iter().any(|block| block == "strict_check"))
7695 .count();
7696 let fixable = diagnostics
7697 .iter()
7698 .filter(|d| d.get("fixable").and_then(Value::as_bool).unwrap_or(false))
7699 .count();
7700 let ok = errors == 0 && (!strict || (warnings == 0 && strict_blockers == 0));
7701
7702 json!({
7703 "ok": ok,
7704 "command": "check",
7705 "schema_version": project::VELA_SCHEMA_VERSION,
7706 "source": {
7707 "path": src.display().to_string(),
7708 "hash": format!("sha256:{source_hash}"),
7709 },
7710 "summary": {
7711 "status": if ok { "pass" } else { "fail" },
7712 "checked_findings": report.total_files,
7713 "valid_findings": report.valid,
7714 "invalid_findings": report.invalid,
7715 "errors": errors,
7716 "warnings": warnings,
7717 "info": infos,
7718 "fixable": fixable,
7719 "strict": strict,
7720 "schema_only": schema_only,
7721 },
7722 "checks": [
7723 {
7724 "id": "schema",
7725 "status": if report.invalid == 0 { "pass" } else { "fail" },
7726 "checked": report.total_files,
7727 "failed": report.invalid,
7728 "errors": report.errors.iter().map(|e| json!({
7729 "file": e.file,
7730 "message": e.error,
7731 })).collect::<Vec<_>>(),
7732 },
7733 {
7734 "id": "methodology",
7735 "status": if method_errors == 0 { "pass" } else { "fail" },
7736 "checked": method_report.as_ref().map_or(0, |r| r.findings_checked),
7737 "failed": method_errors,
7738 "warnings": method_warnings,
7739 "info": method_infos,
7740 "skipped": schema_only,
7741 },
7742 {
7743 "id": "frontier_graph",
7744 "status": if graph_errors == 0 { "pass" } else { "fail" },
7745 "checked": graph_report.as_ref().map_or(0, |r| r.findings_checked),
7746 "failed": graph_errors,
7747 "warnings": graph_warnings,
7748 "info": graph_infos,
7749 "skipped": schema_only,
7750 },
7751 {
7752 "id": "signals",
7753 "status": if strict_blockers == 0 { "pass" } else { "fail" },
7754 "checked": signal_report.signals.len(),
7755 "failed": strict_blockers,
7756 "warnings": signal_report.proof_readiness.warnings,
7757 "skipped": loaded.is_none(),
7758 "blockers": signal_report.signals.iter()
7759 .filter(|s| s.blocks.iter().any(|b| b == "strict_check"))
7760 .map(|s| json!({
7761 "id": s.id,
7762 "kind": s.kind,
7763 "severity": s.severity,
7764 "reason": s.reason,
7765 }))
7766 .collect::<Vec<_>>(),
7767 },
7768 {
7769 "id": "events",
7770 "status": if replay_report.as_ref().is_none_or(|replay| replay.ok) { "pass" } else { "fail" },
7771 "checked": replay_report.as_ref().map_or(0, |replay| replay.event_log.count),
7772 "failed": event_errors,
7773 "skipped": schema_only || loaded.is_none(),
7774 },
7775 {
7776 "id": "state_integrity",
7777 "status": if state_integrity_report.as_ref().is_none_or(|report| report.status != "fail") { "pass" } else { "fail" },
7778 "checked": state_integrity_report.as_ref().map_or(0, |report| report.summary.get("events").copied().unwrap_or_default()),
7779 "failed": state_integrity_errors,
7780 "skipped": schema_only || loaded.is_none(),
7781 }
7782 ],
7783 "event_log": replay_report.as_ref().map(|replay| &replay.event_log),
7784 "replay": replay_report,
7785 "state_integrity": state_integrity_report,
7786 "source_registry": source_registry,
7787 "evidence_atoms": evidence_atoms,
7788 "conditions": conditions,
7789 "proposals": proposal_summary,
7790 "proof_state": proof_state,
7791 "signatures": signature_report,
7792 "diagnostics": diagnostics,
7793 "signals": signal_report.signals,
7794 "review_queue": signal_report.review_queue,
7795 "proof_readiness": signal_report.proof_readiness,
7796 "repair_plan": build_repair_plan(&diagnostics),
7797 })
7798}
7799
7800#[allow(clippy::too_many_arguments)]
7801fn cmd_normalize(
7802 source: &Path,
7803 out: Option<&Path>,
7804 write: bool,
7805 dry_run: bool,
7806 rewrite_ids: bool,
7807 id_map: Option<&Path>,
7808 resync_provenance: bool,
7809 json_output: bool,
7810) {
7811 if write && out.is_some() {
7812 fail("Use either --write or --out, not both.");
7813 }
7814 if dry_run && (write || out.is_some()) {
7815 fail("--dry-run cannot be combined with --write or --out.");
7816 }
7817 if id_map.is_some() && !rewrite_ids {
7818 fail("--id-map requires --rewrite-ids.");
7819 }
7820
7821 let detected = repo::detect(source).unwrap_or_else(|e| {
7822 eprintln!("{e}");
7823 std::process::exit(1);
7824 });
7825 if matches!(detected, repo::VelaSource::PacketDir(_)) {
7826 fail(
7827 "Cannot normalize a proof packet directory. Export a new packet from frontier state instead.",
7828 );
7829 }
7830 let mut frontier = repo::load(&detected).unwrap_or_else(|e| fail_return(&e));
7831 let has_substantive_events = frontier
7836 .events
7837 .iter()
7838 .any(|event| event.kind != "frontier.created");
7839 if has_substantive_events && (write || out.is_some()) {
7840 fail(
7841 "Refusing to normalize a frontier with canonical events. Normalize before proposal-backed writes, or create a new reviewed transition for the intended change.",
7842 );
7843 }
7844 let source_hash = hash_path(source).unwrap_or_else(|_| "unavailable".to_string());
7845 let before_stats = serde_json::to_value(&frontier.stats).unwrap_or(Value::Null);
7846 let (entity_type_fixes, entity_name_fixes) =
7847 normalize::normalize_findings(&mut frontier.findings);
7848 let confidence_updates =
7849 bundle::recompute_all_confidence(&mut frontier.findings, &frontier.replications);
7850 let provenance_resync_count = if resync_provenance {
7854 sources::resync_provenance_from_sources(&mut frontier)
7855 } else {
7856 0
7857 };
7858 let before_source_count = frontier.sources.len();
7859 let before_evidence_atom_count = frontier.evidence_atoms.len();
7860 let before_condition_record_count = frontier.condition_records.len();
7861
7862 let mut id_rewrites = Vec::new();
7863 if rewrite_ids {
7864 let mut id_map_values = std::collections::BTreeMap::<String, String>::new();
7865 for finding in &frontier.findings {
7866 let expected =
7867 bundle::FindingBundle::content_address(&finding.assertion, &finding.provenance);
7868 if expected != finding.id {
7869 id_map_values.insert(finding.id.clone(), expected);
7870 }
7871 }
7872 let new_ids = id_map_values
7873 .values()
7874 .map(String::as_str)
7875 .collect::<std::collections::HashSet<_>>();
7876 if new_ids.len() != id_map_values.len() {
7877 fail("Refusing to rewrite IDs because two findings map to the same content address.");
7878 }
7879 for finding in &mut frontier.findings {
7880 if let Some(new_id) = id_map_values.get(&finding.id) {
7881 id_rewrites.push(json!({"old": finding.id, "new": new_id}));
7882 finding.previous_version = Some(finding.id.clone());
7883 finding.id = new_id.clone();
7884 }
7885 }
7886 for finding in &mut frontier.findings {
7887 for link in &mut finding.links {
7888 if let Some(new_target) = id_map_values.get(&link.target) {
7889 link.target = new_target.clone();
7890 }
7891 }
7892 }
7893 if let Some(path) = id_map {
7894 std::fs::write(
7895 path,
7896 serde_json::to_string_pretty(&id_map_values)
7897 .expect("failed to serialize normalize id map"),
7898 )
7899 .unwrap_or_else(|e| fail(&format!("Failed to write {}: {e}", path.display())));
7900 }
7901 }
7902
7903 sources::materialize_project(&mut frontier);
7904 let source_records_materialized = frontier.sources.len().saturating_sub(before_source_count);
7905 let evidence_atoms_materialized = frontier
7906 .evidence_atoms
7907 .len()
7908 .saturating_sub(before_evidence_atom_count);
7909 let condition_records_materialized = frontier
7910 .condition_records
7911 .len()
7912 .saturating_sub(before_condition_record_count);
7913 let after_stats = serde_json::to_value(&frontier.stats).unwrap_or(Value::Null);
7914 let id_rewrite_count = id_rewrites.len();
7915 let wrote_to = if write {
7916 repo::save(&detected, &frontier).unwrap_or_else(|e| fail(&e));
7917 Some(source.display().to_string())
7918 } else if let Some(out_path) = out {
7919 repo::save_to_path(out_path, &frontier).unwrap_or_else(|e| fail(&e));
7920 Some(out_path.display().to_string())
7921 } else {
7922 None
7923 };
7924 let wrote = wrote_to.is_some();
7925 let planned_changes = entity_type_fixes
7926 + entity_name_fixes
7927 + confidence_updates
7928 + id_rewrite_count
7929 + source_records_materialized
7930 + evidence_atoms_materialized
7931 + condition_records_materialized
7932 + provenance_resync_count;
7933 let payload = json!({
7934 "ok": true,
7935 "command": "normalize",
7936 "schema_version": project::VELA_SCHEMA_VERSION,
7937 "source": {
7938 "path": source.display().to_string(),
7939 "hash": format!("sha256:{source_hash}"),
7940 },
7941 "dry_run": wrote_to.is_none(),
7942 "wrote_to": wrote_to,
7943 "summary": {
7944 "planned": planned_changes,
7945 "safe": planned_changes,
7946 "unsafe": 0,
7947 "applied": if wrote { planned_changes } else { 0 },
7948 },
7949 "changes": {
7950 "entity_type_fixes": entity_type_fixes,
7951 "entity_name_fixes": entity_name_fixes,
7952 "confidence_updates": confidence_updates,
7953 "id_rewrites": id_rewrite_count,
7954 "source_records_materialized": source_records_materialized,
7955 "evidence_atoms_materialized": evidence_atoms_materialized,
7956 "condition_records_materialized": condition_records_materialized,
7957 "provenance_resyncs": provenance_resync_count,
7958 "stats_changed": before_stats != after_stats,
7959 },
7960 "id_rewrites": id_rewrites,
7961 "repair_plan": if wrote { Vec::<Value>::new() } else {
7962 vec![json!({
7963 "action": "apply_normalization",
7964 "command": "vela normalize <frontier> --out frontier.normalized.json"
7965 })]
7966 },
7967 });
7968 if json_output {
7969 println!(
7970 "{}",
7971 serde_json::to_string_pretty(&payload).expect("failed to serialize normalize report")
7972 );
7973 } else if let Some(path) = payload.get("wrote_to").and_then(Value::as_str) {
7974 println!("{} normalized frontier written to {path}", style::ok("ok"));
7975 println!(
7976 " entity type fixes: {}, entity name fixes: {}, confidence updates: {}, id rewrites: {}",
7977 entity_type_fixes, entity_name_fixes, confidence_updates, id_rewrite_count
7978 );
7979 } else {
7980 println!("normalize dry run for {}", source.display());
7981 println!(
7982 " would apply entity type fixes: {}, entity name fixes: {}, confidence updates: {}, id rewrites: {}",
7983 entity_type_fixes, entity_name_fixes, confidence_updates, id_rewrite_count
7984 );
7985 }
7986}
7987
7988fn cmd_proof(
7989 frontier: &Path,
7990 out: &Path,
7991 template: &str,
7992 gold: Option<&Path>,
7993 record_proof_state: bool,
7994 json_output: bool,
7995) {
7996 if template != "bbb-alzheimer" {
7997 fail(&format!(
7998 "Unsupported proof template '{template}'. Supported: bbb-alzheimer"
7999 ));
8000 }
8001 let mut loaded = load_frontier_or_fail(frontier);
8002 let source_hash = hash_path_or_fail(frontier);
8003 let export_record = export::export_packet_with_source(&loaded, Some(frontier), out)
8004 .unwrap_or_else(|e| fail(&e));
8005 let benchmark_summary = gold.map(|gold_path| {
8006 let summary = benchmark::run_suite(gold_path).unwrap_or_else(|e| {
8007 fail(&format!(
8008 "Failed to run proof benchmark '{}': {e}",
8009 gold_path.display()
8010 ))
8011 });
8012 append_packet_json_file(out, "benchmark-summary.json", &summary).unwrap_or_else(|e| {
8013 fail(&format!("Failed to write benchmark summary: {e}"));
8014 });
8015 if summary.get("ok").and_then(Value::as_bool) != Some(true) {
8016 fail(&format!(
8017 "Proof benchmark failed for {}",
8018 gold_path.display()
8019 ));
8020 }
8021 summary
8022 });
8023 let validation_summary = packet::validate(out).unwrap_or_else(|e| {
8024 fail(&format!("Proof packet validation failed: {e}"));
8025 });
8026 proposals::record_proof_export(
8027 &mut loaded,
8028 proposals::ProofPacketRecord {
8029 generated_at: export_record.generated_at.clone(),
8030 snapshot_hash: export_record.snapshot_hash.clone(),
8031 event_log_hash: export_record.event_log_hash.clone(),
8032 packet_manifest_hash: export_record.packet_manifest_hash.clone(),
8033 },
8034 );
8035 project::recompute_stats(&mut loaded);
8036 if record_proof_state {
8037 repo::save_to_path(frontier, &loaded).unwrap_or_else(|e| fail(&e));
8038 }
8039 let signal_report = signals::analyze(&loaded, &[]);
8040 if json_output {
8041 let payload = json!({
8042 "ok": true,
8043 "command": "proof",
8044 "schema_version": project::VELA_SCHEMA_VERSION,
8045 "recorded_proof_state": record_proof_state,
8046 "frontier": {
8047 "name": &loaded.project.name,
8048 "source": frontier.display().to_string(),
8049 "hash": format!("sha256:{source_hash}"),
8050 },
8051 "template": template,
8052 "gold": gold.map(|p| p.display().to_string()),
8053 "benchmark": benchmark_summary,
8054 "output": out.display().to_string(),
8055 "packet": {
8056 "manifest_path": out.join("manifest.json").display().to_string(),
8057 },
8058 "validation": {
8059 "status": "ok",
8060 "summary": validation_summary,
8061 },
8062 "proposals": proposals::summary(&loaded),
8063 "proof_state": loaded.proof_state,
8064 "signals": signal_report.signals,
8065 "review_queue": signal_report.review_queue,
8066 "proof_readiness": signal_report.proof_readiness,
8067 "trace_path": out.join("proof-trace.json").display().to_string(),
8068 });
8069 println!(
8070 "{}",
8071 serde_json::to_string_pretty(&payload).expect("failed to serialize proof response")
8072 );
8073 } else {
8074 println!("vela proof");
8075 println!(" source: {}", frontier.display());
8076 println!(" template: {template}");
8077 println!(" output: {}", out.display());
8078 println!(" trace: {}", out.join("proof-trace.json").display());
8079 println!(
8080 " proof state: {}",
8081 if record_proof_state {
8082 "recorded"
8083 } else {
8084 "not recorded"
8085 }
8086 );
8087 println!();
8088 println!("{validation_summary}");
8089 }
8090}
8091
8092fn cmd_status(path: &Path, json: bool) {
8096 let project = repo::load_from_path(path).unwrap_or_else(|e| fail_return(&e));
8097
8098 let mut pending_total = 0usize;
8100 let mut pending_by_kind: std::collections::BTreeMap<String, usize> =
8101 std::collections::BTreeMap::new();
8102 for p in &project.proposals {
8103 if p.status == "pending_review" {
8104 pending_total += 1;
8105 *pending_by_kind.entry(p.kind.clone()).or_insert(0) += 1;
8106 }
8107 }
8108
8109 let audit = crate::causal_reasoning::audit_frontier(&project);
8111 let audit_summary = crate::causal_reasoning::summarize_audit(&audit);
8112
8113 let mut last_sync: Option<&crate::events::StateEvent> = None;
8115 let mut last_conflict: Option<&crate::events::StateEvent> = None;
8116 let mut total_conflicts = 0usize;
8117 for e in &project.events {
8118 match e.kind.as_str() {
8119 "frontier.synced_with_peer" => {
8120 if last_sync
8121 .map(|prev| e.timestamp > prev.timestamp)
8122 .unwrap_or(true)
8123 {
8124 last_sync = Some(e);
8125 }
8126 }
8127 "frontier.conflict_detected" => {
8128 total_conflicts += 1;
8129 if last_conflict
8130 .map(|prev| e.timestamp > prev.timestamp)
8131 .unwrap_or(true)
8132 {
8133 last_conflict = Some(e);
8134 }
8135 }
8136 _ => {}
8137 }
8138 }
8139
8140 let mut targets_with_success = std::collections::HashSet::new();
8142 let mut failed_replications = 0usize;
8143 for r in &project.replications {
8144 if r.outcome == "replicated" {
8145 targets_with_success.insert(r.target_finding.clone());
8146 } else if r.outcome == "failed" {
8147 failed_replications += 1;
8148 }
8149 }
8150
8151 if json {
8152 println!(
8153 "{}",
8154 serde_json::to_string_pretty(&json!({
8155 "ok": true,
8156 "command": "status",
8157 "frontier": frontier_label(&project),
8158 "vfr_id": project.frontier_id(),
8159 "findings": project.findings.len(),
8160 "events": project.events.len(),
8161 "actors": project.actors.len(),
8162 "peers": project.peers.len(),
8163 "inbox": {
8164 "pending_total": pending_total,
8165 "pending_by_kind": pending_by_kind,
8166 },
8167 "causal_audit": {
8168 "identified": audit_summary.identified,
8169 "conditional": audit_summary.conditional,
8170 "underidentified": audit_summary.underidentified,
8171 "underdetermined": audit_summary.underdetermined,
8172 },
8173 "replications": {
8174 "total": project.replications.len(),
8175 "findings_with_success": targets_with_success.len(),
8176 "failed": failed_replications,
8177 },
8178 "federation": {
8179 "peers": project.peers.len(),
8180 "last_sync": last_sync.map(|e| e.timestamp.clone()),
8181 "last_conflict": last_conflict.map(|e| e.timestamp.clone()),
8182 "total_conflicts": total_conflicts,
8183 },
8184 }))
8185 .expect("serialize status")
8186 );
8187 return;
8188 }
8189
8190 println!();
8191 println!(
8192 " {}",
8193 format!("VELA · STATUS · {}", path.display())
8194 .to_uppercase()
8195 .dimmed()
8196 );
8197 println!(" {}", style::tick_row(60));
8198 println!();
8199 println!(" frontier: {}", frontier_label(&project));
8200 println!(" vfr_id: {}", project.frontier_id());
8201 println!(
8202 " findings: {} events: {} peers: {} actors: {}",
8203 project.findings.len(),
8204 project.events.len(),
8205 project.peers.len(),
8206 project.actors.len(),
8207 );
8208 println!();
8209 if pending_total > 0 {
8210 println!(
8211 " {} {pending_total} pending proposals",
8212 style::warn("inbox")
8213 );
8214 for (k, n) in &pending_by_kind {
8215 println!(" · {n:>3} {k}");
8216 }
8217 } else {
8218 println!(" {} inbox clean", style::ok("ok"));
8219 }
8220 println!();
8221 if audit_summary.underidentified > 0 || audit_summary.conditional > 0 {
8222 let chip = if audit_summary.underidentified > 0 {
8223 style::lost("audit")
8224 } else {
8225 style::warn("audit")
8226 };
8227 println!(
8228 " {} identified {} · conditional {} · underidentified {} · underdetermined {}",
8229 chip,
8230 audit_summary.identified,
8231 audit_summary.conditional,
8232 audit_summary.underidentified,
8233 audit_summary.underdetermined,
8234 );
8235 if audit_summary.underidentified > 0 {
8236 println!(
8237 " next: vela causal audit {} --problems-only",
8238 path.display()
8239 );
8240 }
8241 } else if audit_summary.underdetermined == 0 {
8242 println!(
8243 " {} causal audit: all {} identified",
8244 style::ok("ok"),
8245 audit_summary.identified
8246 );
8247 } else {
8248 println!(
8249 " {} causal audit: {} identified, {} ungraded",
8250 style::warn("audit"),
8251 audit_summary.identified,
8252 audit_summary.underdetermined,
8253 );
8254 }
8255 println!();
8256 if !project.replications.is_empty() {
8257 println!(
8258 " {} {} records · {} findings replicated · {} failed",
8259 style::ok("replications"),
8260 project.replications.len(),
8261 targets_with_success.len(),
8262 failed_replications,
8263 );
8264 }
8265 if project.peers.is_empty() {
8266 println!(
8267 " {} no federation peers registered",
8268 style::warn("federation")
8269 );
8270 } else {
8271 let last = last_sync
8272 .map(|e| fmt_timestamp(&e.timestamp))
8273 .unwrap_or_else(|| "never".to_string());
8274 let chip = if total_conflicts > 0 {
8275 style::warn("federation")
8276 } else {
8277 style::ok("federation")
8278 };
8279 println!(
8280 " {} {} peer(s) · last sync {} · {} conflict events",
8281 chip,
8282 project.peers.len(),
8283 last,
8284 total_conflicts,
8285 );
8286 }
8287 println!();
8288}
8289
8290fn cmd_log(path: &Path, limit: usize, kind_filter: Option<&str>, json: bool) {
8292 let project = repo::load_from_path(path).unwrap_or_else(|e| fail_return(&e));
8293 let mut events: Vec<&crate::events::StateEvent> = project
8294 .events
8295 .iter()
8296 .filter(|e| match kind_filter {
8297 Some(k) => e.kind.contains(k),
8298 None => true,
8299 })
8300 .collect();
8301 events.sort_by(|a, b| b.timestamp.cmp(&a.timestamp));
8302 events.truncate(limit);
8303
8304 if json {
8305 let payload: Vec<_> = events
8306 .iter()
8307 .map(|e| {
8308 json!({
8309 "id": e.id,
8310 "kind": e.kind,
8311 "actor": e.actor.id,
8312 "target": &e.target.id,
8313 "target_type": &e.target.r#type,
8314 "timestamp": e.timestamp,
8315 "reason": e.reason,
8316 })
8317 })
8318 .collect();
8319 println!(
8320 "{}",
8321 serde_json::to_string_pretty(&json!({
8322 "ok": true,
8323 "command": "log",
8324 "events": payload,
8325 }))
8326 .expect("serialize log")
8327 );
8328 return;
8329 }
8330
8331 println!();
8332 println!(
8333 " {}",
8334 format!("VELA · LOG · {} (latest {})", path.display(), events.len())
8335 .to_uppercase()
8336 .dimmed()
8337 );
8338 println!(" {}", style::tick_row(60));
8339 if events.is_empty() {
8340 println!(" (no events)");
8341 return;
8342 }
8343 for e in &events {
8344 let when = fmt_timestamp(&e.timestamp);
8345 let target_short = if e.target.id.len() > 22 {
8346 format!("{}…", &e.target.id[..21])
8347 } else {
8348 e.target.id.clone()
8349 };
8350 let reason: String = e.reason.chars().take(70).collect();
8351 println!(
8352 " {:<19} {:<32} {:<24} {}",
8353 when, e.kind, target_short, reason
8354 );
8355 }
8356 println!();
8357}
8358
8359fn cmd_inbox(path: &Path, kind_filter: Option<&str>, limit: usize, json: bool) {
8361 let project = repo::load_from_path(path).unwrap_or_else(|e| fail_return(&e));
8362
8363 let mut score_map: std::collections::HashMap<String, (f64, f64, f64, f64)> =
8366 std::collections::HashMap::new();
8367 for p in &project.proposals {
8368 if p.kind != "finding.note" {
8369 continue;
8370 }
8371 if p.actor.id != "agent:reviewer-agent" {
8372 continue;
8373 }
8374 let reason = &p.reason;
8375 let Some(target) = reason.split_whitespace().find(|s| s.starts_with("vpr_")) else {
8376 continue;
8377 };
8378 let text = p.payload.get("text").and_then(|v| v.as_str()).unwrap_or("");
8379 let extract = |k: &str| -> f64 {
8380 let pat = format!("{k} ");
8381 text.find(&pat)
8382 .and_then(|idx| text[idx + pat.len()..].split_whitespace().next())
8383 .and_then(|t| t.parse::<f64>().ok())
8384 .unwrap_or(0.0)
8385 };
8386 score_map.insert(
8387 target.to_string(),
8388 (
8389 extract("plausibility"),
8390 extract("evidence"),
8391 extract("scope"),
8392 extract("duplicate-risk"),
8393 ),
8394 );
8395 }
8396
8397 let mut pending: Vec<&crate::proposals::StateProposal> = project
8398 .proposals
8399 .iter()
8400 .filter(|p| {
8401 p.status == "pending_review"
8402 && match kind_filter {
8403 Some(k) => p.kind.contains(k),
8404 None => true,
8405 }
8406 })
8407 .collect();
8408 pending.sort_by(|a, b| {
8410 let sa = score_map
8411 .get(&a.id)
8412 .map(|(p, e, s, d)| 0.4 * p + 0.3 * e + 0.2 * s - 0.3 * d);
8413 let sb = score_map
8414 .get(&b.id)
8415 .map(|(p, e, s, d)| 0.4 * p + 0.3 * e + 0.2 * s - 0.3 * d);
8416 sb.partial_cmp(&sa).unwrap_or(std::cmp::Ordering::Equal)
8417 });
8418 pending.truncate(limit);
8419
8420 if json {
8421 let payload: Vec<_> = pending
8422 .iter()
8423 .map(|p| {
8424 let assertion_text = p
8425 .payload
8426 .get("finding")
8427 .and_then(|f| f.get("assertion"))
8428 .and_then(|a| a.get("text"))
8429 .and_then(|t| t.as_str());
8430 let assertion_type = p
8431 .payload
8432 .get("finding")
8433 .and_then(|f| f.get("assertion"))
8434 .and_then(|a| a.get("type"))
8435 .and_then(|t| t.as_str());
8436 let composite = score_map
8437 .get(&p.id)
8438 .map(|(pl, e, s, d)| 0.4 * pl + 0.3 * e + 0.2 * s - 0.3 * d);
8439 json!({
8440 "proposal_id": p.id,
8441 "kind": p.kind,
8442 "actor": p.actor,
8443 "reason": p.reason,
8444 "assertion_text": assertion_text,
8445 "assertion_type": assertion_type,
8446 "reviewer_composite": composite,
8447 })
8448 })
8449 .collect();
8450 println!(
8451 "{}",
8452 serde_json::to_string_pretty(&json!({
8453 "ok": true,
8454 "command": "inbox",
8455 "shown": pending.len(),
8456 "proposals": payload,
8457 }))
8458 .expect("serialize inbox")
8459 );
8460 return;
8461 }
8462
8463 println!();
8464 println!(
8465 " {}",
8466 format!(
8467 "VELA · INBOX · {} ({} pending shown)",
8468 path.display(),
8469 pending.len()
8470 )
8471 .to_uppercase()
8472 .dimmed()
8473 );
8474 println!(" {}", style::tick_row(60));
8475 if pending.is_empty() {
8476 println!(" (inbox clean)");
8477 return;
8478 }
8479 for p in &pending {
8480 let assertion_text = p
8481 .payload
8482 .get("finding")
8483 .and_then(|f| f.get("assertion"))
8484 .and_then(|a| a.get("text"))
8485 .and_then(|t| t.as_str())
8486 .unwrap_or("");
8487 let assertion_type = p
8488 .payload
8489 .get("finding")
8490 .and_then(|f| f.get("assertion"))
8491 .and_then(|a| a.get("type"))
8492 .and_then(|t| t.as_str())
8493 .unwrap_or("");
8494 let composite = score_map
8495 .get(&p.id)
8496 .map(|(pl, e, s, d)| 0.4 * pl + 0.3 * e + 0.2 * s - 0.3 * d);
8497 let score_str = composite
8498 .map(|c| format!("[{:.2}]", c))
8499 .unwrap_or_else(|| "[—] ".to_string());
8500 let kind_short = if p.kind.len() > 12 {
8501 format!("{}…", &p.kind[..11])
8502 } else {
8503 p.kind.clone()
8504 };
8505 let summary: String = if !assertion_text.is_empty() {
8506 assertion_text.chars().take(80).collect()
8507 } else {
8508 p.reason.chars().take(80).collect()
8509 };
8510 println!(
8511 " {} {} {:<13} {:<18} {}",
8512 score_str, p.id, kind_short, assertion_type, summary
8513 );
8514 }
8515 println!();
8516}
8517
8518fn cmd_ask(path: &Path, question: &str, json: bool) {
8523 let project = repo::load_from_path(path).unwrap_or_else(|e| fail_return(&e));
8524
8525 if question.trim().is_empty() {
8526 use std::io::{BufRead, Write};
8528 println!();
8529 println!(
8530 " {}",
8531 format!("VELA · ASK · {}", path.display())
8532 .to_uppercase()
8533 .dimmed()
8534 );
8535 println!(" {}", style::tick_row(60));
8536 println!(" Ask a question. Type `exit` to quit.");
8537 println!(" Examples:");
8538 println!(" · what's pending?");
8539 println!(" · what's underidentified?");
8540 println!(" · how many findings?");
8541 println!(" · what changed recently?");
8542 println!(" · who has what calibration?");
8543 println!();
8544 let stdin = std::io::stdin();
8545 let mut stdout = std::io::stdout();
8546 loop {
8547 print!(" ask> ");
8548 stdout.flush().ok();
8549 let mut line = String::new();
8550 if stdin.lock().read_line(&mut line).is_err() {
8551 break;
8552 }
8553 let q = line.trim();
8554 if q.is_empty() {
8555 continue;
8556 }
8557 if matches!(q, "exit" | "quit" | "q") {
8558 break;
8559 }
8560 answer(&project, q, false);
8561 }
8562 return;
8563 }
8564
8565 answer(&project, question, json);
8566}
8567
8568fn answer(project: &crate::project::Project, q: &str, json: bool) {
8569 let lower = q.to_lowercase();
8570
8571 if lower.contains("pending")
8573 || lower.contains("inbox")
8574 || lower.contains("queue")
8575 || lower.contains("to review")
8576 {
8577 let pending: Vec<&crate::proposals::StateProposal> = project
8578 .proposals
8579 .iter()
8580 .filter(|p| p.status == "pending_review")
8581 .collect();
8582 let mut by_kind: std::collections::BTreeMap<String, usize> = Default::default();
8583 for p in &pending {
8584 *by_kind.entry(p.kind.clone()).or_insert(0) += 1;
8585 }
8586 if json {
8587 println!(
8588 "{}",
8589 serde_json::to_string_pretty(&json!({
8590 "answer": "pending",
8591 "total": pending.len(),
8592 "by_kind": by_kind,
8593 }))
8594 .unwrap()
8595 );
8596 } else {
8597 println!(" {} pending proposals.", pending.len());
8598 for (k, n) in &by_kind {
8599 println!(" · {n:>3} {k}");
8600 }
8601 if pending.is_empty() {
8602 println!(" Inbox is clean.");
8603 } else {
8604 println!(" Run `vela inbox <frontier>` to triage.");
8605 }
8606 }
8607 return;
8608 }
8609
8610 if lower.contains("underident")
8612 || lower.contains("audit")
8613 || lower.contains("identif")
8614 || lower.contains("causal")
8615 {
8616 let entries = crate::causal_reasoning::audit_frontier(project);
8617 let summary = crate::causal_reasoning::summarize_audit(&entries);
8618 if json {
8619 println!(
8620 "{}",
8621 serde_json::to_string_pretty(&json!({
8622 "answer": "audit",
8623 "summary": {
8624 "identified": summary.identified,
8625 "conditional": summary.conditional,
8626 "underidentified": summary.underidentified,
8627 "underdetermined": summary.underdetermined,
8628 },
8629 }))
8630 .unwrap()
8631 );
8632 } else {
8633 println!(
8634 " Causal audit: {} identified · {} conditional · {} underidentified · {} underdetermined.",
8635 summary.identified,
8636 summary.conditional,
8637 summary.underidentified,
8638 summary.underdetermined,
8639 );
8640 if summary.underidentified > 0 {
8641 println!(
8642 " The {} underidentified findings are concrete review items:",
8643 summary.underidentified
8644 );
8645 for e in entries
8646 .iter()
8647 .filter(|e| {
8648 matches!(
8649 e.verdict,
8650 crate::causal_reasoning::Identifiability::Underidentified
8651 )
8652 })
8653 .take(8)
8654 {
8655 let txt: String = e.assertion_text.chars().take(70).collect();
8656 println!(" · {} {}", e.finding_id, txt);
8657 }
8658 }
8659 }
8660 return;
8661 }
8662
8663 if lower.contains("recent")
8665 || lower.contains("changed")
8666 || lower.contains("latest")
8667 || lower.contains("happen")
8668 {
8669 let mut events: Vec<&crate::events::StateEvent> = project.events.iter().collect();
8670 events.sort_by(|a, b| b.timestamp.cmp(&a.timestamp));
8671 events.truncate(8);
8672 if json {
8673 println!(
8674 "{}",
8675 serde_json::to_string_pretty(&json!({
8676 "answer": "recent_events",
8677 "events": events.iter().map(|e| json!({
8678 "id": e.id, "kind": e.kind, "timestamp": e.timestamp,
8679 "actor": e.actor.id, "target": e.target.id,
8680 })).collect::<Vec<_>>(),
8681 }))
8682 .unwrap()
8683 );
8684 } else {
8685 println!(" Most recent {} events:", events.len());
8686 for e in &events {
8687 let when = fmt_timestamp(&e.timestamp);
8688 println!(" · {when} {:<28} {}", e.kind, e.target.id);
8689 }
8690 }
8691 return;
8692 }
8693
8694 if lower.starts_with("how many") || lower.contains("count") || lower.contains("total") {
8696 let n = project.findings.len();
8697 let evs = project.events.len();
8698 let peers = project.peers.len();
8699 let actors = project.actors.len();
8700 if json {
8701 println!(
8702 "{}",
8703 serde_json::to_string_pretty(&json!({
8704 "answer": "counts",
8705 "findings": n,
8706 "events": evs,
8707 "peers": peers,
8708 "actors": actors,
8709 "replications": project.replications.len(),
8710 "predictions": project.predictions.len(),
8711 }))
8712 .unwrap()
8713 );
8714 } else {
8715 println!(" {n} findings · {evs} events · {actors} actors · {peers} peers.");
8716 println!(
8717 " {} replications · {} predictions · {} datasets · {} code artifacts.",
8718 project.replications.len(),
8719 project.predictions.len(),
8720 project.datasets.len(),
8721 project.code_artifacts.len(),
8722 );
8723 }
8724 return;
8725 }
8726
8727 if lower.contains("calibration") || lower.contains("brier") || lower.contains("predict") {
8729 let records =
8730 crate::calibration::calibration_records(&project.predictions, &project.resolutions);
8731 if json {
8732 println!("{}", serde_json::to_string_pretty(&records).unwrap());
8733 } else if records.is_empty() {
8734 println!(" No predictions yet. The calibration ledger is empty.");
8735 } else {
8736 println!(" Calibration over {} actor(s):", records.len());
8737 for r in &records {
8738 let brier = r
8739 .brier_score
8740 .map(|b| format!("{:.3}", b))
8741 .unwrap_or_else(|| "—".into());
8742 println!(
8743 " · {:<28} predictions {} · resolved {} · expired {} · Brier {}",
8744 r.actor, r.n_predictions, r.n_resolved, r.n_expired, brier
8745 );
8746 }
8747 }
8748 return;
8749 }
8750
8751 if lower.contains("peer")
8753 || lower.contains("federat")
8754 || lower.contains("sync")
8755 || lower.contains("conflict")
8756 {
8757 let mut total_conflicts = 0usize;
8758 for e in &project.events {
8759 if e.kind == "frontier.conflict_detected" {
8760 total_conflicts += 1;
8761 }
8762 }
8763 if json {
8764 println!(
8765 "{}",
8766 serde_json::to_string_pretty(&json!({
8767 "answer": "federation",
8768 "peers": project.peers.iter().map(|p| &p.id).collect::<Vec<_>>(),
8769 "total_conflicts": total_conflicts,
8770 }))
8771 .unwrap()
8772 );
8773 } else {
8774 println!(" {} peer(s) registered:", project.peers.len());
8775 for p in &project.peers {
8776 println!(" · {:<24} {}", p.id, p.url);
8777 }
8778 println!(" {total_conflicts} conflict events on the canonical log.");
8779 }
8780 return;
8781 }
8782
8783 if json {
8785 println!(
8786 "{}",
8787 serde_json::to_string_pretty(&json!({
8788 "answer": "unknown_question",
8789 "question": q,
8790 "hint": "Try: pending, audit, recent, how many, calibration, peers."
8791 }))
8792 .unwrap()
8793 );
8794 } else {
8795 println!(" Don't know how to route that question yet.");
8796 println!(" Try: pending · audit · recent · how many · calibration · peers");
8797 }
8798}
8799
8800fn frontier_label(p: &crate::project::Project) -> String {
8801 if p.project.name.trim().is_empty() {
8802 "(unnamed)".to_string()
8803 } else {
8804 p.project.name.clone()
8805 }
8806}
8807
8808fn fmt_timestamp(ts: &str) -> String {
8809 chrono::DateTime::parse_from_rfc3339(ts)
8812 .map(|dt| dt.format("%m-%d %H:%M").to_string())
8813 .unwrap_or_else(|_| ts.chars().take(16).collect())
8814}
8815
8816fn cmd_stats(path: &Path) {
8817 let frontier = load_frontier_or_fail(path);
8818 let s = &frontier.stats;
8819 println!();
8820 println!(" {}", "FRONTIER · V0.36.0".dimmed());
8821 println!(" {}", frontier.project.name.bold());
8822 println!(" {}", style::tick_row(60));
8823 println!(" id: {}", frontier.frontier_id());
8824 println!(" compiled: {}", frontier.project.compiled_at);
8825 println!(" papers: {}", frontier.project.papers_processed);
8826 println!(" findings: {}", s.findings);
8827 println!(" links: {}", s.links);
8828 println!(" replicated: {}", s.replicated);
8829 println!(" avg confidence: {}", s.avg_confidence);
8830 println!(" gaps: {}", s.gaps);
8831 println!(" contested: {}", s.contested);
8832 println!(" reviewed: {}", s.human_reviewed);
8833 println!(" proposals: {}", s.proposal_count);
8834 println!(
8835 " recorded proof: {}",
8836 frontier.proof_state.latest_packet.status
8837 );
8838 if frontier.proof_state.latest_packet.status != "never_exported" {
8839 println!(
8840 " proof note: recorded frontier metadata; packet files are checked by `vela packet validate`"
8841 );
8842 }
8843 if !s.categories.is_empty() {
8844 println!();
8845 println!(" {}", "categories".dimmed());
8846 let mut categories = s.categories.iter().collect::<Vec<_>>();
8847 categories.sort_by(|a, b| b.1.cmp(a.1));
8848 for (category, count) in categories {
8849 println!(" {category}: {}", count);
8850 }
8851 }
8852 println!();
8853 println!(" {}", style::tick_row(60));
8854 println!();
8855}
8856
8857fn cmd_proposals(action: ProposalAction) {
8858 match action {
8859 ProposalAction::List {
8860 frontier,
8861 status,
8862 json,
8863 } => {
8864 let frontier_state =
8865 repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
8866 let proposals_list = proposals::list(&frontier_state, status.as_deref());
8867 let payload = json!({
8868 "ok": true,
8869 "command": "proposals.list",
8870 "frontier": frontier_state.project.name,
8871 "status_filter": status,
8872 "summary": proposals::summary(&frontier_state),
8873 "proposals": proposals_list,
8874 });
8875 if json {
8876 println!(
8877 "{}",
8878 serde_json::to_string_pretty(&payload)
8879 .expect("failed to serialize proposals list")
8880 );
8881 } else {
8882 println!("vela proposals list");
8883 println!(" frontier: {}", frontier_state.project.name);
8884 println!(
8885 " proposals: {}",
8886 payload["proposals"].as_array().map_or(0, Vec::len)
8887 );
8888 }
8889 }
8890 ProposalAction::Show {
8891 frontier,
8892 proposal_id,
8893 json,
8894 } => {
8895 let frontier_state =
8896 repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
8897 let proposal =
8898 proposals::show(&frontier_state, &proposal_id).unwrap_or_else(|e| fail_return(&e));
8899 let payload = json!({
8900 "ok": true,
8901 "command": "proposals.show",
8902 "frontier": frontier_state.project.name,
8903 "proposal": proposal,
8904 });
8905 if json {
8906 println!(
8907 "{}",
8908 serde_json::to_string_pretty(&payload)
8909 .expect("failed to serialize proposal show")
8910 );
8911 } else {
8912 println!("vela proposals show");
8913 println!(" frontier: {}", frontier_state.project.name);
8914 println!(" proposal: {}", proposal_id);
8915 println!(" kind: {}", proposal.kind);
8916 println!(" status: {}", proposal.status);
8917 }
8918 }
8919 ProposalAction::Preview {
8920 frontier,
8921 proposal_id,
8922 reviewer,
8923 json,
8924 } => {
8925 let preview = proposals::preview_at_path(&frontier, &proposal_id, &reviewer)
8926 .unwrap_or_else(|e| fail_return(&e));
8927 let payload = json!({
8928 "ok": true,
8929 "command": "proposals.preview",
8930 "frontier": frontier.display().to_string(),
8931 "preview": preview,
8932 });
8933 if json {
8934 println!(
8935 "{}",
8936 serde_json::to_string_pretty(&payload)
8937 .expect("failed to serialize proposal preview")
8938 );
8939 } else {
8940 println!("vela proposals preview");
8941 println!(" proposal: {}", proposal_id);
8942 println!(" kind: {}", preview.kind);
8943 println!(
8944 " findings: {} -> {}",
8945 preview.findings_before, preview.findings_after
8946 );
8947 println!(
8948 " artifacts: {} -> {}",
8949 preview.artifacts_before, preview.artifacts_after
8950 );
8951 println!(
8952 " events: {} -> {}",
8953 preview.events_before, preview.events_after
8954 );
8955 if !preview.changed_findings.is_empty() {
8956 println!(
8957 " findings changed: {}",
8958 preview.changed_findings.join(", ")
8959 );
8960 }
8961 if !preview.changed_artifacts.is_empty() {
8962 println!(
8963 " artifacts changed: {}",
8964 preview.changed_artifacts.join(", ")
8965 );
8966 }
8967 if !preview.event_kinds.is_empty() {
8968 println!(" event kinds: {}", preview.event_kinds.join(", "));
8969 }
8970 println!(" event: {}", preview.applied_event_id);
8971 }
8972 }
8973 ProposalAction::Import {
8974 frontier,
8975 source,
8976 json,
8977 } => {
8978 let report =
8979 proposals::import_from_path(&frontier, &source).unwrap_or_else(|e| fail_return(&e));
8980 let payload = json!({
8981 "ok": true,
8982 "command": "proposals.import",
8983 "frontier": frontier.display().to_string(),
8984 "source": source.display().to_string(),
8985 "summary": {
8986 "imported": report.imported,
8987 "applied": report.applied,
8988 "rejected": report.rejected,
8989 "duplicates": report.duplicates,
8990 },
8991 });
8992 if json {
8993 println!(
8994 "{}",
8995 serde_json::to_string_pretty(&payload)
8996 .expect("failed to serialize proposal import")
8997 );
8998 } else {
8999 println!(
9000 "Imported {} proposals into {}",
9001 report.imported, report.wrote_to
9002 );
9003 }
9004 }
9005 ProposalAction::Validate { source, json } => {
9006 let report = proposals::validate_source(&source).unwrap_or_else(|e| fail_return(&e));
9007 let payload = json!({
9008 "ok": report.ok,
9009 "command": "proposals.validate",
9010 "source": source.display().to_string(),
9011 "summary": {
9012 "checked": report.checked,
9013 "valid": report.valid,
9014 "invalid": report.invalid,
9015 },
9016 "proposal_ids": report.proposal_ids,
9017 "errors": report.errors,
9018 });
9019 if json {
9020 println!(
9021 "{}",
9022 serde_json::to_string_pretty(&payload)
9023 .expect("failed to serialize proposal validation")
9024 );
9025 } else if report.ok {
9026 println!("{} validated {} proposals", style::ok("ok"), report.valid);
9027 } else {
9028 println!(
9029 "{} validated {} proposals, {} invalid",
9030 style::lost("lost"),
9031 report.valid,
9032 report.invalid
9033 );
9034 for error in &report.errors {
9035 println!(" · {error}");
9036 }
9037 std::process::exit(1);
9038 }
9039 }
9040 ProposalAction::Export {
9041 frontier,
9042 output,
9043 status,
9044 json,
9045 } => {
9046 let count = proposals::export_to_path(&frontier, &output, status.as_deref())
9047 .unwrap_or_else(|e| fail_return(&e));
9048 let payload = json!({
9049 "ok": true,
9050 "command": "proposals.export",
9051 "frontier": frontier.display().to_string(),
9052 "output": output.display().to_string(),
9053 "status": status,
9054 "exported": count,
9055 });
9056 if json {
9057 println!(
9058 "{}",
9059 serde_json::to_string_pretty(&payload)
9060 .expect("failed to serialize proposal export")
9061 );
9062 } else {
9063 println!("sealed · {count} proposals · {}", output.display());
9064 }
9065 }
9066 ProposalAction::Accept {
9067 frontier,
9068 proposal_id,
9069 reviewer,
9070 reason,
9071 json,
9072 } => {
9073 let event_id = proposals::accept_at_path(&frontier, &proposal_id, &reviewer, &reason)
9074 .unwrap_or_else(|e| fail_return(&e));
9075 let payload = json!({
9076 "ok": true,
9077 "command": "proposals.accept",
9078 "frontier": frontier.display().to_string(),
9079 "proposal_id": proposal_id,
9080 "reviewer": reviewer,
9081 "applied_event_id": event_id,
9082 });
9083 if json {
9084 println!(
9085 "{}",
9086 serde_json::to_string_pretty(&payload)
9087 .expect("failed to serialize proposal accept")
9088 );
9089 } else {
9090 println!(
9091 "{} accepted and applied proposal {}",
9092 style::ok("ok"),
9093 proposal_id
9094 );
9095 println!(" event: {}", event_id);
9096 }
9097 }
9098 ProposalAction::Reject {
9099 frontier,
9100 proposal_id,
9101 reviewer,
9102 reason,
9103 json,
9104 } => {
9105 proposals::reject_at_path(&frontier, &proposal_id, &reviewer, &reason)
9106 .unwrap_or_else(|e| fail_return(&e));
9107 let payload = json!({
9108 "ok": true,
9109 "command": "proposals.reject",
9110 "frontier": frontier.display().to_string(),
9111 "proposal_id": proposal_id,
9112 "reviewer": reviewer,
9113 "status": "rejected",
9114 });
9115 if json {
9116 println!(
9117 "{}",
9118 serde_json::to_string_pretty(&payload)
9119 .expect("failed to serialize proposal reject")
9120 );
9121 } else {
9122 println!(
9123 "{} rejected proposal {}",
9124 style::warn("rejected"),
9125 proposal_id
9126 );
9127 }
9128 }
9129 }
9130}
9131
9132fn cmd_artifact_to_state(
9133 frontier: &Path,
9134 packet: &Path,
9135 actor: &str,
9136 apply_artifacts: bool,
9137 json: bool,
9138) {
9139 let report =
9140 crate::artifact_to_state::import_packet_at_path(frontier, packet, actor, apply_artifacts)
9141 .unwrap_or_else(|e| fail_return(&e));
9142 if json {
9143 println!(
9144 "{}",
9145 serde_json::to_string_pretty(&report)
9146 .expect("failed to serialize artifact-to-state report")
9147 );
9148 } else {
9149 println!("vela artifact-to-state");
9150 println!(" packet: {}", report.packet_id);
9151 println!(" frontier: {}", report.frontier);
9152 println!(" artifact proposals: {}", report.artifact_proposals);
9153 println!(" finding proposals: {}", report.finding_proposals);
9154 println!(" gap proposals: {}", report.gap_proposals);
9155 println!(
9156 " applied artifact events: {}",
9157 report.applied_artifact_events
9158 );
9159 println!(
9160 " pending truth proposals: {}",
9161 report.pending_truth_proposals
9162 );
9163 }
9164}
9165
9166fn cmd_bridge_kit(action: BridgeKitAction) {
9167 match action {
9168 BridgeKitAction::Validate { source, json } => {
9169 let report = crate::artifact_to_state::validate_bridge_kit_path(&source);
9170 if json {
9171 println!(
9172 "{}",
9173 serde_json::to_string_pretty(&report)
9174 .expect("failed to serialize bridge-kit validation report")
9175 );
9176 } else {
9177 println!("vela bridge-kit validate");
9178 println!(" source: {}", report.source);
9179 println!(" packets: {}", report.packet_count);
9180 println!(" valid: {}", report.valid_packet_count);
9181 println!(" invalid: {}", report.invalid_packet_count);
9182 for packet in &report.packets {
9183 if packet.ok {
9184 println!(
9185 " ok: {} · {} artifacts · {} claims · {} needs",
9186 packet
9187 .packet_id
9188 .as_deref()
9189 .unwrap_or("packet id unavailable"),
9190 packet.artifact_count,
9191 packet.candidate_claim_count,
9192 packet.open_need_count
9193 );
9194 } else {
9195 println!(" invalid: {} · {}", packet.path, packet.errors.join("; "));
9196 }
9197 }
9198 for error in &report.errors {
9199 println!(" error: {error}");
9200 }
9201 }
9202 if !report.ok {
9203 std::process::exit(1);
9204 }
9205 }
9206 }
9207}
9208
9209async fn cmd_source_adapter(action: SourceAdapterAction) {
9210 match action {
9211 SourceAdapterAction::Run {
9212 frontier,
9213 adapter,
9214 actor,
9215 entries,
9216 priority,
9217 include_excluded,
9218 allow_partial,
9219 dry_run,
9220 input_dir,
9221 apply_artifacts,
9222 json,
9223 } => {
9224 let report = crate::source_adapters::run(
9225 &frontier,
9226 crate::source_adapters::SourceAdapterRunOptions {
9227 adapter,
9228 actor,
9229 entries,
9230 priority,
9231 include_excluded,
9232 allow_partial,
9233 dry_run,
9234 input_dir,
9235 apply_artifacts,
9236 },
9237 )
9238 .await
9239 .unwrap_or_else(|e| fail_return(&e));
9240 if json {
9241 println!(
9242 "{}",
9243 serde_json::to_string_pretty(&report)
9244 .expect("failed to serialize source adapter report")
9245 );
9246 } else {
9247 println!("vela source-adapter run");
9248 println!(" adapter: {}", report.adapter);
9249 println!(" run: {}", report.run_id);
9250 println!(" frontier: {}", report.frontier);
9251 println!(" selected entries: {}", report.selected_entries);
9252 println!(" fetched records: {}", report.fetched_records);
9253 println!(" changed records: {}", report.changed_records);
9254 println!(" unchanged records: {}", report.unchanged_records);
9255 println!(" failed records: {}", report.failed_records.len());
9256 if let Some(packet_id) = report.packet_id {
9257 println!(" packet: {packet_id}");
9258 }
9259 println!(" artifact proposals: {}", report.artifact_proposals);
9260 println!(" review note proposals: {}", report.review_note_proposals);
9261 println!(" applied events: {}", report.applied_event_ids.len());
9262 }
9263 }
9264 }
9265}
9266
9267fn cmd_runtime_adapter(action: RuntimeAdapterAction) {
9268 match action {
9269 RuntimeAdapterAction::Run {
9270 frontier,
9271 adapter,
9272 input,
9273 actor,
9274 dry_run,
9275 apply_artifacts,
9276 json,
9277 } => {
9278 let report = crate::runtime_adapters::run(
9279 &frontier,
9280 crate::runtime_adapters::RuntimeAdapterRunOptions {
9281 adapter,
9282 input,
9283 actor,
9284 dry_run,
9285 apply_artifacts,
9286 },
9287 )
9288 .unwrap_or_else(|e| fail_return(&e));
9289 if json {
9290 println!(
9291 "{}",
9292 serde_json::to_string_pretty(&report)
9293 .expect("failed to serialize runtime adapter report")
9294 );
9295 } else {
9296 println!("vela runtime-adapter run");
9297 println!(" adapter: {}", report.adapter);
9298 println!(" run: {}", report.run_id);
9299 println!(" frontier: {}", report.frontier);
9300 if let Some(packet_id) = report.packet_id {
9301 println!(" packet: {packet_id}");
9302 }
9303 println!(" artifact proposals: {}", report.artifact_proposals);
9304 println!(" finding proposals: {}", report.finding_proposals);
9305 println!(" gap proposals: {}", report.gap_proposals);
9306 println!(" review note proposals: {}", report.review_note_proposals);
9307 println!(
9308 " applied artifact events: {}",
9309 report.applied_artifact_events
9310 );
9311 println!(
9312 " pending truth proposals: {}",
9313 report.pending_truth_proposals
9314 );
9315 }
9316 }
9317 }
9318}
9319
9320fn cmd_sign(action: SignAction) {
9321 match action {
9322 SignAction::GenerateKeypair { out, json } => {
9323 let public_key = sign::generate_keypair(&out).unwrap_or_else(|e| fail_return(&e));
9324 let payload = json!({
9325 "ok": true,
9326 "command": "sign.generate-keypair",
9327 "output_dir": out.display().to_string(),
9328 "public_key": public_key,
9329 });
9330 if json {
9331 println!(
9332 "{}",
9333 serde_json::to_string_pretty(&payload)
9334 .expect("failed to serialize sign.generate-keypair")
9335 );
9336 } else {
9337 println!("{} keypair · {}", style::ok("generated"), out.display());
9338 println!(" public key: {public_key}");
9339 }
9340 }
9341 SignAction::Apply {
9342 frontier,
9343 private_key,
9344 json,
9345 } => {
9346 let count =
9347 sign::sign_frontier(&frontier, &private_key).unwrap_or_else(|e| fail_return(&e));
9348 let payload = json!({
9349 "ok": true,
9350 "command": "sign.apply",
9351 "frontier": frontier.display().to_string(),
9352 "private_key": private_key.display().to_string(),
9353 "signed": count,
9354 });
9355 if json {
9356 println!(
9357 "{}",
9358 serde_json::to_string_pretty(&payload).expect("failed to serialize sign.apply")
9359 );
9360 } else {
9361 println!(
9362 "{} {count} findings in {}",
9363 style::ok("signed"),
9364 frontier.display()
9365 );
9366 }
9367 }
9368 SignAction::Verify {
9369 frontier,
9370 public_key,
9371 json,
9372 } => {
9373 let report = sign::verify_frontier(&frontier, public_key.as_deref())
9374 .unwrap_or_else(|e| fail_return(&e));
9375 if json {
9376 println!(
9377 "{}",
9378 serde_json::to_string_pretty(&report).expect("failed to serialize sign.verify")
9379 );
9380 } else {
9381 println!();
9382 println!(
9383 " {}",
9384 format!("VELA · SIGN · VERIFY · {}", frontier.display())
9385 .to_uppercase()
9386 .dimmed()
9387 );
9388 println!(" {}", style::tick_row(60));
9389 println!(" total findings: {}", report.total_findings);
9390 println!(" signed: {}", report.signed);
9391 println!(" unsigned: {}", report.unsigned);
9392 println!(" valid: {}", report.valid);
9393 println!(" invalid: {}", report.invalid);
9394 if report.findings_with_threshold > 0 {
9395 println!(" with threshold: {}", report.findings_with_threshold);
9396 println!(" jointly accepted: {}", report.jointly_accepted);
9397 }
9398 }
9399 }
9400 SignAction::ThresholdSet {
9401 frontier,
9402 finding_id,
9403 to,
9404 json,
9405 } => {
9406 if to == 0 {
9407 fail("--to must be >= 1");
9408 }
9409 let mut project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
9410 let Some(idx) = project.findings.iter().position(|f| f.id == finding_id) else {
9411 fail(&format!("finding '{finding_id}' not present in frontier"));
9412 };
9413 project.findings[idx].flags.signature_threshold = Some(to);
9414 sign::refresh_jointly_accepted(&mut project);
9418 let met = project.findings[idx].flags.jointly_accepted;
9419 repo::save_to_path(&frontier, &project).unwrap_or_else(|e| fail_return(&e));
9420
9421 if json {
9422 println!(
9423 "{}",
9424 serde_json::to_string_pretty(&json!({
9425 "ok": true,
9426 "command": "sign.threshold-set",
9427 "finding_id": finding_id,
9428 "threshold": to,
9429 "jointly_accepted": met,
9430 "frontier": frontier.display().to_string(),
9431 }))
9432 .expect("failed to serialize sign.threshold-set")
9433 );
9434 } else {
9435 println!(
9436 "{} signature_threshold={to} on {finding_id} ({})",
9437 style::ok("set"),
9438 if met {
9439 "jointly accepted"
9440 } else {
9441 "awaiting signatures"
9442 }
9443 );
9444 }
9445 }
9446 }
9447}
9448
9449fn cmd_actor(action: ActorAction) {
9450 match action {
9451 ActorAction::Add {
9452 frontier,
9453 id,
9454 pubkey,
9455 tier,
9456 orcid,
9457 clearance,
9458 json,
9459 } => {
9460 let trimmed = pubkey.trim();
9462 if trimmed.len() != 64 || hex::decode(trimmed).is_err() {
9463 fail("Public key must be 64 hex characters (32-byte Ed25519 pubkey).");
9464 }
9465 let orcid_normalized = orcid
9467 .as_deref()
9468 .map(|s| sign::validate_orcid(s).unwrap_or_else(|e| fail_return(&e)));
9469 let clearance: Option<crate::access_tier::AccessTier> = clearance.as_deref().map(|s| {
9472 crate::access_tier::AccessTier::parse(s).unwrap_or_else(|e| fail_return(&e))
9473 });
9474
9475 let mut project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
9476 if project.actors.iter().any(|actor| actor.id == id) {
9477 fail(&format!(
9478 "Actor '{id}' already registered in this frontier."
9479 ));
9480 }
9481 project.actors.push(sign::ActorRecord {
9482 id: id.clone(),
9483 public_key: trimmed.to_string(),
9484 algorithm: "ed25519".to_string(),
9485 created_at: chrono::Utc::now().to_rfc3339(),
9486 tier: tier.clone(),
9487 orcid: orcid_normalized.clone(),
9488 access_clearance: clearance,
9489 });
9490 repo::save_to_path(&frontier, &project).unwrap_or_else(|e| fail_return(&e));
9491 let payload = json!({
9492 "ok": true,
9493 "command": "actor.add",
9494 "frontier": frontier.display().to_string(),
9495 "actor_id": id,
9496 "public_key": trimmed,
9497 "tier": tier,
9498 "orcid": orcid_normalized,
9499 "registered_count": project.actors.len(),
9500 });
9501 if json {
9502 println!(
9503 "{}",
9504 serde_json::to_string_pretty(&payload).expect("failed to serialize actor.add")
9505 );
9506 } else {
9507 let tier_suffix = tier
9508 .as_deref()
9509 .map_or_else(String::new, |t| format!(" tier={t}"));
9510 println!(
9511 "{} actor {} (pubkey {}{tier_suffix})",
9512 style::ok("registered"),
9513 id,
9514 &trimmed[..16]
9515 );
9516 }
9517 }
9518 ActorAction::List { frontier, json } => {
9519 let project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
9520 if json {
9521 let payload = json!({
9522 "ok": true,
9523 "command": "actor.list",
9524 "frontier": frontier.display().to_string(),
9525 "actors": project.actors,
9526 });
9527 println!(
9528 "{}",
9529 serde_json::to_string_pretty(&payload).expect("failed to serialize actor.list")
9530 );
9531 } else {
9532 println!();
9533 println!(
9534 " {}",
9535 format!("VELA · ACTOR · LIST · {}", frontier.display())
9536 .to_uppercase()
9537 .dimmed()
9538 );
9539 println!(" {}", style::tick_row(60));
9540 if project.actors.is_empty() {
9541 println!(" (no actors registered)");
9542 } else {
9543 for actor in &project.actors {
9544 println!(
9545 " {:<28} {}… registered {}",
9546 actor.id,
9547 &actor.public_key[..16],
9548 actor.created_at
9549 );
9550 }
9551 }
9552 }
9553 }
9554 }
9555}
9556
9557fn cmd_causal(action: CausalAction) {
9559 use crate::causal_reasoning;
9560
9561 match action {
9562 CausalAction::Audit {
9563 frontier,
9564 problems_only,
9565 json,
9566 } => {
9567 let project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
9568 let mut entries = causal_reasoning::audit_frontier(&project);
9569 if problems_only {
9570 entries.retain(|e| e.verdict.needs_reviewer_attention());
9571 }
9572 let summary = causal_reasoning::summarize_audit(&entries);
9573
9574 if json {
9575 println!(
9576 "{}",
9577 serde_json::to_string_pretty(&json!({
9578 "ok": true,
9579 "command": "causal.audit",
9580 "frontier": frontier.display().to_string(),
9581 "summary": summary,
9582 "entries": entries,
9583 }))
9584 .expect("serialize causal.audit")
9585 );
9586 return;
9587 }
9588
9589 println!();
9590 println!(
9591 " {}",
9592 format!("VELA · CAUSAL · AUDIT · {}", frontier.display())
9593 .to_uppercase()
9594 .dimmed()
9595 );
9596 println!(" {}", style::tick_row(60));
9597 println!(
9598 " total: {} identified: {} conditional: {} underidentified: {} underdetermined: {}",
9599 summary.total,
9600 summary.identified,
9601 summary.conditional,
9602 summary.underidentified,
9603 summary.underdetermined,
9604 );
9605 if entries.is_empty() {
9606 println!(" (no entries to report)");
9607 return;
9608 }
9609 for e in &entries {
9610 let chip = match e.verdict {
9611 crate::causal_reasoning::Identifiability::Identified => style::ok("identified"),
9612 crate::causal_reasoning::Identifiability::Conditional => {
9613 style::warn("conditional")
9614 }
9615 crate::causal_reasoning::Identifiability::Underidentified => {
9616 style::lost("underidentified")
9617 }
9618 crate::causal_reasoning::Identifiability::Underdetermined => {
9619 style::warn("underdetermined")
9620 }
9621 };
9622 let claim = e
9623 .causal_claim
9624 .map_or("none".to_string(), |c| format!("{c:?}").to_lowercase());
9625 let grade = e
9626 .causal_evidence_grade
9627 .map_or("none".to_string(), |g| format!("{g:?}").to_lowercase());
9628 println!();
9629 println!(" {chip} {} ({}/{})", e.finding_id, claim, grade);
9630 let assertion_short: String = e.assertion_text.chars().take(78).collect();
9631 println!(" {assertion_short}");
9632 println!(" {} {}", style::ok("why:"), e.rationale);
9633 if e.verdict.needs_reviewer_attention()
9634 || matches!(
9635 e.verdict,
9636 crate::causal_reasoning::Identifiability::Underdetermined
9637 )
9638 {
9639 println!(" {} {}", style::ok("fix:"), e.remediation);
9640 }
9641 }
9642 }
9643 CausalAction::Effect {
9644 frontier,
9645 source,
9646 on: target,
9647 json,
9648 } => {
9649 use crate::causal_graph::{CausalEffectVerdict, identify_effect};
9650
9651 let project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
9652 let verdict = identify_effect(&project, &source, &target);
9653
9654 if json {
9655 println!(
9656 "{}",
9657 serde_json::to_string_pretty(&json!({
9658 "ok": true,
9659 "command": "causal.effect",
9660 "frontier": frontier.display().to_string(),
9661 "source": source,
9662 "target": target,
9663 "verdict": verdict,
9664 }))
9665 .expect("serialize causal.effect")
9666 );
9667 return;
9668 }
9669
9670 println!();
9671 println!(
9672 " {}",
9673 format!("VELA · CAUSAL · EFFECT · {} → {}", source, target)
9674 .to_uppercase()
9675 .dimmed()
9676 );
9677 println!(" {}", style::tick_row(60));
9678 match verdict {
9679 CausalEffectVerdict::Identified {
9680 adjustment_set,
9681 back_door_paths_considered,
9682 } => {
9683 if adjustment_set.is_empty() {
9684 println!(
9685 " {} no back-door adjustment needed",
9686 style::ok("identified")
9687 );
9688 } else {
9689 println!(" {} identified by adjusting on:", style::ok("identified"));
9690 for z in &adjustment_set {
9691 println!(" · {z}");
9692 }
9693 }
9694 println!(
9695 " back-door paths considered: {}",
9696 back_door_paths_considered
9697 );
9698 }
9699 CausalEffectVerdict::IdentifiedByFrontDoor { mediator_set } => {
9700 println!(
9701 " {} identified via front-door criterion (Pearl 1995 §3.3)",
9702 style::ok("identified")
9703 );
9704 println!(" mediators that intercept all directed paths:");
9705 for m in &mediator_set {
9706 println!(" · {m}");
9707 }
9708 println!(
9709 " applies when source-target confounders are unobserved but the mediator chain is."
9710 );
9711 }
9712 CausalEffectVerdict::NoCausalPath { reason } => {
9713 println!(" {} no causal path: {reason}", style::warn("no_path"));
9714 }
9715 CausalEffectVerdict::Underidentified {
9716 unblocked_back_door_paths,
9717 candidates_tried,
9718 } => {
9719 println!(
9720 " {} no observational adjustment set found ({} candidates tried)",
9721 style::lost("underidentified"),
9722 candidates_tried
9723 );
9724 println!(" open back-door paths:");
9725 for path in unblocked_back_door_paths.iter().take(5) {
9726 println!(" · {}", path.join(" — "));
9727 }
9728 println!(
9729 " remediation: either intervene experimentally on {source}, or extend the link graph to make a confounder observable."
9730 );
9731 }
9732 CausalEffectVerdict::UnknownNode { which } => {
9733 fail(&which);
9734 }
9735 }
9736 println!();
9737 }
9738 CausalAction::Graph {
9739 frontier,
9740 node,
9741 json,
9742 } => {
9743 use crate::causal_graph::CausalGraph;
9744 let project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
9745 let graph = CausalGraph::from_project(&project);
9746
9747 let nodes: Vec<&str> = if let Some(n) = node.as_deref() {
9750 if !graph.contains(n) {
9751 fail(&format!("node not in frontier: {n}"));
9752 }
9753 vec![n]
9754 } else {
9755 project.findings.iter().map(|f| f.id.as_str()).collect()
9756 };
9757
9758 if json {
9759 let payload: Vec<_> = nodes
9760 .iter()
9761 .map(|n| {
9762 let parents: Vec<&str> = graph.parents_of(n).collect();
9763 let children: Vec<&str> = graph.children_of(n).collect();
9764 json!({
9765 "node": n,
9766 "parents": parents,
9767 "children": children,
9768 })
9769 })
9770 .collect();
9771 println!(
9772 "{}",
9773 serde_json::to_string_pretty(&json!({
9774 "ok": true,
9775 "command": "causal.graph",
9776 "node_count": graph.node_count(),
9777 "edge_count": graph.edge_count(),
9778 "nodes": payload,
9779 }))
9780 .expect("serialize causal.graph")
9781 );
9782 return;
9783 }
9784
9785 println!();
9786 println!(
9787 " {}",
9788 format!("VELA · CAUSAL · GRAPH · {}", frontier.display())
9789 .to_uppercase()
9790 .dimmed()
9791 );
9792 println!(" {}", style::tick_row(60));
9793 println!(
9794 " {} nodes · {} edges",
9795 graph.node_count(),
9796 graph.edge_count()
9797 );
9798 println!();
9799 for n in &nodes {
9800 let parents: Vec<&str> = graph.parents_of(n).collect();
9801 let children: Vec<&str> = graph.children_of(n).collect();
9802 if parents.is_empty() && children.is_empty() && nodes.len() > 1 {
9803 continue; }
9805 println!(" {n}");
9806 if !parents.is_empty() {
9807 println!(" parents: {}", parents.join(", "));
9808 }
9809 if !children.is_empty() {
9810 println!(" children: {}", children.join(", "));
9811 }
9812 }
9813 }
9814 CausalAction::Counterfactual {
9815 frontier,
9816 intervene_on,
9817 set_to,
9818 target,
9819 json,
9820 } => {
9821 use crate::counterfactual::{
9822 CounterfactualQuery, CounterfactualVerdict, answer_counterfactual,
9823 };
9824
9825 let project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
9826 let query = CounterfactualQuery {
9827 intervene_on: intervene_on.clone(),
9828 set_to,
9829 target: target.clone(),
9830 };
9831 let verdict = answer_counterfactual(&project, &query);
9832
9833 if json {
9834 println!(
9835 "{}",
9836 serde_json::to_string_pretty(&json!({
9837 "ok": true,
9838 "command": "causal.counterfactual",
9839 "frontier": frontier.display().to_string(),
9840 "query": query,
9841 "verdict": verdict,
9842 }))
9843 .expect("serialize causal.counterfactual")
9844 );
9845 return;
9846 }
9847
9848 println!();
9849 println!(
9850 " {}",
9851 format!(
9852 "VELA · CAUSAL · COUNTERFACTUAL · do({intervene_on} := {set_to:.3}) → {target}"
9853 )
9854 .to_uppercase()
9855 .dimmed()
9856 );
9857 println!(" {}", style::tick_row(72));
9858 match verdict {
9859 CounterfactualVerdict::Resolved {
9860 factual,
9861 counterfactual,
9862 delta,
9863 paths_used,
9864 } => {
9865 println!(
9866 " {} factual: {factual:.3} counterfactual: {counterfactual:.3} delta: {delta:+.3}",
9867 style::ok("resolved")
9868 );
9869 println!(
9870 " twin-network propagation through {} causal path(s):",
9871 paths_used.len()
9872 );
9873 for p in paths_used.iter().take(5) {
9874 println!(" · {}", p.join(" → "));
9875 }
9876 println!(
9877 " reading: \"if {intervene_on}'s confidence had been {set_to:.3} \
9878 instead of factual, {target}'s confidence would shift by {delta:+.3}.\""
9879 );
9880 }
9881 CounterfactualVerdict::MechanismUnspecified { unspecified_edges } => {
9882 println!(
9883 " {} causal path exists but {} edge(s) lack a mechanism annotation",
9884 style::warn("mechanism_unspecified"),
9885 unspecified_edges.len()
9886 );
9887 for (parent, child) in unspecified_edges.iter().take(8) {
9888 println!(" · {parent} → {child}");
9889 }
9890 println!(
9891 " remediation: annotate one of the link mechanisms (linear / monotonic / threshold / saturating)."
9892 );
9893 }
9894 CounterfactualVerdict::NoCausalPath { factual } => {
9895 println!(
9896 " {} no directed path from {intervene_on} to {target}; counterfactual = factual = {factual:.3}",
9897 style::warn("no_path")
9898 );
9899 }
9900 CounterfactualVerdict::UnknownNode { which } => {
9901 fail(&format!("node not in frontier: {which}"));
9902 }
9903 CounterfactualVerdict::InvalidIntervention { reason } => {
9904 fail(&reason);
9905 }
9906 }
9907 println!();
9908 }
9909 }
9910}
9911
9912fn cmd_bridges(action: BridgesAction) {
9915 use crate::bridge::{Bridge, BridgeStatus, derive_bridges};
9916 use std::collections::HashMap;
9917
9918 fn bridges_dir(frontier: &Path) -> PathBuf {
9919 frontier.join(".vela/bridges")
9920 }
9921
9922 fn load_bridge(frontier: &Path, id: &str) -> Result<Bridge, String> {
9923 let path = bridges_dir(frontier).join(format!("{id}.json"));
9924 if !path.is_file() {
9925 return Err(format!("bridge not found: {id}"));
9926 }
9927 let data = std::fs::read_to_string(&path).map_err(|e| format!("read {id}: {e}"))?;
9928 serde_json::from_str(&data).map_err(|e| format!("parse {id}: {e}"))
9929 }
9930
9931 fn save_bridge(frontier: &Path, b: &Bridge) -> Result<(), String> {
9932 let dir = bridges_dir(frontier);
9933 std::fs::create_dir_all(&dir).map_err(|e| format!("mkdir bridges/: {e}"))?;
9934 let path = dir.join(format!("{}.json", b.id));
9935 let data = serde_json::to_string_pretty(b).map_err(|e| format!("serialize bridge: {e}"))?;
9936 std::fs::write(&path, format!("{data}\n")).map_err(|e| format!("write bridge: {e}"))
9937 }
9938
9939 fn default_reviewer_id() -> String {
9942 std::env::var("VELA_REVIEWER_ID").unwrap_or_else(|_| "reviewer:will-blair".to_string())
9943 }
9944
9945 fn emit_bridge_reviewed_event(
9956 frontier: &Path,
9957 bridge_id: &str,
9958 status: &str,
9959 reviewer_id: &str,
9960 note: Option<&str>,
9961 ) -> Result<(), String> {
9962 let mut payload = serde_json::json!({
9963 "bridge_id": bridge_id,
9964 "status": status,
9965 });
9966 if let Some(n) = note
9967 && !n.trim().is_empty()
9968 {
9969 payload["note"] = serde_json::Value::String(n.to_string());
9970 }
9971 let known_ids: Vec<String> = list_bridges(frontier)
9973 .unwrap_or_default()
9974 .into_iter()
9975 .map(|b| b.id)
9976 .collect();
9977 crate::events::validate_bridge_reviewed_against_state(&payload, &known_ids)?;
9978 let event = crate::events::new_bridge_reviewed_event(
9979 bridge_id,
9980 reviewer_id,
9981 "human",
9982 &format!("Bridge {status} by {reviewer_id}"),
9983 payload,
9984 Vec::new(),
9985 );
9986 let events_dir = frontier.join(".vela/events");
9987 std::fs::create_dir_all(&events_dir).map_err(|e| format!("mkdir .vela/events: {e}"))?;
9988 let event_path = events_dir.join(format!("{}.json", event.id));
9989 let data =
9990 serde_json::to_string_pretty(&event).map_err(|e| format!("serialize event: {e}"))?;
9991 std::fs::write(&event_path, format!("{data}\n")).map_err(|e| format!("write event: {e}"))
9992 }
9993
9994 fn list_bridges(frontier: &Path) -> Result<Vec<Bridge>, String> {
9995 let dir = bridges_dir(frontier);
9996 if !dir.is_dir() {
9997 return Ok(Vec::new());
9998 }
9999 let mut out = Vec::new();
10000 for entry in std::fs::read_dir(&dir).map_err(|e| format!("read bridges/: {e}"))? {
10001 let entry = entry.map_err(|e| format!("read entry: {e}"))?;
10002 let path = entry.path();
10003 if path.extension().and_then(|s| s.to_str()) != Some("json") {
10004 continue;
10005 }
10006 let data = std::fs::read_to_string(&path).map_err(|e| format!("read {path:?}: {e}"))?;
10007 let b: Bridge =
10008 serde_json::from_str(&data).map_err(|e| format!("parse {path:?}: {e}"))?;
10009 out.push(b);
10010 }
10011 out.sort_by(|a, b| {
10012 b.finding_refs
10013 .len()
10014 .cmp(&a.finding_refs.len())
10015 .then(a.entity_name.cmp(&b.entity_name))
10016 });
10017 Ok(out)
10018 }
10019
10020 match action {
10021 BridgesAction::Derive {
10022 frontier_a,
10023 label_a,
10024 frontier_b,
10025 label_b,
10026 json,
10027 } => {
10028 let a = repo::load_from_path(&frontier_a).unwrap_or_else(|e| fail_return(&e));
10029 let b = repo::load_from_path(&frontier_b).unwrap_or_else(|e| fail_return(&e));
10030 let now = chrono::Utc::now().to_rfc3339();
10031 let new_bridges =
10032 derive_bridges(&[(label_a.as_str(), &a), (label_b.as_str(), &b)], &now);
10033
10034 let existing = list_bridges(&frontier_a).unwrap_or_default();
10038 let existing_by_id: HashMap<String, Bridge> =
10039 existing.iter().map(|b| (b.id.clone(), b.clone())).collect();
10040 let mut written = 0;
10041 let mut preserved = 0;
10042 let mut new_ids = Vec::new();
10043 for mut bridge in new_bridges {
10044 if let Some(prev) = existing_by_id.get(&bridge.id)
10045 && prev.status != BridgeStatus::Derived
10046 {
10047 bridge.status = prev.status;
10049 bridge.derived_at = prev.derived_at.clone();
10050 preserved += 1;
10051 }
10052 save_bridge(&frontier_a, &bridge).unwrap_or_else(|e| fail_return(&e));
10053 new_ids.push(bridge.id.clone());
10054 written += 1;
10055 }
10056
10057 if json {
10058 println!(
10059 "{}",
10060 serde_json::to_string_pretty(&json!({
10061 "ok": true,
10062 "command": "bridges.derive",
10063 "frontier_a": frontier_a.display().to_string(),
10064 "frontier_b": frontier_b.display().to_string(),
10065 "bridges_written": written,
10066 "reviewer_judgments_preserved": preserved,
10067 "ids": new_ids,
10068 }))
10069 .expect("serialize bridges.derive")
10070 );
10071 return;
10072 }
10073
10074 println!();
10075 println!(
10076 " {}",
10077 format!("VELA · BRIDGES · DERIVE · {} ↔ {}", label_a, label_b)
10078 .to_uppercase()
10079 .dimmed()
10080 );
10081 println!(" {}", style::tick_row(60));
10082 println!(" {} {} bridge(s) materialized", style::ok("ok"), written);
10083 if preserved > 0 {
10084 println!(
10085 " {} {} reviewer judgment(s) preserved",
10086 style::ok("kept"),
10087 preserved
10088 );
10089 }
10090 for id in new_ids.iter().take(10) {
10091 println!(" · {id}");
10092 }
10093 if new_ids.len() > 10 {
10094 println!(" … and {} more", new_ids.len() - 10);
10095 }
10096 println!();
10097 }
10098 BridgesAction::List {
10099 frontier,
10100 status,
10101 json,
10102 } => {
10103 let mut bridges = list_bridges(&frontier).unwrap_or_else(|e| fail_return(&e));
10104 if let Some(s) = status.as_deref() {
10105 let want = match s.to_lowercase().as_str() {
10106 "derived" => BridgeStatus::Derived,
10107 "confirmed" => BridgeStatus::Confirmed,
10108 "refuted" => BridgeStatus::Refuted,
10109 other => fail_return(&format!(
10110 "unknown bridge status '{other}' (try derived|confirmed|refuted)"
10111 )),
10112 };
10113 bridges.retain(|b| b.status == want);
10114 }
10115 if json {
10116 println!(
10117 "{}",
10118 serde_json::to_string_pretty(&json!({
10119 "ok": true,
10120 "command": "bridges.list",
10121 "frontier": frontier.display().to_string(),
10122 "count": bridges.len(),
10123 "bridges": bridges,
10124 }))
10125 .expect("serialize bridges.list")
10126 );
10127 return;
10128 }
10129 println!();
10130 println!(
10131 " {}",
10132 format!("VELA · BRIDGES · LIST · {}", frontier.display())
10133 .to_uppercase()
10134 .dimmed()
10135 );
10136 println!(" {}", style::tick_row(60));
10137 println!(" {} bridge(s)", bridges.len());
10138 for b in &bridges {
10139 let chip = match b.status {
10140 BridgeStatus::Derived => style::warn("derived"),
10141 BridgeStatus::Confirmed => style::ok("confirmed"),
10142 BridgeStatus::Refuted => style::lost("refuted"),
10143 };
10144 println!();
10145 println!(
10146 " {chip} {} {} ↔ findings:{}",
10147 b.id,
10148 b.entity_name,
10149 b.finding_refs.len()
10150 );
10151 println!(" frontiers: {}", b.frontiers.join(", "));
10152 if let Some(t) = &b.tension {
10153 println!(" tension: {t}");
10154 }
10155 }
10156 println!();
10157 }
10158 BridgesAction::Show {
10159 frontier,
10160 bridge_id,
10161 json,
10162 } => {
10163 let b = load_bridge(&frontier, &bridge_id).unwrap_or_else(|e| fail_return(&e));
10164 if json {
10165 println!("{}", serde_json::to_string_pretty(&b).expect("serialize"));
10166 return;
10167 }
10168 println!();
10169 println!(
10170 " {}",
10171 format!("VELA · BRIDGES · SHOW · {}", b.id)
10172 .to_uppercase()
10173 .dimmed()
10174 );
10175 println!(" {}", style::tick_row(60));
10176 println!(" entity: {}", b.entity_name);
10177 println!(" status: {:?}", b.status);
10178 println!(" frontiers: {}", b.frontiers.join(", "));
10179 if !b.frontier_ids.is_empty() {
10180 println!(" frontier_ids: {}", b.frontier_ids.join(", "));
10181 }
10182 if let Some(t) = &b.tension {
10183 println!(" tension: {t}");
10184 }
10185 println!(" derived_at: {}", b.derived_at);
10186 println!(" finding refs ({}):", b.finding_refs.len());
10187 for r in &b.finding_refs {
10188 let dir = r.direction.as_deref().unwrap_or("—");
10189 let truncated: String = r.assertion_text.chars().take(72).collect();
10190 println!(
10191 " · [{}] {} (conf={:.2}, dir={})",
10192 r.frontier, r.finding_id, r.confidence, dir
10193 );
10194 println!(" {truncated}");
10195 }
10196 println!();
10197 }
10198 BridgesAction::Confirm {
10199 frontier,
10200 bridge_id,
10201 reviewer,
10202 note,
10203 json,
10204 } => {
10205 let mut b = load_bridge(&frontier, &bridge_id).unwrap_or_else(|e| fail_return(&e));
10206 let reviewer_id = reviewer.unwrap_or_else(default_reviewer_id);
10207 b.status = BridgeStatus::Confirmed;
10208 save_bridge(&frontier, &b).unwrap_or_else(|e| fail_return(&e));
10209 let _ = emit_bridge_reviewed_event(
10213 &frontier,
10214 &bridge_id,
10215 "confirmed",
10216 &reviewer_id,
10217 note.as_deref(),
10218 );
10219 if json {
10220 println!("{}", serde_json::to_string_pretty(&b).expect("serialize"));
10221 return;
10222 }
10223 println!();
10224 println!(" {} {} now confirmed", style::ok("confirmed"), b.id);
10225 println!();
10226 }
10227 BridgesAction::Refute {
10228 frontier,
10229 bridge_id,
10230 reviewer,
10231 note,
10232 json,
10233 } => {
10234 let mut b = load_bridge(&frontier, &bridge_id).unwrap_or_else(|e| fail_return(&e));
10235 let reviewer_id = reviewer.unwrap_or_else(default_reviewer_id);
10236 b.status = BridgeStatus::Refuted;
10237 save_bridge(&frontier, &b).unwrap_or_else(|e| fail_return(&e));
10238 let _ = emit_bridge_reviewed_event(
10239 &frontier,
10240 &bridge_id,
10241 "refuted",
10242 &reviewer_id,
10243 note.as_deref(),
10244 );
10245 if json {
10246 println!("{}", serde_json::to_string_pretty(&b).expect("serialize"));
10247 return;
10248 }
10249 println!();
10250 println!(" {} {} now refuted", style::lost("refuted"), b.id);
10251 println!();
10252 }
10253 }
10254}
10255
10256fn cmd_federation(action: FederationAction) {
10258 use crate::federation::PeerHub;
10259
10260 match action {
10261 FederationAction::PeerAdd {
10262 frontier,
10263 id,
10264 url,
10265 pubkey,
10266 note,
10267 json,
10268 } => {
10269 let peer = PeerHub {
10270 id: id.clone(),
10271 url: url.clone(),
10272 public_key: pubkey.trim().to_string(),
10273 added_at: chrono::Utc::now().to_rfc3339(),
10274 note: note.clone(),
10275 };
10276 peer.validate().unwrap_or_else(|e| fail_return(&e));
10277
10278 let mut project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
10279 if project.peers.iter().any(|p| p.id == id) {
10280 fail(&format!("peer '{id}' already in registry"));
10281 }
10282 project.peers.push(peer.clone());
10283 repo::save_to_path(&frontier, &project).unwrap_or_else(|e| fail_return(&e));
10284
10285 if json {
10286 println!(
10287 "{}",
10288 serde_json::to_string_pretty(&json!({
10289 "ok": true,
10290 "command": "federation.peer-add",
10291 "frontier": frontier.display().to_string(),
10292 "peer": peer,
10293 "registered_count": project.peers.len(),
10294 }))
10295 .expect("serialize federation.peer-add")
10296 );
10297 } else {
10298 println!(
10299 "{} peer {} (pubkey {}…) at {}",
10300 style::ok("registered"),
10301 id,
10302 &peer.public_key[..16],
10303 peer.url
10304 );
10305 }
10306 }
10307 FederationAction::PeerList { frontier, json } => {
10308 let project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
10309 if json {
10310 println!(
10311 "{}",
10312 serde_json::to_string_pretty(&json!({
10313 "ok": true,
10314 "command": "federation.peer-list",
10315 "frontier": frontier.display().to_string(),
10316 "peers": project.peers,
10317 }))
10318 .expect("serialize federation.peer-list")
10319 );
10320 } else {
10321 println!();
10322 println!(
10323 " {}",
10324 format!("VELA · FEDERATION · PEERS · {}", frontier.display())
10325 .to_uppercase()
10326 .dimmed()
10327 );
10328 println!(" {}", style::tick_row(60));
10329 if project.peers.is_empty() {
10330 println!(" (no peers registered)");
10331 } else {
10332 for p in &project.peers {
10333 let note_suffix = if p.note.is_empty() {
10334 String::new()
10335 } else {
10336 format!(" · {}", p.note)
10337 };
10338 println!(
10339 " {:<24} {} {}…{note_suffix}",
10340 p.id,
10341 p.url,
10342 &p.public_key[..16]
10343 );
10344 }
10345 }
10346 }
10347 }
10348 FederationAction::Sync {
10349 frontier,
10350 peer_id,
10351 url,
10352 via_hub,
10353 vfr_id,
10354 allow_cross_vfr,
10355 dry_run,
10356 json,
10357 } => {
10358 use crate::federation::{self, DiscoveryResult};
10359
10360 let mut project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
10361 let Some(peer) = project.peers.iter().find(|p| p.id == peer_id).cloned() else {
10362 fail(&format!(
10363 "peer '{peer_id}' not in registry; run `vela federation peer add` first"
10364 ));
10365 };
10366 let local_frontier_id = project.frontier_id();
10367
10368 if via_hub
10375 && let Some(target) = vfr_id.as_deref()
10376 && target != local_frontier_id
10377 && !allow_cross_vfr
10378 {
10379 fail(&format!(
10380 "cross-vfr sync refused: --vfr-id {target} differs from local frontier_id {local_frontier_id}. \
10381 Pass --allow-cross-vfr to opt in (every peer-side finding will be recorded as a \
10382 missing_locally conflict). Or omit --vfr-id to default to the local frontier id."
10383 ));
10384 }
10385
10386 #[derive(Debug)]
10388 enum SyncOutcome {
10389 Resolved(crate::project::Project, String), BrokenLocator(String, String, u16), UnverifiedEntry(String, String), EntryNotFound(String, u16),
10393 }
10394
10395 let outcome = if via_hub {
10396 let target_vfr = vfr_id.clone().unwrap_or_else(|| local_frontier_id.clone());
10397 match federation::discover_peer_frontier(
10398 &peer.url,
10399 &target_vfr,
10400 Some(&peer.public_key),
10401 ) {
10402 DiscoveryResult::Resolved(p) => {
10403 let src =
10404 format!("{}/entries/{}", peer.url.trim_end_matches('/'), target_vfr);
10405 SyncOutcome::Resolved(p, src)
10406 }
10407 DiscoveryResult::BrokenLocator {
10408 vfr_id,
10409 locator,
10410 status,
10411 } => SyncOutcome::BrokenLocator(vfr_id, locator, status),
10412 DiscoveryResult::UnverifiedEntry { vfr_id, reason } => {
10413 SyncOutcome::UnverifiedEntry(vfr_id, reason)
10414 }
10415 DiscoveryResult::EntryNotFound { vfr_id, status } => {
10416 SyncOutcome::EntryNotFound(vfr_id, status)
10417 }
10418 DiscoveryResult::Unreachable { url, error } => {
10419 fail(&format!("peer hub unreachable ({url}): {error}"));
10420 }
10421 }
10422 } else {
10423 let resolved_url = url.unwrap_or_else(|| {
10424 let base = peer.url.trim_end_matches('/');
10425 format!("{base}/manifest/{local_frontier_id}.json")
10426 });
10427 match federation::fetch_peer_frontier(&resolved_url) {
10428 Ok(p) => SyncOutcome::Resolved(p, resolved_url),
10429 Err(e) => fail(&format!("direct fetch failed: {e}")),
10430 }
10431 };
10432
10433 let peer_source: String;
10436 let peer_state = match outcome {
10437 SyncOutcome::Resolved(p, src) => {
10438 if !json {
10439 println!(" · resolved via {src}");
10440 }
10441 peer_source = src;
10442 p
10443 }
10444 SyncOutcome::BrokenLocator(vfr, locator, status) => {
10445 if dry_run {
10446 if json {
10447 println!(
10448 "{}",
10449 serde_json::to_string_pretty(&json!({
10450 "ok": true,
10451 "command": "federation.sync",
10452 "dry_run": true,
10453 "outcome": "broken_locator",
10454 "vfr_id": vfr,
10455 "locator": locator,
10456 "http_status": status,
10457 }))
10458 .expect("serialize")
10459 );
10460 } else {
10461 println!(
10462 "{} dry-run: peer entry resolved but locator dead",
10463 style::warn("broken_locator")
10464 );
10465 println!(" vfr_id: {vfr}");
10466 println!(" locator: {locator} (HTTP {status})");
10467 }
10468 return;
10469 }
10470 let report = federation::record_locator_failure(
10471 &mut project,
10472 &peer_id,
10473 &vfr,
10474 &locator,
10475 status,
10476 );
10477 repo::save_to_path(&frontier, &project).unwrap_or_else(|e| fail_return(&e));
10478 if json {
10479 println!(
10480 "{}",
10481 serde_json::to_string_pretty(&json!({
10482 "ok": true,
10483 "command": "federation.sync",
10484 "outcome": "broken_locator",
10485 "report": report,
10486 }))
10487 .expect("serialize")
10488 );
10489 } else {
10490 println!(
10491 "{} sync recorded broken-locator conflict against {peer_id}",
10492 style::warn("broken_locator")
10493 );
10494 println!(" vfr_id: {vfr}");
10495 println!(" locator: {locator} (HTTP {status})");
10496 println!(" events appended: {}", report.events_appended);
10497 }
10498 return;
10499 }
10500 SyncOutcome::UnverifiedEntry(vfr, reason) => {
10501 if dry_run {
10502 if json {
10503 println!(
10504 "{}",
10505 serde_json::to_string_pretty(&json!({
10506 "ok": true,
10507 "command": "federation.sync",
10508 "dry_run": true,
10509 "outcome": "unverified_peer_entry",
10510 "vfr_id": vfr,
10511 "reason": reason,
10512 }))
10513 .expect("serialize")
10514 );
10515 } else {
10516 println!(
10517 "{} dry-run: peer entry signature did not verify",
10518 style::lost("unverified_peer_entry")
10519 );
10520 println!(" vfr_id: {vfr}");
10521 println!(" reason: {reason}");
10522 }
10523 return;
10524 }
10525 let report =
10526 federation::record_unverified_entry(&mut project, &peer_id, &vfr, &reason);
10527 repo::save_to_path(&frontier, &project).unwrap_or_else(|e| fail_return(&e));
10528 if json {
10529 println!(
10530 "{}",
10531 serde_json::to_string_pretty(&json!({
10532 "ok": true,
10533 "command": "federation.sync",
10534 "outcome": "unverified_peer_entry",
10535 "report": report,
10536 }))
10537 .expect("serialize")
10538 );
10539 } else {
10540 println!(
10541 "{} sync halted; peer's registry entry signature did not verify",
10542 style::lost("unverified_peer_entry")
10543 );
10544 println!(" vfr_id: {vfr}");
10545 println!(" reason: {reason}");
10546 }
10547 return;
10548 }
10549 SyncOutcome::EntryNotFound(vfr, status) => {
10550 if json {
10551 println!(
10552 "{}",
10553 serde_json::to_string_pretty(&json!({
10554 "ok": false,
10555 "command": "federation.sync",
10556 "outcome": "entry_not_found",
10557 "vfr_id": vfr,
10558 "http_status": status,
10559 }))
10560 .expect("serialize")
10561 );
10562 } else {
10563 println!(
10564 "{} peer's hub does not publish vfr_id {vfr} (HTTP {status})",
10565 style::warn("entry_not_found")
10566 );
10567 }
10568 return;
10569 }
10570 };
10571
10572 if dry_run {
10573 let conflicts = federation::diff_frontiers(&project, &peer_state);
10574 if json {
10575 println!(
10576 "{}",
10577 serde_json::to_string_pretty(&json!({
10578 "ok": true,
10579 "command": "federation.sync",
10580 "dry_run": true,
10581 "peer_id": peer_id,
10582 "peer_source": peer_source,
10583 "conflicts": conflicts,
10584 }))
10585 .expect("serialize federation.sync (dry-run)")
10586 );
10587 } else {
10588 println!(
10589 "{} dry-run vs {peer_id} ({}): {} conflict(s)",
10590 style::ok("ok"),
10591 peer_source,
10592 conflicts.len()
10593 );
10594 for c in &conflicts {
10595 println!(" · {} {} {}", c.kind.as_str(), c.finding_id, c.detail);
10596 }
10597 }
10598 return;
10599 }
10600
10601 let report = federation::sync_with_peer(&mut project, &peer_id, &peer_state);
10602 repo::save_to_path(&frontier, &project).unwrap_or_else(|e| fail_return(&e));
10603
10604 if json {
10605 println!(
10606 "{}",
10607 serde_json::to_string_pretty(&json!({
10608 "ok": true,
10609 "command": "federation.sync",
10610 "peer_id": peer_id,
10611 "peer_source": peer_source,
10612 "report": report,
10613 }))
10614 .expect("serialize federation.sync")
10615 );
10616 } else {
10617 println!(
10618 "{} synced with {} ({})",
10619 style::ok("ok"),
10620 peer_id,
10621 peer_source
10622 );
10623 println!(
10624 " our: {}",
10625 &report.our_snapshot_hash[..16.min(report.our_snapshot_hash.len())]
10626 );
10627 println!(
10628 " peer: {}",
10629 &report.peer_snapshot_hash[..16.min(report.peer_snapshot_hash.len())]
10630 );
10631 println!(
10632 " conflicts: {} events appended: {}",
10633 report.conflicts.len(),
10634 report.events_appended
10635 );
10636 for c in &report.conflicts {
10637 println!(" · {} {} {}", c.kind.as_str(), c.finding_id, c.detail);
10638 }
10639 }
10640 }
10641 FederationAction::PushResolution {
10642 frontier,
10643 conflict_event_id,
10644 to,
10645 key,
10646 vfr_id,
10647 json,
10648 } => {
10649 cmd_federation_push_resolution(frontier, conflict_event_id, to, key, vfr_id, json);
10650 }
10651 FederationAction::PeerRemove { frontier, id, json } => {
10652 let mut project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
10653 let before = project.peers.len();
10654 project.peers.retain(|p| p.id != id);
10655 if project.peers.len() == before {
10656 fail(&format!("peer '{id}' not found in registry"));
10657 }
10658 repo::save_to_path(&frontier, &project).unwrap_or_else(|e| fail_return(&e));
10659
10660 if json {
10661 println!(
10662 "{}",
10663 serde_json::to_string_pretty(&json!({
10664 "ok": true,
10665 "command": "federation.peer-remove",
10666 "frontier": frontier.display().to_string(),
10667 "removed": id,
10668 "remaining": project.peers.len(),
10669 }))
10670 .expect("serialize federation.peer-remove")
10671 );
10672 } else {
10673 println!(
10674 "{} peer {} ({} remaining)",
10675 style::ok("removed"),
10676 id,
10677 project.peers.len()
10678 );
10679 }
10680 }
10681 }
10682}
10683
10684fn cmd_federation_push_resolution(
10696 frontier: PathBuf,
10697 conflict_event_id: String,
10698 to: String,
10699 key: Option<PathBuf>,
10700 vfr_id: Option<String>,
10701 json: bool,
10702) {
10703 use crate::canonical;
10704 use crate::sign;
10705
10706 let project = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
10707
10708 let Some(peer) = project.peers.iter().find(|p| p.id == to).cloned() else {
10709 fail(&format!(
10710 "peer '{to}' not in registry; run `vela federation peer-add` first"
10711 ));
10712 };
10713
10714 let Some(resolution) = project
10716 .events
10717 .iter()
10718 .find(|e| {
10719 e.kind == "frontier.conflict_resolved"
10720 && e.payload.get("conflict_event_id").and_then(|v| v.as_str())
10721 == Some(conflict_event_id.as_str())
10722 })
10723 .cloned()
10724 else {
10725 fail(&format!(
10726 "no frontier.conflict_resolved event paired with conflict {conflict_event_id} in {}",
10727 frontier.display()
10728 ));
10729 };
10730
10731 let actor_id = resolution.actor.id.clone();
10734 let Some(actor) = project.actors.iter().find(|a| a.id == actor_id) else {
10735 fail(&format!(
10736 "resolution event's actor.id ({actor_id}) is not in the frontier's actor registry; \
10737 register the reviewer with `vela actor add` before pushing"
10738 ));
10739 };
10740
10741 let key_path = key.unwrap_or_else(|| {
10744 let home = std::env::var("HOME").unwrap_or_default();
10745 let base = PathBuf::from(home)
10746 .join(".config")
10747 .join("vela")
10748 .join("keys");
10749 let safe_id = actor.id.replace([':', '/'], "_");
10750 let by_actor = base.join(format!("{safe_id}.key"));
10751 if by_actor.exists() {
10752 by_actor
10753 } else {
10754 base.join("private.key")
10755 }
10756 });
10757
10758 let signing_key = sign::load_signing_key_from_path(&key_path).unwrap_or_else(|e| {
10759 fail_return(&format!(
10760 "load private key from {}: {e}",
10761 key_path.display()
10762 ))
10763 });
10764 let pubkey_hex = sign::pubkey_hex(&signing_key);
10765 if !pubkey_hex.eq_ignore_ascii_case(&actor.public_key) {
10766 fail(&format!(
10767 "private key at {} does not match actor {}'s registered public key. \
10768 Loaded pubkey {}, expected {}.",
10769 key_path.display(),
10770 actor.id,
10771 &pubkey_hex[..16],
10772 &actor.public_key[..16]
10773 ));
10774 }
10775
10776 let signature_hex = sign::sign_event(&resolution, &signing_key)
10779 .unwrap_or_else(|e| fail_return(&format!("sign event: {e}")));
10780
10781 let mut body = resolution.clone();
10786 body.signature = None;
10787 let body_value =
10788 serde_json::to_value(&body).unwrap_or_else(|e| fail_return(&format!("serialize: {e}")));
10789 let _canonical_check = canonical::to_canonical_bytes(&body_value)
10790 .unwrap_or_else(|e| fail_return(&format!("canonicalize: {e}")));
10791
10792 let target_vfr = vfr_id.unwrap_or_else(|| project.frontier_id());
10793 let url = format!(
10794 "{}/entries/{}/events",
10795 peer.url.trim_end_matches('/'),
10796 target_vfr
10797 );
10798
10799 let url_owned = url.clone();
10801 let pubkey_owned = pubkey_hex.clone();
10802 let signature_owned = signature_hex.clone();
10803 let body_owned = body_value.clone();
10804 let response: Result<(u16, String), String> = std::thread::spawn(move || {
10805 let client = reqwest::blocking::Client::new();
10806 let resp = client
10807 .post(&url_owned)
10808 .header("X-Vela-Signer-Pubkey", &pubkey_owned)
10809 .header("X-Vela-Signature", &signature_owned)
10810 .json(&body_owned)
10811 .send()
10812 .map_err(|e| format!("HTTP POST {url_owned}: {e}"))?;
10813 let status = resp.status().as_u16();
10814 let text = resp.text().unwrap_or_default();
10815 Ok((status, text))
10816 })
10817 .join()
10818 .map_err(|_| "push thread panicked".to_string())
10819 .unwrap_or_else(|e| fail_return(&e));
10820
10821 let (status, text) = response.unwrap_or_else(|e| fail_return(&e));
10822 let parsed: serde_json::Value =
10823 serde_json::from_str(&text).unwrap_or_else(|_| json!({ "raw": text }));
10824
10825 let accepted = matches!(status, 200..=202);
10826 if json {
10827 println!(
10828 "{}",
10829 serde_json::to_string_pretty(&json!({
10830 "ok": accepted,
10831 "command": "federation.push-resolution",
10832 "frontier": frontier.display().to_string(),
10833 "peer_id": to,
10834 "url": url,
10835 "conflict_event_id": conflict_event_id,
10836 "event_id": resolution.id,
10837 "actor_id": actor.id,
10838 "http_status": status,
10839 "response": parsed,
10840 }))
10841 .expect("serialize federation.push-resolution")
10842 );
10843 } else if accepted {
10844 println!(
10845 "{} resolution {} pushed to {} (HTTP {})",
10846 style::ok("ok"),
10847 &resolution.id[..16.min(resolution.id.len())],
10848 to,
10849 status
10850 );
10851 println!(" url: {url}");
10852 println!(" signer: {} (actor {})", &pubkey_hex[..16], actor.id);
10853 } else {
10854 println!("{} push refused (HTTP {})", style::lost("rejected"), status);
10855 println!(" url: {url}");
10856 println!(" response: {text}");
10857 std::process::exit(1);
10858 }
10859}
10860
10861fn cmd_queue(action: QueueAction) {
10866 use crate::queue;
10867 match action {
10868 QueueAction::List { queue_file, json } => {
10869 let path = queue_file.unwrap_or_else(queue::default_queue_path);
10870 let q = queue::load(&path).unwrap_or_else(|e| fail_return(&e));
10871 if json {
10872 let payload = json!({
10873 "ok": true,
10874 "command": "queue.list",
10875 "queue_file": path.display().to_string(),
10876 "schema": q.schema,
10877 "actions": q.actions,
10878 });
10879 println!(
10880 "{}",
10881 serde_json::to_string_pretty(&payload).expect("failed to serialize queue.list")
10882 );
10883 } else {
10884 println!();
10885 println!(
10886 " {}",
10887 format!("VELA · QUEUE · LIST · {}", path.display())
10888 .to_uppercase()
10889 .dimmed()
10890 );
10891 println!(" {}", style::tick_row(60));
10892 if q.actions.is_empty() {
10893 println!(" (queue is empty)");
10894 } else {
10895 for (idx, action) in q.actions.iter().enumerate() {
10896 println!(
10897 " [{idx}] {} → {} queued {}",
10898 action.kind,
10899 action.frontier.display(),
10900 action.queued_at
10901 );
10902 }
10903 }
10904 }
10905 }
10906 QueueAction::Clear { queue_file, json } => {
10907 let path = queue_file.unwrap_or_else(queue::default_queue_path);
10908 let dropped = queue::clear(&path).unwrap_or_else(|e| fail_return(&e));
10909 if json {
10910 let payload = json!({
10911 "ok": true,
10912 "command": "queue.clear",
10913 "queue_file": path.display().to_string(),
10914 "dropped": dropped,
10915 });
10916 println!(
10917 "{}",
10918 serde_json::to_string_pretty(&payload)
10919 .expect("failed to serialize queue.clear")
10920 );
10921 } else {
10922 println!("{} dropped {dropped} queued action(s)", style::ok("ok"));
10923 }
10924 }
10925 QueueAction::Sign {
10926 actor,
10927 key,
10928 queue_file,
10929 yes_to_all,
10930 json,
10931 } => {
10932 let path = queue_file.unwrap_or_else(queue::default_queue_path);
10933 let q = queue::load(&path).unwrap_or_else(|e| fail_return(&e));
10934 if q.actions.is_empty() {
10935 if json {
10936 println!("{}", json!({"ok": true, "signed": 0, "remaining": 0}));
10937 } else {
10938 println!("{} queue is empty", style::ok("ok"));
10939 }
10940 return;
10941 }
10942 let key_hex = std::fs::read_to_string(&key)
10943 .map(|s| s.trim().to_string())
10944 .unwrap_or_else(|e| fail_return(&format!("read key {}: {e}", key.display())));
10945 let signing_key = parse_signing_key(&key_hex);
10946 let mut signed_count = 0usize;
10947 let mut remaining = Vec::new();
10948 for action in q.actions.iter() {
10949 if !yes_to_all && !confirm_action(action) {
10950 remaining.push(action.clone());
10951 continue;
10952 }
10953 match sign_and_apply(&signing_key, &actor, action) {
10954 Ok(report) => {
10955 signed_count += 1;
10956 if !json {
10957 println!(
10958 "{} {} on {} → {}",
10959 style::ok("signed"),
10960 action.kind,
10961 action.frontier.display(),
10962 report
10963 );
10964 }
10965 }
10966 Err(error) => {
10967 remaining.push(action.clone());
10969 if !json {
10970 eprintln!(
10971 "{} {} on {}: {error}",
10972 style::warn("failed"),
10973 action.kind,
10974 action.frontier.display()
10975 );
10976 }
10977 }
10978 }
10979 }
10980 queue::replace_actions(&path, remaining.clone()).unwrap_or_else(|e| fail_return(&e));
10981 if json {
10982 let payload = json!({
10983 "ok": true,
10984 "command": "queue.sign",
10985 "signed": signed_count,
10986 "remaining": remaining.len(),
10987 });
10988 println!(
10989 "{}",
10990 serde_json::to_string_pretty(&payload).expect("failed to serialize queue.sign")
10991 );
10992 } else {
10993 println!(
10994 "{} signed {signed_count} action(s); {} remaining in queue",
10995 style::ok("ok"),
10996 remaining.len()
10997 );
10998 }
10999 }
11000 }
11001}
11002
11003fn parse_signing_key(hex_str: &str) -> ed25519_dalek::SigningKey {
11004 let bytes = hex::decode(hex_str)
11005 .unwrap_or_else(|e| fail_return(&format!("invalid private-key hex: {e}")));
11006 let key_bytes: [u8; 32] = bytes
11007 .try_into()
11008 .unwrap_or_else(|_| fail_return("private key must be 32 bytes"));
11009 ed25519_dalek::SigningKey::from_bytes(&key_bytes)
11010}
11011
11012fn confirm_action(action: &crate::queue::QueuedAction) -> bool {
11013 use std::io::{self, BufRead, Write};
11014 let mut stdout = io::stdout().lock();
11015 let _ = writeln!(
11016 stdout,
11017 " sign {} on {}? [y/N] ",
11018 action.kind,
11019 action.frontier.display()
11020 );
11021 let _ = stdout.flush();
11022 drop(stdout);
11023 let stdin = io::stdin();
11024 let mut line = String::new();
11025 if stdin.lock().read_line(&mut line).is_err() {
11026 return false;
11027 }
11028 matches!(line.trim().to_lowercase().as_str(), "y" | "yes")
11029}
11030
11031fn sign_and_apply(
11036 signing_key: &ed25519_dalek::SigningKey,
11037 actor: &str,
11038 action: &crate::queue::QueuedAction,
11039) -> Result<String, String> {
11040 use crate::events::StateTarget;
11041 use crate::proposals;
11042 let args = &action.args;
11043 match action.kind.as_str() {
11044 "propose_review" | "propose_note" | "propose_revise_confidence" | "propose_retract" => {
11045 let kind = match action.kind.as_str() {
11046 "propose_review" => "finding.review",
11047 "propose_note" => "finding.note",
11048 "propose_revise_confidence" => "finding.confidence_revise",
11049 "propose_retract" => "finding.retract",
11050 _ => unreachable!(),
11051 };
11052 let target_id = args
11053 .get("target_finding_id")
11054 .and_then(Value::as_str)
11055 .ok_or("target_finding_id missing")?;
11056 let reason = args
11057 .get("reason")
11058 .and_then(Value::as_str)
11059 .ok_or("reason missing")?;
11060 let payload = match action.kind.as_str() {
11061 "propose_review" => {
11062 let status = args
11063 .get("status")
11064 .and_then(Value::as_str)
11065 .ok_or("status missing")?;
11066 json!({"status": status})
11067 }
11068 "propose_note" => {
11069 let text = args
11070 .get("text")
11071 .and_then(Value::as_str)
11072 .ok_or("text missing")?;
11073 json!({"text": text})
11074 }
11075 "propose_revise_confidence" => {
11076 let new_score = args
11077 .get("new_score")
11078 .and_then(Value::as_f64)
11079 .ok_or("new_score missing")?;
11080 json!({"new_score": new_score})
11081 }
11082 "propose_retract" => json!({}),
11083 _ => unreachable!(),
11084 };
11085 let created_at = args
11086 .get("created_at")
11087 .and_then(Value::as_str)
11088 .map(String::from)
11089 .unwrap_or_else(|| chrono::Utc::now().to_rfc3339());
11090 let mut proposal = proposals::new_proposal(
11091 kind,
11092 StateTarget {
11093 r#type: "finding".to_string(),
11094 id: target_id.to_string(),
11095 },
11096 actor,
11097 "human",
11098 reason,
11099 payload,
11100 Vec::new(),
11101 Vec::new(),
11102 );
11103 proposal.created_at = created_at;
11104 proposal.id = proposals::proposal_id(&proposal);
11105 let _signature = crate::sign::sign_proposal(&proposal, signing_key)?;
11109 let result = proposals::create_or_apply(&action.frontier, proposal, false)
11110 .map_err(|e| format!("create_or_apply: {e}"))?;
11111 Ok(format!("proposal {}", result.proposal_id))
11112 }
11113 "accept_proposal" | "reject_proposal" => {
11114 let proposal_id = args
11115 .get("proposal_id")
11116 .and_then(Value::as_str)
11117 .ok_or("proposal_id missing")?;
11118 let reason = args
11119 .get("reason")
11120 .and_then(Value::as_str)
11121 .ok_or("reason missing")?;
11122 let timestamp = args
11123 .get("timestamp")
11124 .and_then(Value::as_str)
11125 .map(String::from)
11126 .unwrap_or_else(|| chrono::Utc::now().to_rfc3339());
11127 let preimage = json!({
11129 "action": if action.kind == "accept_proposal" { "accept" } else { "reject" },
11130 "proposal_id": proposal_id,
11131 "reviewer_id": actor,
11132 "reason": reason,
11133 "timestamp": timestamp,
11134 });
11135 let bytes = crate::canonical::to_canonical_bytes(&preimage)?;
11136 use ed25519_dalek::Signer;
11137 let _signature = hex::encode(signing_key.sign(&bytes).to_bytes());
11138 if action.kind == "accept_proposal" {
11139 let event_id =
11140 crate::proposals::accept_at_path(&action.frontier, proposal_id, actor, reason)
11141 .map_err(|e| format!("accept_at_path: {e}"))?;
11142 Ok(format!("event {event_id}"))
11143 } else {
11144 crate::proposals::reject_at_path(&action.frontier, proposal_id, actor, reason)
11145 .map_err(|e| format!("reject_at_path: {e}"))?;
11146 Ok(format!("rejected {proposal_id}"))
11147 }
11148 }
11149 other => Err(format!("unsupported queued action kind '{other}'")),
11150 }
11151}
11152
11153fn cmd_entity(action: EntityAction) {
11165 use crate::entity_resolve;
11166 match action {
11167 EntityAction::Resolve {
11168 frontier,
11169 force,
11170 json,
11171 } => {
11172 let mut p = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
11173 let report = entity_resolve::resolve_frontier(&mut p, force);
11174 repo::save_to_path(&frontier, &p).unwrap_or_else(|e| fail_return(&e));
11175 if json {
11176 println!(
11177 "{}",
11178 serde_json::to_string_pretty(&serde_json::json!({
11179 "ok": true,
11180 "command": "entity.resolve",
11181 "frontier_path": frontier.display().to_string(),
11182 "report": report,
11183 }))
11184 .expect("serialize")
11185 );
11186 } else {
11187 println!(
11188 "{} resolved {} of {} entities ({} already, {} unresolved) across {} findings",
11189 style::ok("entity"),
11190 report.resolved,
11191 report.total_entities,
11192 report.already_resolved,
11193 report.unresolved_count,
11194 report.findings_touched,
11195 );
11196 let unresolved_summary: std::collections::BTreeSet<&str> = report
11197 .per_finding
11198 .iter()
11199 .flat_map(|f| f.unresolved.iter().map(String::as_str))
11200 .collect();
11201 if !unresolved_summary.is_empty() {
11202 let take = unresolved_summary.iter().take(8).collect::<Vec<_>>();
11203 println!(
11204 " unresolved (first {}): {}",
11205 take.len(),
11206 take.iter().copied().cloned().collect::<Vec<_>>().join(", ")
11207 );
11208 }
11209 }
11210 }
11211 EntityAction::List { json } => {
11212 let entries: Vec<serde_json::Value> = entity_resolve::iter_bundled()
11213 .map(|(name, etype, source, id)| {
11214 serde_json::json!({
11215 "canonical_name": name,
11216 "entity_type": etype,
11217 "source": source,
11218 "id": id,
11219 })
11220 })
11221 .collect();
11222 if json {
11223 println!(
11224 "{}",
11225 serde_json::to_string_pretty(&serde_json::json!({
11226 "ok": true,
11227 "command": "entity.list",
11228 "count": entries.len(),
11229 "entries": entries,
11230 }))
11231 .expect("serialize")
11232 );
11233 } else {
11234 println!("{} {} bundled entries", style::ok("entity"), entries.len());
11235 for e in &entries {
11236 println!(
11237 " {:32} {:18} {} {}",
11238 e["canonical_name"].as_str().unwrap_or("?"),
11239 e["entity_type"].as_str().unwrap_or("?"),
11240 e["source"].as_str().unwrap_or("?"),
11241 e["id"].as_str().unwrap_or("?"),
11242 );
11243 }
11244 }
11245 }
11246 }
11247}
11248
11249fn cmd_link(action: LinkAction) {
11250 use crate::bundle::{Link, LinkRef};
11251 match action {
11252 LinkAction::Add {
11253 frontier,
11254 from,
11255 to,
11256 r#type,
11257 note,
11258 inferred_by,
11259 no_check_target,
11260 json,
11261 } => {
11262 validate_enum_arg("--type", &r#type, bundle::VALID_LINK_TYPES);
11263 if !["compiler", "reviewer", "author"].contains(&inferred_by.as_str()) {
11264 fail(&format!(
11265 "invalid --inferred-by '{inferred_by}'. Valid: compiler, reviewer, author"
11266 ));
11267 }
11268 let parsed = LinkRef::parse(&to).unwrap_or_else(|e| {
11269 fail(&format!(
11270 "invalid --to '{to}': {e}. Expected vf_<hex> or vf_<hex>@vfr_<hex>"
11271 ))
11272 });
11273 let mut p = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
11274 let source_idx = p
11275 .findings
11276 .iter()
11277 .position(|f| f.id == from)
11278 .unwrap_or_else(|| {
11279 fail_return(&format!("--from finding '{from}' not in frontier"))
11280 });
11281 if let LinkRef::Local { vf_id } = &parsed
11282 && !p.findings.iter().any(|f| &f.id == vf_id)
11283 {
11284 fail(&format!(
11285 "local --to target '{vf_id}' not in frontier; add the target finding first"
11286 ));
11287 }
11288 if let LinkRef::Cross { vfr_id, .. } = &parsed
11289 && p.dep_for_vfr(vfr_id).is_none()
11290 {
11291 fail(&format!(
11292 "cross-frontier --to references vfr_id '{vfr_id}' but no matching dep is declared. Run `vela frontier add-dep {vfr_id} --locator <url> --snapshot <hash>` first."
11293 ));
11294 }
11295
11296 let mut target_warning: Option<String> = None;
11302 if let LinkRef::Cross {
11303 vfr_id: target_vfr,
11304 vf_id: target_vf,
11305 } = &parsed
11306 && !no_check_target
11307 && let Some(dep) = p.dep_for_vfr(target_vfr)
11308 && let Some(locator) = dep.locator.as_deref()
11309 && (locator.starts_with("http://") || locator.starts_with("https://"))
11310 {
11311 let client = reqwest::blocking::Client::builder()
11312 .timeout(std::time::Duration::from_secs(15))
11313 .build()
11314 .ok();
11315 if let Some(client) = client
11316 && let Ok(resp) = client.get(locator).send()
11317 && resp.status().is_success()
11318 && let Ok(dep_project) = resp.json::<crate::project::Project>()
11319 {
11320 if let Some(target_finding) =
11321 dep_project.findings.iter().find(|f| &f.id == target_vf)
11322 {
11323 if target_finding.flags.superseded {
11324 target_warning = Some(format!(
11325 "warn · cross-frontier target '{target_vf}' in '{target_vfr}' has flags.superseded = true. \
11326You may be linking to outdated wording. Pull --transitive and inspect the supersedes chain to find the current finding. \
11327Use --no-check-target to skip this check."
11328 ));
11329 }
11330 } else {
11331 target_warning = Some(format!(
11332 "warn · cross-frontier target '{target_vf}' not found in dep '{target_vfr}' (fetched from {locator}). \
11333The target may have been removed or never existed in the pinned snapshot."
11334 ));
11335 }
11336 }
11337 }
11338
11339 let now = chrono::Utc::now().to_rfc3339_opts(chrono::SecondsFormat::Secs, true);
11340 let link = Link {
11341 target: to.clone(),
11342 link_type: r#type.clone(),
11343 note: note.clone(),
11344 inferred_by: inferred_by.clone(),
11345 created_at: now,
11346 mechanism: None,
11347 };
11348 p.findings[source_idx].links.push(link);
11349 project::recompute_stats(&mut p);
11350 repo::save_to_path(&frontier, &p).unwrap_or_else(|e| fail_return(&e));
11351 let payload = json!({
11352 "ok": true,
11353 "command": "link.add",
11354 "frontier": frontier.display().to_string(),
11355 "from": from,
11356 "to": to,
11357 "type": r#type,
11358 "cross_frontier": parsed.is_cross_frontier(),
11359 });
11360 if json {
11361 let mut p2 = payload.clone();
11362 if let Some(w) = &target_warning
11363 && let serde_json::Value::Object(m) = &mut p2
11364 {
11365 m.insert(
11366 "target_warning".to_string(),
11367 serde_json::Value::String(w.clone()),
11368 );
11369 }
11370 println!(
11371 "{}",
11372 serde_json::to_string_pretty(&p2).expect("failed to serialize link.add")
11373 );
11374 } else {
11375 println!(
11376 "{} {} --[{}]--> {}{}",
11377 style::ok("link"),
11378 from,
11379 r#type,
11380 to,
11381 if parsed.is_cross_frontier() {
11382 " (cross-frontier)"
11383 } else {
11384 ""
11385 }
11386 );
11387 if let Some(w) = target_warning {
11388 println!(" {w}");
11389 }
11390 }
11391 }
11392 }
11393}
11394
11395fn cmd_frontier(action: FrontierAction) {
11396 use crate::project::ProjectDependency;
11397 use crate::repo;
11398 match action {
11399 FrontierAction::New {
11400 path,
11401 name,
11402 description,
11403 force,
11404 json,
11405 } => {
11406 if path.exists() && !force {
11407 fail(&format!(
11408 "{} already exists; pass --force to overwrite",
11409 path.display()
11410 ));
11411 }
11412 let now = chrono::Utc::now().to_rfc3339_opts(chrono::SecondsFormat::Secs, true);
11413 let project = project::Project {
11414 vela_version: project::VELA_SCHEMA_VERSION.to_string(),
11415 schema: project::VELA_SCHEMA_URL.to_string(),
11416 frontier_id: None,
11417 project: project::ProjectMeta {
11418 name: name.clone(),
11419 description: description.clone(),
11420 compiled_at: now,
11421 compiler: project::VELA_COMPILER_VERSION.to_string(),
11422 papers_processed: 0,
11423 errors: 0,
11424 dependencies: Vec::new(),
11425 },
11426 stats: project::ProjectStats::default(),
11427 findings: Vec::new(),
11428 sources: Vec::new(),
11429 evidence_atoms: Vec::new(),
11430 condition_records: Vec::new(),
11431 review_events: Vec::new(),
11432 confidence_updates: Vec::new(),
11433 events: Vec::new(),
11434 proposals: Vec::new(),
11435 proof_state: proposals::ProofState::default(),
11436 signatures: Vec::new(),
11437 actors: Vec::new(),
11438 replications: Vec::new(),
11439 datasets: Vec::new(),
11440 code_artifacts: Vec::new(),
11441 artifacts: Vec::new(),
11442 predictions: Vec::new(),
11443 resolutions: Vec::new(),
11444 peers: Vec::new(),
11445 negative_results: Vec::new(),
11446 trajectories: Vec::new(),
11447 };
11448 repo::save_to_path(&path, &project).unwrap_or_else(|e| fail_return(&e));
11449 let payload = json!({
11450 "ok": true,
11451 "command": "frontier.new",
11452 "path": path.display().to_string(),
11453 "name": name,
11454 "schema": project::VELA_SCHEMA_URL,
11455 "vela_version": env!("CARGO_PKG_VERSION"),
11456 "next_steps": [
11457 "vela finding add <path> --assertion '...' --author 'reviewer:you' --apply",
11458 "vela sign generate-keypair --out keys",
11459 "vela actor add <path> reviewer:you --pubkey \"$(cat keys/public.key)\"",
11460 "vela registry publish <path> --owner reviewer:you --key keys/private.key --locator <url> --to https://vela-hub.fly.dev",
11461 ],
11462 });
11463 if json {
11464 println!(
11465 "{}",
11466 serde_json::to_string_pretty(&payload)
11467 .expect("failed to serialize frontier.new")
11468 );
11469 } else {
11470 println!(
11471 "{} scaffolded frontier '{name}' at {}",
11472 style::ok("frontier"),
11473 path.display()
11474 );
11475 println!(" next steps:");
11476 println!(
11477 " 1. vela finding add {} --assertion '...' --author 'reviewer:you' --apply",
11478 path.display()
11479 );
11480 println!(" 2. vela sign generate-keypair --out keys");
11481 println!(
11482 " 3. vela actor add {} reviewer:you --pubkey \"$(cat keys/public.key)\"",
11483 path.display()
11484 );
11485 println!(
11486 " 4. vela registry publish {} --owner reviewer:you --key keys/private.key --locator <url> --to https://vela-hub.fly.dev",
11487 path.display()
11488 );
11489 }
11490 }
11491 FrontierAction::Materialize { frontier, json } => {
11492 let payload = frontier_repo::materialize(&frontier).unwrap_or_else(|e| fail_return(&e));
11493 if json {
11494 println!(
11495 "{}",
11496 serde_json::to_string_pretty(&payload)
11497 .expect("failed to serialize frontier materialize")
11498 );
11499 } else {
11500 println!(
11501 "{} materialized frontier repo at {}",
11502 style::ok("frontier"),
11503 frontier.display()
11504 );
11505 }
11506 }
11507 FrontierAction::AddDep {
11508 frontier,
11509 vfr_id,
11510 locator,
11511 snapshot,
11512 name,
11513 json,
11514 } => {
11515 let mut p = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
11516 if p.project
11517 .dependencies
11518 .iter()
11519 .any(|d| d.vfr_id.as_deref() == Some(&vfr_id))
11520 {
11521 fail(&format!(
11522 "cross-frontier dependency '{vfr_id}' already declared; remove it first via `vela frontier remove-dep`"
11523 ));
11524 }
11525 let dep = ProjectDependency {
11526 name: name.unwrap_or_else(|| vfr_id.clone()),
11527 source: "vela.hub".into(),
11528 version: None,
11529 pinned_hash: None,
11530 vfr_id: Some(vfr_id.clone()),
11531 locator: Some(locator.clone()),
11532 pinned_snapshot_hash: Some(snapshot.clone()),
11533 };
11534 p.project.dependencies.push(dep);
11535 repo::save_to_path(&frontier, &p).unwrap_or_else(|e| fail_return(&e));
11536 let payload = json!({
11537 "ok": true,
11538 "command": "frontier.add-dep",
11539 "frontier": frontier.display().to_string(),
11540 "vfr_id": vfr_id,
11541 "locator": locator,
11542 "pinned_snapshot_hash": snapshot,
11543 "declared_count": p.project.dependencies.len(),
11544 });
11545 if json {
11546 println!(
11547 "{}",
11548 serde_json::to_string_pretty(&payload)
11549 .expect("failed to serialize frontier.add-dep")
11550 );
11551 } else {
11552 println!(
11553 "{} declared cross-frontier dep {vfr_id}",
11554 style::ok("frontier")
11555 );
11556 println!(" locator: {locator}");
11557 println!(" snapshot: {snapshot}");
11558 }
11559 }
11560 FrontierAction::ListDeps { frontier, json } => {
11561 let p = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
11562 let deps: Vec<&ProjectDependency> = p.project.dependencies.iter().collect();
11563 if json {
11564 let payload = json!({
11565 "ok": true,
11566 "command": "frontier.list-deps",
11567 "frontier": frontier.display().to_string(),
11568 "count": deps.len(),
11569 "dependencies": deps,
11570 });
11571 println!(
11572 "{}",
11573 serde_json::to_string_pretty(&payload)
11574 .expect("failed to serialize frontier.list-deps")
11575 );
11576 } else {
11577 println!();
11578 println!(
11579 " {}",
11580 format!("VELA · FRONTIER · LIST-DEPS · {}", frontier.display())
11581 .to_uppercase()
11582 .dimmed()
11583 );
11584 println!(" {}", style::tick_row(60));
11585 if deps.is_empty() {
11586 println!(" (no dependencies declared)");
11587 } else {
11588 for d in &deps {
11589 let kind = if d.is_cross_frontier() {
11590 "cross-frontier"
11591 } else {
11592 "compile-time"
11593 };
11594 println!(" · {} [{kind}]", d.name);
11595 if let Some(v) = &d.vfr_id {
11596 println!(" vfr_id: {v}");
11597 }
11598 if let Some(l) = &d.locator {
11599 println!(" locator: {l}");
11600 }
11601 if let Some(s) = &d.pinned_snapshot_hash {
11602 println!(" snapshot: {s}");
11603 }
11604 }
11605 }
11606 }
11607 }
11608 FrontierAction::RemoveDep {
11609 frontier,
11610 vfr_id,
11611 json,
11612 } => {
11613 let mut p = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
11614 for f in &p.findings {
11616 for l in &f.links {
11617 if let Ok(crate::bundle::LinkRef::Cross { vfr_id: ref v, .. }) =
11618 crate::bundle::LinkRef::parse(&l.target)
11619 && v == &vfr_id
11620 {
11621 fail(&format!(
11622 "cannot remove dep '{vfr_id}': finding {} still links to it via {}",
11623 f.id, l.target
11624 ));
11625 }
11626 }
11627 }
11628 let before = p.project.dependencies.len();
11629 p.project
11630 .dependencies
11631 .retain(|d| d.vfr_id.as_deref() != Some(&vfr_id));
11632 let removed = before - p.project.dependencies.len();
11633 if removed == 0 {
11634 fail(&format!("no cross-frontier dependency '{vfr_id}' found"));
11635 }
11636 repo::save_to_path(&frontier, &p).unwrap_or_else(|e| fail_return(&e));
11637 let payload = json!({
11638 "ok": true,
11639 "command": "frontier.remove-dep",
11640 "frontier": frontier.display().to_string(),
11641 "vfr_id": vfr_id,
11642 "removed": removed,
11643 });
11644 if json {
11645 println!(
11646 "{}",
11647 serde_json::to_string_pretty(&payload)
11648 .expect("failed to serialize frontier.remove-dep")
11649 );
11650 } else {
11651 println!(
11652 "{} removed cross-frontier dep {vfr_id}",
11653 style::ok("frontier")
11654 );
11655 }
11656 }
11657 FrontierAction::RefreshDeps {
11658 frontier,
11659 from,
11660 dry_run,
11661 json,
11662 } => {
11663 let mut p = repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
11664 let cross_deps: Vec<String> = p
11665 .project
11666 .dependencies
11667 .iter()
11668 .filter_map(|d| d.vfr_id.clone())
11669 .collect();
11670 if cross_deps.is_empty() {
11671 if json {
11672 println!(
11673 "{}",
11674 serde_json::to_string_pretty(&json!({
11675 "ok": true,
11676 "command": "frontier.refresh-deps",
11677 "frontier": frontier.display().to_string(),
11678 "from": from,
11679 "dry_run": dry_run,
11680 "deps": [],
11681 "summary": { "total": 0, "refreshed": 0, "unchanged": 0, "missing": 0, "unreachable": 0 },
11682 })).expect("serialize")
11683 );
11684 } else {
11685 println!(
11686 "{} no cross-frontier deps declared in {}",
11687 style::ok("frontier"),
11688 frontier.display()
11689 );
11690 }
11691 return;
11692 }
11693 let client = reqwest::blocking::Client::builder()
11694 .timeout(std::time::Duration::from_secs(20))
11695 .build()
11696 .unwrap_or_else(|e| fail_return(&format!("http client init failed: {e}")));
11697 let base = from.trim_end_matches('/');
11698 #[derive(serde::Deserialize)]
11699 struct HubEntry {
11700 latest_snapshot_hash: String,
11701 }
11702 let mut per_dep: Vec<serde_json::Value> = Vec::new();
11703 let (mut refreshed, mut unchanged, mut missing, mut unreachable) =
11704 (0u32, 0u32, 0u32, 0u32);
11705 for vfr in &cross_deps {
11706 let url = format!("{base}/entries/{vfr}");
11707 let resp = client.get(&url).send();
11708 let outcome = match resp {
11709 Ok(r) if r.status().as_u16() == 404 => {
11710 missing += 1;
11711 json!({ "vfr_id": vfr, "status": "missing", "url": url })
11712 }
11713 Ok(r) if !r.status().is_success() => {
11714 unreachable += 1;
11715 json!({ "vfr_id": vfr, "status": "unreachable", "http_status": r.status().as_u16() })
11716 }
11717 Err(e) => {
11718 unreachable += 1;
11719 json!({ "vfr_id": vfr, "status": "unreachable", "error": e.to_string() })
11720 }
11721 Ok(r) => match r.json::<HubEntry>() {
11722 Err(e) => {
11723 unreachable += 1;
11724 json!({ "vfr_id": vfr, "status": "unreachable", "error": format!("invalid hub response: {e}") })
11725 }
11726 Ok(entry) => {
11727 match p
11729 .project
11730 .dependencies
11731 .iter()
11732 .position(|d| d.vfr_id.as_deref() == Some(vfr.as_str()))
11733 {
11734 None => {
11735 unreachable += 1;
11736 json!({ "vfr_id": vfr, "status": "unreachable", "error": "dep disappeared mid-scan" })
11737 }
11738 Some(idx) => {
11739 let local_pin =
11740 p.project.dependencies[idx].pinned_snapshot_hash.clone();
11741 let new_pin = entry.latest_snapshot_hash;
11742 if local_pin.as_deref() == Some(new_pin.as_str()) {
11743 unchanged += 1;
11744 json!({ "vfr_id": vfr, "status": "unchanged", "snapshot": new_pin })
11745 } else {
11746 if !dry_run {
11747 p.project.dependencies[idx].pinned_snapshot_hash =
11748 Some(new_pin.clone());
11749 }
11750 refreshed += 1;
11751 json!({
11752 "vfr_id": vfr,
11753 "status": "refreshed",
11754 "old_snapshot": local_pin,
11755 "new_snapshot": new_pin,
11756 })
11757 }
11758 }
11759 }
11760 }
11761 },
11762 };
11763 per_dep.push(outcome);
11764 }
11765 if !dry_run && refreshed > 0 {
11766 repo::save_to_path(&frontier, &p).unwrap_or_else(|e| fail_return(&e));
11767 }
11768 let payload = json!({
11769 "ok": true,
11770 "command": "frontier.refresh-deps",
11771 "frontier": frontier.display().to_string(),
11772 "from": from,
11773 "dry_run": dry_run,
11774 "deps": per_dep,
11775 "summary": {
11776 "total": cross_deps.len(),
11777 "refreshed": refreshed,
11778 "unchanged": unchanged,
11779 "missing": missing,
11780 "unreachable": unreachable,
11781 },
11782 });
11783 if json {
11784 println!(
11785 "{}",
11786 serde_json::to_string_pretty(&payload)
11787 .expect("failed to serialize frontier.refresh-deps")
11788 );
11789 } else {
11790 let mode = if dry_run { " (dry-run)" } else { "" };
11791 println!(
11792 "{} refresh-deps{mode} · {} total · {refreshed} refreshed · {unchanged} unchanged · {missing} missing · {unreachable} unreachable",
11793 style::ok("frontier"),
11794 cross_deps.len()
11795 );
11796 for d in &per_dep {
11797 let vfr = d["vfr_id"].as_str().unwrap_or("?");
11798 let status = d["status"].as_str().unwrap_or("?");
11799 match status {
11800 "refreshed" => println!(
11801 " {vfr} refreshed {} → {}",
11802 d["old_snapshot"]
11803 .as_str()
11804 .unwrap_or("(none)")
11805 .chars()
11806 .take(16)
11807 .collect::<String>(),
11808 d["new_snapshot"]
11809 .as_str()
11810 .unwrap_or("?")
11811 .chars()
11812 .take(16)
11813 .collect::<String>(),
11814 ),
11815 "unchanged" => println!(" {vfr} unchanged"),
11816 "missing" => println!(" {vfr} missing on hub"),
11817 _ => println!(" {vfr} unreachable"),
11818 }
11819 }
11820 }
11821 }
11822 FrontierAction::Diff {
11823 frontier,
11824 since,
11825 week,
11826 json,
11827 } => cmd_frontier_diff(&frontier, since.as_deref(), week.as_deref(), json),
11828 }
11829}
11830
11831fn cmd_repo(action: RepoAction) {
11832 match action {
11833 RepoAction::Status { frontier, json } => {
11834 let payload = frontier_repo::repo_status(&frontier).unwrap_or_else(|e| fail_return(&e));
11835 if json {
11836 println!(
11837 "{}",
11838 serde_json::to_string_pretty(&payload)
11839 .expect("failed to serialize repo status")
11840 );
11841 } else {
11842 let summary = payload.get("summary").unwrap_or(&Value::Null);
11843 let freshness = payload.get("freshness").unwrap_or(&Value::Null);
11844 println!("vela repo status");
11845 println!(" frontier: {}", frontier.display());
11846 println!(
11847 " events: {}",
11848 summary
11849 .get("accepted_events")
11850 .and_then(Value::as_u64)
11851 .unwrap_or_default()
11852 );
11853 println!(
11854 " open proposals: {}",
11855 summary
11856 .get("open_proposals")
11857 .and_then(Value::as_u64)
11858 .unwrap_or_default()
11859 );
11860 println!(
11861 " state: {}",
11862 freshness
11863 .get("materialized_state")
11864 .and_then(Value::as_str)
11865 .unwrap_or("unknown")
11866 );
11867 println!(
11868 " proof: {}",
11869 freshness
11870 .get("proof")
11871 .and_then(Value::as_str)
11872 .unwrap_or("unknown")
11873 );
11874 }
11875 }
11876 RepoAction::Doctor { frontier, json } => {
11877 let payload = frontier_repo::repo_doctor(&frontier).unwrap_or_else(|e| fail_return(&e));
11878 if json {
11879 println!(
11880 "{}",
11881 serde_json::to_string_pretty(&payload)
11882 .expect("failed to serialize repo doctor")
11883 );
11884 } else {
11885 let ok = payload.get("ok").and_then(Value::as_bool) == Some(true);
11886 let issues = payload
11887 .get("issues")
11888 .and_then(Value::as_array)
11889 .map_or(0, Vec::len);
11890 println!("vela repo doctor");
11891 println!(" frontier: {}", frontier.display());
11892 println!(" status: {}", if ok { "ok" } else { "needs attention" });
11893 println!(" issues: {issues}");
11894 }
11895 }
11896 }
11897}
11898
11899fn cmd_proof_verify(frontier: &Path, json_output: bool) {
11900 let payload = frontier_repo::proof_verify(frontier).unwrap_or_else(|e| fail_return(&e));
11901 if json_output {
11902 println!(
11903 "{}",
11904 serde_json::to_string_pretty(&payload).expect("failed to serialize proof verify")
11905 );
11906 if payload.get("ok").and_then(Value::as_bool) != Some(true) {
11907 std::process::exit(1);
11908 }
11909 } else {
11910 let ok = payload.get("ok").and_then(Value::as_bool) == Some(true);
11911 println!("vela proof verify");
11912 println!(" frontier: {}", frontier.display());
11913 println!(" status: {}", if ok { "ok" } else { "failed" });
11914 if let Some(issues) = payload.get("issues").and_then(Value::as_array) {
11915 for issue in issues {
11916 if let Some(message) = issue.get("message").and_then(Value::as_str) {
11917 println!(" issue: {message}");
11918 }
11919 }
11920 }
11921 if !ok {
11922 std::process::exit(1);
11923 }
11924 }
11925}
11926
11927fn cmd_proof_explain(frontier: &Path) {
11928 let text = frontier_repo::proof_explain(frontier).unwrap_or_else(|e| fail_return(&e));
11929 print!("{text}");
11930}
11931
11932fn cmd_frontier_diff(frontier: &Path, since: Option<&str>, week: Option<&str>, json: bool) {
11941 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
11942
11943 let now = chrono::Utc::now();
11945 let (window_start, window_end, week_label): (
11946 chrono::DateTime<chrono::Utc>,
11947 chrono::DateTime<chrono::Utc>,
11948 Option<String>,
11949 ) = if let Some(s) = since {
11950 let parsed = chrono::DateTime::parse_from_rfc3339(s)
11951 .map(|d| d.with_timezone(&chrono::Utc))
11952 .unwrap_or_else(|e| fail_return(&format!("invalid --since timestamp '{s}': {e}")));
11953 (parsed, now, None)
11954 } else {
11955 let key = week
11956 .map(str::to_owned)
11957 .unwrap_or_else(|| iso_week_key_for(now.date_naive()));
11958 let (start, end) = iso_week_bounds(&key)
11959 .unwrap_or_else(|e| fail_return(&format!("invalid --week '{key}': {e}")));
11960 (start, end, Some(key))
11961 };
11962
11963 let mut added: Vec<&crate::bundle::FindingBundle> = Vec::new();
11965 let mut updated: Vec<&crate::bundle::FindingBundle> = Vec::new();
11966 let mut new_contradictions: Vec<&crate::bundle::FindingBundle> = Vec::new();
11967 let mut cumulative: usize = 0;
11968
11969 for f in &project.findings {
11970 let created = chrono::DateTime::parse_from_rfc3339(&f.created)
11971 .map(|d| d.with_timezone(&chrono::Utc))
11972 .ok();
11973 let updated_ts = f
11974 .updated
11975 .as_deref()
11976 .and_then(|u| chrono::DateTime::parse_from_rfc3339(u).ok())
11977 .map(|d| d.with_timezone(&chrono::Utc));
11978
11979 if let Some(c) = created
11980 && c < window_end
11981 {
11982 cumulative += 1;
11983 }
11984
11985 if let Some(c) = created
11986 && c >= window_start
11987 && c < window_end
11988 {
11989 added.push(f);
11990 let is_tension = f.flags.contested || f.assertion.assertion_type == "tension";
11991 if is_tension {
11992 new_contradictions.push(f);
11993 }
11994 continue;
11995 }
11996 if let Some(u) = updated_ts
11997 && u >= window_start
11998 && u < window_end
11999 {
12000 updated.push(f);
12001 }
12002 }
12003
12004 let summary_for = |list: &[&crate::bundle::FindingBundle]| -> Vec<serde_json::Value> {
12006 list.iter()
12007 .map(|f| {
12008 json!({
12009 "id": f.id,
12010 "assertion": f.assertion.text,
12011 "evidence_type": f.evidence.evidence_type,
12012 "confidence": f.confidence.score,
12013 "doi": f.provenance.doi,
12014 "pmid": f.provenance.pmid,
12015 })
12016 })
12017 .collect()
12018 };
12019
12020 let payload = json!({
12021 "ok": true,
12022 "command": "frontier.diff",
12023 "frontier": frontier.display().to_string(),
12024 "frontier_id": project.frontier_id,
12025 "window": {
12026 "start": window_start.to_rfc3339_opts(chrono::SecondsFormat::Secs, true),
12027 "end": window_end.to_rfc3339_opts(chrono::SecondsFormat::Secs, true),
12028 "iso_week": week_label,
12029 },
12030 "totals": {
12031 "added": added.len(),
12032 "updated": updated.len(),
12033 "new_contradictions": new_contradictions.len(),
12034 "cumulative_claims": cumulative,
12035 },
12036 "added": summary_for(&added),
12037 "updated": summary_for(&updated),
12038 "new_contradictions": summary_for(&new_contradictions),
12039 });
12040
12041 if json {
12042 println!(
12043 "{}",
12044 serde_json::to_string_pretty(&payload).expect("failed to serialize frontier.diff")
12045 );
12046 return;
12047 }
12048
12049 let label = week_label
12050 .clone()
12051 .unwrap_or_else(|| format!("since {}", window_start.format("%Y-%m-%d %H:%M UTC")));
12052 println!();
12053 println!(
12054 " {}",
12055 format!("VELA · FRONTIER · DIFF · {label}")
12056 .to_uppercase()
12057 .dimmed()
12058 );
12059 println!(" {}", style::tick_row(60));
12060 println!(
12061 " range: {} → {}",
12062 window_start.format("%Y-%m-%d %H:%M"),
12063 window_end.format("%Y-%m-%d %H:%M")
12064 );
12065 println!(" added: {}", added.len());
12066 println!(" updated: {}", updated.len());
12067 println!(" contradictions: {}", new_contradictions.len());
12068 println!(" cumulative: {cumulative}");
12069 if added.is_empty() && updated.is_empty() {
12070 println!();
12071 println!(" (quiet window — no findings added or updated)");
12072 } else {
12073 println!();
12074 println!(" added:");
12075 for f in &added {
12076 println!(
12077 " · {} {}",
12078 f.id.dimmed(),
12079 truncate(&f.assertion.text, 88)
12080 );
12081 }
12082 if !updated.is_empty() {
12083 println!();
12084 println!(" updated:");
12085 for f in &updated {
12086 println!(
12087 " · {} {}",
12088 f.id.dimmed(),
12089 truncate(&f.assertion.text, 88)
12090 );
12091 }
12092 }
12093 }
12094}
12095
12096fn truncate(s: &str, n: usize) -> String {
12097 if s.chars().count() <= n {
12098 s.to_string()
12099 } else {
12100 let mut out: String = s.chars().take(n.saturating_sub(1)).collect();
12101 out.push('…');
12102 out
12103 }
12104}
12105
12106fn iso_week_key_for(d: chrono::NaiveDate) -> String {
12108 use chrono::Datelike;
12109 let iso = d.iso_week();
12110 format!("{:04}-W{:02}", iso.year(), iso.week())
12111}
12112
12113fn iso_week_bounds(
12116 key: &str,
12117) -> Result<(chrono::DateTime<chrono::Utc>, chrono::DateTime<chrono::Utc>), String> {
12118 let (year_str, week_str) = key
12119 .split_once("-W")
12120 .ok_or_else(|| format!("expected YYYY-Www, got '{key}'"))?;
12121 let year: i32 = year_str
12122 .parse()
12123 .map_err(|e| format!("bad year in '{key}': {e}"))?;
12124 let week: u32 = week_str
12125 .parse()
12126 .map_err(|e| format!("bad week in '{key}': {e}"))?;
12127 let monday = chrono::NaiveDate::from_isoywd_opt(year, week, chrono::Weekday::Mon)
12128 .ok_or_else(|| format!("invalid ISO week: {key}"))?;
12129 let next_monday = monday + chrono::Duration::days(7);
12130 let start = monday.and_hms_opt(0, 0, 0).expect("00:00 valid").and_utc();
12131 let end = next_monday
12132 .and_hms_opt(0, 0, 0)
12133 .expect("00:00 valid")
12134 .and_utc();
12135 Ok((start, end))
12136}
12137
12138fn cmd_registry(action: RegistryAction) {
12143 use crate::registry;
12144 let default_registry = || -> PathBuf {
12145 let home = std::env::var("HOME").unwrap_or_else(|_| ".".to_string());
12146 PathBuf::from(home)
12147 .join(".vela")
12148 .join("registry")
12149 .join("entries.json")
12150 };
12151 match action {
12152 RegistryAction::DependsOn { vfr_id, from, json } => {
12153 let base = from.trim_end_matches('/');
12154 let url = format!("{base}/entries/{vfr_id}/depends-on");
12155 let client = reqwest::blocking::Client::builder()
12156 .timeout(std::time::Duration::from_secs(30))
12157 .build()
12158 .unwrap_or_else(|e| fail_return(&format!("http client init: {e}")));
12159 let resp = client
12160 .get(&url)
12161 .send()
12162 .unwrap_or_else(|e| fail_return(&format!("GET {url}: {e}")));
12163 if !resp.status().is_success() {
12164 fail(&format!("GET {url}: HTTP {}", resp.status()));
12165 }
12166 let body: serde_json::Value = resp
12167 .json()
12168 .unwrap_or_else(|e| fail_return(&format!("parse response: {e}")));
12169 if json {
12170 println!(
12171 "{}",
12172 serde_json::to_string_pretty(&body).expect("serialize")
12173 );
12174 } else {
12175 let dependents = body
12176 .get("dependents")
12177 .and_then(|v| v.as_array())
12178 .cloned()
12179 .unwrap_or_default();
12180 let count = dependents.len();
12181 println!(
12182 "{} {count} {} on {vfr_id}",
12183 style::ok("registry"),
12184 if count == 1 {
12185 "frontier depends"
12186 } else {
12187 "frontiers depend"
12188 },
12189 );
12190 for e in &dependents {
12191 let v = e.get("vfr_id").and_then(|v| v.as_str()).unwrap_or("?");
12192 let n = e.get("name").and_then(|v| v.as_str()).unwrap_or("?");
12193 let o = e
12194 .get("owner_actor_id")
12195 .and_then(|v| v.as_str())
12196 .unwrap_or("?");
12197 println!(" {v} {n} ({o})");
12198 }
12199 }
12200 }
12201 RegistryAction::Mirror {
12202 vfr_id,
12203 from,
12204 to,
12205 json,
12206 } => {
12207 let src_base = from.trim_end_matches('/');
12208 let dst_base = to.trim_end_matches('/');
12209 let src_url = format!("{src_base}/entries/{vfr_id}");
12210 let dst_url = format!("{dst_base}/entries");
12211 let client = reqwest::blocking::Client::builder()
12212 .timeout(std::time::Duration::from_secs(30))
12213 .build()
12214 .unwrap_or_else(|e| fail_return(&format!("http client init: {e}")));
12215
12216 let entry: serde_json::Value = client
12217 .get(&src_url)
12218 .send()
12219 .unwrap_or_else(|e| fail_return(&format!("GET {src_url}: {e}")))
12220 .error_for_status()
12221 .unwrap_or_else(|e| fail_return(&format!("GET {src_url}: {e}")))
12222 .json()
12223 .unwrap_or_else(|e| fail_return(&format!("parse {src_url}: {e}")));
12224
12225 let resp = client
12226 .post(&dst_url)
12227 .header("content-type", "application/json")
12228 .body(
12229 serde_json::to_vec(&entry)
12230 .unwrap_or_else(|e| fail_return(&format!("serialize: {e}"))),
12231 )
12232 .send()
12233 .unwrap_or_else(|e| fail_return(&format!("POST {dst_url}: {e}")));
12234 let status = resp.status();
12235 if !status.is_success() {
12236 let body = resp.text().unwrap_or_default();
12237 fail(&format!(
12238 "POST {dst_url}: HTTP {status}: {}",
12239 body.chars().take(300).collect::<String>()
12240 ));
12241 }
12242 let body: serde_json::Value = resp
12243 .json()
12244 .unwrap_or_else(|e| fail_return(&format!("parse POST response: {e}")));
12245 let duplicate = body
12246 .get("duplicate")
12247 .and_then(serde_json::Value::as_bool)
12248 .unwrap_or(false);
12249 let payload = json!({
12250 "ok": true,
12251 "command": "registry.mirror",
12252 "vfr_id": vfr_id,
12253 "from": src_base,
12254 "to": dst_base,
12255 "duplicate_on_destination": duplicate,
12256 "destination_response": body,
12257 });
12258 if json {
12259 println!(
12260 "{}",
12261 serde_json::to_string_pretty(&payload).expect("serialize")
12262 );
12263 } else {
12264 println!(
12265 "{} mirrored {vfr_id} from {src_base} → {dst_base}{}",
12266 style::ok("registry"),
12267 if duplicate {
12268 " (duplicate; signature already known)"
12269 } else {
12270 " (fresh insert)"
12271 }
12272 );
12273 }
12274 }
12275 RegistryAction::List { from, json } => {
12276 let (label, registry_data) = match &from {
12279 Some(loc) if loc.starts_with("http") => (
12280 loc.clone(),
12281 registry::load_any(loc).unwrap_or_else(|e| fail_return(&e)),
12282 ),
12283 Some(loc) => {
12284 let p = registry::resolve_local(loc).unwrap_or_else(|e| fail_return(&e));
12285 (
12286 p.display().to_string(),
12287 registry::load_local(&p).unwrap_or_else(|e| fail_return(&e)),
12288 )
12289 }
12290 None => {
12291 let p = default_registry();
12292 (
12293 p.display().to_string(),
12294 registry::load_local(&p).unwrap_or_else(|e| fail_return(&e)),
12295 )
12296 }
12297 };
12298 let r = registry_data;
12299 let path_label = label;
12300 if json {
12301 let payload = json!({
12302 "ok": true,
12303 "command": "registry.list",
12304 "registry": path_label,
12305 "entry_count": r.entries.len(),
12306 "entries": r.entries,
12307 });
12308 println!(
12309 "{}",
12310 serde_json::to_string_pretty(&payload)
12311 .expect("failed to serialize registry.list")
12312 );
12313 } else {
12314 println!();
12315 println!(
12316 " {}",
12317 format!("VELA · REGISTRY · LIST · {}", path_label)
12318 .to_uppercase()
12319 .dimmed()
12320 );
12321 println!(" {}", style::tick_row(60));
12322 if r.entries.is_empty() {
12323 println!(" (registry is empty)");
12324 } else {
12325 for entry in &r.entries {
12326 println!(
12327 " {} {} ({}) by {} published {}",
12328 entry.vfr_id,
12329 entry.name,
12330 entry.network_locator,
12331 entry.owner_actor_id,
12332 entry.signed_publish_at
12333 );
12334 }
12335 }
12336 }
12337 }
12338 RegistryAction::Publish {
12339 frontier,
12340 owner,
12341 key,
12342 locator,
12343 to,
12344 json,
12345 } => {
12346 let key_hex = std::fs::read_to_string(&key)
12349 .map(|s| s.trim().to_string())
12350 .unwrap_or_else(|e| fail_return(&format!("read key {}: {e}", key.display())));
12351 let signing_key = parse_signing_key(&key_hex);
12352 let derived = hex::encode(signing_key.verifying_key().to_bytes());
12353
12354 let mut frontier_data =
12356 repo::load_from_path(&frontier).unwrap_or_else(|e| fail_return(&e));
12357
12358 let pubkey = match frontier_data.actors.iter().find(|actor| actor.id == owner) {
12359 Some(actor) => actor.public_key.clone(),
12360 None => {
12361 eprintln!(
12369 " vela registry publish · auto-registering actor {owner} (derived pubkey {})",
12370 &derived[..16]
12371 );
12372 frontier_data.actors.push(sign::ActorRecord {
12373 id: owner.clone(),
12374 public_key: derived.clone(),
12375 algorithm: "ed25519".to_string(),
12376 created_at: chrono::Utc::now().to_rfc3339(),
12377 tier: None,
12378 orcid: None,
12379 access_clearance: None,
12380 });
12381 repo::save_to_path(&frontier, &frontier_data)
12382 .unwrap_or_else(|e| fail_return(&format!("save actor: {e}")));
12383 derived.clone()
12384 }
12385 };
12386
12387 let snapshot_hash = events::snapshot_hash(&frontier_data);
12391 let event_log_hash = events::event_log_hash(&frontier_data.events);
12392 let vfr_id = frontier_data.frontier_id();
12393 let name = frontier_data.project.name.clone();
12394
12395 if derived != pubkey {
12397 fail(&format!(
12398 "private key does not match registered pubkey for owner '{owner}'"
12399 ));
12400 }
12401
12402 let to_is_remote = matches!(
12410 to.as_deref(),
12411 Some(loc) if loc.starts_with("http://") || loc.starts_with("https://")
12412 );
12413 let resolved_locator = match locator {
12414 Some(l) => l,
12415 None => {
12416 if to_is_remote {
12417 let hub = to.as_deref().unwrap().trim_end_matches('/');
12418 let hub_root = hub.trim_end_matches("/entries");
12419 format!("{hub_root}/entries/{vfr_id}/snapshot")
12420 } else {
12421 fail_return(
12422 "--locator is required for local publishes; pass e.g. \
12423 --locator file:///path/to/frontier.json or an HTTPS URL.",
12424 )
12425 }
12426 }
12427 };
12428
12429 let mut entry = registry::RegistryEntry {
12430 schema: registry::ENTRY_SCHEMA.to_string(),
12431 vfr_id: vfr_id.clone(),
12432 name: name.clone(),
12433 owner_actor_id: owner.clone(),
12434 owner_pubkey: pubkey,
12435 latest_snapshot_hash: snapshot_hash,
12436 latest_event_log_hash: event_log_hash,
12437 network_locator: resolved_locator,
12438 signed_publish_at: chrono::Utc::now().to_rfc3339(),
12439 signature: String::new(),
12440 };
12441 entry.signature =
12442 registry::sign_entry(&entry, &signing_key).unwrap_or_else(|e| fail_return(&e));
12443
12444 let (registry_label, duplicate) = if to_is_remote {
12445 let hub_url = to.clone().unwrap();
12446 let resp = registry::publish_remote(&entry, &hub_url, Some(&frontier_data))
12450 .unwrap_or_else(|e| fail_return(&e));
12451 (hub_url, resp.duplicate)
12452 } else {
12453 let registry_path = match &to {
12454 Some(loc) => registry::resolve_local(loc).unwrap_or_else(|e| fail_return(&e)),
12455 None => default_registry(),
12456 };
12457 registry::publish_entry(®istry_path, entry.clone())
12458 .unwrap_or_else(|e| fail_return(&e));
12459 (registry_path.display().to_string(), false)
12460 };
12461
12462 let payload = json!({
12463 "ok": true,
12464 "command": "registry.publish",
12465 "registry": registry_label,
12466 "vfr_id": vfr_id,
12467 "name": name,
12468 "owner": owner,
12469 "snapshot_hash": entry.latest_snapshot_hash,
12470 "event_log_hash": entry.latest_event_log_hash,
12471 "signed_publish_at": entry.signed_publish_at,
12472 "signature": entry.signature,
12473 "duplicate": duplicate,
12474 });
12475 if json {
12476 println!(
12477 "{}",
12478 serde_json::to_string_pretty(&payload)
12479 .expect("failed to serialize registry.publish")
12480 );
12481 } else {
12482 let dup_suffix = if duplicate { " (duplicate, no-op)" } else { "" };
12483 println!(
12484 "{} published {vfr_id} → {}{}",
12485 style::ok("registry"),
12486 registry_label,
12487 dup_suffix
12488 );
12489 println!(" snapshot: {}", entry.latest_snapshot_hash);
12490 println!(" event_log: {}", entry.latest_event_log_hash);
12491 println!(" signature: {}…", &entry.signature[..16]);
12492 }
12493 }
12494 RegistryAction::Pull {
12495 vfr_id,
12496 from,
12497 out,
12498 transitive,
12499 depth,
12500 json,
12501 } => {
12502 let (registry_label, registry_data) = match &from {
12506 Some(loc) if loc.starts_with("http") => (
12507 loc.clone(),
12508 registry::load_any(loc).unwrap_or_else(|e| fail_return(&e)),
12509 ),
12510 Some(loc) => {
12511 let p = registry::resolve_local(loc).unwrap_or_else(|e| fail_return(&e));
12512 (
12513 p.display().to_string(),
12514 registry::load_local(&p).unwrap_or_else(|e| fail_return(&e)),
12515 )
12516 }
12517 None => {
12518 let p = default_registry();
12519 (
12520 p.display().to_string(),
12521 registry::load_local(&p).unwrap_or_else(|e| fail_return(&e)),
12522 )
12523 }
12524 };
12525 let entry = registry::find_latest(®istry_data, &vfr_id)
12526 .unwrap_or_else(|| fail_return(&format!("{vfr_id} not found in registry")));
12527
12528 if transitive {
12529 let result = registry::pull_transitive(®istry_data, &vfr_id, &out, depth)
12533 .unwrap_or_else(|e| fail_return(&format!("transitive pull failed: {e}")));
12534
12535 let dep_paths_json: serde_json::Value = serde_json::Value::Object(
12536 result
12537 .deps
12538 .iter()
12539 .map(|(k, v)| (k.clone(), serde_json::json!(v.display().to_string())))
12540 .collect(),
12541 );
12542 let payload = json!({
12543 "ok": true,
12544 "command": "registry.pull",
12545 "registry": registry_label,
12546 "vfr_id": vfr_id,
12547 "transitive": true,
12548 "depth": depth,
12549 "out_dir": out.display().to_string(),
12550 "primary": result.primary_path.display().to_string(),
12551 "verified": result.verified,
12552 "deps": dep_paths_json,
12553 });
12554 if json {
12555 println!(
12556 "{}",
12557 serde_json::to_string_pretty(&payload)
12558 .expect("failed to serialize registry.pull")
12559 );
12560 } else {
12561 println!(
12562 "{} pulled {vfr_id} (transitive) → {}",
12563 style::ok("registry"),
12564 out.display()
12565 );
12566 println!(" verified {} frontier(s):", result.verified.len());
12567 for v in &result.verified {
12568 println!(" · {v}");
12569 }
12570 println!(" every cross-frontier dependency's pinned snapshot hash matched");
12571 }
12572 return;
12573 }
12574
12575 registry::fetch_frontier_to_prefer_event_hub(&entry, from.as_deref(), &out)
12578 .unwrap_or_else(|e| fail_return(&format!("fetch frontier: {e}")));
12579 registry::verify_pull(&entry, &out).unwrap_or_else(|e| {
12580 let _ = std::fs::remove_file(&out);
12581 fail_return(&format!("pull verification failed: {e}"))
12582 });
12583
12584 let payload = json!({
12585 "ok": true,
12586 "command": "registry.pull",
12587 "registry": registry_label,
12588 "vfr_id": vfr_id,
12589 "out": out.display().to_string(),
12590 "snapshot_hash": entry.latest_snapshot_hash,
12591 "event_log_hash": entry.latest_event_log_hash,
12592 "verified": true,
12593 });
12594 if json {
12595 println!(
12596 "{}",
12597 serde_json::to_string_pretty(&payload)
12598 .expect("failed to serialize registry.pull")
12599 );
12600 } else {
12601 println!(
12602 "{} pulled {vfr_id} → {}",
12603 style::ok("registry"),
12604 out.display()
12605 );
12606 println!(" verified snapshot+event_log hashes match registry; signature ok");
12607 }
12608 }
12609 }
12610}
12611
12612fn print_stats_json(path: &Path) {
12613 let frontier = load_frontier_or_fail(path);
12614 let source_hash = hash_path_or_fail(path);
12615 let payload = json!({
12616 "ok": true,
12617 "command": "stats",
12618 "schema_version": project::VELA_SCHEMA_VERSION,
12619 "frontier": {
12620 "name": &frontier.project.name,
12621 "description": &frontier.project.description,
12622 "source": path.display().to_string(),
12623 "hash": format!("sha256:{source_hash}"),
12624 "compiled_at": &frontier.project.compiled_at,
12625 "compiler": &frontier.project.compiler,
12626 "papers_processed": frontier.project.papers_processed,
12627 "errors": frontier.project.errors,
12628 },
12629 "stats": frontier.stats,
12630 "proposals": proposals::summary(&frontier),
12631 "proof_state": frontier.proof_state,
12632 });
12633 println!(
12634 "{}",
12635 serde_json::to_string_pretty(&payload).expect("failed to serialize stats")
12636 );
12637}
12638
12639fn cmd_search(
12640 source: Option<&Path>,
12641 query: &str,
12642 entity: Option<&str>,
12643 assertion_type: Option<&str>,
12644 all: Option<&Path>,
12645 limit: usize,
12646 json_output: bool,
12647) {
12648 if let Some(dir) = all {
12649 search::run_all(dir, query, entity, assertion_type, limit);
12650 return;
12651 }
12652 let Some(src) = source else {
12653 fail("Provide --source <frontier> or --all <directory>.");
12654 };
12655 if json_output {
12656 let results = search::search(src, query, entity, assertion_type, limit);
12657 let loaded = load_frontier_or_fail(src);
12658 let source_hash = hash_path_or_fail(src);
12659 let payload = json!({
12660 "ok": true,
12661 "command": "search",
12662 "schema_version": project::VELA_SCHEMA_VERSION,
12663 "query": query,
12664 "frontier": {
12665 "name": &loaded.project.name,
12666 "source": src.display().to_string(),
12667 "hash": format!("sha256:{source_hash}"),
12668 },
12669 "filters": {
12670 "entity": entity,
12671 "assertion_type": assertion_type,
12672 "limit": limit,
12673 },
12674 "count": results.len(),
12675 "results": results.iter().map(|result| json!({
12676 "id": &result.id,
12677 "score": result.score,
12678 "assertion": &result.assertion,
12679 "assertion_type": &result.assertion_type,
12680 "confidence": result.confidence,
12681 "entities": &result.entities,
12682 "doi": &result.doi,
12683 })).collect::<Vec<_>>()
12684 });
12685 println!(
12686 "{}",
12687 serde_json::to_string_pretty(&payload).expect("failed to serialize search results")
12688 );
12689 } else {
12690 search::run(src, query, entity, assertion_type, limit);
12691 }
12692}
12693
12694fn cmd_tensions(source: &Path, both_high: bool, cross_domain: bool, top: usize, json_output: bool) {
12695 let frontier = load_frontier_or_fail(source);
12696 let result = tensions::analyze(&frontier, both_high, cross_domain, top);
12697 if json_output {
12698 let source_hash = hash_path_or_fail(source);
12699 let payload = json!({
12700 "ok": true,
12701 "command": "tensions",
12702 "schema_version": project::VELA_SCHEMA_VERSION,
12703 "frontier": {
12704 "name": &frontier.project.name,
12705 "source": source.display().to_string(),
12706 "hash": format!("sha256:{source_hash}"),
12707 },
12708 "filters": {
12709 "both_high": both_high,
12710 "cross_domain": cross_domain,
12711 "top": top,
12712 },
12713 "count": result.len(),
12714 "tensions": result.iter().map(|t| json!({
12715 "score": t.score,
12716 "resolved": t.resolved,
12717 "superseding_id": &t.superseding_id,
12718 "finding_a": {
12719 "id": &t.finding_a.id,
12720 "assertion": &t.finding_a.assertion,
12721 "confidence": t.finding_a.confidence,
12722 "assertion_type": &t.finding_a.assertion_type,
12723 "citation_count": t.finding_a.citation_count,
12724 "contradicts_count": t.finding_a.contradicts_count,
12725 },
12726 "finding_b": {
12727 "id": &t.finding_b.id,
12728 "assertion": &t.finding_b.assertion,
12729 "confidence": t.finding_b.confidence,
12730 "assertion_type": &t.finding_b.assertion_type,
12731 "citation_count": t.finding_b.citation_count,
12732 "contradicts_count": t.finding_b.contradicts_count,
12733 }
12734 })).collect::<Vec<_>>()
12735 });
12736 println!(
12737 "{}",
12738 serde_json::to_string_pretty(&payload).expect("failed to serialize tensions")
12739 );
12740 } else {
12741 tensions::print_tensions(&result);
12742 }
12743}
12744
12745fn cmd_gaps(action: GapsAction) {
12746 match action {
12747 GapsAction::Rank {
12748 frontier,
12749 top,
12750 domain,
12751 json,
12752 } => cmd_gap_rank(&frontier, top, domain.as_deref(), json),
12753 }
12754}
12755
12756fn cmd_gap_rank(frontier_path: &Path, top: usize, domain: Option<&str>, json_output: bool) {
12757 let frontier = load_frontier_or_fail(frontier_path);
12758 let mut ranked = frontier
12759 .findings
12760 .iter()
12761 .filter(|finding| finding.flags.gap || finding.flags.negative_space)
12762 .filter(|finding| {
12763 domain.is_none_or(|domain| {
12764 finding
12765 .assertion
12766 .text
12767 .to_lowercase()
12768 .contains(&domain.to_lowercase())
12769 || finding
12770 .assertion
12771 .entities
12772 .iter()
12773 .any(|entity| entity.name.to_lowercase().contains(&domain.to_lowercase()))
12774 })
12775 })
12776 .map(|finding| {
12777 let dependency_count = frontier
12778 .findings
12779 .iter()
12780 .flat_map(|candidate| candidate.links.iter())
12781 .filter(|link| link.target == finding.id)
12782 .count();
12783 let score = dependency_count as f64 + finding.confidence.score;
12784 json!({
12785 "id": &finding.id,
12786 "kind": "candidate_gap_review_lead",
12787 "assertion": &finding.assertion.text,
12788 "score": score,
12789 "dependency_count": dependency_count,
12790 "confidence": finding.confidence.score,
12791 "evidence_type": &finding.evidence.evidence_type,
12792 "entities": finding.assertion.entities.iter().map(|e| &e.name).collect::<Vec<_>>(),
12793 "recommended_action": "Review source scope and missing evidence before treating this as an experiment target.",
12794 "caveats": ["Candidate gap rankings are review leads, not guaranteed underexplored areas or experiment targets."],
12795 })
12796 })
12797 .collect::<Vec<_>>();
12798 ranked.sort_by(|a, b| {
12799 b.get("score")
12800 .and_then(Value::as_f64)
12801 .partial_cmp(&a.get("score").and_then(Value::as_f64))
12802 .unwrap_or(std::cmp::Ordering::Equal)
12803 });
12804 ranked.truncate(top);
12805 if json_output {
12806 let source_hash = hash_path_or_fail(frontier_path);
12807 let payload = json!({
12808 "ok": true,
12809 "command": "gaps rank",
12810 "schema_version": project::VELA_SCHEMA_VERSION,
12811 "frontier": {
12812 "name": &frontier.project.name,
12813 "source": frontier_path.display().to_string(),
12814 "hash": format!("sha256:{source_hash}"),
12815 },
12816 "filters": {
12817 "top": top,
12818 "domain": domain,
12819 },
12820 "count": ranked.len(),
12821 "ranking_label": "candidate gap review leads",
12822 "caveats": ["These rankings are navigation signals over flagged findings, not scientific conclusions."],
12823 "review_leads": ranked.clone(),
12824 "gaps": ranked,
12825 });
12826 println!(
12827 "{}",
12828 serde_json::to_string_pretty(&payload).expect("failed to serialize gap ranking")
12829 );
12830 } else {
12831 println!();
12832 println!(" {}", "CANDIDATE GAP REVIEW LEADS".dimmed());
12833 println!(" {}", style::tick_row(60));
12834 println!(" review source scope; these are not guaranteed experiment targets.");
12835 println!();
12836 for (idx, gap) in ranked.iter().enumerate() {
12837 println!(
12838 " {}. [{}] score={} {}",
12839 idx + 1,
12840 gap["id"].as_str().unwrap_or("?"),
12841 gap["score"].as_f64().unwrap_or(0.0),
12842 gap["assertion"].as_str().unwrap_or("")
12843 );
12844 }
12845 }
12846}
12847
12848async fn cmd_bridge(inputs: &[PathBuf], check_novelty: bool, top_n: usize) {
12849 if inputs.len() < 2 {
12850 fail("need at least 2 frontier files for bridge detection.");
12851 }
12852 println!();
12853 println!(" {}", "VELA · BRIDGE · V0.36.0".dimmed());
12854 println!(" {}", style::tick_row(60));
12855 println!(" loading {} frontiers...", inputs.len());
12856 let mut named_projects = Vec::<(String, project::Project)>::new();
12857 let mut total_findings = 0;
12858 for path in inputs {
12859 let frontier = load_frontier_or_fail(path);
12860 let name = path
12861 .file_stem()
12862 .unwrap_or_default()
12863 .to_string_lossy()
12864 .to_string();
12865 println!(" {} · {} findings", name, frontier.stats.findings);
12866 total_findings += frontier.stats.findings;
12867 named_projects.push((name, frontier));
12868 }
12869 let refs = named_projects
12870 .iter()
12871 .map(|(name, frontier)| (name.as_str(), frontier))
12872 .collect::<Vec<_>>();
12873 let mut bridges = bridge::detect_bridges(&refs);
12874 if check_novelty && !bridges.is_empty() {
12875 let client = Client::new();
12876 let check_count = bridges.len().min(top_n);
12877 println!(" running rough PubMed prior-art checks for top {check_count} bridges...");
12878 for bridge_item in bridges.iter_mut().take(check_count) {
12879 let query = bridge::novelty_query(&bridge_item.entity_name, bridge_item);
12880 match bridge::check_novelty(&client, &query).await {
12881 Ok(count) => bridge_item.pubmed_count = Some(count),
12882 Err(e) => eprintln!(
12883 " {} prior-art check failed for {}: {e}",
12884 style::err_prefix(),
12885 bridge_item.entity_name
12886 ),
12887 }
12888 tokio::time::sleep(std::time::Duration::from_millis(350)).await;
12889 }
12890 }
12891 print!("{}", bridge::format_report(&bridges, total_findings));
12892}
12893
12894struct BenchArgs {
12895 frontier: Option<PathBuf>,
12896 gold: Option<PathBuf>,
12897 entity_gold: Option<PathBuf>,
12898 link_gold: Option<PathBuf>,
12899 suite: Option<PathBuf>,
12900 suite_ready: bool,
12901 min_f1: Option<f64>,
12902 min_precision: Option<f64>,
12903 min_recall: Option<f64>,
12904 no_thresholds: bool,
12905 json: bool,
12906}
12907
12908fn cmd_agent_bench(
12913 gold: &Path,
12914 candidate: &Path,
12915 sources: Option<&Path>,
12916 threshold: Option<f64>,
12917 report_path: Option<&Path>,
12918 json_out: bool,
12919) {
12920 let input = crate::agent_bench::BenchInput {
12921 gold_path: gold.to_path_buf(),
12922 candidate_path: candidate.to_path_buf(),
12923 sources: sources.map(Path::to_path_buf),
12924 threshold: threshold.unwrap_or(0.0),
12925 };
12926 let report = match crate::agent_bench::run(input) {
12927 Ok(r) => r,
12928 Err(e) => {
12929 eprintln!("{} bench failed: {e}", style::err_prefix());
12930 std::process::exit(1);
12931 }
12932 };
12933
12934 let json = serde_json::to_string_pretty(&report).unwrap_or_default();
12935 if let Some(path) = report_path
12936 && let Err(e) = std::fs::write(path, &json)
12937 {
12938 eprintln!(
12939 "{} failed to write report to {}: {e}",
12940 style::err_prefix(),
12941 path.display()
12942 );
12943 }
12944
12945 if json_out {
12946 println!("{json}");
12947 } else {
12948 println!();
12949 println!(" {}", "VELA · BENCH · AGENT STATE-UPDATE".dimmed());
12950 println!(" {}", style::tick_row(60));
12951 print!("{}", crate::agent_bench::render_pretty(&report));
12952 println!();
12953 }
12954
12955 if !report.pass {
12956 std::process::exit(1);
12957 }
12958}
12959
12960fn cmd_bench(args: BenchArgs) {
12961 if args.suite_ready {
12962 let suite_path = args
12963 .suite
12964 .unwrap_or_else(|| PathBuf::from("benchmarks/suites/bbb-core.json"));
12965 let payload =
12966 benchmark::suite_ready_report(&suite_path).unwrap_or_else(|e| fail_return(&e));
12967 println!(
12968 "{}",
12969 serde_json::to_string_pretty(&payload).expect("failed to serialize suite-ready report")
12970 );
12971 if payload.get("ok").and_then(Value::as_bool) != Some(true) {
12972 std::process::exit(1);
12973 }
12974 return;
12975 }
12976 if let Some(suite_path) = args.suite {
12977 let payload = benchmark::run_suite(&suite_path).unwrap_or_else(|e| fail_return(&e));
12978 if args.json {
12979 println!(
12980 "{}",
12981 serde_json::to_string_pretty(&payload)
12982 .expect("failed to serialize benchmark suite")
12983 );
12984 } else {
12985 let ok = payload.get("ok").and_then(Value::as_bool) == Some(true);
12986 let metrics = payload.get("metrics").unwrap_or(&Value::Null);
12987 println!();
12988 println!(" {}", "VELA · BENCH · SUITE".dimmed());
12989 println!(" {}", style::tick_row(60));
12990 println!(" suite: {}", suite_path.display());
12991 println!(
12992 " status: {}",
12993 if ok {
12994 style::ok("pass")
12995 } else {
12996 style::lost("fail")
12997 }
12998 );
12999 println!(
13000 " tasks: {}/{} passed",
13001 metrics
13002 .get("tasks_passed")
13003 .and_then(Value::as_u64)
13004 .unwrap_or(0),
13005 metrics
13006 .get("tasks_total")
13007 .and_then(Value::as_u64)
13008 .unwrap_or(0)
13009 );
13010 }
13011 if payload.get("ok").and_then(Value::as_bool) != Some(true) {
13012 std::process::exit(1);
13013 }
13014 return;
13015 }
13016
13017 let frontier = args
13018 .frontier
13019 .unwrap_or_else(|| PathBuf::from("frontiers/bbb-alzheimer.json"));
13020 let thresholds = benchmark::BenchmarkThresholds {
13021 min_f1: if args.no_thresholds {
13022 None
13023 } else {
13024 args.min_f1.or(Some(0.05))
13025 },
13026 min_precision: if args.no_thresholds {
13027 None
13028 } else {
13029 args.min_precision
13030 },
13031 min_recall: if args.no_thresholds {
13032 None
13033 } else {
13034 args.min_recall
13035 },
13036 ..Default::default()
13037 };
13038 if let Some(path) = args.link_gold {
13039 print_benchmark_or_exit(benchmark::task_envelope(
13040 &frontier,
13041 None,
13042 benchmark::BenchmarkMode::Link,
13043 Some(&path),
13044 &thresholds,
13045 None,
13046 ));
13047 } else if let Some(path) = args.entity_gold {
13048 print_benchmark_or_exit(benchmark::task_envelope(
13049 &frontier,
13050 None,
13051 benchmark::BenchmarkMode::Entity,
13052 Some(&path),
13053 &thresholds,
13054 None,
13055 ));
13056 } else if let Some(path) = args.gold {
13057 if args.json {
13058 print_benchmark_or_exit(benchmark::task_envelope(
13059 &frontier,
13060 None,
13061 benchmark::BenchmarkMode::Finding,
13062 Some(&path),
13063 &thresholds,
13064 None,
13065 ));
13066 } else {
13067 benchmark::run(&frontier, &path, false);
13068 }
13069 } else {
13070 fail("Provide --suite, --gold, --entity-gold, or --link-gold.");
13071 }
13072}
13073
13074fn print_benchmark_or_exit(result: Result<Value, String>) {
13075 let payload = result.unwrap_or_else(|e| fail_return(&e));
13076 println!(
13077 "{}",
13078 serde_json::to_string_pretty(&payload).expect("failed to serialize benchmark report")
13079 );
13080 if payload.get("ok").and_then(Value::as_bool) != Some(true) {
13081 std::process::exit(1);
13082 }
13083}
13084
13085fn cmd_packet(action: PacketAction) {
13086 let (result, json_output) = match action {
13087 PacketAction::Inspect { path, json } => (packet::inspect(&path), json),
13088 PacketAction::Validate { path, json } => (packet::validate(&path), json),
13089 };
13090 match result {
13091 Ok(output) if json_output => {
13092 println!(
13093 "{}",
13094 serde_json::to_string_pretty(&json!({
13095 "ok": true,
13096 "command": "packet",
13097 "result": output,
13098 }))
13099 .expect("failed to serialize packet response")
13100 );
13101 }
13102 Ok(output) => println!("{output}"),
13103 Err(e) => fail(&e),
13104 }
13105}
13106
13107fn cmd_verify(path: &Path, json_output: bool) {
13112 let result = packet::validate(path);
13113 match result {
13114 Ok(output) if json_output => {
13115 println!(
13116 "{}",
13117 serde_json::to_string_pretty(&json!({
13118 "ok": true,
13119 "command": "verify",
13120 "result": output,
13121 }))
13122 .expect("failed to serialize verify response")
13123 );
13124 }
13125 Ok(output) => {
13126 println!("{output}");
13127 println!(
13128 "\nverify: ok\n every file in the manifest matched its claimed sha256.\n pull this packet on another machine, run the same command, see the same line."
13129 );
13130 }
13131 Err(e) => fail(&e),
13132 }
13133}
13134
13135fn cmd_init(path: &Path, name: &str, template: &str, initialize_git: bool, json_output: bool) {
13136 if path.join(".vela").exists() {
13137 fail(&format!(
13138 "already initialized: {} exists",
13139 path.join(".vela").display()
13140 ));
13141 }
13142 let payload = frontier_repo::initialize(
13143 path,
13144 frontier_repo::InitOptions {
13145 name,
13146 template,
13147 initialize_git,
13148 },
13149 )
13150 .unwrap_or_else(|e| fail_return(&e));
13151 if json_output {
13152 println!(
13153 "{}",
13154 serde_json::to_string_pretty(&payload).expect("failed to serialize init report")
13155 );
13156 } else {
13157 println!(
13158 "{} initialized frontier repository in {}",
13159 style::ok("ok"),
13160 path.display()
13161 );
13162 }
13163}
13164
13165fn cmd_import(frontier_path: &Path, into: Option<&Path>) {
13166 let frontier = repo::load_from_path(frontier_path).unwrap_or_else(|e| fail_return(&e));
13167 let target = into
13168 .map(Path::to_path_buf)
13169 .unwrap_or_else(|| PathBuf::from(frontier.project.name.replace(' ', "-").to_lowercase()));
13170 repo::init_repo(&target, &frontier).unwrap_or_else(|e| fail(&e));
13171 println!(
13172 "{} {} findings · {}",
13173 style::ok("imported"),
13174 frontier.findings.len(),
13175 target.display()
13176 );
13177}
13178
13179fn cmd_locator_repair(
13180 path: &Path,
13181 atom_id: &str,
13182 locator_override: Option<&str>,
13183 reviewer: &str,
13184 reason: &str,
13185 apply: bool,
13186 json_output: bool,
13187) {
13188 let report = state::repair_evidence_atom_locator(
13189 path,
13190 atom_id,
13191 locator_override,
13192 reviewer,
13193 reason,
13194 apply,
13195 )
13196 .unwrap_or_else(|e| fail_return(&e));
13197 print_state_report(&report, json_output);
13198}
13199
13200async fn cmd_source_fetch(
13205 identifier: &str,
13206 cache_root: Option<&Path>,
13207 out_path: Option<&Path>,
13208 refresh: bool,
13209 _json_output: bool,
13210) {
13211 use sha2::{Digest, Sha256};
13212
13213 let normalized = normalize_source_identifier(identifier);
13214 let cache_path = cache_root.map(|root| {
13215 let hash = format!("{:x}", Sha256::digest(normalized.as_bytes()));
13216 root.join("sources")
13217 .join("cache")
13218 .join(format!("{hash}.json"))
13219 });
13220
13221 if !refresh
13222 && let Some(p) = cache_path.as_ref()
13223 && p.is_file()
13224 {
13225 let body = std::fs::read_to_string(p)
13226 .unwrap_or_else(|e| fail_return(&format!("read cache {}: {e}", p.display())));
13227 emit_source_fetch_result(&body, out_path);
13228 return;
13229 }
13230
13231 let result = fetch_source_metadata(&normalized).await;
13232 let json = match result {
13233 Ok(value) => serde_json::to_string_pretty(&value)
13234 .unwrap_or_else(|e| fail_return(&format!("serialize fetched record: {e}"))),
13235 Err(e) => fail_return(&format!("source-fetch '{identifier}': {e}")),
13236 };
13237
13238 if let Some(p) = cache_path.as_ref() {
13239 if let Some(parent) = p.parent() {
13240 std::fs::create_dir_all(parent)
13241 .unwrap_or_else(|e| fail_return(&format!("mkdir {}: {e}", parent.display())));
13242 }
13243 std::fs::write(p, &json)
13244 .unwrap_or_else(|e| fail_return(&format!("write cache {}: {e}", p.display())));
13245 }
13246 emit_source_fetch_result(&json, out_path);
13247}
13248
13249fn emit_source_fetch_result(body: &str, out_path: Option<&Path>) {
13250 if let Some(p) = out_path {
13251 if let Some(parent) = p.parent() {
13252 let _ = std::fs::create_dir_all(parent);
13253 }
13254 std::fs::write(p, body)
13255 .unwrap_or_else(|e| fail_return(&format!("write {}: {e}", p.display())));
13256 } else {
13257 println!("{body}");
13258 }
13259}
13260
13261fn normalize_source_identifier(raw: &str) -> String {
13262 let trimmed = raw.trim();
13263 if trimmed.starts_with("doi:")
13264 || trimmed.starts_with("pmid:")
13265 || trimmed.starts_with("nct:")
13266 || trimmed.starts_with("pmc:")
13267 {
13268 return trimmed.to_string();
13269 }
13270 if trimmed.starts_with("10.") {
13271 return format!("doi:{trimmed}");
13272 }
13273 if trimmed.starts_with("NCT") || trimmed.starts_with("nct") {
13274 return format!(
13275 "nct:{}",
13276 trimmed
13277 .to_uppercase()
13278 .trim_start_matches("NCT")
13279 .to_string()
13280 .split_at(0)
13281 .0
13282 );
13283 }
13284 if trimmed.chars().all(|c| c.is_ascii_digit()) {
13285 return format!("pmid:{trimmed}");
13286 }
13287 trimmed.to_string()
13288}
13289
13290async fn fetch_source_metadata(normalized: &str) -> Result<Value, String> {
13291 let client = Client::builder()
13292 .user_agent("vela/0.66.0 (vela-source-fetch; +https://vela-science.fly.dev)")
13293 .timeout(std::time::Duration::from_secs(30))
13294 .build()
13295 .map_err(|e| format!("client build: {e}"))?;
13296 if let Some(rest) = normalized.strip_prefix("doi:") {
13297 let mut record = fetch_via_crossref(&client, rest).await?;
13304 let crossref_abstract = record
13305 .get("abstract")
13306 .and_then(|v| v.as_str())
13307 .unwrap_or("");
13308 if crossref_abstract.is_empty()
13309 && let Some(pmid) = resolve_doi_to_pmid(&client, rest).await
13310 && let Ok(pubmed_record) = fetch_via_pubmed(&client, &pmid).await
13311 {
13312 let pubmed_abstract = pubmed_record
13313 .get("abstract")
13314 .and_then(|v| v.as_str())
13315 .unwrap_or("")
13316 .to_string();
13317 if !pubmed_abstract.is_empty()
13318 && let Some(obj) = record.as_object_mut()
13319 {
13320 obj.insert("abstract".to_string(), Value::String(pubmed_abstract));
13321 obj.insert(
13322 "abstract_source".to_string(),
13323 Value::String(format!("pubmed:{pmid}")),
13324 );
13325 }
13326 }
13327 return Ok(record);
13328 }
13329 if let Some(rest) = normalized.strip_prefix("pmid:") {
13330 return fetch_via_pubmed(&client, rest).await;
13331 }
13332 if let Some(rest) = normalized.strip_prefix("nct:") {
13333 return fetch_via_ctgov(&client, rest).await;
13334 }
13335 Err(format!(
13336 "unsupported source identifier '{normalized}'. Use doi:, pmid:, or nct: prefix."
13337 ))
13338}
13339
13340async fn resolve_doi_to_pmid(client: &Client, doi: &str) -> Option<String> {
13344 let url = format!(
13345 "https://eutils.ncbi.nlm.nih.gov/entrez/eutils/esearch.fcgi?db=pubmed&term={}[doi]&retmode=json",
13346 urlencoding::encode(doi)
13347 );
13348 let resp = client.get(&url).send().await.ok()?;
13349 if !resp.status().is_success() {
13350 return None;
13351 }
13352 let body: Value = resp.json().await.ok()?;
13353 let id_list = body.pointer("/esearchresult/idlist")?.as_array()?;
13354 if id_list.len() != 1 {
13355 return None;
13358 }
13359 id_list.first()?.as_str().map(|s| s.to_string())
13360}
13361
13362async fn fetch_via_crossref(client: &Client, doi: &str) -> Result<Value, String> {
13363 let url = format!("https://api.crossref.org/works/{doi}");
13364 let resp = client
13365 .get(&url)
13366 .send()
13367 .await
13368 .map_err(|e| format!("crossref get: {e}"))?;
13369 if !resp.status().is_success() {
13370 return Err(format!("crossref returned {}", resp.status()));
13371 }
13372 let body: Value = resp
13373 .json()
13374 .await
13375 .map_err(|e| format!("crossref json: {e}"))?;
13376 let work = body.get("message").cloned().unwrap_or(Value::Null);
13377 let title = work
13378 .get("title")
13379 .and_then(|v| v.as_array())
13380 .and_then(|a| a.first())
13381 .and_then(|v| v.as_str())
13382 .unwrap_or("")
13383 .to_string();
13384 let abstract_html = work
13385 .get("abstract")
13386 .and_then(|v| v.as_str())
13387 .unwrap_or("")
13388 .to_string();
13389 let abstract_text = strip_jats_tags(&abstract_html);
13390 let year = work
13391 .get("issued")
13392 .and_then(|v| v.get("date-parts"))
13393 .and_then(|v| v.as_array())
13394 .and_then(|a| a.first())
13395 .and_then(|v| v.as_array())
13396 .and_then(|a| a.first())
13397 .and_then(|v| v.as_i64());
13398 let journal = work
13399 .get("container-title")
13400 .and_then(|v| v.as_array())
13401 .and_then(|a| a.first())
13402 .and_then(|v| v.as_str())
13403 .unwrap_or("")
13404 .to_string();
13405 let authors = work
13406 .get("author")
13407 .and_then(|v| v.as_array())
13408 .map(|arr| {
13409 arr.iter()
13410 .filter_map(|a| {
13411 let given = a.get("given").and_then(|v| v.as_str()).unwrap_or("");
13412 let family = a.get("family").and_then(|v| v.as_str()).unwrap_or("");
13413 let combined = format!("{given} {family}").trim().to_string();
13414 if combined.is_empty() {
13415 None
13416 } else {
13417 Some(combined)
13418 }
13419 })
13420 .collect::<Vec<_>>()
13421 })
13422 .unwrap_or_default();
13423 Ok(json!({
13424 "schema": "vela.source_fetch.v0.1",
13425 "identifier": format!("doi:{doi}"),
13426 "source": "crossref",
13427 "title": title,
13428 "abstract": abstract_text,
13429 "year": year,
13430 "journal": journal,
13431 "authors": authors,
13432 "retrieved_at": chrono::Utc::now().to_rfc3339(),
13433 }))
13434}
13435
13436async fn fetch_via_pubmed(client: &Client, pmid: &str) -> Result<Value, String> {
13437 let url = format!(
13438 "https://eutils.ncbi.nlm.nih.gov/entrez/eutils/efetch.fcgi?db=pubmed&id={pmid}&retmode=xml"
13439 );
13440 let resp = client
13441 .get(&url)
13442 .send()
13443 .await
13444 .map_err(|e| format!("pubmed get: {e}"))?;
13445 if !resp.status().is_success() {
13446 return Err(format!("pubmed returned {}", resp.status()));
13447 }
13448 let xml = resp.text().await.map_err(|e| format!("pubmed text: {e}"))?;
13449 let title = extract_xml_text(&xml, "<ArticleTitle>", "</ArticleTitle>");
13450 let abstract_text = extract_xml_text(&xml, "<AbstractText>", "</AbstractText>");
13451 let year = extract_xml_text(&xml, "<Year>", "</Year>")
13452 .parse::<i64>()
13453 .ok();
13454 let journal = extract_xml_text(&xml, "<Title>", "</Title>");
13455 Ok(json!({
13456 "schema": "vela.source_fetch.v0.1",
13457 "identifier": format!("pmid:{pmid}"),
13458 "source": "pubmed",
13459 "title": title,
13460 "abstract": abstract_text,
13461 "year": year,
13462 "journal": journal,
13463 "authors": Vec::<String>::new(),
13464 "retrieved_at": chrono::Utc::now().to_rfc3339(),
13465 }))
13466}
13467
13468async fn fetch_via_ctgov(client: &Client, nct: &str) -> Result<Value, String> {
13469 let nct_clean = nct.trim();
13470 let nct_id = if nct_clean.starts_with("NCT") || nct_clean.starts_with("nct") {
13471 nct_clean.to_uppercase()
13472 } else {
13473 format!("NCT{nct_clean}")
13474 };
13475 let url = format!("https://clinicaltrials.gov/api/v2/studies/{nct_id}");
13476 let resp = client
13477 .get(&url)
13478 .send()
13479 .await
13480 .map_err(|e| format!("ctgov get: {e}"))?;
13481 if !resp.status().is_success() {
13482 return Err(format!("ctgov returned {}", resp.status()));
13483 }
13484 let body: Value = resp.json().await.map_err(|e| format!("ctgov json: {e}"))?;
13485 let title = body
13486 .pointer("/protocolSection/identificationModule/briefTitle")
13487 .and_then(|v| v.as_str())
13488 .unwrap_or("")
13489 .to_string();
13490 let abstract_text = body
13491 .pointer("/protocolSection/descriptionModule/briefSummary")
13492 .and_then(|v| v.as_str())
13493 .unwrap_or("")
13494 .to_string();
13495 let phase = body
13496 .pointer("/protocolSection/designModule/phases")
13497 .and_then(|v| v.as_array())
13498 .and_then(|a| a.first())
13499 .and_then(|v| v.as_str())
13500 .unwrap_or("")
13501 .to_string();
13502 Ok(json!({
13503 "schema": "vela.source_fetch.v0.1",
13504 "identifier": format!("nct:{nct_id}"),
13505 "source": "clinicaltrials.gov",
13506 "title": title,
13507 "abstract": abstract_text,
13508 "year": Value::Null,
13509 "journal": phase,
13510 "authors": Vec::<String>::new(),
13511 "retrieved_at": chrono::Utc::now().to_rfc3339(),
13512 }))
13513}
13514
13515fn extract_xml_text(xml: &str, open: &str, close: &str) -> String {
13516 if let Some(start) = xml.find(open) {
13517 let after = &xml[start + open.len()..];
13518 if let Some(end) = after.find(close) {
13519 return after[..end].trim().to_string();
13520 }
13521 }
13522 String::new()
13523}
13524
13525fn strip_jats_tags(html: &str) -> String {
13526 let mut out = String::with_capacity(html.len());
13527 let mut in_tag = false;
13528 for c in html.chars() {
13529 match c {
13530 '<' => in_tag = true,
13531 '>' => in_tag = false,
13532 _ if !in_tag => out.push(c),
13533 _ => {}
13534 }
13535 }
13536 out.split_whitespace().collect::<Vec<_>>().join(" ")
13537}
13538
13539fn cmd_span_repair(
13540 path: &Path,
13541 finding_id: &str,
13542 section: &str,
13543 text: &str,
13544 reviewer: &str,
13545 reason: &str,
13546 apply: bool,
13547 json_output: bool,
13548) {
13549 let report =
13550 state::repair_finding_span(path, finding_id, section, text, reviewer, reason, apply)
13551 .unwrap_or_else(|e| fail_return(&e));
13552 print_state_report(&report, json_output);
13553}
13554
13555#[allow(clippy::too_many_arguments)]
13556fn cmd_entity_resolve(
13557 path: &Path,
13558 finding_id: &str,
13559 entity_name: &str,
13560 source: &str,
13561 id: &str,
13562 confidence: f64,
13563 matched_name: Option<&str>,
13564 resolution_method: &str,
13565 reviewer: &str,
13566 reason: &str,
13567 apply: bool,
13568 json_output: bool,
13569) {
13570 let report = state::resolve_finding_entity(
13571 path,
13572 finding_id,
13573 entity_name,
13574 source,
13575 id,
13576 confidence,
13577 matched_name,
13578 resolution_method,
13579 reviewer,
13580 reason,
13581 apply,
13582 )
13583 .unwrap_or_else(|e| fail_return(&e));
13584 print_state_report(&report, json_output);
13585}
13586
13587fn cmd_propagate(
13588 path: &Path,
13589 retract: Option<String>,
13590 reduce_confidence: Option<String>,
13591 to: Option<f64>,
13592 output: Option<&Path>,
13593) {
13594 let mut frontier = load_frontier_or_fail(path);
13595 let (finding_id, action, label) = if let Some(id) = retract {
13596 (id, propagate::PropagationAction::Retracted, "retraction")
13597 } else if let Some(id) = reduce_confidence {
13598 let score = to.unwrap_or_else(|| fail_return("--reduce-confidence requires --to <score>"));
13599 if !(0.0..=1.0).contains(&score) {
13600 fail("--to must be between 0.0 and 1.0");
13601 }
13602 (
13603 id,
13604 propagate::PropagationAction::ConfidenceReduced { new_score: score },
13605 "confidence reduction",
13606 )
13607 } else {
13608 fail("specify --retract <id> or --reduce-confidence <id> --to <score>");
13609 };
13610 if !frontier.findings.iter().any(|f| f.id == finding_id) {
13611 fail(&format!("finding not found: {finding_id}"));
13612 }
13613 let result = propagate::propagate_correction(&mut frontier, &finding_id, action);
13614 frontier.review_events.extend(result.events.clone());
13619 project::recompute_stats(&mut frontier);
13620 propagate::print_result(&result, label, &finding_id);
13621 let out = output.unwrap_or(path);
13622 repo::save_to_path(out, &frontier).expect("Failed to save frontier");
13623 println!(" output: {}", out.display());
13624}
13625
13626fn cmd_mcp_setup(source: Option<&Path>, frontiers: Option<&Path>) {
13627 let source_desc = source
13628 .map(|p| p.display().to_string())
13629 .or_else(|| frontiers.map(|p| p.display().to_string()))
13630 .unwrap_or_else(|| "frontier.json".to_string());
13631 let args = if let Some(path) = source {
13632 format!(r#""serve", "{}""#, path.display())
13633 } else if let Some(path) = frontiers {
13634 format!(r#""serve", "--frontiers", "{}""#, path.display())
13635 } else {
13636 r#""serve", "frontier.json""#.to_string()
13637 };
13638 println!(
13639 r#"Add this MCP server configuration to your client:
13640
13641{{
13642 "mcpServers": {{
13643 "vela": {{
13644 "command": "vela",
13645 "args": [{args}]
13646 }}
13647 }}
13648}}
13649
13650Source: {source_desc}"#
13651 );
13652}
13653
13654fn parse_entities(input: &str) -> Vec<(String, String)> {
13655 if input.trim().is_empty() {
13656 return Vec::new();
13657 }
13658 input
13659 .split(',')
13660 .filter_map(|pair| {
13661 let parts = pair.trim().splitn(2, ':').collect::<Vec<_>>();
13662 if parts.len() == 2 {
13663 Some((parts[0].trim().to_string(), parts[1].trim().to_string()))
13664 } else {
13665 eprintln!(
13666 "{} skipping malformed entity '{}'",
13667 style::warn("warn"),
13668 pair.trim()
13669 );
13670 None
13671 }
13672 })
13673 .collect()
13674}
13675
13676fn parse_evidence_spans(inputs: &[String]) -> Vec<Value> {
13677 inputs
13678 .iter()
13679 .filter_map(|input| {
13680 let trimmed = input.trim();
13681 if trimmed.is_empty() {
13682 return None;
13683 }
13684 if trimmed.starts_with('{') {
13685 match serde_json::from_str::<Value>(trimmed) {
13686 Ok(value @ Value::Object(_)) => return Some(value),
13687 Ok(_) | Err(_) => {
13688 eprintln!(
13689 "{} evidence span JSON should be an object; storing as text",
13690 style::warn("warn")
13691 );
13692 }
13693 }
13694 }
13695 Some(json!({
13696 "section": "curator_source",
13697 "text": trimmed,
13698 }))
13699 })
13700 .collect()
13701}
13702
13703fn hash_path(path: &Path) -> Result<String, String> {
13704 let mut hasher = Sha256::new();
13705 if path.is_file() {
13706 let bytes = std::fs::read(path)
13707 .map_err(|e| format!("Failed to read {} for hashing: {e}", path.display()))?;
13708 hasher.update(&bytes);
13709 } else if path.is_dir() {
13710 let mut files = Vec::new();
13711 collect_hash_files(path, path, &mut files)?;
13712 files.sort();
13713 for rel in files {
13714 hasher.update(rel.to_string_lossy().as_bytes());
13715 let bytes = std::fs::read(path.join(&rel))
13716 .map_err(|e| format!("Failed to read {} for hashing: {e}", rel.display()))?;
13717 hasher.update(bytes);
13718 }
13719 } else {
13720 return Err(format!("Cannot hash missing path {}", path.display()));
13721 }
13722 Ok(format!("{:x}", hasher.finalize()))
13723}
13724
13725fn load_frontier_or_fail(path: &Path) -> project::Project {
13726 repo::load_from_path(path).unwrap_or_else(|e| {
13727 fail_return(&format!(
13728 "Failed to load frontier '{}': {e}",
13729 path.display()
13730 ))
13731 })
13732}
13733
13734fn hash_path_or_fail(path: &Path) -> String {
13735 hash_path(path).unwrap_or_else(|e| {
13736 fail_return(&format!(
13737 "Failed to hash frontier '{}': {e}",
13738 path.display()
13739 ))
13740 })
13741}
13742
13743fn collect_hash_files(root: &Path, dir: &Path, files: &mut Vec<PathBuf>) -> Result<(), String> {
13744 for entry in
13745 std::fs::read_dir(dir).map_err(|e| format!("Failed to read {}: {e}", dir.display()))?
13746 {
13747 let entry = entry.map_err(|e| format!("Failed to read directory entry: {e}"))?;
13748 let path = entry.path();
13749 if path.is_dir() {
13750 collect_hash_files(root, &path, files)?;
13751 } else if path.is_file() {
13752 files.push(
13753 path.strip_prefix(root)
13754 .map_err(|e| e.to_string())?
13755 .to_path_buf(),
13756 );
13757 }
13758 }
13759 Ok(())
13760}
13761
13762fn schema_error_suggestion(error: &str) -> &'static str {
13763 if schema_error_action(error).is_some() {
13764 "Run `vela normalize` to repair deterministic frontier state."
13765 } else {
13766 "Inspect and correct the referenced frontier field."
13767 }
13768}
13769
13770fn schema_error_fix(error: &str) -> bool {
13771 schema_error_action(error).is_some()
13772}
13773
13774fn schema_error_action(error: &str) -> Option<&'static str> {
13775 if error.contains("stats.findings")
13776 || error.contains("stats.links")
13777 || error.contains("Invalid compiler")
13778 || error.contains("Invalid vela_version")
13779 || error.contains("Invalid schema")
13780 {
13781 Some("normalize_metadata_and_stats")
13782 } else if error.contains("does not match content-address") {
13783 Some("rewrite_ids")
13784 } else {
13785 None
13786 }
13787}
13788
13789fn build_repair_plan(diagnostics: &[Value]) -> Vec<Value> {
13790 let mut actions = std::collections::BTreeMap::<String, usize>::new();
13791 for diagnostic in diagnostics {
13792 if let Some(action) = diagnostic.get("normalize_action").and_then(Value::as_str) {
13793 *actions.entry(action.to_string()).or_default() += 1;
13794 }
13795 }
13796 actions
13797 .into_iter()
13798 .map(|(action, count)| {
13799 let command = if action == "rewrite_ids" {
13800 "vela normalize <frontier> --write --rewrite-ids --id-map id-map.json"
13801 } else {
13802 "vela normalize <frontier> --write"
13803 };
13804 json!({
13805 "action": action,
13806 "count": count,
13807 "command": command,
13808 })
13809 })
13810 .collect()
13811}
13812
13813fn cmd_integrity(frontier: &Path, json: bool) {
13814 let report = state_integrity::analyze_path(frontier).unwrap_or_else(|e| fail_return(&e));
13815 if json {
13816 println!(
13817 "{}",
13818 serde_json::to_string_pretty(&report).expect("failed to serialize integrity report")
13819 );
13820 } else {
13821 println!("vela integrity");
13822 println!(" frontier: {}", frontier.display());
13823 println!(" status: {}", report.status);
13824 println!(" proof freshness: {}", report.proof_freshness);
13825 println!(" structural errors: {}", report.structural_errors.len());
13826 for error in report.structural_errors.iter().take(8) {
13827 println!(" - {}: {}", error.rule_id, error.message);
13828 }
13829 }
13830}
13831
13832fn cmd_impact(frontier: &Path, finding_id: &str, depth: Option<usize>, json: bool) {
13833 let report =
13834 impact::analyze_path(frontier, finding_id, depth).unwrap_or_else(|e| fail_return(&e));
13835 if json {
13836 println!(
13837 "{}",
13838 serde_json::to_string_pretty(&report).expect("failed to serialize impact report")
13839 );
13840 } else {
13841 println!("vela impact");
13842 println!(" finding: {}", report.target.id);
13843 println!(" frontier: {}", report.frontier.vfr_id);
13844 println!(" direct dependents: {}", report.summary.direct_dependents);
13845 println!(" downstream: {}", report.summary.total_downstream);
13846 println!(" open proposals: {}", report.summary.open_proposals);
13847 println!(" accepted events: {}", report.summary.accepted_events);
13848 println!(" proof: {}", report.summary.proof_status);
13849 }
13850}
13851
13852fn cmd_discord(frontier: &Path, json: bool, kind_filter: Option<&str>) {
13853 use crate::discord::DiscordKind;
13854 use crate::discord_compute::compute_discord_assignment;
13855
13856 let project = repo::load_from_path(frontier).unwrap_or_else(|e| fail_return(&e));
13857 let assignment = compute_discord_assignment(&project);
13858 let support = assignment.frontier_support();
13859
13860 let mut rows: Vec<(String, Vec<String>)> = Vec::new();
13863 for context in support.iter() {
13864 let set = assignment.get(context);
13865 let kinds: Vec<String> = set.iter().map(|k| k.as_str().to_string()).collect();
13866 if let Some(filter) = kind_filter
13867 && !kinds.iter().any(|k| k == filter)
13868 {
13869 continue;
13870 }
13871 rows.push((context.clone(), kinds));
13872 }
13873
13874 let mut histogram: std::collections::BTreeMap<&'static str, usize> =
13878 std::collections::BTreeMap::new();
13879 for kind in DiscordKind::ALL {
13880 let count = assignment
13881 .iter()
13882 .filter(|(_, set)| set.contains(*kind))
13883 .count();
13884 if count > 0 {
13885 histogram.insert(kind.as_str(), count);
13886 }
13887 }
13888
13889 let total_findings = project.findings.len();
13890 let frontier_id = project
13891 .frontier_id
13892 .clone()
13893 .unwrap_or_else(|| String::from("<unknown>"));
13894
13895 if json {
13896 let row_value = |row: &(String, Vec<String>)| {
13897 serde_json::json!({
13898 "finding_id": row.0,
13899 "discord_kinds": row.1,
13900 })
13901 };
13902 let report = serde_json::json!({
13903 "frontier_id": frontier_id,
13904 "total_findings": total_findings,
13905 "frontier_support_size": support.len(),
13906 "filtered_row_count": rows.len(),
13907 "filter_kind": kind_filter,
13908 "histogram": histogram,
13909 "rows": rows.iter().map(row_value).collect::<Vec<_>>(),
13910 });
13911 println!(
13912 "{}",
13913 serde_json::to_string_pretty(&report).expect("serialize discord report")
13914 );
13915 return;
13916 }
13917
13918 println!("vela discord");
13919 println!(" frontier: {frontier_id}");
13920 println!(" total findings: {total_findings}");
13921 println!(
13922 " frontier support (any discord): {} of {}",
13923 support.len(),
13924 total_findings
13925 );
13926 if let Some(k) = kind_filter {
13927 println!(" filter: kind = {k}");
13928 }
13929 println!();
13930 if histogram.is_empty() {
13931 println!(" no discord detected.");
13932 } else {
13933 println!(" discord histogram:");
13934 for (k, n) in &histogram {
13935 println!(" {n:>4} {k}");
13936 }
13937 }
13938 if !rows.is_empty() {
13939 println!();
13940 println!(" findings with discord (showing up to 50):");
13941 for (fid, kinds) in rows.iter().take(50) {
13942 println!(" {fid} · {}", kinds.join(", "));
13943 }
13944 if rows.len() > 50 {
13945 println!(" ... and {} more", rows.len() - 50);
13946 }
13947 }
13948}
13949
13950fn empty_signal_report() -> signals::SignalReport {
13951 signals::SignalReport {
13952 schema: "vela.signals.v0".to_string(),
13953 frontier: "unavailable".to_string(),
13954 signals: Vec::new(),
13955 review_queue: Vec::new(),
13956 proof_readiness: signals::ProofReadiness {
13957 status: "unavailable".to_string(),
13958 blockers: 0,
13959 warnings: 0,
13960 caveats: vec!["Frontier could not be loaded for signal analysis.".to_string()],
13961 },
13962 }
13963}
13964
13965fn print_signal_summary(report: &signals::SignalReport, strict: bool) {
13966 println!();
13967 println!(" {}", "SIGNALS".dimmed());
13968 println!(" {}", style::tick_row(60));
13969 println!(" total signals: {}", report.signals.len());
13970 println!(" proof readiness: {}", report.proof_readiness.status);
13971 if !report.review_queue.is_empty() {
13972 println!(" review queue: {} items", report.review_queue.len());
13973 }
13974 if strict && report.proof_readiness.status != "ready" {
13975 println!(
13976 " {} proof readiness has blocking signals.",
13977 style::lost("strict check failed")
13978 );
13979 }
13980}
13981
13982fn append_packet_json_file(
13983 packet_dir: &Path,
13984 relative_path: &str,
13985 value: &Value,
13986) -> Result<(), String> {
13987 let content = serde_json::to_vec_pretty(value)
13988 .map_err(|e| format!("Failed to serialize packet JSON file: {e}"))?;
13989 let path = packet_dir.join(relative_path);
13990 if let Some(parent) = path.parent() {
13991 std::fs::create_dir_all(parent)
13992 .map_err(|e| format!("Failed to create {}: {e}", parent.display()))?;
13993 }
13994 std::fs::write(&path, &content)
13995 .map_err(|e| format!("Failed to write {}: {e}", path.display()))?;
13996 let entry = json!({
13997 "path": relative_path,
13998 "sha256": hex::encode(Sha256::digest(&content)),
13999 "bytes": content.len(),
14000 });
14001
14002 for manifest_name in ["manifest.json", "packet.lock.json"] {
14003 let manifest_path = packet_dir.join(manifest_name);
14004 let data = std::fs::read_to_string(&manifest_path)
14005 .map_err(|e| format!("Failed to read {}: {e}", manifest_path.display()))?;
14006 let mut manifest: Value = serde_json::from_str(&data)
14007 .map_err(|e| format!("Failed to parse {}: {e}", manifest_path.display()))?;
14008 let array_key = if manifest_name == "manifest.json" {
14009 "included_files"
14010 } else {
14011 "files"
14012 };
14013 let files = manifest
14014 .get_mut(array_key)
14015 .and_then(Value::as_array_mut)
14016 .ok_or_else(|| format!("{} missing {array_key} array", manifest_path.display()))?;
14017 files.retain(|file| {
14018 file.get("path")
14019 .and_then(Value::as_str)
14020 .is_none_or(|path| path != relative_path)
14021 });
14022 files.push(entry.clone());
14023 std::fs::write(
14024 &manifest_path,
14025 serde_json::to_vec_pretty(&manifest)
14026 .map_err(|e| format!("Failed to serialize {}: {e}", manifest_path.display()))?,
14027 )
14028 .map_err(|e| format!("Failed to write {}: {e}", manifest_path.display()))?;
14029 }
14030
14031 let lock_path = packet_dir.join("packet.lock.json");
14032 let lock_content = std::fs::read(&lock_path)
14033 .map_err(|e| format!("Failed to read {}: {e}", lock_path.display()))?;
14034 let lock_entry = json!({
14035 "path": "packet.lock.json",
14036 "sha256": hex::encode(Sha256::digest(&lock_content)),
14037 "bytes": lock_content.len(),
14038 });
14039 let manifest_path = packet_dir.join("manifest.json");
14040 let data = std::fs::read_to_string(&manifest_path)
14041 .map_err(|e| format!("Failed to read {}: {e}", manifest_path.display()))?;
14042 let mut manifest: Value = serde_json::from_str(&data)
14043 .map_err(|e| format!("Failed to parse {}: {e}", manifest_path.display()))?;
14044 let files = manifest
14045 .get_mut("included_files")
14046 .and_then(Value::as_array_mut)
14047 .ok_or_else(|| format!("{} missing included_files array", manifest_path.display()))?;
14048 files.retain(|file| {
14049 file.get("path")
14050 .and_then(Value::as_str)
14051 .is_none_or(|path| path != "packet.lock.json")
14052 });
14053 files.push(lock_entry);
14054 std::fs::write(
14055 &manifest_path,
14056 serde_json::to_vec_pretty(&manifest)
14057 .map_err(|e| format!("Failed to serialize {}: {e}", manifest_path.display()))?,
14058 )
14059 .map_err(|e| format!("Failed to write {}: {e}", manifest_path.display()))?;
14060 Ok(())
14061}
14062
14063fn print_tool_check_report(report: &Value) {
14064 let summary = report.get("summary").unwrap_or(&Value::Null);
14065 let frontier = report.get("frontier").unwrap_or(&Value::Null);
14066 println!();
14067 println!(" {}", "VELA · SERVE · CHECK-TOOLS".dimmed());
14068 println!(" {}", style::tick_row(60));
14069 println!(
14070 "frontier: {}",
14071 frontier
14072 .get("name")
14073 .and_then(Value::as_str)
14074 .unwrap_or("unknown")
14075 );
14076 println!(
14077 "findings: {}",
14078 frontier
14079 .get("findings")
14080 .and_then(Value::as_u64)
14081 .unwrap_or_default()
14082 );
14083 println!(
14084 "checks: {} passed, {} failed",
14085 summary
14086 .get("passed")
14087 .and_then(Value::as_u64)
14088 .unwrap_or_default(),
14089 summary
14090 .get("failed")
14091 .and_then(Value::as_u64)
14092 .unwrap_or_default()
14093 );
14094 if let Some(tools) = report.get("tools").and_then(Value::as_array) {
14095 let names = tools
14096 .iter()
14097 .filter_map(Value::as_str)
14098 .collect::<Vec<_>>()
14099 .join(", ");
14100 println!("tools: {names}");
14101 }
14102 if let Some(checks) = report.get("checks").and_then(Value::as_array) {
14103 for check in checks {
14104 let status = if check.get("ok").and_then(Value::as_bool) == Some(true) {
14105 style::ok("ok")
14106 } else {
14107 style::lost("lost")
14108 };
14109 println!(
14110 " {} {}",
14111 status,
14112 check
14113 .get("tool")
14114 .and_then(Value::as_str)
14115 .unwrap_or("unknown")
14116 );
14117 }
14118 }
14119}
14120
14121fn print_state_report(report: &state::StateCommandReport, json_output: bool) {
14122 if json_output {
14123 println!(
14124 "{}",
14125 serde_json::to_string_pretty(report).expect("failed to serialize state command report")
14126 );
14127 } else {
14128 println!("{}", report.message);
14129 println!(" frontier: {}", report.frontier);
14130 println!(" finding: {}", report.finding_id);
14131 println!(" proposal: {}", report.proposal_id);
14132 println!(" status: {}", report.proposal_status);
14133 if let Some(event_id) = &report.applied_event_id {
14134 println!(" event: {}", event_id);
14135 }
14136 println!(" wrote: {}", report.wrote_to);
14137 }
14138}
14139
14140fn print_history(payload: &Value) {
14141 let finding = payload.get("finding").unwrap_or(&Value::Null);
14142 println!("vela history");
14143 println!(
14144 " finding: {}",
14145 finding
14146 .get("id")
14147 .and_then(Value::as_str)
14148 .unwrap_or("unknown")
14149 );
14150 println!(
14151 " assertion: {}",
14152 finding
14153 .get("assertion")
14154 .and_then(Value::as_str)
14155 .unwrap_or("")
14156 );
14157 println!(
14158 " confidence: {:.3}",
14159 finding
14160 .get("confidence")
14161 .and_then(Value::as_f64)
14162 .unwrap_or_default()
14163 );
14164 let reviews = payload
14165 .get("review_events")
14166 .and_then(Value::as_array)
14167 .map_or(0, Vec::len);
14168 let updates = payload
14169 .get("confidence_updates")
14170 .and_then(Value::as_array)
14171 .map_or(0, Vec::len);
14172 let annotations = finding
14173 .get("annotations")
14174 .and_then(Value::as_array)
14175 .map_or(0, Vec::len);
14176 let sources = payload
14177 .get("sources")
14178 .and_then(Value::as_array)
14179 .map_or(0, Vec::len);
14180 let atoms = payload
14181 .get("evidence_atoms")
14182 .and_then(Value::as_array)
14183 .map_or(0, Vec::len);
14184 let conditions = payload
14185 .get("condition_records")
14186 .and_then(Value::as_array)
14187 .map_or(0, Vec::len);
14188 let proposals = payload
14189 .get("proposals")
14190 .and_then(Value::as_array)
14191 .map_or(0, Vec::len);
14192 let events = payload
14193 .get("events")
14194 .and_then(Value::as_array)
14195 .map_or(0, Vec::len);
14196 println!(" review events: {reviews}");
14197 println!(" confidence updates: {updates}");
14198 println!(" annotations: {annotations}");
14199 println!(" sources: {sources}");
14200 println!(" evidence atoms: {atoms}");
14201 println!(" condition records: {conditions}");
14202 println!(" proposals: {proposals}");
14203 println!(" canonical events: {events}");
14204 if let Some(status) = payload
14205 .get("proof_state")
14206 .and_then(|value| value.get("latest_packet"))
14207 .and_then(|value| value.get("status"))
14208 .and_then(Value::as_str)
14209 {
14210 println!(" proof state: {status}");
14211 }
14212 if let Some(events) = payload.get("review_events").and_then(Value::as_array) {
14213 for event in events.iter().take(8) {
14214 println!(
14215 " - {} {} {}",
14216 event
14217 .get("reviewed_at")
14218 .and_then(Value::as_str)
14219 .unwrap_or(""),
14220 event.get("id").and_then(Value::as_str).unwrap_or(""),
14221 event.get("reason").and_then(Value::as_str).unwrap_or("")
14222 );
14223 }
14224 }
14225}
14226
14227#[derive(Debug, Serialize)]
14228pub struct ProofTrace {
14229 pub trace_version: String,
14230 pub command: Vec<String>,
14231 pub source: String,
14232 pub source_hash: String,
14233 pub schema_version: String,
14234 pub checked_artifacts: Vec<String>,
14235 pub benchmark: Option<Value>,
14236 pub packet_manifest: String,
14237 pub packet_validation: String,
14238 pub caveats: Vec<String>,
14239 pub status: String,
14240 pub trace_path: String,
14241}
14242
14243const SCIENCE_SUBCOMMANDS: &[&str] = &[
14244 "compile-notes",
14245 "compile-code",
14246 "compile-data",
14247 "review-pending",
14248 "find-tensions",
14249 "plan-experiments",
14250 "scout",
14251 "check",
14252 "normalize",
14253 "integrity",
14254 "impact",
14255 "discord",
14256 "proof",
14257 "repo",
14258 "serve",
14259 "stats",
14260 "search",
14261 "tensions",
14262 "gaps",
14263 "bridge",
14264 "export",
14265 "packet",
14266 "bench",
14267 "conformance",
14268 "version",
14269 "sign",
14270 "actor",
14271 "frontier",
14272 "queue",
14273 "registry",
14274 "init",
14275 "import",
14276 "diff",
14277 "proposals",
14278 "finding",
14279 "link",
14280 "entity",
14281 "review",
14282 "note",
14283 "caveat",
14284 "revise",
14285 "reject",
14286 "history",
14287 "import-events",
14288 "retract",
14289 "propagate",
14290 "replicate",
14292 "replications",
14293 "dataset-add",
14296 "datasets",
14297 "code-add",
14298 "code-artifacts",
14299 "artifact-add",
14300 "artifact-to-state",
14301 "bridge-kit",
14302 "source-adapter",
14303 "runtime-adapter",
14304 "artifacts",
14305 "artifact-audit",
14306 "decision-brief",
14307 "trial-summary",
14308 "source-verification",
14309 "source-ingest-plan",
14310 "clinical-trial-import",
14311 "negative-result-add",
14313 "negative-results",
14314 "trajectory-create",
14316 "trajectory-step",
14317 "trajectories",
14318 "tier-set",
14320 "locator-repair",
14322 "span-repair",
14324 "entity-resolve",
14326 "entity-add",
14328 "source-fetch",
14330 "predict",
14333 "resolve",
14334 "predictions",
14335 "predictions-expire",
14336 "calibration",
14337 "consensus",
14340 "federation",
14342 "causal",
14344 "status",
14348 "log",
14349 "inbox",
14350 "ask",
14351 "bridges",
14353 "workbench",
14355 "verify",
14357 "ingest",
14361 "propose",
14362 "accept",
14363 "attest",
14364 "lineage",
14365 "carina",
14368 "atlas",
14371 "constellation",
14374];
14375
14376pub fn is_science_subcommand(name: &str) -> bool {
14377 SCIENCE_SUBCOMMANDS.contains(&name)
14378}
14379
14380fn print_strict_help() {
14381 println!(
14382 r#"Vela {}
14383Version control for scientific state.
14384
14385Usage:
14386 vela <COMMAND>
14387
14388Core flow (v0.74):
14389 init Initialize a split frontier repo
14390 ingest Ingest a paper, dataset, or Carina packet (dispatches by file type)
14391 propose Create a finding.review proposal
14392 diff Preview a `vpr_*` proposal, or compare two frontier files
14393 accept Apply a proposal under reviewer authority
14394 attest Sign findings under your private key
14395 log Recent canonical state events
14396 lineage State-transition replay for one finding
14397 serve Local Workbench (findings, evidence, diff, lineage)
14398
14399Read-only inspection:
14400 check Validate a frontier, repo, or proof packet
14401 integrity Check accepted frontier state integrity
14402 impact Report downstream finding impact
14403 normalize Apply deterministic frontier-state repairs
14404 proof Export and validate a proof packet
14405 repo Inspect split frontier repository status and shape
14406 stats Show frontier statistics
14407 search Search findings
14408 tensions List candidate contradictions and tensions
14409 gaps Inspect and rank candidate gap review leads
14410 bridge Find candidate cross-domain connections
14411
14412Advanced (proposal-creation, agent inboxes, federation):
14413 scout Run Literature Scout against a folder of PDFs (writes proposals)
14414 compile-notes Run Notes Compiler against a Markdown vault (writes proposals)
14415 compile-code Run Code & Notebook Analyst against a research repo (writes proposals)
14416 compile-data Run Datasets agent against a folder of CSV/TSV data (writes proposals)
14417 review-pending Run Reviewer Agent: score every pending proposal (writes notes)
14418 find-tensions Run Contradiction Finder: surface real contradictions among findings
14419 plan-experiments Run Experiment Planner: propose experiments for open questions / hypotheses
14420 export Export frontier artifacts
14421 packet Inspect or validate proof packets
14422 bench Run deterministic benchmark gates
14423 conformance Run protocol conformance vectors
14424 sign Optional signing and signature verification
14425 runtime-adapter
14426 Normalize external runtime exports into reviewable proposals
14427 version Show version information
14428 import Import frontier.json into a .vela repo
14429 proposals Inspect, validate, export, import, accept, or reject write proposals
14430 artifact-to-state
14431 Import a Carina artifact packet as reviewable proposals
14432 bridge-kit
14433 Validate Carina artifact packets before importing runtime output
14434 source-adapter
14435 Run reviewed source adapters into artifact-to-state proposals
14436 finding Add or manage finding bundles as frontier state
14437 link Add typed links between findings (incl. cross-frontier vf_at-vfr targets)
14438 entity Resolve unresolved entities against a bundled common-entity table (v0.19)
14439 frontier Scaffold (`new`), materialize, and manage frontier metadata + deps
14440 actor Register Ed25519 publisher identities in a frontier
14441 registry Publish, list, or pull frontiers (open hub at https://vela-hub.fly.dev)
14442 review Create a review proposal or review interactively
14443 note Add a lightweight note to a finding
14444 caveat Create an explicit caveat proposal
14445 revise Create a confidence revision proposal
14446 reject Create a rejection proposal
14447 history Show state-transition history for one finding (v0.74 alias: `lineage`)
14448 import-events Import review/state events from a packet or JSON file
14449 retract Create a retraction proposal
14450 propagate Simulate impact over declared dependency links
14451 artifact-add Register a content-addressed artifact
14452 artifacts List content-addressed artifacts
14453 artifact-audit Audit artifact locators, hashes, references, and profiles
14454 decision-brief Show the validated decision brief projection
14455 trial-summary Show the validated trial outcome projection
14456 source-verification Show the validated source verification projection
14457 source-ingest-plan Show the validated source ingest plan
14458 clinical-trial-import Import a ClinicalTrials.gov record as an artifact
14459 locator-repair Mechanically repair an evidence atom's missing source locator
14460 span-repair Mechanically repair a finding's missing evidence span
14461 entity-resolve Resolve a finding entity to a canonical id
14462 source-fetch Fetch metadata + abstract for a doi:/pmid:/nct: source
14463 atlas Compose multiple frontiers into a domain-level Atlas (vat_*) (v0.78+)
14464 constellation Compose multiple Atlases into a cross-domain Constellation (vco_*) (v0.82+)
14465
14466Quick start (the demo):
14467 vela init demo --name "Your bounded question"
14468 vela ingest paper.pdf --frontier demo
14469 vela propose demo <vf_id> --status accepted --reason "..." --reviewer reviewer:you --apply
14470 vela diff <vpr_id> --frontier demo
14471 vela accept demo <vpr_id> --reviewer reviewer:you --reason "applied"
14472 vela serve --path demo
14473
14474Substrate health:
14475 vela frontier materialize my-frontier --json
14476 vela repo status my-frontier --json
14477 vela proof verify my-frontier --json
14478 vela check my-frontier --strict --json
14479
14480Monolithic frontier file:
14481 vela frontier new frontier.json --name "Your bounded question"
14482 vela finding add frontier.json --assertion "..." --author "reviewer:demo" --apply
14483 vela check frontier.json --json
14484 FINDING_ID=$(jq -r '.findings[0].id' frontier.json)
14485 vela review frontier.json "$FINDING_ID" --status contested --reason "Mouse-only evidence" --reviewer reviewer:demo --apply
14486
14487Publish your own frontier (see docs/PUBLISHING.md):
14488 vela frontier new ./frontier.json --name "Your bounded question"
14489 vela finding add ./frontier.json --assertion "..." --author "reviewer:you" --apply
14490 vela sign generate-keypair --out keys
14491 vela actor add ./frontier.json reviewer:you --pubkey "$(cat keys/public.key)"
14492 vela registry publish ./frontier.json --owner reviewer:you --key keys/private.key \
14493 --to https://vela-hub.fly.dev
14494"#,
14495 env!("CARGO_PKG_VERSION")
14496 );
14497}
14498
14499pub type ScoutHandler = fn(
14508 folder: PathBuf,
14509 frontier: PathBuf,
14510 backend: Option<String>,
14511 dry_run: bool,
14512 json: bool,
14513) -> Pin<Box<dyn Future<Output = ()> + Send>>;
14514
14515static SCOUT_HANDLER: OnceLock<ScoutHandler> = OnceLock::new();
14516
14517pub fn register_scout_handler(handler: ScoutHandler) {
14521 let _ = SCOUT_HANDLER.set(handler);
14522}
14523
14524pub type AtlasInitHandler = fn(
14528 atlases_root: PathBuf,
14529 name: String,
14530 domain: String,
14531 scope_note: Option<String>,
14532 frontiers: Vec<PathBuf>,
14533 json: bool,
14534) -> Pin<Box<dyn Future<Output = ()> + Send>>;
14535
14536static ATLAS_INIT_HANDLER: OnceLock<AtlasInitHandler> = OnceLock::new();
14537
14538pub fn register_atlas_init_handler(handler: AtlasInitHandler) {
14539 let _ = ATLAS_INIT_HANDLER.set(handler);
14540}
14541
14542pub type AtlasMaterializeHandler =
14544 fn(atlases_root: PathBuf, name: String, json: bool) -> Pin<Box<dyn Future<Output = ()> + Send>>;
14545
14546static ATLAS_MATERIALIZE_HANDLER: OnceLock<AtlasMaterializeHandler> = OnceLock::new();
14547
14548pub fn register_atlas_materialize_handler(handler: AtlasMaterializeHandler) {
14549 let _ = ATLAS_MATERIALIZE_HANDLER.set(handler);
14550}
14551
14552pub type AtlasServeHandler = fn(
14557 atlases_root: PathBuf,
14558 name: String,
14559 port: u16,
14560 open_browser: bool,
14561) -> Pin<Box<dyn Future<Output = ()> + Send>>;
14562
14563static ATLAS_SERVE_HANDLER: OnceLock<AtlasServeHandler> = OnceLock::new();
14564
14565pub fn register_atlas_serve_handler(handler: AtlasServeHandler) {
14566 let _ = ATLAS_SERVE_HANDLER.set(handler);
14567}
14568
14569pub type AtlasUpdateHandler = fn(
14574 atlases_root: PathBuf,
14575 name: String,
14576 add_frontier: Vec<PathBuf>,
14577 remove_vfr_id: Vec<String>,
14578 json: bool,
14579) -> Pin<Box<dyn Future<Output = ()> + Send>>;
14580
14581static ATLAS_UPDATE_HANDLER: OnceLock<AtlasUpdateHandler> = OnceLock::new();
14582
14583pub fn register_atlas_update_handler(handler: AtlasUpdateHandler) {
14584 let _ = ATLAS_UPDATE_HANDLER.set(handler);
14585}
14586
14587pub type ConstellationInitHandler = fn(
14591 constellations_root: PathBuf,
14592 name: String,
14593 scope_note: Option<String>,
14594 atlases: Vec<PathBuf>,
14595 json: bool,
14596) -> Pin<Box<dyn Future<Output = ()> + Send>>;
14597
14598static CONSTELLATION_INIT_HANDLER: OnceLock<ConstellationInitHandler> = OnceLock::new();
14599
14600pub fn register_constellation_init_handler(handler: ConstellationInitHandler) {
14601 let _ = CONSTELLATION_INIT_HANDLER.set(handler);
14602}
14603
14604pub type ConstellationMaterializeHandler = fn(
14605 constellations_root: PathBuf,
14606 name: String,
14607 json: bool,
14608) -> Pin<Box<dyn Future<Output = ()> + Send>>;
14609
14610static CONSTELLATION_MATERIALIZE_HANDLER: OnceLock<ConstellationMaterializeHandler> =
14611 OnceLock::new();
14612
14613pub fn register_constellation_materialize_handler(handler: ConstellationMaterializeHandler) {
14614 let _ = CONSTELLATION_MATERIALIZE_HANDLER.set(handler);
14615}
14616
14617pub type ConstellationServeHandler = fn(
14618 constellations_root: PathBuf,
14619 name: String,
14620 port: u16,
14621 open_browser: bool,
14622) -> Pin<Box<dyn Future<Output = ()> + Send>>;
14623
14624static CONSTELLATION_SERVE_HANDLER: OnceLock<ConstellationServeHandler> = OnceLock::new();
14625
14626pub fn register_constellation_serve_handler(handler: ConstellationServeHandler) {
14627 let _ = CONSTELLATION_SERVE_HANDLER.set(handler);
14628}
14629
14630pub type NotesHandler = fn(
14634 vault: PathBuf,
14635 frontier: PathBuf,
14636 backend: Option<String>,
14637 max_files: Option<usize>,
14638 max_items_per_category: Option<usize>,
14639 dry_run: bool,
14640 json: bool,
14641) -> Pin<Box<dyn Future<Output = ()> + Send>>;
14642
14643static NOTES_HANDLER: OnceLock<NotesHandler> = OnceLock::new();
14644
14645pub fn register_notes_handler(handler: NotesHandler) {
14647 let _ = NOTES_HANDLER.set(handler);
14648}
14649
14650pub type CodeHandler = fn(
14652 root: PathBuf,
14653 frontier: PathBuf,
14654 backend: Option<String>,
14655 max_files: Option<usize>,
14656 dry_run: bool,
14657 json: bool,
14658) -> Pin<Box<dyn Future<Output = ()> + Send>>;
14659
14660static CODE_HANDLER: OnceLock<CodeHandler> = OnceLock::new();
14661
14662pub fn register_code_handler(handler: CodeHandler) {
14664 let _ = CODE_HANDLER.set(handler);
14665}
14666
14667pub type DatasetsHandler = fn(
14669 root: PathBuf,
14670 frontier: PathBuf,
14671 backend: Option<String>,
14672 sample_rows: Option<usize>,
14673 dry_run: bool,
14674 json: bool,
14675) -> Pin<Box<dyn Future<Output = ()> + Send>>;
14676
14677static DATASETS_HANDLER: OnceLock<DatasetsHandler> = OnceLock::new();
14678
14679pub fn register_datasets_handler(handler: DatasetsHandler) {
14681 let _ = DATASETS_HANDLER.set(handler);
14682}
14683
14684pub type ReviewerHandler = fn(
14686 frontier: PathBuf,
14687 backend: Option<String>,
14688 max_proposals: Option<usize>,
14689 batch_size: usize,
14690 dry_run: bool,
14691 json: bool,
14692) -> Pin<Box<dyn Future<Output = ()> + Send>>;
14693
14694static REVIEWER_HANDLER: OnceLock<ReviewerHandler> = OnceLock::new();
14695
14696pub fn register_reviewer_handler(handler: ReviewerHandler) {
14698 let _ = REVIEWER_HANDLER.set(handler);
14699}
14700
14701pub type TensionsHandler = fn(
14703 frontier: PathBuf,
14704 backend: Option<String>,
14705 max_findings: Option<usize>,
14706 dry_run: bool,
14707 json: bool,
14708) -> Pin<Box<dyn Future<Output = ()> + Send>>;
14709
14710static TENSIONS_HANDLER: OnceLock<TensionsHandler> = OnceLock::new();
14711
14712pub fn register_tensions_handler(handler: TensionsHandler) {
14714 let _ = TENSIONS_HANDLER.set(handler);
14715}
14716
14717pub type ExperimentsHandler = fn(
14719 frontier: PathBuf,
14720 backend: Option<String>,
14721 max_findings: Option<usize>,
14722 dry_run: bool,
14723 json: bool,
14724) -> Pin<Box<dyn Future<Output = ()> + Send>>;
14725
14726static EXPERIMENTS_HANDLER: OnceLock<ExperimentsHandler> = OnceLock::new();
14727
14728pub fn register_experiments_handler(handler: ExperimentsHandler) {
14730 let _ = EXPERIMENTS_HANDLER.set(handler);
14731}
14732
14733fn find_vela_repo() -> Option<PathBuf> {
14749 let mut cur = std::env::current_dir().ok()?;
14750 loop {
14751 if cur.join(".vela").is_dir() {
14752 return Some(cur);
14753 }
14754 if !cur.pop() {
14755 return None;
14756 }
14757 }
14758}
14759
14760fn print_session_help() {
14761 println!();
14762 println!(
14763 " Vela {} · Version control for scientific state.",
14764 env!("CARGO_PKG_VERSION")
14765 );
14766 println!();
14767 println!(" USAGE");
14768 println!(" vela Open a session against the nearest .vela/ repo");
14769 println!(" vela <command> Run a specific subcommand");
14770 println!(" vela help advanced Full subcommand list (30+ commands)");
14771 println!();
14772 println!(" CORE FLOW (v0.74)");
14773 println!(" init Initialize a split frontier repo");
14774 println!(" ingest <path> Ingest a paper, dataset, or Carina packet");
14775 println!(" propose Create a finding.review proposal");
14776 println!(" diff <vpr_id> Preview a pending proposal vs current frontier");
14777 println!(" accept <vpr_id> Apply a proposal under reviewer authority");
14778 println!(" attest Sign findings under your private key");
14779 println!(" log Recent canonical state events");
14780 println!(" lineage <vf_id> State-transition replay for one finding");
14781 println!(" serve Local Workbench (find, evidence, diff, lineage)");
14782 println!();
14783 println!(" DAILY ALSO-RANS");
14784 println!(" status One-screen frontier health");
14785 println!(" inbox Pending review proposals");
14786 println!(" review Review a proposal interactively");
14787 println!(" ask <question> Plain-text query against the frontier");
14788 println!();
14789 println!(" REASONING (Pearl 1 → 2 → 3)");
14790 println!(" causal audit Per-finding identifiability");
14791 println!(" causal effect <src> --on <tgt> Pairwise back-door / front-door");
14792 println!(" causal counterfactual <src> --target <tgt> --set-to <0..1>");
14793 println!();
14794 println!(" COMPOSITION");
14795 println!(" bridge <a> <b> Cross-frontier hypotheses");
14796 println!(" consensus <vf> Field consensus over similar claims");
14797 println!();
14798 println!(" PUBLISH");
14799 println!(" registry publish Push a signed manifest to the hub");
14800 println!(" federation peer-add Federate with another hub");
14801 println!();
14802 println!(" In session, type a single letter for a quick verb, or any");
14803 println!(" question in plain text. `q` or `exit` quits.");
14804 println!();
14805}
14806
14807fn print_session_dashboard(project: &crate::project::Project, repo_path: &Path) {
14808 use crate::causal_reasoning::{audit_frontier, summarize_audit};
14809
14810 let label = frontier_label(project);
14811 let vfr = project.frontier_id();
14812 let vfr_short = vfr.chars().take(16).collect::<String>();
14813
14814 let mut pending = 0usize;
14815 let mut by_kind: std::collections::BTreeMap<String, usize> = std::collections::BTreeMap::new();
14816 for p in &project.proposals {
14817 if p.status == "pending_review" {
14818 pending += 1;
14819 *by_kind.entry(p.kind.clone()).or_insert(0) += 1;
14820 }
14821 }
14822
14823 let audit = audit_frontier(project);
14824 let audit_summary = summarize_audit(&audit);
14825
14826 let bridges_dir = repo_path.join(".vela/bridges");
14827 let mut bridge_total = 0usize;
14828 let mut bridge_confirmed = 0usize;
14829 let mut bridge_derived = 0usize;
14830 if bridges_dir.is_dir()
14831 && let Ok(entries) = std::fs::read_dir(&bridges_dir)
14832 {
14833 for entry in entries.flatten() {
14834 let path = entry.path();
14835 if path.extension().and_then(|s| s.to_str()) != Some("json") {
14836 continue;
14837 }
14838 bridge_total += 1;
14839 if let Ok(data) = std::fs::read_to_string(&path)
14840 && let Ok(b) = serde_json::from_str::<crate::bridge::Bridge>(&data)
14841 {
14842 match b.status {
14843 crate::bridge::BridgeStatus::Confirmed => bridge_confirmed += 1,
14844 crate::bridge::BridgeStatus::Derived => bridge_derived += 1,
14845 _ => {}
14846 }
14847 }
14848 }
14849 }
14850
14851 let mut targets_with_success = std::collections::HashSet::new();
14852 let mut failed_replications = 0usize;
14853 for r in &project.replications {
14854 if r.outcome == "replicated" {
14855 targets_with_success.insert(r.target_finding.clone());
14856 } else if r.outcome == "failed" {
14857 failed_replications += 1;
14858 }
14859 }
14860
14861 println!();
14862 let version = crate::project::VELA_COMPILER_VERSION
14863 .strip_prefix("vela/")
14864 .unwrap_or(crate::project::VELA_COMPILER_VERSION);
14865 println!(
14866 " {}",
14867 format!("VELA · {version} · {label}")
14868 .to_uppercase()
14869 .dimmed()
14870 );
14871 println!(" {}", style::tick_row(60));
14872 println!(
14873 " vfr_id {}… repo {}",
14874 vfr_short,
14875 repo_path.display()
14876 );
14877 println!(
14878 " findings {:>4} events {} proposals pending {}",
14879 project.findings.len(),
14880 project.events.len(),
14881 pending
14882 );
14883
14884 if pending > 0 {
14885 let parts: Vec<String> = by_kind.iter().map(|(k, n)| format!("{n} {k}")).collect();
14886 println!(" {} · {}", style::warn("inbox"), parts.join(" "));
14887 }
14888 if audit_summary.underidentified > 0 || audit_summary.conditional > 0 {
14889 println!(
14890 " {} · {} underidentified · {} conditional",
14891 if audit_summary.underidentified > 0 {
14892 style::lost("audit")
14893 } else {
14894 style::warn("audit")
14895 },
14896 audit_summary.underidentified,
14897 audit_summary.conditional,
14898 );
14899 }
14900 if bridge_total > 0 {
14901 println!(
14902 " {} · {} total · {} confirmed · {} awaiting review",
14903 style::ok("bridges"),
14904 bridge_total,
14905 bridge_confirmed,
14906 bridge_derived
14907 );
14908 }
14909 if !project.replications.is_empty() {
14910 println!(
14911 " {} · {} records · {} findings replicated · {} failed",
14912 style::ok("replications"),
14913 project.replications.len(),
14914 targets_with_success.len(),
14915 failed_replications,
14916 );
14917 }
14918
14919 println!();
14920 println!(" type a verb or ask anything:");
14921 println!(" a audit problems i inbox (pending) b bridges");
14922 println!(" g causal graph l log (recent) c counterfactuals");
14923 println!(" s refresh status h help (more verbs) q quit");
14924 println!();
14925}
14926
14927fn run_session_verb(verb: &str, repo_path: &Path) -> bool {
14929 match verb {
14930 "a" | "audit" => {
14931 let action = CausalAction::Audit {
14932 frontier: repo_path.to_path_buf(),
14933 problems_only: true,
14934 json: false,
14935 };
14936 cmd_causal(action);
14937 true
14938 }
14939 "i" | "inbox" => {
14940 let action = ProposalAction::List {
14941 frontier: repo_path.to_path_buf(),
14942 status: Some("pending_review".into()),
14943 json: false,
14944 };
14945 cmd_proposals(action);
14946 true
14947 }
14948 "b" | "bridges" => {
14949 let action = BridgesAction::List {
14950 frontier: repo_path.to_path_buf(),
14951 status: None,
14952 json: false,
14953 };
14954 cmd_bridges(action);
14955 true
14956 }
14957 "g" | "graph" => {
14958 let action = CausalAction::Graph {
14959 frontier: repo_path.to_path_buf(),
14960 node: None,
14961 json: false,
14962 };
14963 cmd_causal(action);
14964 true
14965 }
14966 "l" | "log" => {
14967 cmd_log(repo_path, 10, None, false);
14968 true
14969 }
14970 "c" | "counterfactual" | "counterfactuals" => {
14971 let project = match repo::load_from_path(repo_path) {
14974 Ok(p) => p,
14975 Err(e) => {
14976 eprintln!("{} {e}", style::err_prefix());
14977 return true;
14978 }
14979 };
14980 println!();
14981 println!(" {}", "VELA · COUNTERFACTUAL · LIVE PAIRS".dimmed());
14982 println!(" {}", style::tick_row(60));
14983 let mut pairs = 0usize;
14987 for child in &project.findings {
14988 for link in &child.links {
14989 if !matches!(link.link_type.as_str(), "depends" | "supports") {
14990 continue;
14991 }
14992 if link.mechanism.is_none() {
14993 continue;
14994 }
14995 let parent = link
14996 .target
14997 .split_once(':')
14998 .map_or(link.target.as_str(), |(_, r)| r);
14999 pairs += 1;
15000 if pairs <= 10 {
15001 println!(" · do({parent}) → {}", child.id);
15002 }
15003 }
15004 }
15005 if pairs == 0 {
15006 println!(" no mechanism-annotated edges found.");
15007 println!(" add a mechanism via the link's `mechanism` field; see /counterfactual");
15008 } else {
15009 println!();
15010 println!(" {pairs} live pair(s). Run with:");
15011 println!(" vela causal counterfactual <repo> <src> --target <tgt> --set-to 0.5");
15012 }
15013 println!();
15014 true
15015 }
15016 "s" | "status" | "refresh" => {
15017 match repo::load_from_path(repo_path) {
15019 Ok(p) => print_session_dashboard(&p, repo_path),
15020 Err(e) => eprintln!("{} {e}", style::err_prefix()),
15021 }
15022 true
15023 }
15024 "h" | "help" | "?" => {
15025 print_session_help();
15026 true
15027 }
15028 _ => false,
15029 }
15030}
15031
15032fn run_session() {
15033 let repo_path = match find_vela_repo() {
15034 Some(p) => p,
15035 None => {
15036 println!();
15037 println!(
15038 " {}",
15039 "VELA · NO FRONTIER FOUND IN CWD OR ANY PARENT".dimmed()
15040 );
15041 println!(" {}", style::tick_row(60));
15042 println!(" Run `vela init` here to create a frontier, or cd into one.");
15043 println!(" Or run `vela help` for the command list.");
15044 println!();
15045 return;
15046 }
15047 };
15048
15049 let project = match repo::load_from_path(&repo_path) {
15050 Ok(p) => p,
15051 Err(e) => {
15052 eprintln!("{} failed to load .vela/ repo: {e}", style::err_prefix());
15053 std::process::exit(1);
15054 }
15055 };
15056
15057 print_session_dashboard(&project, &repo_path);
15058
15059 use std::io::{BufRead, Write};
15060 let stdin = std::io::stdin();
15061 let mut stdout = std::io::stdout();
15062 loop {
15063 print!(" > ");
15064 stdout.flush().ok();
15065 let mut line = String::new();
15066 if stdin.lock().read_line(&mut line).is_err() {
15067 break;
15068 }
15069 let input = line.trim();
15070 if input.is_empty() {
15071 continue;
15072 }
15073 if matches!(input, "q" | "quit" | "exit") {
15074 break;
15075 }
15076 if run_session_verb(input, &repo_path) {
15077 continue;
15078 }
15079 let project = match repo::load_from_path(&repo_path) {
15081 Ok(p) => p,
15082 Err(e) => {
15083 eprintln!("{} {e}", style::err_prefix());
15084 continue;
15085 }
15086 };
15087 answer(&project, input, false);
15088 }
15089}
15090
15091pub fn run_from_args() {
15092 style::init();
15093 let args = std::env::args().collect::<Vec<_>>();
15094 match args.get(1).map(String::as_str) {
15095 None => {
15099 run_session();
15100 return;
15101 }
15102 Some("-h" | "--help" | "help") => {
15103 if args.get(2).map(String::as_str) == Some("advanced") {
15106 print_strict_help();
15107 } else {
15108 print_session_help();
15109 }
15110 return;
15111 }
15112 Some("-V" | "--version" | "version") => {
15113 println!("vela {}", env!("CARGO_PKG_VERSION"));
15114 return;
15115 }
15116 Some("proof") if args.get(2).map(String::as_str) == Some("verify") => {
15117 let json = args.iter().any(|arg| arg == "--json");
15118 let frontier = args
15119 .iter()
15120 .skip(3)
15121 .find(|arg| !arg.starts_with('-'))
15122 .map(PathBuf::from)
15123 .unwrap_or_else(|| {
15124 eprintln!(
15125 "{} proof verify requires a frontier repo",
15126 style::err_prefix()
15127 );
15128 std::process::exit(2);
15129 });
15130 cmd_proof_verify(&frontier, json);
15131 return;
15132 }
15133 Some("proof") if args.get(2).map(String::as_str) == Some("explain") => {
15134 let frontier = args
15135 .iter()
15136 .skip(3)
15137 .find(|arg| !arg.starts_with('-'))
15138 .map(PathBuf::from)
15139 .unwrap_or_else(|| {
15140 eprintln!(
15141 "{} proof explain requires a frontier repo",
15142 style::err_prefix()
15143 );
15144 std::process::exit(2);
15145 });
15146 cmd_proof_explain(&frontier);
15147 return;
15148 }
15149 Some(cmd) if !is_science_subcommand(cmd) => {
15150 eprintln!(
15151 "{} unknown or non-release command: {cmd}",
15152 style::err_prefix()
15153 );
15154 eprintln!("run `vela --help` for the strict v0 command surface.");
15155 std::process::exit(2);
15156 }
15157 Some(_) => {}
15158 }
15159 let runtime = tokio::runtime::Runtime::new().expect("failed to create tokio runtime");
15160 runtime.block_on(run_command());
15161}
15162
15163fn fail(message: &str) -> ! {
15164 eprintln!("{} {message}", style::err_prefix());
15165 std::process::exit(1);
15166}
15167
15168fn validate_enum_arg(flag: &str, value: &str, valid: &[&str]) {
15173 if !valid.contains(&value) {
15174 fail(&format!(
15175 "invalid {flag} '{value}'. Valid: {}",
15176 valid.join(", ")
15177 ));
15178 }
15179}
15180
15181fn fail_return<T>(message: &str) -> T {
15182 fail(message)
15183}